Add logging to logfiles to all snakemake workflow scripts. (#102)

* Add logging to logfiles to all snakemake workflow scripts.

* Fix missing quotation marks in Snakefile.

* Apply suggestions from code review

Co-Authored-By: Fabian Neumann <fabian.neumann@outlook.de>

* Apply suggestions from code review

Co-Authored-By: Fabian Neumann <fabian.neumann@outlook.de>

* doc: fix _ec_ filenames in docs

* Allow logging message format to be specified in config.yaml.

* Add logging for Snakemake rule 'retrieve_databundle '.

* Add limited logging to STDERR only for retrieve_*.py scripts.

* Import progressbar module only on demand.

* Fix logging to file and enable concurrent printing to STDERR for most scripts.

* Add new 'logging_format' option to Travis CI test config.yaml.

* Add missing parenthesis (bug fix) and cross-os compatible paths.

* Fix typos in messages.

* Use correct log files for logging (bug fix).

* doc: fix line references

* config: logging_format in all configs

* doc: add doc for logging_format

* environment: update to powerplantmatching 0.4.3

* doc: update line references for tutorial.rst

* Change logging configuration scheme for config.yaml.

* Add helper function for doing basic logging configuration.

* Add logpath for prepare_links_p_nom rule.

* Outsource basic logging configuration for all scripts to _helper submodule.

* Update documentation for changed config.yaml structure.

Instead of 'logging_level' and 'logging_format', now 'logging' with subcategories is used.

* _helpers: Change configure_logging signature.
This commit is contained in:
euronion 2019-11-28 08:22:52 +01:00 committed by Fabian Neumann
parent 7dc9bb2941
commit 85c356297a
35 changed files with 286 additions and 134 deletions

View File

@ -29,6 +29,7 @@ rule solve_all_elec_networks:
if config['enable'].get('prepare_links_p_nom', False): if config['enable'].get('prepare_links_p_nom', False):
rule prepare_links_p_nom: rule prepare_links_p_nom:
output: 'data/links_p_nom.csv' output: 'data/links_p_nom.csv'
log: 'logs/prepare_links_p_nom.log'
threads: 1 threads: 1
resources: mem=500 resources: mem=500
# group: 'nonfeedin_preparation' # group: 'nonfeedin_preparation'
@ -48,6 +49,7 @@ if not config.get('tutorial', False):
if config['enable'].get('retrieve_databundle', True): if config['enable'].get('retrieve_databundle', True):
rule retrieve_databundle: rule retrieve_databundle:
output: expand('data/bundle/{file}', file=datafiles) output: expand('data/bundle/{file}', file=datafiles)
log: "logs/retrieve_databundle.log"
script: 'scripts/retrieve_databundle.py' script: 'scripts/retrieve_databundle.py'
rule build_powerplants: rule build_powerplants:
@ -55,6 +57,7 @@ rule build_powerplants:
base_network="networks/base.nc", base_network="networks/base.nc",
custom_powerplants="data/custom_powerplants.csv" custom_powerplants="data/custom_powerplants.csv"
output: "resources/powerplants.csv" output: "resources/powerplants.csv"
log: "logs/build_powerplants.log"
threads: 1 threads: 1
resources: mem=500 resources: mem=500
# group: 'nonfeedin_preparation' # group: 'nonfeedin_preparation'
@ -74,6 +77,7 @@ rule base_network:
offshore_shapes='resources/offshore_shapes.geojson', offshore_shapes='resources/offshore_shapes.geojson',
europe_shape='resources/europe_shape.geojson' europe_shape='resources/europe_shape.geojson'
output: "networks/base.nc" output: "networks/base.nc"
log: "logs/base_network.log"
benchmark: "benchmarks/base_network" benchmark: "benchmarks/base_network"
threads: 1 threads: 1
resources: mem=500 resources: mem=500
@ -94,6 +98,7 @@ rule build_shapes:
offshore_shapes='resources/offshore_shapes.geojson', offshore_shapes='resources/offshore_shapes.geojson',
europe_shape='resources/europe_shape.geojson', europe_shape='resources/europe_shape.geojson',
nuts3_shapes='resources/nuts3_shapes.geojson' nuts3_shapes='resources/nuts3_shapes.geojson'
log: "logs/build_shapes.log"
threads: 1 threads: 1
resources: mem=500 resources: mem=500
# group: 'nonfeedin_preparation' # group: 'nonfeedin_preparation'
@ -107,6 +112,7 @@ rule build_bus_regions:
output: output:
regions_onshore="resources/regions_onshore.geojson", regions_onshore="resources/regions_onshore.geojson",
regions_offshore="resources/regions_offshore.geojson" regions_offshore="resources/regions_offshore.geojson"
log: "logs/build_bus_regions.log"
resources: mem=1000 resources: mem=1000
# group: 'nonfeedin_preparation' # group: 'nonfeedin_preparation'
script: "scripts/build_bus_regions.py" script: "scripts/build_bus_regions.py"
@ -114,6 +120,7 @@ rule build_bus_regions:
if config['enable'].get('build_cutout', False): if config['enable'].get('build_cutout', False):
rule build_cutout: rule build_cutout:
output: directory("cutouts/{cutout}") output: directory("cutouts/{cutout}")
log: "logs/build_cutout.log"
resources: mem=config['atlite'].get('nprocesses', 4) * 1000 resources: mem=config['atlite'].get('nprocesses', 4) * 1000
threads: config['atlite'].get('nprocesses', 4) threads: config['atlite'].get('nprocesses', 4)
benchmark: "benchmarks/build_cutout_{cutout}" benchmark: "benchmarks/build_cutout_{cutout}"
@ -122,6 +129,7 @@ if config['enable'].get('build_cutout', False):
else: else:
rule retrieve_cutout: rule retrieve_cutout:
output: directory(expand("cutouts/{cutouts}", **config['atlite'])), output: directory(expand("cutouts/{cutouts}", **config['atlite'])),
log: "logs/retrieve_cutout.log"
script: 'scripts/retrieve_cutout.py' script: 'scripts/retrieve_cutout.py'
@ -131,10 +139,12 @@ if config['enable'].get('build_natura_raster', False):
natura="data/bundle/natura/Natura2000_end2015.shp", natura="data/bundle/natura/Natura2000_end2015.shp",
cutouts=expand("cutouts/{cutouts}", **config['atlite']) cutouts=expand("cutouts/{cutouts}", **config['atlite'])
output: "resources/natura.tiff" output: "resources/natura.tiff"
log: "logs/build_natura_raster.log"
script: "scripts/build_natura_raster.py" script: "scripts/build_natura_raster.py"
else: else:
rule retrieve_natura_raster: rule retrieve_natura_raster:
output: "resources/natura.tiff" output: "resources/natura.tiff"
log: "logs/retrieve_natura_raster.log"
script: 'scripts/retrieve_natura_raster.py' script: 'scripts/retrieve_natura_raster.py'
rule build_renewable_profiles: rule build_renewable_profiles:
@ -152,6 +162,7 @@ rule build_renewable_profiles:
else "resources/regions_offshore.geojson"), else "resources/regions_offshore.geojson"),
cutout=lambda wildcards: "cutouts/" + config["renewable"][wildcards.technology]['cutout'] cutout=lambda wildcards: "cutouts/" + config["renewable"][wildcards.technology]['cutout']
output: profile="resources/profile_{technology}.nc", output: profile="resources/profile_{technology}.nc",
log: "logs/build_renewable_profile_{technology}.log"
resources: mem=config['atlite'].get('nprocesses', 2) * 5000 resources: mem=config['atlite'].get('nprocesses', 2) * 5000
threads: config['atlite'].get('nprocesses', 2) threads: config['atlite'].get('nprocesses', 2)
benchmark: "benchmarks/build_renewable_profiles_{technology}" benchmark: "benchmarks/build_renewable_profiles_{technology}"
@ -165,6 +176,7 @@ if 'hydro' in config['renewable'].keys():
eia_hydro_generation='data/bundle/EIA_hydro_generation_2000_2014.csv', eia_hydro_generation='data/bundle/EIA_hydro_generation_2000_2014.csv',
cutout="cutouts/" + config["renewable"]['hydro']['cutout'] cutout="cutouts/" + config["renewable"]['hydro']['cutout']
output: 'resources/profile_hydro.nc' output: 'resources/profile_hydro.nc'
log: "logs/build_hydro_profile.log"
resources: mem=5000 resources: mem=5000
# group: 'feedin_preparation' # group: 'feedin_preparation'
script: 'scripts/build_hydro_profile.py' script: 'scripts/build_hydro_profile.py'
@ -182,6 +194,7 @@ rule add_electricity:
**{'profile_' + t: "resources/profile_" + t + ".nc" **{'profile_' + t: "resources/profile_" + t + ".nc"
for t in config['renewable']} for t in config['renewable']}
output: "networks/elec.nc" output: "networks/elec.nc"
log: "logs/add_electricity.log"
benchmark: "benchmarks/add_electricity" benchmark: "benchmarks/add_electricity"
threads: 1 threads: 1
resources: mem=3000 resources: mem=3000
@ -199,6 +212,7 @@ rule simplify_network:
regions_onshore="resources/regions_onshore_{network}_s{simpl}.geojson", regions_onshore="resources/regions_onshore_{network}_s{simpl}.geojson",
regions_offshore="resources/regions_offshore_{network}_s{simpl}.geojson", regions_offshore="resources/regions_offshore_{network}_s{simpl}.geojson",
clustermaps='resources/clustermaps_{network}_s{simpl}.h5' clustermaps='resources/clustermaps_{network}_s{simpl}.h5'
log: "logs/simplify_network/{network}_s{simpl}.log"
benchmark: "benchmarks/simplify_network/{network}_s{simpl}" benchmark: "benchmarks/simplify_network/{network}_s{simpl}"
threads: 1 threads: 1
resources: mem=4000 resources: mem=4000
@ -217,6 +231,7 @@ rule cluster_network:
regions_onshore="resources/regions_onshore_{network}_s{simpl}_{clusters}.geojson", regions_onshore="resources/regions_onshore_{network}_s{simpl}_{clusters}.geojson",
regions_offshore="resources/regions_offshore_{network}_s{simpl}_{clusters}.geojson", regions_offshore="resources/regions_offshore_{network}_s{simpl}_{clusters}.geojson",
clustermaps='resources/clustermaps_{network}_s{simpl}_{clusters}.h5' clustermaps='resources/clustermaps_{network}_s{simpl}_{clusters}.h5'
log: "logs/cluster_network/{network}_s{simpl}_{clusters}.log"
benchmark: "benchmarks/cluster_network/{network}_s{simpl}_{clusters}" benchmark: "benchmarks/cluster_network/{network}_s{simpl}_{clusters}"
threads: 1 threads: 1
resources: mem=3000 resources: mem=3000
@ -229,6 +244,7 @@ rule add_extra_components:
network='networks/{network}_s{simpl}_{clusters}.nc', network='networks/{network}_s{simpl}_{clusters}.nc',
tech_costs=COSTS, tech_costs=COSTS,
output: 'networks/{network}_s{simpl}_{clusters}_ec.nc' output: 'networks/{network}_s{simpl}_{clusters}_ec.nc'
log: "logs/add_extra_components/{network}_s{simpl}_{clusters}.log"
benchmark: "benchmarks/add_extra_components/{network}_s{simpl}_{clusters}_ec" benchmark: "benchmarks/add_extra_components/{network}_s{simpl}_{clusters}_ec"
threads: 1 threads: 1
resources: mem=3000 resources: mem=3000
@ -239,6 +255,7 @@ rule add_extra_components:
rule prepare_network: rule prepare_network:
input: 'networks/{network}_s{simpl}_{clusters}_ec.nc', tech_costs=COSTS input: 'networks/{network}_s{simpl}_{clusters}_ec.nc', tech_costs=COSTS
output: 'networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc' output: 'networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc'
log: "logs/prepare_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"
threads: 1 threads: 1
resources: mem=1000 resources: mem=1000
# benchmark: "benchmarks/prepare_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}" # benchmark: "benchmarks/prepare_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}"
@ -262,9 +279,9 @@ rule solve_network:
output: "results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc" output: "results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
shadow: "shallow" shadow: "shallow"
log: log:
solver=normpath("logs/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log"), solver=normpath("logs/solve_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log"),
python="logs/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log", python="logs/solve_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log",
memory="logs/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log" memory="logs/solve_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log"
benchmark: "benchmarks/solve_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}" benchmark: "benchmarks/solve_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}"
threads: 4 threads: 4
resources: mem=memory resources: mem=memory
@ -303,6 +320,7 @@ rule plot_network:
output: output:
only_map="results/plots/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}.{ext}", only_map="results/plots/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}.{ext}",
ext="results/plots/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_ext.{ext}" ext="results/plots/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_ext.{ext}"
log: "logs/plot_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_{ext}.log"
script: "scripts/plot_network.py" script: "scripts/plot_network.py"
def input_make_summary(w): def input_make_summary(w):
@ -323,11 +341,13 @@ def input_make_summary(w):
rule make_summary: rule make_summary:
input: input_make_summary input: input_make_summary
output: directory("results/summaries/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}") output: directory("results/summaries/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}")
log: "logs/make_summary/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.log",
script: "scripts/make_summary.py" script: "scripts/make_summary.py"
rule plot_summary: rule plot_summary:
input: "results/summaries/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}" input: "results/summaries/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}"
output: "results/plots/summary_{summary}_{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.{ext}" output: "results/plots/summary_{summary}_{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.{ext}"
log: "logs/plot_summary/{summary}_{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}_{ext}.log"
script: "scripts/plot_summary.py" script: "scripts/plot_summary.py"
def input_plot_p_nom_max(wildcards): def input_plot_p_nom_max(wildcards):
@ -337,6 +357,7 @@ def input_plot_p_nom_max(wildcards):
rule plot_p_nom_max: rule plot_p_nom_max:
input: input_plot_p_nom_max input: input_plot_p_nom_max
output: "results/plots/{network}_s{simpl}_cum_p_nom_max_{clusters}_{technology}_{country}.{ext}" output: "results/plots/{network}_s{simpl}_cum_p_nom_max_{clusters}_{technology}_{country}.{ext}"
log: "logs/plot_p_nom_max/{network}_s{simpl}_{clusters}_{technology}_{country}_{ext}.log"
script: "scripts/plot_p_nom_max.py" script: "scripts/plot_p_nom_max.py"
rule build_country_flh: rule build_country_flh:
@ -360,6 +381,7 @@ rule build_country_flh:
uncorrected="resources/country_flh_uncorrected_{technology}.csv", uncorrected="resources/country_flh_uncorrected_{technology}.csv",
plot="resources/country_flh_{technology}.pdf", plot="resources/country_flh_{technology}.pdf",
exclusion=directory("resources/country_exclusion_{technology}") exclusion=directory("resources/country_exclusion_{technology}")
log: "logs/build_country_flh_{technology}.log"
resources: mem=10000 resources: mem=10000
benchmark: "benchmarks/build_country_flh_{technology}" benchmark: "benchmarks/build_country_flh_{technology}"
# group: 'feedin_preparation' # group: 'feedin_preparation'

View File

@ -1,6 +1,9 @@
version: 0.1 version: 0.1
tutorial: false tutorial: false
logging_level: INFO
logging:
level: INFO
format: '%(levelname)s:%(name)s:%(message)s'
summary_dir: results summary_dir: results

View File

@ -1,6 +1,8 @@
version: 0.1 version: 0.1
tutorial: true tutorial: true
logging_level: INFO logging:
level: INFO
format: '%(levelname)s:%(name)s:%(message)s'
summary_dir: results summary_dir: results

View File

@ -1,7 +1,9 @@
,Unit,Values,Description ,Unit,Values,Description
version,--,0.1,"Version of PyPSA-Eur" version,--,0.1,"Version of PyPSA-Eur"
tutorial,bool,"{true, false}","Switch to retrieve the tutorial data set instead of the full data set." tutorial,bool,"{true, false}","Switch to retrieve the tutorial data set instead of the full data set."
logging_level,--,"Any of {'INFO', 'WARNING', 'ERROR'}","Restrict console outputs to all infos, warning or errors only" logging,,,
-- level,--,"Any of {'INFO', 'WARNING', 'ERROR'}","Restrict console outputs to all infos, warning or errors only"
-- format,--,"e.g. ``%(levelname)s:%(name)s:%(message)s``","Custom format for log messages. See `LogRecord <https://docs.python.org/3/library/logging.html#logging.LogRecord`_ attributes."
summary_dir,--,"e.g. 'results'","Directory into which results are written." summary_dir,--,"e.g. 'results'","Directory into which results are written."
countries,--,"Subset of {'AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK'}","European countries defined by their `Two-letter country codes (ISO 3166-1) <https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2>`_ which should be included in the energy system model." countries,--,"Subset of {'AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK'}","European countries defined by their `Two-letter country codes (ISO 3166-1) <https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2>`_ which should be included in the energy system model."
focus_weights,--,"Keys should be two-digit country codes (e.g. DE) and values should range between 0 and 1","Ratio of total clusters for particular countries. the remaining weight is distributed according to mean load. An example: ``focus_weights: DE: 0.6 FR: 0.2``." focus_weights,--,"Keys should be two-digit country codes (e.g. DE) and values should range between 0 and 1","Ratio of total clusters for particular countries. the remaining weight is distributed according to mean load. An example: ``focus_weights: DE: 0.6 FR: 0.2``."

Can't render this file because it contains an unexpected character in line 11 and column 22.

View File

@ -13,7 +13,7 @@ Top-level configuration
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 1-6,14 :lines: 1-8,17
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -37,7 +37,7 @@ facilitate running multiple scenarios through a single command
snakemake solve_all_elec_networks snakemake solve_all_elec_networks
For each wildcard, a **list of values** is provided. The rule ``solve_all_elec_networks`` will trigger the rules for creating ``results/networks/elec_s{simpl}_{clusters}_l{ll}_{opts}.nc`` for **all combinations** of the provided wildcard values as defined by Python's `itertools.product(...) <https://docs.python.org/2/library/itertools.html#itertools.product>`_ function that snakemake's `expand(...) function <https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#targets>`_ uses. For each wildcard, a **list of values** is provided. The rule ``solve_all_elec_networks`` will trigger the rules for creating ``results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc`` for **all combinations** of the provided wildcard values as defined by Python's `itertools.product(...) <https://docs.python.org/2/library/itertools.html#itertools.product>`_ function that snakemake's `expand(...) function <https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#targets>`_ uses.
An exemplary dependency graph (starting from the simplification rules) then looks like this: An exemplary dependency graph (starting from the simplification rules) then looks like this:
@ -45,7 +45,7 @@ An exemplary dependency graph (starting from the simplification rules) then look
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 7-12 :lines: 10-15
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -61,7 +61,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 16-19 :lines: 19-22
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -75,7 +75,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 26-42 :lines: 30-47
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -92,7 +92,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 50-63 :lines: 55-68
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -109,7 +109,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 65-82 :lines: 70-87
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -121,7 +121,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 65,83-95 :lines: 70,88-100
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -133,7 +133,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 65,96-109 :lines: 70,101-114
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -145,7 +145,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 65,110-129 :lines: 70,115-134
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -157,7 +157,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 65,130-136 :lines: 70,135-141
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -171,7 +171,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 138-145 :lines: 133-150
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -185,7 +185,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 147-150 :lines: 152-155
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -199,7 +199,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 152-155 :lines: 157-160
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -213,7 +213,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 157-158 :lines: 162-163
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -227,7 +227,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 160-172 :lines: 165-177
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -249,7 +249,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 174-182 :lines: 179-187
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -261,7 +261,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 174,183-199 :lines: 179,188-204
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1
@ -275,7 +275,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml
:lines: 201-335 :lines: 206-340
.. csv-table:: .. csv-table::
:header-rows: 1 :header-rows: 1

View File

@ -38,47 +38,47 @@ The model can be adapted to only include selected countries (e.g. Germany) inste
.. literalinclude:: ../config.tutorial.yaml .. literalinclude:: ../config.tutorial.yaml
:language: yaml :language: yaml
:lines: 14 :lines: 15
Likewise, the example's temporal scope can be restricted (e.g. to a single month). Likewise, the example's temporal scope can be restricted (e.g. to a single month).
.. literalinclude:: ../config.tutorial.yaml .. literalinclude:: ../config.tutorial.yaml
:language: yaml :language: yaml
:lines: 16-19 :lines: 17-20
It is also possible to allow less or more carbon-dioxide emissions. Here, we limit the emissions of Germany 100 Megatonnes per year. It is also possible to allow less or more carbon-dioxide emissions. Here, we limit the emissions of Germany 100 Megatonnes per year.
.. literalinclude:: ../config.tutorial.yaml .. literalinclude:: ../config.tutorial.yaml
:language: yaml :language: yaml
:lines: 28 :lines: 30
PyPSA-Eur also includes a database of existing conventional powerplants. PyPSA-Eur also includes a database of existing conventional powerplants.
We can select which types of powerplants we like to be included with fixed capacities: We can select which types of powerplants we like to be included with fixed capacities:
.. literalinclude:: ../config.tutorial.yaml .. literalinclude:: ../config.tutorial.yaml
:language: yaml :language: yaml
:lines: 40 :lines: 43
To accurately model the temporal and spatial availability of renewables such as wind and solar energy, we rely on historical weather data. To accurately model the temporal and spatial availability of renewables such as wind and solar energy, we rely on historical weather data.
It is advisable to adapt the required range of coordinates to the selection of countries. It is advisable to adapt the required range of coordinates to the selection of countries.
.. literalinclude:: ../config.tutorial.yaml .. literalinclude:: ../config.tutorial.yaml
:language: yaml :language: yaml
:lines: 42-50 :lines: 45-53
We can also decide which weather data source should be used to calculate potentials and capacity factor time-series for each carrier. We can also decide which weather data source should be used to calculate potentials and capacity factor time-series for each carrier.
For example, we may want to use the ERA-5 dataset for solar and not the default SARAH-2 dataset. For example, we may want to use the ERA-5 dataset for solar and not the default SARAH-2 dataset.
.. literalinclude:: ../config.tutorial.yaml .. literalinclude:: ../config.tutorial.yaml
:language: yaml :language: yaml
:lines: 52,95-96 :lines: 55,98-99
Finally, it is possible to pick a solver. For instance, this tutorial uses the open-source solvers CBC and Ipopt and does not rely Finally, it is possible to pick a solver. For instance, this tutorial uses the open-source solvers CBC and Ipopt and does not rely
on the commercial solvers Gurobi or CPLEX (for which free academic licenses are available). on the commercial solvers Gurobi or CPLEX (for which free academic licenses are available).
.. literalinclude:: ../config.tutorial.yaml .. literalinclude:: ../config.tutorial.yaml
:language: yaml :language: yaml
:lines: 151,160-161 :lines: 154,163-164
.. note:: .. note::
@ -110,8 +110,8 @@ orders ``snakemake`` to run the script ``solve_network`` that produces the solve
.. code:: .. code::
rule solve_network: rule solve_network:
input: "networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}.nc" input: "networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
output: "results/networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}.nc" output: "results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
[...] [...]
script: "scripts/solve_network.py" script: "scripts/solve_network.py"

View File

@ -17,7 +17,7 @@ dependencies:
- memory_profiler - memory_profiler
- yaml - yaml
- pytables - pytables
- powerplantmatching>=0.4.2 - powerplantmatching>=0.4.3
# Second order dependencies which should really be deps of atlite # Second order dependencies which should really be deps of atlite
- xarray - xarray

View File

@ -18,7 +18,7 @@ dependencies:
- memory_profiler - memory_profiler
- yaml - yaml
- pytables - pytables
- powerplantmatching>=0.4.2 - powerplantmatching>=0.4.3
# Second order dependencies which should really be deps of atlite # Second order dependencies which should really be deps of atlite
- xarray - xarray

View File

@ -1,17 +1,48 @@
import pandas as pd import pandas as pd
from six import iterkeys, itervalues
import urllib
from progressbar import ProgressBar
import pypsa def configure_logging(snakemake, skip_handlers=False):
"""
Configure the basic behaviour for the logging module.
from add_electricity import load_costs, update_transmission_costs Note: Must only be called once from the __main__ section of a script.
The setup includes printing log messages to STDERR and to a log file defined
by either (in priority order): snakemake.log.python, snakemake.log[0] or "logs/{rulename}.log".
Additional keywords from logging.basicConfig are accepted via the snakemake configuration
file under snakemake.config.logging.
Parameters
----------
snakemake : snakemake object
Your snakemake object containing a snakemake.config and snakemake.log.
skip_handlers : True | False (default)
Do (not) skip the default handlers created for redirecting output to STDERR and file.
"""
import logging
kwargs = snakemake.config.get('logging', dict())
kwargs.setdefault("level", "INFO")
if skip_handlers is False:
kwargs.update(
{'handlers': [
# Prefer the 'python' log, otherwise take the first log for each
# Snakemake rule
logging.FileHandler(snakemake.log.get('python', snakemake.log[0] if snakemake.log else f"logs/{snakemake.rule}.log")),
logging.StreamHandler()
]
})
logging.basicConfig(**kwargs)
def pdbcast(v, h): def pdbcast(v, h):
return pd.DataFrame(v.values.reshape((-1, 1)) * h.values, return pd.DataFrame(v.values.reshape((-1, 1)) * h.values,
index=v.index, columns=h.index) index=v.index, columns=h.index)
def load_network(fn, tech_costs, config, combine_hydro_ps=True): def load_network(fn, tech_costs, config, combine_hydro_ps=True):
import pypsa
from add_electricity import update_transmission_costs, load_costs
opts = config['plotting'] opts = config['plotting']
n = pypsa.Network(fn) n = pypsa.Network(fn)
@ -73,6 +104,8 @@ def aggregate_p_curtailed(n):
]) ])
def aggregate_costs(n, flatten=False, opts=None, existing_only=False): def aggregate_costs(n, flatten=False, opts=None, existing_only=False):
from six import iterkeys, itervalues
components = dict(Link=("p_nom", "p0"), components = dict(Link=("p_nom", "p0"),
Generator=("p_nom", "p"), Generator=("p_nom", "p"),
StorageUnit=("p_nom", "p"), StorageUnit=("p_nom", "p"),
@ -107,6 +140,9 @@ def aggregate_costs(n, flatten=False, opts=None, existing_only=False):
return costs return costs
def progress_retrieve(url, file): def progress_retrieve(url, file):
import urllib
from progressbar import ProgressBar
pbar = ProgressBar(0, 100) pbar = ProgressBar(0, 100)
def dlProgress(count, blockSize, totalSize): def dlProgress(count, blockSize, totalSize):

View File

@ -91,6 +91,9 @@ from vresutils.load import timeseries_opsd
from vresutils import transfer as vtransfer from vresutils import transfer as vtransfer
import logging import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import pandas as pd import pandas as pd
import numpy as np import numpy as np
import xarray as xr import xarray as xr
@ -99,7 +102,6 @@ import pypsa
import powerplantmatching as ppm import powerplantmatching as ppm
idx = pd.IndexSlice idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def normed(s): return s/s.sum() def normed(s): return s/s.sum()
@ -522,7 +524,7 @@ if __name__ == "__main__":
for t in snakemake.config['renewable']}) for t in snakemake.config['renewable']})
) )
logging.basicConfig(level=snakemake.config['logging_level']) configure_logging(snakemake)
n = pypsa.Network(snakemake.input.base_network) n = pypsa.Network(snakemake.input.base_network)
Nyears = n.snapshot_weightings.sum()/8760. Nyears = n.snapshot_weightings.sum()/8760.

View File

@ -45,16 +45,16 @@ The rule :mod:`add_extra_components` attaches additional extendable components t
- ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If this option is chosen, the script adds extra buses with corresponding carrier where energy ``Stores`` are attached and which are connected to the corresponding power buses via two links, one each for charging and discharging. This leads to three investment variables for the energy capacity, charging and discharging capacity of the storage unit. - ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If this option is chosen, the script adds extra buses with corresponding carrier where energy ``Stores`` are attached and which are connected to the corresponding power buses via two links, one each for charging and discharging. This leads to three investment variables for the energy capacity, charging and discharging capacity of the storage unit.
""" """
import logging import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import pandas as pd import pandas as pd
import pypsa import pypsa
from add_electricity import (load_costs, normed, add_nice_carrier_names, from add_electricity import (load_costs, normed, add_nice_carrier_names,
_add_missing_carriers_from_costs) _add_missing_carriers_from_costs)
idx = pd.IndexSlice idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def attach_storageunits(n, costs): def attach_storageunits(n, costs):
elec_opts = snakemake.config['electricity'] elec_opts = snakemake.config['electricity']
@ -150,7 +150,7 @@ if __name__ == "__main__":
Dict(network='networks/elec_s_5.nc', Dict(network='networks/elec_s_5.nc',
tech_costs='data/costs.csv')) tech_costs='data/costs.csv'))
logging.basicConfig(level=snakemake.config['logging_level']) configure_logging(snakemake)
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)
Nyears = n.snapshot_weightings.sum()/8760. Nyears = n.snapshot_weightings.sum()/8760.

View File

@ -58,6 +58,10 @@ Description
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import yaml import yaml
import pandas as pd import pandas as pd
import geopandas as gpd import geopandas as gpd
@ -72,9 +76,6 @@ import shapely, shapely.prepared, shapely.wkt
import networkx as nx import networkx as nx
import logging
logger = logging.getLogger(__name__)
import pypsa import pypsa
def _get_oid(df): def _get_oid(df):
@ -569,7 +570,7 @@ if __name__ == "__main__":
output = ['networks/base.nc'] output = ['networks/base.nc']
) )
logging.basicConfig(level=snakemake.config['logging_level']) configure_logging(snakemake)
n = base_network() n = base_network()
n.export_to_netcdf(snakemake.output[0]) n.export_to_netcdf(snakemake.output[0])

View File

@ -36,6 +36,11 @@ Description
----------- -----------
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
from vresutils.graph import voronoi_partition_pts from vresutils.graph import voronoi_partition_pts
import os import os
@ -43,12 +48,10 @@ import os
import pandas as pd import pandas as pd
import geopandas as gpd import geopandas as gpd
import pypsa import pypsa
import logging
if __name__ == "__main__": if __name__ == "__main__":
logging.basicConfig(level=snakemake.config["logging_level"]) configure_logging(snakemake)
countries = snakemake.config['countries'] countries = snakemake.config['countries']

View File

@ -57,6 +57,10 @@ Description
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import os import os
import atlite import atlite
import numpy as np import numpy as np
@ -72,8 +76,6 @@ from vresutils import landuse as vlanduse
from vresutils.array import spdiag from vresutils.array import spdiag
import progressbar as pgb import progressbar as pgb
import logging
logger = logging.getLogger(__name__)
from build_renewable_profiles import init_globals, calculate_potential from build_renewable_profiles import init_globals, calculate_potential
@ -175,7 +177,8 @@ if __name__ == '__main__':
snakemake.config["renewable"][snakemake.wildcards.technology]['cutout']) snakemake.config["renewable"][snakemake.wildcards.technology]['cutout'])
pgb.streams.wrap_stderr() pgb.streams.wrap_stderr()
logging.basicConfig(level=snakemake.config['logging_level'])
configure_logging(snakemake)
config = snakemake.config['renewable'][snakemake.wildcards.technology] config = snakemake.config['renewable'][snakemake.wildcards.technology]

View File

@ -86,13 +86,16 @@ Description
----------- -----------
""" """
import os
import atlite
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
from _helpers import configure_logging
import os
import atlite
if __name__ == "__main__": if __name__ == "__main__":
logging.basicConfig(level=snakemake.config['logging_level']) configure_logging(snakemake)
cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout] cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout]
for p in ('xs', 'ys', 'years', 'months'): for p in ('xs', 'ys', 'years', 'months'):

View File

@ -54,15 +54,17 @@ Description
:mod:`build_renewable_profiles` :mod:`build_renewable_profiles`
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import os import os
import atlite import atlite
import geopandas as gpd import geopandas as gpd
from vresutils import hydro as vhydro from vresutils import hydro as vhydro
import logging
if __name__ == "__main__": if __name__ == "__main__":
logging.basicConfig(level=snakemake.config['logging_level']) configure_logging(snakemake)
config = snakemake.config['renewable']['hydro'] config = snakemake.config['renewable']['hydro']
cutout = atlite.Cutout(config['cutout'], cutout = atlite.Cutout(config['cutout'],

View File

@ -35,6 +35,10 @@ Description
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import numpy as np import numpy as np
import atlite import atlite
import geokit as gk import geokit as gk
@ -47,6 +51,8 @@ def determine_cutout_xXyY(cutout_name):
return [x - dx/2., X + dx/2., y - dy/2., Y + dy/2.] return [x - dx/2., X + dx/2., y - dy/2., Y + dy/2.]
if __name__ == "__main__": if __name__ == "__main__":
configure_logging(snakemake)
cutout_names = np.unique([res['cutout'] for res in snakemake.config['renewable'].values()]) cutout_names = np.unique([res['cutout'] for res in snakemake.config['renewable'].values()])
xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutout_names)) xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutout_names))
xXyY = min(xs), max(Xs), min(ys), max(Ys) xXyY = min(xs), max(Xs), min(ys), max(Ys)

View File

@ -68,15 +68,15 @@ The configuration options ``electricity: powerplants_filter`` and ``electricity:
""" """
import logging import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
from scipy.spatial import cKDTree as KDTree from scipy.spatial import cKDTree as KDTree
import pypsa import pypsa
import powerplantmatching as pm import powerplantmatching as pm
import pandas as pd import pandas as pd
logger = logging.getLogger(__name__)
def add_custom_powerplants(ppl): def add_custom_powerplants(ppl):
custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] custom_ppl_query = snakemake.config['electricity']['custom_powerplants']
if not custom_ppl_query: if not custom_ppl_query:
@ -88,6 +88,7 @@ def add_custom_powerplants(ppl):
if __name__ == "__main__": if __name__ == "__main__":
if 'snakemake' not in globals(): if 'snakemake' not in globals():
from vresutils.snakemake import MockSnakemake, Dict from vresutils.snakemake import MockSnakemake, Dict
@ -97,7 +98,7 @@ if __name__ == "__main__":
output=['resources/powerplants.csv'] output=['resources/powerplants.csv']
) )
logging.basicConfig(level=snakemake.config['logging_level']) configure_logging(snakemake)
n = pypsa.Network(snakemake.input.base_network) n = pypsa.Network(snakemake.input.base_network)
countries = n.buses.country.unique() countries = n.buses.country.unique()

View File

@ -150,6 +150,9 @@ node (`p_nom_max`): ``simple`` and ``conservative``:
reached. reached.
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
@ -170,8 +173,6 @@ from vresutils import landuse as vlanduse
from vresutils.array import spdiag from vresutils.array import spdiag
import progressbar as pgb import progressbar as pgb
import logging
logger = logging.getLogger(__name__)
bounds = dx = dy = config = paths = gebco = clc = natura = None bounds = dx = dy = config = paths = gebco = clc = natura = None
def init_globals(bounds_xXyY, n_dx, n_dy, n_config, n_paths): def init_globals(bounds_xXyY, n_dx, n_dy, n_config, n_paths):
@ -239,7 +240,8 @@ def calculate_potential(gid, save_map=None):
if __name__ == '__main__': if __name__ == '__main__':
pgb.streams.wrap_stderr() pgb.streams.wrap_stderr()
logging.basicConfig(level=snakemake.config['logging_level'])
configure_logging(snakemake)
config = snakemake.config['renewable'][snakemake.wildcards.technology] config = snakemake.config['renewable'][snakemake.wildcards.technology]

View File

@ -63,6 +63,10 @@ Description
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import os import os
import numpy as np import numpy as np
from operator import attrgetter from operator import attrgetter
@ -221,6 +225,8 @@ if __name__ == "__main__":
) )
) )
configure_logging(snakemake)
country_shapes = countries() country_shapes = countries()
save_to_geojson(country_shapes, snakemake.output.country_shapes) save_to_geojson(country_shapes, snakemake.output.country_shapes)

View File

@ -91,11 +91,12 @@ Description
""" """
import pandas as pd
idx = pd.IndexSlice
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
from _helpers import configure_logging
import pandas as pd
idx = pd.IndexSlice
import os import os
import numpy as np import numpy as np
@ -312,7 +313,7 @@ if __name__ == "__main__":
) )
) )
logging.basicConfig(level=snakemake.config['logging_level']) configure_logging(snakemake)
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)

View File

@ -49,7 +49,12 @@ Replacing '/summaries/' with '/plots/' creates nice colored maps of the results.
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import os import os
from six import iteritems from six import iteritems
import pandas as pd import pandas as pd
@ -478,7 +483,9 @@ if __name__ == "__main__":
else: else:
ll = [snakemake.wildcards.ll] ll = [snakemake.wildcards.ll]
networks_dict = {(simpl,clusters,l,opts) : ('results/networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}.nc' configure_logging(snakemake)
networks_dict = {(simpl,clusters,l,opts) : ('results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc'
.format(network=snakemake.wildcards.network, .format(network=snakemake.wildcards.network,
simpl=simpl, simpl=simpl,
clusters=clusters, clusters=clusters,

View File

@ -15,12 +15,13 @@ Description
""" """
from _helpers import load_network, aggregate_p, aggregate_costs import logging
logger = logging.getLogger(__name__)
from _helpers import load_network, aggregate_p, aggregate_costs, configure_logging
import pandas as pd import pandas as pd
import numpy as np import numpy as np
from six.moves import zip from six.moves import zip
import logging
import cartopy.crs as ccrs import cartopy.crs as ccrs
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
@ -257,7 +258,7 @@ if __name__ == "__main__":
ext="results/plots/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{attr}_ext.{ext}") ext="results/plots/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{attr}_ext.{ext}")
) )
logging.basicConfig(level=snakemake.config['logging_level']) configure_logging(snakemake)
set_plot_style() set_plot_style()

View File

@ -14,8 +14,12 @@ Description
----------- -----------
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import pypsa import pypsa
import pandas as pd import pandas as pd
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
@ -57,7 +61,7 @@ if __name__ == "__main__":
output=['results/plots/cum_p_nom_max_{clusters}_{country}.pdf'] output=['results/plots/cum_p_nom_max_{clusters}_{country}.pdf']
) )
logging.basicConfig(level=snakemake.config['logging_level']) configure_logging(snakemake)
plot_kwds = dict(drawstyle="steps-post") plot_kwds = dict(drawstyle="steps-post")

View File

@ -16,6 +16,9 @@ Description
""" """
import os import os
import logging
logger = logging.getLogger(__name__)
import pandas as pd import pandas as pd
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
@ -178,6 +181,9 @@ def plot_energy(infn, fn=None):
if __name__ == "__main__": if __name__ == "__main__":
configure_logging(snakemake)
summary = snakemake.wildcards.summary summary = snakemake.wildcards.summary
try: try:
func = globals()[f"plot_{summary}"] func = globals()[f"plot_{summary}"]

View File

@ -31,9 +31,16 @@ Description
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import pandas as pd import pandas as pd
if __name__ == "__main__": if __name__ == "__main__":
configure_logging(snakemake)
links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0] links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0]
def extract_coordinates(s): def extract_coordinates(s):

View File

@ -38,7 +38,7 @@ Inputs
Outputs Outputs
------- -------
- ``networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}.nc``: Complete PyPSA network that will be handed to the ``solve_network`` rule. - ``networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Complete PyPSA network that will be handed to the ``solve_network`` rule.
Description Description
----------- -----------
@ -50,6 +50,10 @@ Description
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
from add_electricity import load_costs, update_transmission_costs from add_electricity import load_costs, update_transmission_costs
from six import iteritems from six import iteritems
@ -57,10 +61,8 @@ import numpy as np
import re import re
import pypsa import pypsa
import pandas as pd import pandas as pd
import logging
idx = pd.IndexSlice idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def add_co2limit(n, Nyears=1., factor=None): def add_co2limit(n, Nyears=1., factor=None):
@ -182,10 +184,10 @@ if __name__ == "__main__":
snakemake = MockSnakemake( snakemake = MockSnakemake(
wildcards=dict(network='elec', simpl='', clusters='37', ll='v2', opts='Co2L-3H'), wildcards=dict(network='elec', simpl='', clusters='37', ll='v2', opts='Co2L-3H'),
input=['networks/{network}_s{simpl}_{clusters}.nc'], input=['networks/{network}_s{simpl}_{clusters}.nc'],
output=['networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}.nc'] output=['networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc']
) )
logging.basicConfig(level=snakemake.config['logging_level']) configure_logging(snakemake)
opts = snakemake.wildcards.opts.split('-') opts = snakemake.wildcards.opts.split('-')

View File

@ -37,22 +37,31 @@ The :ref:`tutorial` uses smaller `cutouts <https://zenodo.org/record/3518020/fil
""" """
import logging, os, tarfile import logging
from _helpers import progress_retrieve
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
from pathlib import Path
import tarfile
from _helpers import progress_retrieve, configure_logging
if __name__ == "__main__": if __name__ == "__main__":
configure_logging(snakemake) # TODO Make logging compatible with progressbar (see PR #102)
if snakemake.config['tutorial']: if snakemake.config['tutorial']:
url = "https://zenodo.org/record/3518020/files/pypsa-eur-tutorial-cutouts.tar.xz" url = "https://zenodo.org/record/3518020/files/pypsa-eur-tutorial-cutouts.tar.xz"
else: else:
url = "https://zenodo.org/record/3517949/files/pypsa-eur-cutouts.tar.xz" url = "https://zenodo.org/record/3517949/files/pypsa-eur-cutouts.tar.xz"
tarball_fn = "./cutouts.tar.xz" # Save location
tarball_fn = Path("./cutouts.tar.xz")
logger.info(f"Downloading cutouts from '{url}'.")
progress_retrieve(url, tarball_fn) progress_retrieve(url, tarball_fn)
logger.info(f"Extracting cutouts.")
tarfile.open(tarball_fn).extractall() tarfile.open(tarball_fn).extractall()
os.remove(tarball_fn) tarball_fn.unlink()
logger.info(f"Cutouts available in '{Path(tarball_fn.stem).stem}'.")

View File

@ -28,22 +28,32 @@ The :ref:`tutorial` uses a smaller `data bundle <https://zenodo.org/record/35179
""" """
import logging, os, tarfile import logging
from _helpers import progress_retrieve
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
from _helpers import progress_retrieve, configure_logging
from pathlib import Path
import tarfile
if __name__ == "__main__": if __name__ == "__main__":
configure_logging(snakemake) # TODO Make logging compatible with progressbar (see PR #102)
if snakemake.config['tutorial']: if snakemake.config['tutorial']:
url = "https://zenodo.org/record/3517921/files/pypsa-eur-tutorial-data-bundle.tar.xz" url = "https://zenodo.org/record/3517921/files/pypsa-eur-tutorial-data-bundle.tar.xz"
else: else:
url = "https://zenodo.org/record/3517935/files/pypsa-eur-data-bundle.tar.xz" url = "https://zenodo.org/record/3517935/files/pypsa-eur-data-bundle.tar.xz"
tarball_fn = "./bundle.tar.xz" # Save locations
tarball_fn = Path("./bundle.tar.xz")
to_fn = Path("./data")
logger.info(f"Downloading databundle from '{url}'.")
progress_retrieve(url, tarball_fn) progress_retrieve(url, tarball_fn)
tarfile.open(tarball_fn).extractall('./data') logger.info(f"Extracting databundle.")
tarfile.open(tarball_fn).extractall(to_fn)
os.remove(tarball_fn) tarball_fn.unlink()
logger.info(f"Databundle available in '{to_fn}'.")

View File

@ -26,17 +26,23 @@ This rule, as a substitute for :mod:`build_natura_raster`, downloads an already
""" """
import logging, os import logging
from _helpers import progress_retrieve
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
from pathlib import Path
from _helpers import progress_retrieve, configure_logging
if __name__ == "__main__": if __name__ == "__main__":
d = './resources' configure_logging(snakemake) # TODO Make logging compatible with progressbar (see PR #102)
if not os.path.exists(d):
os.makedirs(d)
progress_retrieve("https://zenodo.org/record/3518215/files/natura.tiff", # Save location, ensure folder existence
"resources/natura.tiff") to_fn = Path("resources/natura.tiff")
to_fn.parent.mkdir(parents=True, exist_ok=True)
url = "https://zenodo.org/record/3518215/files/natura.tiff"
logger.info(f"Downloading natura raster from '{url}'.")
progress_retrieve(url, to_fn)
logger.info(f"Natura raster available as '{to_fn}'.")

View File

@ -78,10 +78,13 @@ The rule :mod:`simplify_network` does up to four things:
4. Optionally, if an integer were provided for the wildcard ``{simpl}`` (e.g. ``networks/elec_s500.nc``), the network is clustered to this number of clusters with the routines from the ``cluster_network`` rule with the function ``cluster_network.cluster(...)``. This step is usually skipped! 4. Optionally, if an integer were provided for the wildcard ``{simpl}`` (e.g. ``networks/elec_s500.nc``), the network is clustered to this number of clusters with the routines from the ``cluster_network`` rule with the function ``cluster_network.cluster(...)``. This step is usually skipped!
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
from cluster_network import clustering_for_n_clusters, cluster_regions from cluster_network import clustering_for_n_clusters, cluster_regions
from add_electricity import load_costs from add_electricity import load_costs
import logging
import pandas as pd import pandas as pd
import numpy as np import numpy as np
import scipy as sp import scipy as sp
@ -94,8 +97,6 @@ import pypsa
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport
logger = logging.getLogger(__name__)
idx = pd.IndexSlice idx = pd.IndexSlice
def simplify_network_to_380(n): def simplify_network_to_380(n):
@ -350,7 +351,7 @@ if __name__ == "__main__":
) )
) )
logging.basicConfig(level=snakemake.config['logging_level']) configure_logging(snakemake)
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)

View File

@ -34,12 +34,12 @@ Relevant Settings
Inputs Inputs
------ ------
- ``networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}.nc``: confer :ref:`prepare` - ``networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`prepare`
Outputs Outputs
------- -------
- ``results/networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}.nc``: Solved PyPSA network including optimisation results - ``results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Solved PyPSA network including optimisation results
.. image:: ../img/results.png .. image:: ../img/results.png
:scale: 40 % :scale: 40 %
@ -77,10 +77,12 @@ Details (and errors made through this heuristic) are discussed in the paper
""" """
import numpy as np
import pandas as pd
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
from _helpers import configure_logging
import numpy as np
import pandas as pd
import gc import gc
import pypsa import pypsa
@ -379,13 +381,12 @@ if __name__ == "__main__":
python="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_python.log") python="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_python.log")
) )
configure_logging(snakemake)
tmpdir = snakemake.config['solving'].get('tmpdir') tmpdir = snakemake.config['solving'].get('tmpdir')
if tmpdir is not None: if tmpdir is not None:
patch_pyomo_tmpdir(tmpdir) patch_pyomo_tmpdir(tmpdir)
logging.basicConfig(filename=snakemake.log.python,
level=snakemake.config['logging_level'])
with memory_logger(filename=getattr(snakemake.log, 'memory', None), interval=30.) as mem: with memory_logger(filename=getattr(snakemake.log, 'memory', None), interval=30.) as mem:
n = pypsa.Network(snakemake.input[0]) n = pypsa.Network(snakemake.input[0])

View File

@ -29,23 +29,25 @@ Inputs
------ ------
- ``networks/{network}_s{simpl}_{clusters}.nc``: confer :ref:`cluster` - ``networks/{network}_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
- ``results/networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}.nc``: confer :ref:`solve` - ``results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`solve`
Outputs Outputs
------- -------
- ``results/networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}_op.nc``: Solved PyPSA network for optimal dispatch including optimisation results - ``results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc``: Solved PyPSA network for optimal dispatch including optimisation results
Description Description
----------- -----------
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
import pypsa import pypsa
import numpy as np import numpy as np
import re import re
import logging
logger = logging.getLogger(__name__)
from vresutils.benchmark import memory_logger from vresutils.benchmark import memory_logger
from solve_network import patch_pyomo_tmpdir, solve_network, prepare_network from solve_network import patch_pyomo_tmpdir, solve_network, prepare_network
@ -93,8 +95,7 @@ if __name__ == "__main__":
if tmpdir is not None: if tmpdir is not None:
patch_pyomo_tmpdir(tmpdir) patch_pyomo_tmpdir(tmpdir)
logging.basicConfig(filename=snakemake.log.python, configure_logging(snakemake)
level=snakemake.config['logging_level'])
n = pypsa.Network(snakemake.input.unprepared) n = pypsa.Network(snakemake.input.unprepared)

View File

@ -28,25 +28,26 @@ Relevant Settings
Inputs Inputs
------ ------
- ``networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}.nc``: confer :ref:`prepare` - ``networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`prepare`
Outputs Outputs
------- -------
- ``results/networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}_trace.nc``: Solved PyPSA network including optimisation results (with trace) - ``results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_trace.nc``: Solved PyPSA network including optimisation results (with trace)
Description Description
----------- -----------
""" """
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging
from solve_network import patch_pyomo_tmpdir, prepare_network, solve_network from solve_network import patch_pyomo_tmpdir, prepare_network, solve_network
import logging
import pypsa import pypsa
logger = logging.getLogger(__name__)
if __name__ == "__main__": if __name__ == "__main__":
# Detect running outside of snakemake and mock snakemake for testing # Detect running outside of snakemake and mock snakemake for testing
if 'snakemake' not in globals(): if 'snakemake' not in globals():
@ -62,8 +63,7 @@ if __name__ == "__main__":
if tmpdir is not None: if tmpdir is not None:
patch_pyomo_tmpdir(tmpdir) patch_pyomo_tmpdir(tmpdir)
logging.basicConfig(filename=snakemake.log.python, configure_logging(snakemake)
level=snakemake.config['logging_level'])
n = pypsa.Network(snakemake.input[0]) n = pypsa.Network(snakemake.input[0])

View File

@ -1,6 +1,8 @@
version: 0.1 version: 0.1
tutorial: true tutorial: true
logging_level: INFO logging:
level: INFO
format: '%(levelname)s:%(name)s:%(message)s'
summary_dir: results summary_dir: results