From e7836246ceb6e5ded89dfcf1404bc5466786b9be Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 26 Apr 2023 12:02:59 +0200 Subject: [PATCH 01/76] scenario management: draft scenario yaml creator --- config/create_scenarios.py | 31 +++++++++++++++++++++++++++++++ config/scenarios.yaml | 0 2 files changed, 31 insertions(+) create mode 100644 config/create_scenarios.py create mode 100644 config/scenarios.yaml diff --git a/config/create_scenarios.py b/config/create_scenarios.py new file mode 100644 index 00000000..9a5f9a98 --- /dev/null +++ b/config/create_scenarios.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT + +import itertools + +# Insert your config values that should be altered in the template. +template = """ +scenario{scenario_number}: + sector: + carbon_: {config_value} + + config_section2: + config_key2: {config_value2} +""" + +# Define all possible combinations of config values. +# This must define all config values that are used in the template. +config_values = dict(config_values=["true", "false"], config_values2=[1, 2, 3, 4, 5]) + +combinations = [ + dict(zip(config_values.keys(), values)) + for values in itertools.product(*config_values.values()) +] + +# write the scenarios to a file +filename = "scenarios.yaml" +with open(filename, "w") as f: + for i, config in enumerate(combinations): + f.write(template.format(scenario_number=i, **config)) diff --git a/config/scenarios.yaml b/config/scenarios.yaml new file mode 100644 index 00000000..e69de29b From e28ae59375e9561304905bec34460f8aa87a4568 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 15 Aug 2023 15:02:41 +0200 Subject: [PATCH 02/76] introduce scenario-management --- .github/workflows/ci.yaml | 1 + .gitignore | 17 +-- Snakefile | 19 ++- config/config.default.yaml | 2 + config/create_scenarios.py | 31 ----- config/scenarios.yaml | 12 ++ config/test/config.scenarios.electricity.yaml | 89 ++++++++++++++ config/test/scenarios.electricity.yaml | 14 +++ doc/configtables/run.csv | 3 +- doc/configtables/toplevel.csv | 1 + rules/build_electricity.smk | 110 +++++++++--------- rules/build_sector.smk | 78 +++++++------ rules/collect.smk | 29 +++-- rules/common.smk | 51 ++++++++ rules/postprocess.smk | 28 ++--- rules/retrieve.smk | 16 +-- scripts/_helpers.py | 8 ++ scripts/add_electricity.py | 3 +- scripts/add_extra_components.py | 3 +- scripts/base_network.py | 3 +- scripts/build_bus_regions.py | 3 +- scripts/build_cross_border_flows.py | 3 +- scripts/build_cutout.py | 3 +- scripts/build_electricity_demand.py | 3 +- scripts/build_electricity_prices.py | 3 +- scripts/build_electricity_production.py | 3 +- scripts/build_hydro_profile.py | 3 +- scripts/build_line_rating.py | 3 +- scripts/build_monthly_prices.py | 3 +- scripts/build_natura_raster.py | 3 +- scripts/build_powerplants.py | 3 +- scripts/build_renewable_profiles.py | 3 +- scripts/build_shapes.py | 3 +- scripts/build_ship_raster.py | 20 ++-- scripts/cluster_network.py | 3 +- scripts/plot_statistics.py | 3 +- scripts/plot_validation_cross_border_flows.py | 3 +- scripts/plot_validation_electricity_prices.py | 3 +- .../plot_validation_electricity_production.py | 3 +- scripts/prepare_links_p_nom.py | 3 +- scripts/prepare_network.py | 3 +- scripts/retrieve_databundle.py | 2 +- scripts/retrieve_monthly_fuel_prices.py | 3 +- scripts/retrieve_sector_databundle.py | 3 +- scripts/simplify_network.py | 3 +- scripts/solve_network.py | 7 +- scripts/solve_operations_network.py | 7 +- 47 files changed, 419 insertions(+), 204 deletions(-) delete mode 100644 config/create_scenarios.py create mode 100644 config/test/config.scenarios.electricity.yaml create mode 100644 config/test/scenarios.electricity.yaml diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c2be3909..ff481b46 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -83,6 +83,7 @@ jobs: snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime + snakemake -call all --configfile config/test/config.electricity.scenario.yaml - name: Upload artifacts uses: actions/upload-artifact@v3 diff --git a/.gitignore b/.gitignore index 0adf0ae6..67cab0c9 100644 --- a/.gitignore +++ b/.gitignore @@ -19,10 +19,15 @@ gurobi.log /notebooks /data /cutouts - +/tmp doc/_build +/scripts/old +/scripts/create_scenarios.py + config.yaml +config/scenario.yaml + dconf /data/links_p_nom.csv @@ -51,25 +56,15 @@ publications.jrc.ec.europa.eu/ *.nc *~ -/scripts/old *.pyc -/cutouts -/tmp -/pypsa *.xlsx -config.yaml - -doc/_build - *.xls *.geojson *.ipynb -data/costs_* - merger-todos.md diff --git a/Snakefile b/Snakefile index 0e783beb..e495e7d3 100644 --- a/Snakefile +++ b/Snakefile @@ -4,14 +4,13 @@ from os.path import normpath, exists from shutil import copyfile, move, rmtree - +from pathlib import Path +import yaml from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider - -HTTP = HTTPRemoteProvider() - from snakemake.utils import min_version min_version("7.7") +HTTP = HTTPRemoteProvider() if not exists("config/config.yaml"): @@ -24,8 +23,16 @@ configfile: "config/config.yaml" COSTS = f"data/costs_{config['costs']['year']}.csv" ATLITE_NPROCESSES = config["atlite"].get("nprocesses", 4) -run = config.get("run", {}) -RDIR = run["name"] + "/" if run.get("name") else "" +run = config["run"] +if run.get("scenarios", False): + if run["shared_resources"]: + raise ValueError("Cannot use shared resources with scenarios") + scenarios = yaml.safe_load(Path(config["scenariofile"]).read_text()) + RDIR = "{run}/" +elif run["name"]: + RDIR = run["name"] + "/" +else: + RDIR = "" CDIR = RDIR if not run.get("shared_cutouts") else "" LOGS = "logs/" + RDIR diff --git a/config/config.default.yaml b/config/config.default.yaml index b162b75d..5357db8d 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -5,6 +5,7 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#top-level-configuration version: 0.8.1 tutorial: false +scenariofile: config/scenarios.yaml logging: level: INFO @@ -21,6 +22,7 @@ remote: # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: name: "" + scenarios: false disable_progressbar: false shared_resources: false shared_cutouts: true diff --git a/config/create_scenarios.py b/config/create_scenarios.py deleted file mode 100644 index 9a5f9a98..00000000 --- a/config/create_scenarios.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: MIT - -import itertools - -# Insert your config values that should be altered in the template. -template = """ -scenario{scenario_number}: - sector: - carbon_: {config_value} - - config_section2: - config_key2: {config_value2} -""" - -# Define all possible combinations of config values. -# This must define all config values that are used in the template. -config_values = dict(config_values=["true", "false"], config_values2=[1, 2, 3, 4, 5]) - -combinations = [ - dict(zip(config_values.keys(), values)) - for values in itertools.product(*config_values.values()) -] - -# write the scenarios to a file -filename = "scenarios.yaml" -with open(filename, "w") as f: - for i, config in enumerate(combinations): - f.write(template.format(scenario_number=i, **config)) diff --git a/config/scenarios.yaml b/config/scenarios.yaml index e69de29b..37d32243 100644 --- a/config/scenarios.yaml +++ b/config/scenarios.yaml @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT + +# This file is used to define the scenarios that are run by snakemake. Each entry on the first level is a scenario. Each scenario can contain configuration overrides with respect to the config/config.yaml settings. +# +# Example +# +# custom-scenario: # name of the scenario +# electricity: +# renewable_carriers: [wind, solar] # override the list of renewable carriers diff --git a/config/test/config.scenarios.electricity.yaml b/config/test/config.scenarios.electricity.yaml new file mode 100644 index 00000000..0e4ced04 --- /dev/null +++ b/config/test/config.scenarios.electricity.yaml @@ -0,0 +1,89 @@ +# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + +tutorial: true +scenariofile: "config/test/scenarios.electricity.yaml" + +run: + name: + - test-elec-no-offshore-wind + - test-elec-no-onshore-wind + scenarios: true + disable_progressbar: true + shared_resources: false # cannot be true if scenarios is true + shared_cutouts: true + +scenario: + clusters: + - 5 + opts: + - Co2L-24H + +countries: ['BE'] + +snapshots: + start: "2013-03-01" + end: "2013-03-08" + +electricity: + co2limit: 100.e+6 + + extendable_carriers: + Generator: [OCGT] + StorageUnit: [battery] + Store: [H2] + Link: [H2 pipeline] + + renewable_carriers: [solar, onwind, offwind-ac, offwind-dc] + + +atlite: + default_cutout: be-03-2013-era5 + cutouts: + be-03-2013-era5: + module: era5 + x: [4., 15.] + y: [46., 56.] + time: ["2013-03-01", "2013-03-08"] + +renewable: + onwind: + cutout: be-03-2013-era5 + offwind-ac: + cutout: be-03-2013-era5 + max_depth: false + offwind-dc: + cutout: be-03-2013-era5 + max_depth: false + solar: + cutout: be-03-2013-era5 + + +clustering: + exclude_carriers: ["OCGT", "offwind-ac", "coal"] + +lines: + dynamic_line_rating: + activate: true + cutout: be-03-2013-era5 + max_line_rating: 1.3 + + +solving: + solver: + name: glpk + options: "glpk-default" + + +plotting: + map: + boundaries: + eu_node_location: + x: -5.5 + y: 46. + costs_max: 1000 + costs_threshold: 0.0000001 + energy_max: + energy_min: + energy_threshold: 0.000001 diff --git a/config/test/scenarios.electricity.yaml b/config/test/scenarios.electricity.yaml new file mode 100644 index 00000000..e9893479 --- /dev/null +++ b/config/test/scenarios.electricity.yaml @@ -0,0 +1,14 @@ +# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + +test-elec-no-offshore-wind: + electricity: + renewable_carriers: [solar, onwind] + + +test-elec-no-onshore-wind: + electricity: + extendable_carriers: + Generator: [OCGT] + renewable_carriers: [solar, offwind-ac, offwind-dc] diff --git a/doc/configtables/run.csv b/doc/configtables/run.csv index 90cf65ad..2d5cf5d9 100644 --- a/doc/configtables/run.csv +++ b/doc/configtables/run.csv @@ -1,5 +1,6 @@ ,Unit,Values,Description -name,--,"any string","Specify a name for your run. Results will be stored under this name." +name,--,str/list,"Specify a name for your run. Results will be stored under this name. If ``scenarios`` is set to ``true``, the name must contain a subset of scenario names defined in ``scenariofile``." +scenarios,--,bool,"{true, false}","Switch to select whether workflow should generate scenarios based on ``scenariofile``." disable_progrssbar,bool,"{true, false}","Switch to select whether progressbar should be disabled." shared_resources,bool,"{true, false}","Switch to select whether resources should be shared across runs." shared_cutouts,bool,"{true, false}","Switch to select whether cutouts should be shared across runs." diff --git a/doc/configtables/toplevel.csv b/doc/configtables/toplevel.csv index 67954389..8cbb3e56 100644 --- a/doc/configtables/toplevel.csv +++ b/doc/configtables/toplevel.csv @@ -1,6 +1,7 @@ ,Unit,Values,Description version,--,0.x.x,Version of PyPSA-Eur. Descriptive only. tutorial,bool,"{true, false}",Switch to retrieve the tutorial data set instead of the full data set. +scenariofile,str,,Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run:scenarios`` has to be set to ``true`` and ``run:name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combindation of settings, alter and use the ``create_scenarios.py`` script in ``scripts``. logging,,, -- level,--,"Any of {'INFO', 'WARNING', 'ERROR'}","Restrict console outputs to all infos, warning or errors only" -- format,--,,Custom format for log messages. See `LogRecord `_ attributes. diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 383951bd..4d59c058 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -20,9 +20,9 @@ if config["enable"].get("prepare_links_p_nom", False): rule build_electricity_demand: params: - snapshots=config["snapshots"], - countries=config["countries"], - load=config["load"], + snapshots=config_provider("snapshots"), + countries=config_provider("countries"), + load=config_provider("load"), input: ancient("data/load_raw.csv"), output: @@ -39,9 +39,9 @@ rule build_electricity_demand: rule build_powerplants: params: - powerplants_filter=config["electricity"]["powerplants_filter"], - custom_powerplants=config["electricity"]["custom_powerplants"], - countries=config["countries"], + powerplants_filter=config_provider("electricity", "powerplants_filter"), + custom_powerplants=config_provider("electricity", "custom_powerplants"), + countries=config_provider("countries"), input: base_network=RESOURCES + "networks/base.nc", custom_powerplants="data/custom_powerplants.csv", @@ -60,11 +60,11 @@ rule build_powerplants: rule base_network: params: - countries=config["countries"], - snapshots=config["snapshots"], - lines=config["lines"], - links=config["links"], - transformers=config["transformers"], + countries=config_provider("countries"), + snapshots=config_provider("snapshots"), + lines=config_provider("lines"), + links=config_provider("links"), + transformers=config_provider("transformers"), input: eg_buses="data/entsoegridkit/buses.csv", eg_lines="data/entsoegridkit/lines.csv", @@ -94,7 +94,7 @@ rule base_network: rule build_shapes: params: - countries=config["countries"], + countries=config_provider("countries"), input: naturalearth=ancient("data/bundle/naturalearth/ne_10m_admin_0_countries.shp"), eez=ancient("data/bundle/eez/World_EEZ_v8_2014.shp"), @@ -121,7 +121,7 @@ rule build_shapes: rule build_bus_regions: params: - countries=config["countries"], + countries=config_provider("countries"), input: country_shapes=RESOURCES + "country_shapes.geojson", offshore_shapes=RESOURCES + "offshore_shapes.geojson", @@ -144,8 +144,8 @@ if config["enable"].get("build_cutout", False): rule build_cutout: params: - snapshots=config["snapshots"], - cutouts=config["atlite"]["cutouts"], + snapshots=config_provider("snapshots"), + cutouts=config_provider("atlite", "cutouts"), input: regions_onshore=RESOURCES + "regions_onshore.geojson", regions_offshore=RESOURCES + "regions_offshore.geojson", @@ -208,7 +208,7 @@ rule build_ship_raster: rule build_renewable_profiles: params: - renewable=config["renewable"], + renewable=config_provider("renewable"), input: base_network=RESOURCES + "networks/base.nc", corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"), @@ -277,8 +277,8 @@ rule build_monthly_prices: rule build_hydro_profile: params: - hydro=config["renewable"]["hydro"], - countries=config["countries"], + hydro=config_provider("renewable", "hydro"), + countries=config_provider("countries"), input: country_shapes=RESOURCES + "country_shapes.geojson", eia_hydro_generation="data/eia_hydro_annual_generation.csv", @@ -321,13 +321,13 @@ if config["lines"]["dynamic_line_rating"]["activate"]: rule add_electricity: params: - length_factor=config["lines"]["length_factor"], - scaling_factor=config["load"]["scaling_factor"], - countries=config["countries"], - renewable=config["renewable"], - electricity=config["electricity"], - conventional=config["conventional"], - costs=config["costs"], + length_factor=config_provider("lines", "length_factor"), + scaling_factor=config_provider("load", "scaling_factor"), + countries=config_provider("countries"), + renewable=config_provider("renewable"), + electricity=config_provider("electricity"), + conventional=config_provider("conventional"), + costs=config_provider("costs"), input: **{ f"profile_{tech}": RESOURCES + f"profile_{tech}.nc" @@ -370,14 +370,16 @@ rule add_electricity: rule simplify_network: params: - simplify_network=config["clustering"]["simplify_network"], - aggregation_strategies=config["clustering"].get("aggregation_strategies", {}), - focus_weights=config.get("focus_weights", None), - renewable_carriers=config["electricity"]["renewable_carriers"], - max_hours=config["electricity"]["max_hours"], - length_factor=config["lines"]["length_factor"], - p_max_pu=config["links"].get("p_max_pu", 1.0), - costs=config["costs"], + simplify_network=config_provider("clustering", "simplify_network"), + aggregation_strategies=config_provider( + "clustering", "aggregation_strategies", default={} + ), + focus_weights=config_provider("focus_weights", default=None), + renewable_carriers=config_provider("electricity", "renewable_carriers"), + max_hours=config_provider("electricity", "max_hours"), + length_factor=config_provider("lines", "length_factor"), + p_max_pu=config_provider("links", "p_max_pu", default=1.0), + costs=config_provider("costs"), input: network=RESOURCES + "networks/elec.nc", tech_costs=COSTS, @@ -404,15 +406,19 @@ rule simplify_network: rule cluster_network: params: - cluster_network=config["clustering"]["cluster_network"], - aggregation_strategies=config["clustering"].get("aggregation_strategies", {}), - custom_busmap=config["enable"].get("custom_busmap", False), - focus_weights=config.get("focus_weights", None), - renewable_carriers=config["electricity"]["renewable_carriers"], - conventional_carriers=config["electricity"].get("conventional_carriers", []), - max_hours=config["electricity"]["max_hours"], - length_factor=config["lines"]["length_factor"], - costs=config["costs"], + cluster_network=config_provider("clustering", "cluster_network"), + aggregation_strategies=config_provider( + "clustering", "aggregation_strategies", default={} + ), + custom_busmap=config_provider("enable", "custom_busmap", default=False), + focus_weights=config_provider("focus_weights", default=None), + renewable_carriers=config_provider("electricity", "renewable_carriers"), + conventional_carriers=config_provider( + "electricity", "conventional_carriers", default=[] + ), + max_hours=config_provider("electricity", "max_hours"), + length_factor=config_provider("lines", "length_factor"), + costs=config_provider("costs"), input: network=RESOURCES + "networks/elec_s{simpl}.nc", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}.geojson", @@ -445,9 +451,9 @@ rule cluster_network: rule add_extra_components: params: - extendable_carriers=config["electricity"]["extendable_carriers"], - max_hours=config["electricity"]["max_hours"], - costs=config["costs"], + extendable_carriers=config_provider("electricity", "extendable_carriers"), + max_hours=config_provider("electricity", "max_hours"), + costs=config_provider("costs"), input: network=RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", tech_costs=COSTS, @@ -468,13 +474,13 @@ rule add_extra_components: rule prepare_network: params: - links=config["links"], - lines=config["lines"], - co2base=config["electricity"]["co2base"], - co2limit=config["electricity"]["co2limit"], - gaslimit=config["electricity"].get("gaslimit"), - max_hours=config["electricity"]["max_hours"], - costs=config["costs"], + links=config_provider("links"), + lines=config_provider("lines"), + co2base=config_provider("electricity", "co2base"), + co2limit=config_provider("electricity", "co2limit"), + gaslimit=config_provider("electricity", "gaslimit"), + max_hours=config_provider("electricity", "max_hours"), + costs=config_provider("costs"), input: RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc", tech_costs=COSTS, diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 356abdc5..cc80ab64 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -141,7 +141,7 @@ if not (config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]): rule build_heat_demands: params: - snapshots=config["snapshots"], + snapshots=config_provider("snapshots"), input: pop_layout=RESOURCES + "pop_layout_{scope}.nc", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", @@ -163,7 +163,7 @@ rule build_heat_demands: rule build_temperature_profiles: params: - snapshots=config["snapshots"], + snapshots=config_provider("snapshots"), input: pop_layout=RESOURCES + "pop_layout_{scope}.nc", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", @@ -186,7 +186,7 @@ rule build_temperature_profiles: rule build_cop_profiles: params: - heat_pump_sink_T=config["sector"]["heat_pump_sink_T"], + heat_pump_sink_T=config_provider("sector", "heat_pump_sink_T"), input: temp_soil_total=RESOURCES + "temp_soil_total_elec_s{simpl}_{clusters}.nc", temp_soil_rural=RESOURCES + "temp_soil_rural_elec_s{simpl}_{clusters}.nc", @@ -215,8 +215,8 @@ rule build_cop_profiles: rule build_solar_thermal_profiles: params: - snapshots=config["snapshots"], - solar_thermal=config["solar_thermal"], + snapshots=config_provider("snapshots"), + solar_thermal=config_provider("solar_thermal"), input: pop_layout=RESOURCES + "pop_layout_{scope}.nc", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", @@ -238,8 +238,8 @@ rule build_solar_thermal_profiles: rule build_energy_totals: params: - countries=config["countries"], - energy=config["energy"], + countries=config_provider("countries"), + energy=config_provider("energy"), input: nuts3_shapes=RESOURCES + "nuts3_shapes.geojson", co2="data/eea/UNFCCC_v23.csv", @@ -266,7 +266,7 @@ rule build_energy_totals: rule build_biomass_potentials: params: - biomass=config["biomass"], + biomass=config_provider("biomass"), input: enspreso_biomass=HTTP.remote( "https://cidportal.jrc.ec.europa.eu/ftp/jrc-opendata/ENSPRESO/ENSPRESO_BIOMASS.xlsx", @@ -329,9 +329,9 @@ if config["sector"]["regional_co2_sequestration_potential"]["enable"]: rule build_sequestration_potentials: params: - sequestration_potential=config["sector"][ - "regional_co2_sequestration_potential" - ], + sequestration_potential=config_provider( + "sector", "regional_co2_sequestration_potential" + ), input: sequestration_potential=HTTP.remote( "https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson", @@ -386,7 +386,7 @@ rule build_salt_cavern_potentials: rule build_ammonia_production: params: - countries=config["countries"], + countries=config_provider("countries"), input: usgs="data/myb1-2017-nitro.xls", output: @@ -406,8 +406,8 @@ rule build_ammonia_production: rule build_industry_sector_ratios: params: - industry=config["industry"], - ammonia=config["sector"].get("ammonia", False), + industry=config_provider("industry"), + ammonia=config_provider("sector", "ammonia", default=False), input: ammonia_production=RESOURCES + "ammonia_production.csv", idees="data/jrc-idees-2015", @@ -428,8 +428,8 @@ rule build_industry_sector_ratios: rule build_industrial_production_per_country: params: - industry=config["industry"], - countries=config["countries"], + industry=config_provider("industry"), + countries=config_provider("countries"), input: ammonia_production=RESOURCES + "ammonia_production.csv", jrc="data/jrc-idees-2015", @@ -452,7 +452,7 @@ rule build_industrial_production_per_country: rule build_industrial_production_per_country_tomorrow: params: - industry=config["industry"], + industry=config_provider("industry"), input: industrial_production_per_country=RESOURCES + "industrial_production_per_country.csv", @@ -478,8 +478,10 @@ rule build_industrial_production_per_country_tomorrow: rule build_industrial_distribution_key: params: - hotmaps_locate_missing=config["industry"].get("hotmaps_locate_missing", False), - countries=config["countries"], + hotmaps_locate_missing=config_provider( + "industry", "hotmaps_locate_missing", default=False + ), + countries=config_provider("countries"), input: regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", @@ -555,8 +557,8 @@ rule build_industrial_energy_demand_per_node: rule build_industrial_energy_demand_per_country_today: params: - countries=config["countries"], - industry=config["industry"], + countries=config_provider("countries"), + industry=config_provider("industry"), input: jrc="data/jrc-idees-2015", ammonia_production=RESOURCES + "ammonia_production.csv", @@ -604,8 +606,8 @@ if config["sector"]["retrofitting"]["retro_endogen"]: rule build_retro_cost: params: - retrofitting=config["sector"]["retrofitting"], - countries=config["countries"], + retrofitting=config_provider("sector", "retrofitting"), + countries=config_provider("countries"), input: building_stock="data/retro/data_building_stock.csv", data_tabula="data/retro/tabula-calculator-calcsetbuilding.csv", @@ -677,8 +679,8 @@ rule build_shipping_demand: rule build_transport_demand: params: - snapshots=config["snapshots"], - sector=config["sector"], + snapshots=config_provider("snapshots"), + sector=config_provider("sector"), input: clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", pop_weighted_energy_totals=RESOURCES @@ -705,18 +707,20 @@ rule build_transport_demand: rule prepare_sector_network: params: - co2_budget=config["co2_budget"], - conventional_carriers=config["existing_capacities"]["conventional_carriers"], - foresight=config["foresight"], - costs=config["costs"], - sector=config["sector"], - industry=config["industry"], - pypsa_eur=config["pypsa_eur"], - length_factor=config["lines"]["length_factor"], - planning_horizons=config["scenario"]["planning_horizons"], - countries=config["countries"], - emissions_scope=config["energy"]["emissions"], - eurostat_report_year=config["energy"]["eurostat_report_year"], + co2_budget=config_provider("co2_budget"), + conventional_carriers=config_provider( + "existing_capacities", "conventional_carriers" + ), + foresight=config_provider("foresight"), + costs=config_provider("costs"), + sector=config_provider("sector"), + industry=config_provider("industry"), + pypsa_eur=config_provider("pypsa_eur"), + length_factor=config_provider("lines", "length_factor"), + planning_horizons=config_provider("scenario", "planning_horizons"), + countries=config_provider("countries"), + emissions_scope=config_provider("energy", "emissions"), + eurostat_report_year=config_provider("energy", "eurostat_report_year"), RDIR=RDIR, input: **build_retro_cost_output, diff --git a/rules/collect.smk b/rules/collect.smk index 74f26ccb..a29aa715 100644 --- a/rules/collect.smk +++ b/rules/collect.smk @@ -22,13 +22,19 @@ rule all: rule cluster_networks: input: - expand(RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", **config["scenario"]), + expand( + RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", + **config["scenario"], + run=config["run"]["name"] + ), rule extra_components_networks: input: expand( - RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc", **config["scenario"] + RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc", + **config["scenario"], + run=config["run"]["name"] ), @@ -36,7 +42,8 @@ rule prepare_elec_networks: input: expand( RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", - **config["scenario"] + **config["scenario"], + run=config["run"]["name"] ), @@ -45,7 +52,8 @@ rule prepare_sector_networks: expand( RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", - **config["scenario"] + **config["scenario"], + run=config["run"]["name"] ), @@ -53,7 +61,8 @@ rule solve_elec_networks: input: expand( RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", - **config["scenario"] + **config["scenario"], + run=config["run"]["name"] ), @@ -62,7 +71,8 @@ rule solve_sector_networks: expand( RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", - **config["scenario"] + **config["scenario"], + run=config["run"]["name"] ), @@ -71,7 +81,8 @@ rule plot_networks: expand( RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", - **config["scenario"] + **config["scenario"], + run=config["run"]["name"] ), @@ -80,11 +91,13 @@ rule validate_elec_networks: expand( RESULTS + "figures/.statistics_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}", - **config["scenario"] + **config["scenario"], + run=config["run"]["name"] ), expand( RESULTS + "figures/.validation_{kind}_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}", **config["scenario"], + run=config["run"]["name"], kind=["production", "prices", "cross_border"] ), diff --git a/rules/common.smk b/rules/common.smk index ec5be355..5677f577 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -2,6 +2,57 @@ # # SPDX-License-Identifier: MIT +import copy + + +def get_config(keys, config, default=None): + """Retrieve a nested value from a dictionary using a tuple of keys.""" + value = config + for key in keys: + value = value.get(key, default) + if value == default: + return default + return value + + +def merge_configs(base_config, scenario_config): + """Merge base config with a specific scenario without modifying the original.""" + merged = copy.deepcopy(base_config) + for key, value in scenario_config.items(): + if key in merged and isinstance(merged[key], dict): + merged[key] = merge_configs(merged[key], value) + else: + merged[key] = value + return merged + + +def config_provider(*keys, default=None): + """Dynamically provide config values based on 'run' -> 'name'. + + Usage in Snakemake rules would look something like: + params: + my_param=config_provider("key1", "key2", default="some_default_value") + """ + + def static_getter(wildcards): + """Getter function for static config values.""" + return get_config(keys, config, default) + + def dynamic_getter(wildcards): + """Getter function for dynamic config values based on scenario.""" + scenario_name = wildcards.run + if scenario_name not in scenarios: + raise ValueError( + f"Scenario {scenario_name} not found in file {config['scenariofile']}." + ) + merged_config = merge_configs(config, scenarios[scenario_name]) + return get_config(keys, merged_config, default) + + if config["run"].get("scenarios", False): + return dynamic_getter + else: + return static_getter + def memory(w): factor = 3.0 diff --git a/rules/postprocess.smk b/rules/postprocess.smk index 2618680e..fccda6e4 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -10,8 +10,8 @@ localrules: rule plot_network: params: - foresight=config["foresight"], - plotting=config["plotting"], + foresight=config_provider("foresight"), + plotting=config_provider("plotting"), input: network=RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", @@ -53,16 +53,17 @@ rule copy_config: rule make_summary: params: - foresight=config["foresight"], - costs=config["costs"], - snapshots=config["snapshots"], - scenario=config["scenario"], + foresight=config_provider("foresight"), + costs=config_provider("costs"), + snapshots=config_provider("snapshots"), + scenario=config_provider("scenario"), RDIR=RDIR, input: networks=expand( RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", - **config["scenario"] + **config["scenario"], + run=config["run"]["name"] ), costs="data/costs_{}.csv".format(config["costs"]["year"]) if config["foresight"] == "overnight" @@ -70,7 +71,8 @@ rule make_summary: plots=expand( RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", - **config["scenario"] + **config["scenario"], + run=config["run"]["name"] ), output: nodal_costs=RESULTS + "csvs/nodal_costs.csv", @@ -103,10 +105,10 @@ rule make_summary: rule plot_summary: params: - countries=config["countries"], - planning_horizons=config["scenario"]["planning_horizons"], - sector_opts=config["scenario"]["sector_opts"], - plotting=config["plotting"], + countries=config_provider("countries"), + planning_horizons=config_provider("scenario", "planning_horizons"), + sector_opts=config_provider("scenario", "sector_opts"), + plotting=config_provider("plotting"), RDIR=RDIR, input: costs=RESULTS + "csvs/costs.csv", @@ -145,7 +147,7 @@ STATISTICS_BARPLOTS = [ rule plot_elec_statistics: params: - plotting=config["plotting"], + plotting=config_provider("plotting"), barplots=STATISTICS_BARPLOTS, input: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 0b60ee2e..4b9e9542 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -29,7 +29,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_databundle", output: expand("data/bundle/{file}", file=datafiles), log: - LOGS + "retrieve_databundle.log", + "logs/retrieve_databundle.log", resources: mem_mb=1000, retries: 2 @@ -72,7 +72,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cost_data", T output: "data/costs_{year}.csv", log: - LOGS + "retrieve_cost_data_{year}.log", + "logs/retrieve_cost_data_{year}.log", resources: mem_mb=1000, retries: 2 @@ -123,7 +123,7 @@ if config["enable"]["retrieve"] and config["enable"].get( output: *datafiles, log: - LOGS + "retrieve_sector_databundle.log", + "logs/retrieve_sector_databundle.log", retries: 2 conda: "../envs/environment.yaml" @@ -145,7 +145,7 @@ if config["enable"]["retrieve"] and ( output: expand("data/gas_network/scigrid-gas/data/{files}", files=datafiles), log: - LOGS + "retrieve_gas_infrastructure_data.log", + "logs/retrieve_gas_infrastructure_data.log", retries: 2 conda: "../envs/environment.yaml" @@ -169,7 +169,7 @@ if config["enable"]["retrieve"]: output: "data/load_raw.csv", log: - LOGS + "retrieve_electricity_demand.log", + "logs/retrieve_electricity_demand.log", resources: mem_mb=5000, retries: 2 @@ -189,7 +189,7 @@ if config["enable"]["retrieve"]: output: "data/shipdensity_global.zip", log: - LOGS + "retrieve_ship_raster.log", + "logs/retrieve_ship_raster.log", resources: mem_mb=5000, retries: 2 @@ -209,7 +209,7 @@ if config["enable"]["retrieve"]: output: "data/validation/emission-spot-primary-market-auction-report-2019-data.xls", log: - LOGS + "retrieve_monthly_co2_prices.log", + "logs/retrieve_monthly_co2_prices.log", resources: mem_mb=5000, retries: 2 @@ -223,7 +223,7 @@ if config["enable"]["retrieve"]: output: "data/validation/energy-price-trends-xlsx-5619002.xlsx", log: - LOGS + "retrieve_monthly_fuel_prices.log", + "logs/retrieve_monthly_fuel_prices.log", resources: mem_mb=5000, retries: 2 diff --git a/scripts/_helpers.py b/scripts/_helpers.py index fc7bc9e0..c66d708e 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -14,6 +14,7 @@ import pytz import yaml from pypsa.components import component_attrs, components from pypsa.descriptors import Dict +from snakemake.utils import update_config from tqdm import tqdm logger = logging.getLogger(__name__) @@ -29,6 +30,13 @@ def mute_print(): yield +def set_scenario_config(snakemake): + if "scenario_config" in snakemake.input: + with open(snakemake.input.scenario_config, "r") as f: + scenario_config = yaml.safe_load(f) + update_config(snakemake.config, scenario_config) + + def configure_logging(snakemake, skip_handlers=False): """ Configure the basic behaviour for the logging module. diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 56375800..ff5e950e 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -92,7 +92,7 @@ import powerplantmatching as pm import pypsa import scipy.sparse as sparse import xarray as xr -from _helpers import configure_logging, update_p_nom_max +from _helpers import configure_logging, set_scenario_config, update_p_nom_max from powerplantmatching.export import map_country_bus from shapely.prepared import prep @@ -809,6 +809,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("add_electricity") configure_logging(snakemake) + set_scenario_config(snakemake) params = snakemake.params diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index e00e1e5f..9fe20066 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -55,7 +55,7 @@ import logging import numpy as np import pandas as pd import pypsa -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from add_electricity import load_costs, sanitize_carriers idx = pd.IndexSlice @@ -231,6 +231,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("add_extra_components", simpl="", clusters=5) configure_logging(snakemake) + set_scenario_config(snakemake) n = pypsa.Network(snakemake.input.network) extendable_carriers = snakemake.params.extendable_carriers diff --git a/scripts/base_network.py b/scripts/base_network.py index b4ac1d8c..b5304109 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -77,7 +77,7 @@ import shapely import shapely.prepared import shapely.wkt import yaml -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from scipy import spatial from scipy.sparse import csgraph from shapely.geometry import LineString, Point @@ -745,6 +745,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("base_network") configure_logging(snakemake) + set_scenario_config(snakemake) n = base_network( snakemake.input.eg_buses, diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index a6500bb0..76a57f5e 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -47,7 +47,7 @@ import geopandas as gpd import numpy as np import pandas as pd import pypsa -from _helpers import REGION_COLS, configure_logging +from _helpers import REGION_COLS, configure_logging, set_scenario_config from scipy.spatial import Voronoi from shapely.geometry import Polygon @@ -115,6 +115,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_bus_regions") configure_logging(snakemake) + set_scenario_config(snakemake) countries = snakemake.params.countries diff --git a/scripts/build_cross_border_flows.py b/scripts/build_cross_border_flows.py index b9fc3fe8..743f1742 100644 --- a/scripts/build_cross_border_flows.py +++ b/scripts/build_cross_border_flows.py @@ -8,7 +8,7 @@ import logging import pandas as pd import pypsa -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from entsoe import EntsoePandasClient from entsoe.exceptions import InvalidBusinessParameterError, NoMatchingDataError from requests import HTTPError @@ -21,6 +21,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_cross_border_flows") configure_logging(snakemake) + set_scenario_config(snakemake) api_key = snakemake.config["private"]["keys"]["entsoe_api"] client = EntsoePandasClient(api_key=api_key) diff --git a/scripts/build_cutout.py b/scripts/build_cutout.py index 9a7f9e00..f9f951b5 100644 --- a/scripts/build_cutout.py +++ b/scripts/build_cutout.py @@ -95,7 +95,7 @@ import logging import atlite import geopandas as gpd import pandas as pd -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config logger = logging.getLogger(__name__) @@ -105,6 +105,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_cutout", cutout="europe-2013-era5") configure_logging(snakemake) + set_scenario_config(snakemake) cutout_params = snakemake.params.cutouts[snakemake.wildcards.cutout] diff --git a/scripts/build_electricity_demand.py b/scripts/build_electricity_demand.py index 38c75544..60d40e1e 100755 --- a/scripts/build_electricity_demand.py +++ b/scripts/build_electricity_demand.py @@ -45,7 +45,7 @@ logger = logging.getLogger(__name__) import dateutil import numpy as np import pandas as pd -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from pandas import Timedelta as Delta @@ -288,6 +288,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_electricity_demand") configure_logging(snakemake) + set_scenario_config(snakemake) powerstatistics = snakemake.params.load["power_statistics"] interpolate_limit = snakemake.params.load["interpolate_limit"] diff --git a/scripts/build_electricity_prices.py b/scripts/build_electricity_prices.py index 353ea7e3..48361afe 100644 --- a/scripts/build_electricity_prices.py +++ b/scripts/build_electricity_prices.py @@ -7,7 +7,7 @@ import logging import pandas as pd -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from entsoe import EntsoePandasClient from entsoe.exceptions import NoMatchingDataError @@ -19,6 +19,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_cross_border_flows") configure_logging(snakemake) + set_scenario_config(snakemake) api_key = snakemake.config["private"]["keys"]["entsoe_api"] client = EntsoePandasClient(api_key=api_key) diff --git a/scripts/build_electricity_production.py b/scripts/build_electricity_production.py index beb859bd..38be2ba0 100644 --- a/scripts/build_electricity_production.py +++ b/scripts/build_electricity_production.py @@ -7,7 +7,7 @@ import logging import pandas as pd -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from entsoe import EntsoePandasClient from entsoe.exceptions import NoMatchingDataError @@ -39,6 +39,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_electricity_production") configure_logging(snakemake) + set_scenario_config(snakemake) api_key = snakemake.config["private"]["keys"]["entsoe_api"] client = EntsoePandasClient(api_key=api_key) diff --git a/scripts/build_hydro_profile.py b/scripts/build_hydro_profile.py index bed666f2..883f33d2 100644 --- a/scripts/build_hydro_profile.py +++ b/scripts/build_hydro_profile.py @@ -65,7 +65,7 @@ import atlite import country_converter as coco import geopandas as gpd import pandas as pd -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config cc = coco.CountryConverter() @@ -129,6 +129,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_hydro_profile") configure_logging(snakemake) + set_scenario_config(snakemake) params_hydro = snakemake.params.hydro cutout = atlite.Cutout(snakemake.input.cutout) diff --git a/scripts/build_line_rating.py b/scripts/build_line_rating.py index 7f842d43..abc6b286 100755 --- a/scripts/build_line_rating.py +++ b/scripts/build_line_rating.py @@ -59,7 +59,7 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from shapely.geometry import LineString as Line from shapely.geometry import Point @@ -147,6 +147,7 @@ if __name__ == "__main__": opts="Co2L-4H", ) configure_logging(snakemake) + set_scenario_config(snakemake) n = pypsa.Network(snakemake.input.base_network) time = pd.date_range(freq="h", **snakemake.config["snapshots"]) diff --git a/scripts/build_monthly_prices.py b/scripts/build_monthly_prices.py index c2e88972..89edde79 100644 --- a/scripts/build_monthly_prices.py +++ b/scripts/build_monthly_prices.py @@ -46,7 +46,7 @@ Data was accessed at 16.5.2023 import logging import pandas as pd -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config logger = logging.getLogger(__name__) @@ -114,6 +114,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_monthly_prices") configure_logging(snakemake) + set_scenario_config(snakemake) fuel_price = get_fuel_price() fuel_price.to_csv(snakemake.output.fuel_price) diff --git a/scripts/build_natura_raster.py b/scripts/build_natura_raster.py index 8fdb4ea3..79418d5c 100644 --- a/scripts/build_natura_raster.py +++ b/scripts/build_natura_raster.py @@ -46,7 +46,7 @@ import logging import atlite import geopandas as gpd import rasterio as rio -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from rasterio.features import geometry_mask from rasterio.warp import transform_bounds @@ -92,6 +92,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_natura_raster") configure_logging(snakemake) + set_scenario_config(snakemake) cutouts = snakemake.input.cutouts xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts)) diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index cbe94505..2ad1e010 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -80,7 +80,7 @@ import logging import pandas as pd import powerplantmatching as pm import pypsa -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from powerplantmatching.export import map_country_bus logger = logging.getLogger(__name__) @@ -115,6 +115,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_powerplants") configure_logging(snakemake) + set_scenario_config(snakemake) n = pypsa.Network(snakemake.input.base_network) countries = snakemake.params.countries diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 7b08325b..40b3151d 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -188,7 +188,7 @@ import geopandas as gpd import numpy as np import pandas as pd import xarray as xr -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from dask.distributed import Client from pypsa.geo import haversine from shapely.geometry import LineString @@ -202,6 +202,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_renewable_profiles", technology="solar") configure_logging(snakemake) + set_scenario_config(snakemake) nprocesses = int(snakemake.threads) noprogress = snakemake.config["run"].get("disable_progressbar", True) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index eb837409..571a7282 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -77,7 +77,7 @@ import geopandas as gpd import numpy as np import pandas as pd import pycountry as pyc -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from shapely.geometry import MultiPolygon, Polygon logger = logging.getLogger(__name__) @@ -254,6 +254,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_shapes") configure_logging(snakemake) + set_scenario_config(snakemake) country_shapes = countries(snakemake.input.naturalearth, snakemake.params.countries) country_shapes.reset_index().to_file(snakemake.output.country_shapes) diff --git a/scripts/build_ship_raster.py b/scripts/build_ship_raster.py index 90e006b0..25bebcca 100644 --- a/scripts/build_ship_raster.py +++ b/scripts/build_ship_raster.py @@ -44,9 +44,10 @@ Description import logging import os import zipfile +from pathlib import Path import rioxarray -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from build_natura_raster import determine_cutout_xXyY logger = logging.getLogger(__name__) @@ -57,16 +58,19 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_ship_raster") configure_logging(snakemake) + set_scenario_config(snakemake) cutouts = snakemake.input.cutouts xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts)) with zipfile.ZipFile(snakemake.input.ship_density) as zip_f: - zip_f.extract("shipdensity_global.tif") - with rioxarray.open_rasterio("shipdensity_global.tif") as ship_density: - ship_density = ship_density.drop(["band"]).sel( - x=slice(min(xs), max(Xs)), y=slice(max(Ys), min(ys)) - ) - ship_density.rio.to_raster(snakemake.output[0]) + resources = Path(snakemake.output[0]).parent + fn = "shipdensity_global.tif" + zip_f.extract(fn, resources) + with rioxarray.open_rasterio(resources / fn) as ship_density: + ship_density = ship_density.drop(["band"]).sel( + x=slice(min(xs), max(Xs)), y=slice(max(Ys), min(ys)) + ) + ship_density.rio.to_raster(snakemake.output[0]) - os.remove("shipdensity_global.tif") + (resources / fn).unlink() diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 884b6a2b..b0ce4796 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -133,7 +133,7 @@ import pandas as pd import pyomo.environ as po import pypsa import seaborn as sns -from _helpers import configure_logging, update_p_nom_max +from _helpers import configure_logging, set_scenario_config, update_p_nom_max from pypsa.clustering.spatial import ( busmap_by_greedy_modularity, busmap_by_hac, @@ -463,6 +463,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("cluster_network", simpl="", clusters="37") configure_logging(snakemake) + set_scenario_config(snakemake) params = snakemake.params solver_name = snakemake.config["solving"]["solver"]["name"] diff --git a/scripts/plot_statistics.py b/scripts/plot_statistics.py index 1e75203f..a0a3e71d 100644 --- a/scripts/plot_statistics.py +++ b/scripts/plot_statistics.py @@ -7,7 +7,7 @@ import matplotlib.pyplot as plt import pypsa import seaborn as sns -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config sns.set_theme("paper", style="whitegrid") @@ -24,6 +24,7 @@ if __name__ == "__main__": ll="v1.0", ) configure_logging(snakemake) + set_scenario_config(snakemake) n = pypsa.Network(snakemake.input.network) diff --git a/scripts/plot_validation_cross_border_flows.py b/scripts/plot_validation_cross_border_flows.py index 43ed45e9..8b063d8c 100644 --- a/scripts/plot_validation_cross_border_flows.py +++ b/scripts/plot_validation_cross_border_flows.py @@ -9,7 +9,7 @@ import matplotlib.pyplot as plt import pandas as pd import pypsa import seaborn as sns -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config sns.set_theme("paper", style="whitegrid") @@ -195,6 +195,7 @@ if __name__ == "__main__": ll="v1.0", ) configure_logging(snakemake) + set_scenario_config(snakemake) countries = snakemake.params.countries diff --git a/scripts/plot_validation_electricity_prices.py b/scripts/plot_validation_electricity_prices.py index 2a187b9f..c229e382 100644 --- a/scripts/plot_validation_electricity_prices.py +++ b/scripts/plot_validation_electricity_prices.py @@ -8,7 +8,7 @@ import matplotlib.pyplot as plt import pandas as pd import pypsa import seaborn as sns -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from pypsa.statistics import get_bus_and_carrier sns.set_theme("paper", style="whitegrid") @@ -25,6 +25,7 @@ if __name__ == "__main__": ll="v1.0", ) configure_logging(snakemake) + set_scenario_config(snakemake) n = pypsa.Network(snakemake.input.network) n.loads.carrier = "load" diff --git a/scripts/plot_validation_electricity_production.py b/scripts/plot_validation_electricity_production.py index 5c5569d0..3e81faff 100644 --- a/scripts/plot_validation_electricity_production.py +++ b/scripts/plot_validation_electricity_production.py @@ -8,7 +8,7 @@ import matplotlib.pyplot as plt import pandas as pd import pypsa import seaborn as sns -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from pypsa.statistics import get_bus_and_carrier sns.set_theme("paper", style="whitegrid") @@ -35,6 +35,7 @@ if __name__ == "__main__": ll="v1.0", ) configure_logging(snakemake) + set_scenario_config(snakemake) n = pypsa.Network(snakemake.input.network) n.loads.carrier = "load" diff --git a/scripts/prepare_links_p_nom.py b/scripts/prepare_links_p_nom.py index 4b915d22..450f3227 100644 --- a/scripts/prepare_links_p_nom.py +++ b/scripts/prepare_links_p_nom.py @@ -40,7 +40,7 @@ Description import logging import pandas as pd -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config logger = logging.getLogger(__name__) @@ -69,6 +69,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("prepare_links_p_nom", simpl="") configure_logging(snakemake) + set_scenario_config(snakemake) links_p_nom = pd.read_html( "https://en.wikipedia.org/wiki/List_of_HVDC_projects", header=0, match="SwePol" diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index a5a00a3c..a7f1ddf3 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -63,7 +63,7 @@ import re import numpy as np import pandas as pd import pypsa -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from add_electricity import load_costs, update_transmission_costs from pypsa.descriptors import expand_series @@ -283,6 +283,7 @@ if __name__ == "__main__": "prepare_network", simpl="", clusters="37", ll="v1.0", opts="Ept" ) configure_logging(snakemake) + set_scenario_config(snakemake) opts = snakemake.wildcards.opts.split("-") diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index 75d8519e..cb3bdc11 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -36,7 +36,7 @@ import logging import tarfile from pathlib import Path -from _helpers import configure_logging, progress_retrieve +from _helpers import configure_logging, progress_retrieve, set_scenario_config logger = logging.getLogger(__name__) diff --git a/scripts/retrieve_monthly_fuel_prices.py b/scripts/retrieve_monthly_fuel_prices.py index 11e351ce..887014cc 100644 --- a/scripts/retrieve_monthly_fuel_prices.py +++ b/scripts/retrieve_monthly_fuel_prices.py @@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) from pathlib import Path -from _helpers import configure_logging, progress_retrieve +from _helpers import configure_logging, progress_retrieve, set_scenario_config if __name__ == "__main__": if "snakemake" not in globals(): @@ -23,6 +23,7 @@ if __name__ == "__main__": else: rootpath = "." configure_logging(snakemake) + set_scenario_config(snakemake) url = "https://www.destatis.de/EN/Themes/Economy/Prices/Publications/Downloads-Energy-Price-Trends/energy-price-trends-xlsx-5619002.xlsx?__blob=publicationFile" diff --git a/scripts/retrieve_sector_databundle.py b/scripts/retrieve_sector_databundle.py index 0d172c8d..1beed478 100644 --- a/scripts/retrieve_sector_databundle.py +++ b/scripts/retrieve_sector_databundle.py @@ -13,7 +13,7 @@ logger = logging.getLogger(__name__) import tarfile from pathlib import Path -from _helpers import configure_logging, progress_retrieve +from _helpers import configure_logging, progress_retrieve, set_scenario_config if __name__ == "__main__": if "snakemake" not in globals(): @@ -24,6 +24,7 @@ if __name__ == "__main__": else: rootpath = "." configure_logging(snakemake) + set_scenario_config(snakemake) url = "https://zenodo.org/record/5824485/files/pypsa-eur-sec-data-bundle.tar.gz" diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index cac25647..440145ff 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -92,7 +92,7 @@ import numpy as np import pandas as pd import pypsa import scipy as sp -from _helpers import configure_logging, update_p_nom_max +from _helpers import configure_logging, set_scenario_config, update_p_nom_max from add_electricity import load_costs from cluster_network import cluster_regions, clustering_for_n_clusters from pypsa.clustering.spatial import ( @@ -531,6 +531,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("simplify_network", simpl="") configure_logging(snakemake) + set_scenario_config(snakemake) params = snakemake.params solver_name = snakemake.config["solving"]["solver"]["name"] diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 8eccef19..37b05286 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -33,7 +33,11 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import configure_logging, update_config_with_sector_opts +from _helpers import ( + configure_logging, + set_scenario_config, + update_config_with_sector_opts, +) logger = logging.getLogger(__name__) pypsa.pf.logger.setLevel(logging.WARNING) @@ -657,6 +661,7 @@ if __name__ == "__main__": planning_horizons="2020", ) configure_logging(snakemake) + set_scenario_config(snakemake) if "sector_opts" in snakemake.wildcards.keys(): update_config_with_sector_opts( snakemake.config, snakemake.wildcards.sector_opts diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index 1a3855a9..064d735a 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -11,7 +11,11 @@ import logging import numpy as np import pypsa -from _helpers import configure_logging, update_config_with_sector_opts +from _helpers import ( + configure_logging, + set_scenario_config, + update_config_with_sector_opts, +) from solve_network import prepare_network, solve_network logger = logging.getLogger(__name__) @@ -33,6 +37,7 @@ if __name__ == "__main__": ) configure_logging(snakemake) + set_scenario_config(snakemake) update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) opts = (snakemake.wildcards.opts + "-" + snakemake.wildcards.sector_opts).split("-") From 1f38c69b7634e6facbfe6dcb3d0dcd19caca3ee0 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 15 Aug 2023 15:07:34 +0200 Subject: [PATCH 03/76] untrack scenarios.yaml --- config/scenarios.yaml | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 config/scenarios.yaml diff --git a/config/scenarios.yaml b/config/scenarios.yaml deleted file mode 100644 index 37d32243..00000000 --- a/config/scenarios.yaml +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: MIT - -# This file is used to define the scenarios that are run by snakemake. Each entry on the first level is a scenario. Each scenario can contain configuration overrides with respect to the config/config.yaml settings. -# -# Example -# -# custom-scenario: # name of the scenario -# electricity: -# renewable_carriers: [wind, solar] # override the list of renewable carriers From 56fe245eb4ae03839f3fd61b33a7319237a596b0 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 15 Aug 2023 15:08:22 +0200 Subject: [PATCH 04/76] gitignore: fix typo --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 67cab0c9..e79d129d 100644 --- a/.gitignore +++ b/.gitignore @@ -26,7 +26,7 @@ doc/_build /scripts/create_scenarios.py config.yaml -config/scenario.yaml +config/scenarios.yaml dconf From 6df2742cb996fb62a97851dd671ec00861e88fc2 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 15 Aug 2023 15:14:55 +0200 Subject: [PATCH 05/76] update release notes --- doc/release_notes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 70a73e2f..918bb6dd 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -11,6 +11,8 @@ Upcoming Release ================ * Updated Global Energy Monitor LNG terminal data to March 2023 version. +* PyPSA-EUR now supports the simultaneous execution of multiple scenarios. For this purpose, a scenarios.yaml file has been introduced which contains customizable scenario names with corresponding configuration overrides. To enable it, set the ``run: scenarios:`` key to ``True`` and define the scenario names to run under ``run: name:`` in the configuration file. The latter must be a subset of toplevel keys in the scenario file. + PyPSA-Eur 0.8.1 (27th July 2023) ================================ From 16d08ec100ee334daf729dcd5dc1bdb93feba1d1 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 15 Aug 2023 15:29:43 +0200 Subject: [PATCH 06/76] CI: fix config scenarios file spec --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ff481b46..c09d05c2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -83,7 +83,7 @@ jobs: snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime - snakemake -call all --configfile config/test/config.electricity.scenario.yaml + snakemake -call all --configfile config/test/config.scenarios.electricity.yaml - name: Upload artifacts uses: actions/upload-artifact@v3 From 3462080b896ba71b04653b73453024b750de7721 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 15 Aug 2023 15:44:54 +0200 Subject: [PATCH 07/76] ci: adjust scenario test run --- .github/workflows/ci.yaml | 2 +- rules/collect.smk | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c09d05c2..6cbee85c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -83,7 +83,7 @@ jobs: snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime - snakemake -call all --configfile config/test/config.scenarios.electricity.yaml + snakemake -call solve_elec_networks --configfile config/test/config.scenarios.electricity.yaml - name: Upload artifacts uses: actions/upload-artifact@v3 diff --git a/rules/collect.smk b/rules/collect.smk index a29aa715..8a64b577 100644 --- a/rules/collect.smk +++ b/rules/collect.smk @@ -16,7 +16,7 @@ localrules: rule all: input: - RESULTS + "graphs/costs.pdf", + expand(RESULTS + "graphs/costs.pdf", run=config["run"]["name"]), default_target: True From 5036964dbea41eaabf3695bdad1fad089ca8ef62 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 15 Aug 2023 15:49:57 +0200 Subject: [PATCH 08/76] copy_config: ensure to store updated config --- scripts/copy_config.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/copy_config.py b/scripts/copy_config.py index a549d893..40025342 100644 --- a/scripts/copy_config.py +++ b/scripts/copy_config.py @@ -13,10 +13,12 @@ import yaml if __name__ == "__main__": if "snakemake" not in globals(): - from _helpers import mock_snakemake + from _helpers import mock_snakemake, set_scenario_config snakemake = mock_snakemake("copy_config") + set_scenario_config(snakemake) + with open(snakemake.output[0], "w") as yaml_file: yaml.dump( snakemake.config, From b40a8926ff06fbcd501d2fdef8cd33407d8cba50 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 15 Aug 2023 16:51:35 +0200 Subject: [PATCH 09/76] copy_config: fix import --- scripts/copy_config.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/copy_config.py b/scripts/copy_config.py index 40025342..d6908a62 100644 --- a/scripts/copy_config.py +++ b/scripts/copy_config.py @@ -10,10 +10,11 @@ from pathlib import Path from shutil import copy import yaml +from _helpers import set_scenario_config if __name__ == "__main__": if "snakemake" not in globals(): - from _helpers import mock_snakemake, set_scenario_config + from _helpers import mock_snakemake snakemake = mock_snakemake("copy_config") From b9f3df385611d8f7c1f22575b8222cda1951c7b3 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 15 Aug 2023 17:28:12 +0200 Subject: [PATCH 10/76] ci: only dry-run scenarios --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 6cbee85c..68bffc4c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -83,7 +83,7 @@ jobs: snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime - snakemake -call solve_elec_networks --configfile config/test/config.scenarios.electricity.yaml + snakemake -call solve_elec_networks --configfile config/test/config.scenarios.electricity.yaml -n - name: Upload artifacts uses: actions/upload-artifact@v3 From b3a6e2c2816087b2b22eb05877ac2ec82d93084f Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Aug 2023 10:50:09 +0200 Subject: [PATCH 11/76] common: make cache config_getter, use partial functions --- rules/common.smk | 44 ++++++++++++++++++++++++++------------------ 1 file changed, 26 insertions(+), 18 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index 5677f577..f24301c8 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -3,9 +3,10 @@ # SPDX-License-Identifier: MIT import copy +from functools import partial, lru_cache -def get_config(keys, config, default=None): +def get_config(config, keys, default=None): """Retrieve a nested value from a dictionary using a tuple of keys.""" value = config for key in keys: @@ -26,6 +27,27 @@ def merge_configs(base_config, scenario_config): return merged +@lru_cache +def scenario_config(scenario_name): + """Retrieve a scenario config based on the overrides from the scenario file.""" + return merge_configs(config, scenarios[scenario_name]) + + +def static_getter(wildcards, keys, default): + """Getter function for static config values.""" + return get_config(config, keys, default) + + +def dynamic_getter(wildcards, keys, default): + """Getter function for dynamic config values based on scenario.""" + scenario_name = wildcards.run + if scenario_name not in scenarios: + raise ValueError( + f"Scenario {scenario_name} not found in file {config['scenariofile']}." + ) + return get_config(scenario_config(scenario_name), keys, default) + + def config_provider(*keys, default=None): """Dynamically provide config values based on 'run' -> 'name'. @@ -33,25 +55,11 @@ def config_provider(*keys, default=None): params: my_param=config_provider("key1", "key2", default="some_default_value") """ - - def static_getter(wildcards): - """Getter function for static config values.""" - return get_config(keys, config, default) - - def dynamic_getter(wildcards): - """Getter function for dynamic config values based on scenario.""" - scenario_name = wildcards.run - if scenario_name not in scenarios: - raise ValueError( - f"Scenario {scenario_name} not found in file {config['scenariofile']}." - ) - merged_config = merge_configs(config, scenarios[scenario_name]) - return get_config(keys, merged_config, default) - + # Using functools.partial to freeze certain arguments in our getter functions. if config["run"].get("scenarios", False): - return dynamic_getter + return partial(dynamic_getter, keys=keys, default=default) else: - return static_getter + return partial(static_getter, keys=keys, default=default) def memory(w): From 750b74db46bc6ac656145df2802b62cfa8fb801d Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Aug 2023 10:51:03 +0200 Subject: [PATCH 12/76] build_electricity: optionally download dynamic fuel prices --- rules/build_electricity.smk | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 4d59c058..b359868f 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -350,7 +350,9 @@ rule add_electricity: hydro_capacities=ancient("data/bundle/hydro_capacities.csv"), geth_hydro_capacities="data/geth2015_hydro_capacities.csv", unit_commitment="data/unit_commitment.csv", - fuel_price=RESOURCES + "monthly_fuel_price.csv", + fuel_price=RESOURCES + "monthly_fuel_price.csv" + if config["conventional"]["dynamic_fuel_price"] + else [], load=RESOURCES + "load.csv", nuts3_shapes=RESOURCES + "nuts3_shapes.geojson", output: From 9cd449cf3c24bc935b43c0bb1dcb3ff80015822e Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Aug 2023 11:41:48 +0200 Subject: [PATCH 13/76] retrieve electricity demand: use script in order to concat time-series --- rules/postprocess.smk | 1 - rules/retrieve.smk | 14 ++-------- scripts/retrieve_electricity_demand.py | 37 ++++++++++++++++++++++++++ 3 files changed, 39 insertions(+), 13 deletions(-) create mode 100644 scripts/retrieve_electricity_demand.py diff --git a/rules/postprocess.smk b/rules/postprocess.smk index fccda6e4..4c833b4f 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -5,7 +5,6 @@ localrules: copy_config, - copy_conda_env, rule plot_network: diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 4b9e9542..d7e22e71 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -156,16 +156,6 @@ if config["enable"]["retrieve"] and ( if config["enable"]["retrieve"]: rule retrieve_electricity_demand: - input: - HTTP.remote( - "data.open-power-system-data.org/time_series/{version}/time_series_60min_singleindex.csv".format( - version="2019-06-05" - if config["snapshots"]["end"] < "2019" - else "2020-10-06" - ), - keep_local=True, - static=True, - ), output: "data/load_raw.csv", log: @@ -173,8 +163,8 @@ if config["enable"]["retrieve"]: resources: mem_mb=5000, retries: 2 - run: - move(input[0], output[0]) + script: + "../scripts/retrieve_electricity_demand.py" if config["enable"]["retrieve"]: diff --git a/scripts/retrieve_electricity_demand.py b/scripts/retrieve_electricity_demand.py new file mode 100644 index 00000000..58615755 --- /dev/null +++ b/scripts/retrieve_electricity_demand.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# SPDX-FileCopyrightText: : 2023 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT +""" +Retrieve monthly fuel prices from Destatis. +""" + +import logging + +import pandas as pd + +logger = logging.getLogger(__name__) + +from pathlib import Path + +from _helpers import configure_logging, set_scenario_config + +if __name__ == "__main__": + if "snakemake" not in globals(): + from _helpers import mock_snakemake + + snakemake = mock_snakemake("retrieve_eletricity_demand") + rootpath = ".." + else: + rootpath = "." + configure_logging(snakemake) + set_scenario_config(snakemake) + + versions = ["2019-06-05", "2020-10-06"] + url = "https://data.open-power-system-data.org/time_series/{version}/time_series_60min_singleindex.csv" + + df1, df2 = [ + pd.read_csv(url.format(version=version), index_col=0) for version in versions + ] + res = pd.concat([df1, df2[df2.index > df1.index[-1]]], join="inner") + res.to_csv(snakemake.output[0]) From 32eb114b613d8e0c88c3c992d0b1ddb4ecf71b4c Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Aug 2023 11:47:27 +0200 Subject: [PATCH 14/76] retrieve electricity demand: make online version snakemake params --- rules/retrieve.smk | 2 ++ scripts/build_industrial_distribution_key.py | 4 ++-- scripts/retrieve_electricity_demand.py | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index d7e22e71..5af0ffb5 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -156,6 +156,8 @@ if config["enable"]["retrieve"] and ( if config["enable"]["retrieve"]: rule retrieve_electricity_demand: + params: + versions=["2019-06-05", "2020-10-06"], output: "data/load_raw.csv", log: diff --git a/scripts/build_industrial_distribution_key.py b/scripts/build_industrial_distribution_key.py index 979a1493..24cb6fa0 100644 --- a/scripts/build_industrial_distribution_key.py +++ b/scripts/build_industrial_distribution_key.py @@ -116,7 +116,7 @@ def build_nodal_distribution_key(hotmaps, regions, countries): if not facilities.empty: emissions = facilities["Emissions_ETS_2014"].fillna( - hotmaps["Emissions_EPRTR_2014"] + hotmaps["Emissions_EPRTR_2014"].dropna() ) if emissions.sum() == 0: key = pd.Series(1 / len(facilities), facilities.index) @@ -140,7 +140,7 @@ if __name__ == "__main__": snakemake = mock_snakemake( "build_industrial_distribution_key", simpl="", - clusters=48, + clusters=128, ) logging.basicConfig(level=snakemake.config["logging"]["level"]) diff --git a/scripts/retrieve_electricity_demand.py b/scripts/retrieve_electricity_demand.py index 58615755..58511857 100644 --- a/scripts/retrieve_electricity_demand.py +++ b/scripts/retrieve_electricity_demand.py @@ -27,11 +27,11 @@ if __name__ == "__main__": configure_logging(snakemake) set_scenario_config(snakemake) - versions = ["2019-06-05", "2020-10-06"] url = "https://data.open-power-system-data.org/time_series/{version}/time_series_60min_singleindex.csv" df1, df2 = [ - pd.read_csv(url.format(version=version), index_col=0) for version in versions + pd.read_csv(url.format(version=version), index_col=0) + for version in snakemake.params.versions ] res = pd.concat([df1, df2[df2.index > df1.index[-1]]], join="inner") res.to_csv(snakemake.output[0]) From 59fe66561651ff64730b27cf0647c78caea9030d Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Aug 2023 13:25:52 +0200 Subject: [PATCH 15/76] add exemplary scenarios.yaml add create_scenarios.py --- config/scenarios.yaml | 12 ++++++++++++ scripts/create_scenarios.py | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 config/scenarios.yaml create mode 100644 scripts/create_scenarios.py diff --git a/config/scenarios.yaml b/config/scenarios.yaml new file mode 100644 index 00000000..37d32243 --- /dev/null +++ b/config/scenarios.yaml @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT + +# This file is used to define the scenarios that are run by snakemake. Each entry on the first level is a scenario. Each scenario can contain configuration overrides with respect to the config/config.yaml settings. +# +# Example +# +# custom-scenario: # name of the scenario +# electricity: +# renewable_carriers: [wind, solar] # override the list of renewable carriers diff --git a/scripts/create_scenarios.py b/scripts/create_scenarios.py new file mode 100644 index 00000000..40a3c331 --- /dev/null +++ b/scripts/create_scenarios.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT + +# This script helps to generate a scenarios.yaml file for PyPSA-Eur. +# You can modify the template to your needs and define all possible combinations of config values that should be considered. + + +import itertools + +# Insert your config values that should be altered in the template. +# Change `config_section` and `config_section2` to the actual config sections. +template = """ +scenario{scenario_number}: + config_section: + config_value: {config_value} + + config_section2: + config_key2: {config_value2} +""" + +# Define all possible combinations of config values. +# This must define all config values that are used in the template. +config_values = dict(config_values=["true", "false"], config_values2=[1, 2, 3, 4, 5]) + +combinations = [ + dict(zip(config_values.keys(), values)) + for values in itertools.product(*config_values.values()) +] + +# write the scenarios to a file +filename = "../config/scenarios.yaml" +with open(filename, "w") as f: + for i, config in enumerate(combinations): + f.write(template.format(scenario_number=i, **config)) From af2fb1a82e728c7d0de2a73c1222ba5479fb6540 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Aug 2023 13:37:10 +0200 Subject: [PATCH 16/76] uptrack changes in scenarios.yaml/create_scenarios.py --- config/scenarios.yaml | 12 ------------ scripts/create_scenarios.py | 36 ------------------------------------ 2 files changed, 48 deletions(-) delete mode 100644 config/scenarios.yaml delete mode 100644 scripts/create_scenarios.py diff --git a/config/scenarios.yaml b/config/scenarios.yaml deleted file mode 100644 index 37d32243..00000000 --- a/config/scenarios.yaml +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: MIT - -# This file is used to define the scenarios that are run by snakemake. Each entry on the first level is a scenario. Each scenario can contain configuration overrides with respect to the config/config.yaml settings. -# -# Example -# -# custom-scenario: # name of the scenario -# electricity: -# renewable_carriers: [wind, solar] # override the list of renewable carriers diff --git a/scripts/create_scenarios.py b/scripts/create_scenarios.py deleted file mode 100644 index 40a3c331..00000000 --- a/scripts/create_scenarios.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: MIT - -# This script helps to generate a scenarios.yaml file for PyPSA-Eur. -# You can modify the template to your needs and define all possible combinations of config values that should be considered. - - -import itertools - -# Insert your config values that should be altered in the template. -# Change `config_section` and `config_section2` to the actual config sections. -template = """ -scenario{scenario_number}: - config_section: - config_value: {config_value} - - config_section2: - config_key2: {config_value2} -""" - -# Define all possible combinations of config values. -# This must define all config values that are used in the template. -config_values = dict(config_values=["true", "false"], config_values2=[1, 2, 3, 4, 5]) - -combinations = [ - dict(zip(config_values.keys(), values)) - for values in itertools.product(*config_values.values()) -] - -# write the scenarios to a file -filename = "../config/scenarios.yaml" -with open(filename, "w") as f: - for i, config in enumerate(combinations): - f.write(template.format(scenario_number=i, **config)) From 672d7b9538ed9c1ad811a43dd056ff7cb60d202a Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 16 Aug 2023 13:45:59 +0200 Subject: [PATCH 17/76] force add scenarios.yaml / create_scenarios --- config/scenarios.yaml | 12 ++++++++++++ scripts/create_scenarios.py | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 config/scenarios.yaml create mode 100644 scripts/create_scenarios.py diff --git a/config/scenarios.yaml b/config/scenarios.yaml new file mode 100644 index 00000000..37d32243 --- /dev/null +++ b/config/scenarios.yaml @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT + +# This file is used to define the scenarios that are run by snakemake. Each entry on the first level is a scenario. Each scenario can contain configuration overrides with respect to the config/config.yaml settings. +# +# Example +# +# custom-scenario: # name of the scenario +# electricity: +# renewable_carriers: [wind, solar] # override the list of renewable carriers diff --git a/scripts/create_scenarios.py b/scripts/create_scenarios.py new file mode 100644 index 00000000..40a3c331 --- /dev/null +++ b/scripts/create_scenarios.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT + +# This script helps to generate a scenarios.yaml file for PyPSA-Eur. +# You can modify the template to your needs and define all possible combinations of config values that should be considered. + + +import itertools + +# Insert your config values that should be altered in the template. +# Change `config_section` and `config_section2` to the actual config sections. +template = """ +scenario{scenario_number}: + config_section: + config_value: {config_value} + + config_section2: + config_key2: {config_value2} +""" + +# Define all possible combinations of config values. +# This must define all config values that are used in the template. +config_values = dict(config_values=["true", "false"], config_values2=[1, 2, 3, 4, 5]) + +combinations = [ + dict(zip(config_values.keys(), values)) + for values in itertools.product(*config_values.values()) +] + +# write the scenarios to a file +filename = "../config/scenarios.yaml" +with open(filename, "w") as f: + for i, config in enumerate(combinations): + f.write(template.format(scenario_number=i, **config)) From 9d4ce430cc6abc73941ab9acebb35d9b0db919c4 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 17 Aug 2023 10:17:12 +0200 Subject: [PATCH 18/76] electricity demand: remove powerstastics flag, merge sources in retrieve_electricity_demand --- doc/configtables/load.csv | 1 - doc/retrieve.rst | 2 +- rules/build_electricity.smk | 2 +- rules/retrieve.smk | 2 +- scripts/build_electricity_demand.py | 172 ++++++++++--------------- scripts/retrieve_electricity_demand.py | 16 ++- 6 files changed, 85 insertions(+), 110 deletions(-) diff --git a/doc/configtables/load.csv b/doc/configtables/load.csv index 6e98f881..ac666947 100644 --- a/doc/configtables/load.csv +++ b/doc/configtables/load.csv @@ -1,5 +1,4 @@ ,Unit,Values,Description -power_statistics,bool,"{true, false}",Whether to load the electricity consumption data of the ENTSOE power statistics (only for files from 2019 and before) or from the ENTSOE transparency data (only has load data from 2015 onwards). interpolate_limit,hours,integer,"Maximum gap size (consecutive nans) which interpolated linearly." time_shift_for_large_gaps,string,string,"Periods which are used for copying time-slices in order to fill large gaps of nans. Have to be valid ``pandas`` period strings." manual_adjustments,bool,"{true, false}","Whether to adjust the load data manually according to the function in :func:`manual_adjustment`." diff --git a/doc/retrieve.rst b/doc/retrieve.rst index 4786581e..66c996f5 100644 --- a/doc/retrieve.rst +++ b/doc/retrieve.rst @@ -91,7 +91,7 @@ None. **Outputs** -- ``data/load_raw.csv`` +- ``data/electricity_demand.csv`` Rule ``retrieve_cost_data`` diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index b359868f..2e7a0c30 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -24,7 +24,7 @@ rule build_electricity_demand: countries=config_provider("countries"), load=config_provider("load"), input: - ancient("data/load_raw.csv"), + ancient("data/electricity_demand.csv"), output: RESOURCES + "load.csv", log: diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 5af0ffb5..34e2eb7c 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -159,7 +159,7 @@ if config["enable"]["retrieve"]: params: versions=["2019-06-05", "2020-10-06"], output: - "data/load_raw.csv", + "data/electricity_demand.csv", log: "logs/retrieve_electricity_demand.log", resources: diff --git a/scripts/build_electricity_demand.py b/scripts/build_electricity_demand.py index 60d40e1e..3fd9d605 100755 --- a/scripts/build_electricity_demand.py +++ b/scripts/build_electricity_demand.py @@ -31,7 +31,7 @@ Relevant Settings Inputs ------ -- ``data/load_raw.csv``: +- ``data/electricity_demand.csv``: Outputs ------- @@ -49,7 +49,7 @@ from _helpers import configure_logging, set_scenario_config from pandas import Timedelta as Delta -def load_timeseries(fn, years, countries, powerstatistics=True): +def load_timeseries(fn, years, countries): """ Read load data from OPSD time-series package version 2020-10-06. @@ -62,10 +62,6 @@ def load_timeseries(fn, years, countries, powerstatistics=True): File name or url location (file format .csv) countries : listlike Countries for which to read load data. - powerstatistics: bool - Whether the electricity consumption data of the ENTSOE power - statistics (if true) or of the ENTSOE transparency map (if false) - should be parsed. Returns ------- @@ -74,17 +70,9 @@ def load_timeseries(fn, years, countries, powerstatistics=True): """ logger.info(f"Retrieving load data from '{fn}'.") - pattern = "power_statistics" if powerstatistics else "transparency" - pattern = f"_load_actual_entsoe_{pattern}" - - def rename(s): - return s[: -len(pattern)] - return ( pd.read_csv(fn, index_col=0, parse_dates=[0]) .tz_localize(None) - .filter(like=pattern) - .rename(columns=rename) .dropna(how="all", axis=0) .rename(columns={"GB_UKM": "GB"}) .filter(items=countries) @@ -149,17 +137,18 @@ def copy_timeslice(load, cntry, start, stop, delta, fn_load=None): ].values elif fn_load is not None: duration = pd.date_range(freq="h", start=start - delta, end=stop - delta) - load_raw = load_timeseries(fn_load, duration, [cntry], powerstatistics) + load_raw = load_timeseries(fn_load, duration, [cntry]) load.loc[start:stop, cntry] = load_raw.loc[ start - delta : stop - delta, cntry ].values -def manual_adjustment(load, fn_load, powerstatistics): +def manual_adjustment(load, fn_load): """ Adjust gaps manual for load data from OPSD time-series package. - 1. For the ENTSOE power statistics load data (if powerstatistics is True) + 1. For years later than 2015 for which the load data is mainly taken from the + ENTSOE power statistics Kosovo (KV) and Albania (AL) do not exist in the data set. Kosovo gets the same load curve as Serbia and Albania the same as Macdedonia, both scaled @@ -167,7 +156,8 @@ def manual_adjustment(load, fn_load, powerstatistics): IEA Data browser [0] for the year 2013. - 2. For the ENTSOE transparency load data (if powerstatistics is False) + 2. For years earlier than 2015 for which the load data is mainly taken from the + ENTSOE transparency platforms Albania (AL) and Macedonia (MK) do not exist in the data set. Both get the same load curve as Montenegro, scaled by the corresponding ratio of total energy @@ -183,9 +173,6 @@ def manual_adjustment(load, fn_load, powerstatistics): ---------- load : pd.DataFrame Load time-series with UTC timestamps x ISO-2 countries - powerstatistics: bool - Whether argument load comprises the electricity consumption data of - the ENTSOE power statistics or of the ENTSOE transparency map load_fn: str File name or url location (file format .csv) @@ -195,88 +182,66 @@ def manual_adjustment(load, fn_load, powerstatistics): Manual adjusted and interpolated load time-series with UTC timestamps x ISO-2 countries """ - if powerstatistics: - if "MK" in load.columns: - if "AL" not in load.columns or load.AL.isnull().values.all(): - load["AL"] = load["MK"] * (4.1 / 7.4) - if "RS" in load.columns: - if "KV" not in load.columns or load.KV.isnull().values.all(): - load["KV"] = load["RS"] * (4.8 / 27.0) + if "MK" in load: + if "AL" not in load or load.AL.isnull().values.all(): + load["AL"] = load["MK"] * (4.1 / 7.4) + if "RS" in load: + if "KV" not in load or load.KV.isnull().values.all(): + load["KV"] = load["RS"] * (4.8 / 27.0) + if "ME" in load: + if "AL" not in load and "AL" in countries: + load["AL"] = load.ME * (5.7 / 2.9) + if "MK" not in load and "MK" in countries: + load["MK"] = load.ME * (6.7 / 2.9) + if "BA" not in load and "BA" in countries: + load["BA"] = load.HR * (11.0 / 16.2) - copy_timeslice( - load, "GR", "2015-08-11 21:00", "2015-08-15 20:00", Delta(weeks=1) - ) - copy_timeslice( - load, "AT", "2018-12-31 22:00", "2019-01-01 22:00", Delta(days=2) - ) - copy_timeslice( - load, "CH", "2010-01-19 07:00", "2010-01-19 22:00", Delta(days=1) - ) - copy_timeslice( - load, "CH", "2010-03-28 00:00", "2010-03-28 21:00", Delta(days=1) - ) - # is a WE, so take WE before - copy_timeslice( - load, "CH", "2010-10-08 13:00", "2010-10-10 21:00", Delta(weeks=1) - ) - copy_timeslice( - load, "CH", "2010-11-04 04:00", "2010-11-04 22:00", Delta(days=1) - ) - copy_timeslice( - load, "NO", "2010-12-09 11:00", "2010-12-09 18:00", Delta(days=1) - ) - # whole january missing - copy_timeslice( - load, - "GB", - "2010-01-01 00:00", - "2010-01-31 23:00", - Delta(days=-365), - fn_load, - ) - # 1.1. at midnight gets special treatment - copy_timeslice( - load, - "IE", - "2016-01-01 00:00", - "2016-01-01 01:00", - Delta(days=-366), - fn_load, - ) - copy_timeslice( - load, - "PT", - "2016-01-01 00:00", - "2016-01-01 01:00", - Delta(days=-366), - fn_load, - ) - copy_timeslice( - load, - "GB", - "2016-01-01 00:00", - "2016-01-01 01:00", - Delta(days=-366), - fn_load, - ) + copy_timeslice(load, "GR", "2015-08-11 21:00", "2015-08-15 20:00", Delta(weeks=1)) + copy_timeslice(load, "AT", "2018-12-31 22:00", "2019-01-01 22:00", Delta(days=2)) + copy_timeslice(load, "CH", "2010-01-19 07:00", "2010-01-19 22:00", Delta(days=1)) + copy_timeslice(load, "CH", "2010-03-28 00:00", "2010-03-28 21:00", Delta(days=1)) + # is a WE, so take WE before + copy_timeslice(load, "CH", "2010-10-08 13:00", "2010-10-10 21:00", Delta(weeks=1)) + copy_timeslice(load, "CH", "2010-11-04 04:00", "2010-11-04 22:00", Delta(days=1)) + copy_timeslice(load, "NO", "2010-12-09 11:00", "2010-12-09 18:00", Delta(days=1)) + # whole january missing + copy_timeslice( + load, + "GB", + "2010-01-01 00:00", + "2010-01-31 23:00", + Delta(days=-365), + fn_load, + ) + # 1.1. at midnight gets special treatment + copy_timeslice( + load, + "IE", + "2016-01-01 00:00", + "2016-01-01 01:00", + Delta(days=-366), + fn_load, + ) + copy_timeslice( + load, + "PT", + "2016-01-01 00:00", + "2016-01-01 01:00", + Delta(days=-366), + fn_load, + ) + copy_timeslice( + load, + "GB", + "2016-01-01 00:00", + "2016-01-01 01:00", + Delta(days=-366), + fn_load, + ) - else: - if "ME" in load: - if "AL" not in load and "AL" in countries: - load["AL"] = load.ME * (5.7 / 2.9) - if "MK" not in load and "MK" in countries: - load["MK"] = load.ME * (6.7 / 2.9) - if "BA" not in load and "BA" in countries: - load["BA"] = load.HR * (11.0 / 16.2) - copy_timeslice( - load, "BG", "2018-10-27 21:00", "2018-10-28 22:00", Delta(weeks=1) - ) - copy_timeslice( - load, "LU", "2019-01-02 11:00", "2019-01-05 05:00", Delta(weeks=-1) - ) - copy_timeslice( - load, "LU", "2019-02-05 20:00", "2019-02-06 19:00", Delta(weeks=-1) - ) + copy_timeslice(load, "BG", "2018-10-27 21:00", "2018-10-28 22:00", Delta(weeks=1)) + copy_timeslice(load, "LU", "2019-01-02 11:00", "2019-01-05 05:00", Delta(weeks=-1)) + copy_timeslice(load, "LU", "2019-02-05 20:00", "2019-02-06 19:00", Delta(weeks=-1)) return load @@ -290,17 +255,16 @@ if __name__ == "__main__": configure_logging(snakemake) set_scenario_config(snakemake) - powerstatistics = snakemake.params.load["power_statistics"] interpolate_limit = snakemake.params.load["interpolate_limit"] countries = snakemake.params.countries snapshots = pd.date_range(freq="h", **snakemake.params.snapshots) years = slice(snapshots[0], snapshots[-1]) time_shift = snakemake.params.load["time_shift_for_large_gaps"] - load = load_timeseries(snakemake.input[0], years, countries, powerstatistics) + load = load_timeseries(snakemake.input[0], years, countries) if snakemake.params.load["manual_adjustments"]: - load = manual_adjustment(load, snakemake.input[0], powerstatistics) + load = manual_adjustment(load, snakemake.input[0]) if load.empty: logger.warning("Build electricity demand time series is empty.") diff --git a/scripts/retrieve_electricity_demand.py b/scripts/retrieve_electricity_demand.py index 58511857..01dc4aa8 100644 --- a/scripts/retrieve_electricity_demand.py +++ b/scripts/retrieve_electricity_demand.py @@ -20,7 +20,7 @@ if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake - snakemake = mock_snakemake("retrieve_eletricity_demand") + snakemake = mock_snakemake("retrieve_electricity_demand") rootpath = ".." else: rootpath = "." @@ -33,5 +33,17 @@ if __name__ == "__main__": pd.read_csv(url.format(version=version), index_col=0) for version in snakemake.params.versions ] - res = pd.concat([df1, df2[df2.index > df1.index[-1]]], join="inner") + combined = pd.concat([df1, df2[df2.index > df1.index[-1]]]) + + pattern = "_load_actual_entsoe_transparency" + transparency = combined.filter(like=pattern).rename( + columns=lambda x: x.replace(pattern, "") + ) + pattern = "_load_actual_entsoe_power_statistics" + powerstatistics = combined.filter(like=pattern).rename( + columns=lambda x: x.replace(pattern, "") + ) + + res = transparency.fillna(powerstatistics) + res.to_csv(snakemake.output[0]) From 91eff472a7748dc617d19cd9ad788bf81144c356 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 17 Aug 2023 12:31:07 +0200 Subject: [PATCH 19/76] scenario-management: fix set_scenario_config function; apply config_provider to some direct inputs --- rules/build_electricity.smk | 8 ++++---- scripts/_helpers.py | 8 +++++--- scripts/base_network.py | 2 +- scripts/build_electricity_demand.py | 32 ++++++++++++++++------------- scripts/build_renewable_profiles.py | 4 +++- 5 files changed, 31 insertions(+), 23 deletions(-) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 2e7a0c30..5f44d2ef 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -214,19 +214,19 @@ rule build_renewable_profiles: corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"), natura=lambda w: ( RESOURCES + "natura.tiff" - if config["renewable"][w.technology]["natura"] + if config_provider("renewable", w.technology, "natura")(w) else [] ), gebco=ancient( lambda w: ( "data/bundle/GEBCO_2014_2D.nc" - if config["renewable"][w.technology].get("max_depth") + if config_provider("renewable", w.technology)(w).get("max_depth") else [] ) ), ship_density=lambda w: ( RESOURCES + "shipdensity_raster.tif" - if "ship_threshold" in config["renewable"][w.technology].keys() + if "ship_threshold" in config_provider("renewable", w.technology)(w).keys() else [] ), country_shapes=RESOURCES + "country_shapes.geojson", @@ -238,7 +238,7 @@ rule build_renewable_profiles: ), cutout=lambda w: "cutouts/" + CDIR - + config["renewable"][w.technology]["cutout"] + + config_provider("renewable", w.technology, "cutout")(w) + ".nc", output: profile=RESOURCES + "profile_{technology}.nc", diff --git a/scripts/_helpers.py b/scripts/_helpers.py index c66d708e..7a356a44 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -31,10 +31,12 @@ def mute_print(): def set_scenario_config(snakemake): - if "scenario_config" in snakemake.input: - with open(snakemake.input.scenario_config, "r") as f: + if snakemake.config["run"]["scenarios"]: + script_dir = Path(__file__).parent.resolve() + root_dir = script_dir.parent + with open(root_dir / snakemake.config["scenariofile"], "r") as f: scenario_config = yaml.safe_load(f) - update_config(snakemake.config, scenario_config) + update_config(snakemake.config, scenario_config[snakemake.wildcards.run]) def configure_logging(snakemake, skip_handlers=False): diff --git a/scripts/base_network.py b/scripts/base_network.py index b5304109..32b54d28 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -743,7 +743,7 @@ if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake - snakemake = mock_snakemake("base_network") + snakemake = mock_snakemake("base_network", run="network2019") configure_logging(snakemake) set_scenario_config(snakemake) diff --git a/scripts/build_electricity_demand.py b/scripts/build_electricity_demand.py index 3fd9d605..376af247 100755 --- a/scripts/build_electricity_demand.py +++ b/scripts/build_electricity_demand.py @@ -68,8 +68,6 @@ def load_timeseries(fn, years, countries): load : pd.DataFrame Load time-series with UTC timestamps x ISO-2 countries """ - logger.info(f"Retrieving load data from '{fn}'.") - return ( pd.read_csv(fn, index_col=0, parse_dates=[0]) .tz_localize(None) @@ -182,20 +180,26 @@ def manual_adjustment(load, fn_load): Manual adjusted and interpolated load time-series with UTC timestamps x ISO-2 countries """ - if "MK" in load: - if "AL" not in load or load.AL.isnull().values.all(): - load["AL"] = load["MK"] * (4.1 / 7.4) - if "RS" in load: - if "KV" not in load or load.KV.isnull().values.all(): - load["KV"] = load["RS"] * (4.8 / 27.0) - if "ME" in load: - if "AL" not in load and "AL" in countries: + + if "AL" not in load and "AL" in countries: + if "ME" in load: load["AL"] = load.ME * (5.7 / 2.9) - if "MK" not in load and "MK" in countries: - load["MK"] = load.ME * (6.7 / 2.9) - if "BA" not in load and "BA" in countries: + elif "MK" in load: + load["AL"] = load["MK"] * (4.1 / 7.4) + + if "MK" in countries: + if "MK" not in load or load.MK.isnull().sum() > len(load) / 2: + if "ME" in load: + load["MK"] = load.ME * (6.7 / 2.9) + + if "BA" not in load and "BA" in countries: + if "ME" in load: load["BA"] = load.HR * (11.0 / 16.2) + if "KV" not in load or load.KV.isnull().values.all(): + if "RS" in load: + load["KV"] = load["RS"] * (4.8 / 27.0) + copy_timeslice(load, "GR", "2015-08-11 21:00", "2015-08-15 20:00", Delta(weeks=1)) copy_timeslice(load, "AT", "2018-12-31 22:00", "2019-01-01 22:00", Delta(days=2)) copy_timeslice(load, "CH", "2010-01-19 07:00", "2010-01-19 22:00", Delta(days=1)) @@ -250,7 +254,7 @@ if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake - snakemake = mock_snakemake("build_electricity_demand") + snakemake = mock_snakemake("build_electricity_demand", run="network2019") configure_logging(snakemake) set_scenario_config(snakemake) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 40b3151d..c6d42e6b 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -200,7 +200,9 @@ if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake - snakemake = mock_snakemake("build_renewable_profiles", technology="solar") + snakemake = mock_snakemake( + "build_renewable_profiles", technology="solar", run="network2019" + ) configure_logging(snakemake) set_scenario_config(snakemake) From ebb5da655ef8627d534e1995df18fdeac93b54b2 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 17 Aug 2023 12:56:34 +0200 Subject: [PATCH 20/76] scenarios: add collect function plot_elec_networks --- rules/collect.smk | 10 ++++++++++ scripts/plot_statistics.py | 4 ++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/rules/collect.smk b/rules/collect.smk index 8a64b577..70dc4641 100644 --- a/rules/collect.smk +++ b/rules/collect.smk @@ -76,6 +76,16 @@ rule solve_sector_networks: ), +rule plot_elec_networks: + input: + expand( + RESULTS + + "figures/.statistics_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}", + **config["scenario"], + run=config["run"]["name"] + ), + + rule plot_networks: input: expand( diff --git a/scripts/plot_statistics.py b/scripts/plot_statistics.py index a0a3e71d..11293c08 100644 --- a/scripts/plot_statistics.py +++ b/scripts/plot_statistics.py @@ -61,7 +61,7 @@ if __name__ == "__main__": fig, ax = plt.subplots() ds = n.statistics.installed_capacity().dropna() ds = ds.drop("Line") - ds = ds.drop(("Generator", "Load")) + ds = ds.drop(("Generator", "Load"), errors="ignore") ds = ds / 1e3 ds.attrs["unit"] = "GW" plot_static_per_carrier(ds, ax) @@ -70,7 +70,7 @@ if __name__ == "__main__": fig, ax = plt.subplots() ds = n.statistics.optimal_capacity() ds = ds.drop("Line") - ds = ds.drop(("Generator", "Load")) + ds = ds.drop(("Generator", "Load"), errors="ignore") ds = ds / 1e3 ds.attrs["unit"] = "GW" plot_static_per_carrier(ds, ax) From 1810bbd4b3243ffbf516ccaa58fe5b92adf463d1 Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 21 Aug 2023 12:24:27 +0200 Subject: [PATCH 21/76] make set_scenario_config robust against mock_snakemake and subworkflow --- scripts/_helpers.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 7a356a44..b41ed60d 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -31,11 +31,16 @@ def mute_print(): def set_scenario_config(snakemake): - if snakemake.config["run"]["scenarios"]: - script_dir = Path(__file__).parent.resolve() - root_dir = script_dir.parent - with open(root_dir / snakemake.config["scenariofile"], "r") as f: - scenario_config = yaml.safe_load(f) + if snakemake.config["run"]["scenarios"] and "run" in snakemake.wildcards: + try: + with open(snakemake.config["scenariofile"], "r") as f: + scenario_config = yaml.safe_load(f) + except FileNotFoundError: + # fallback for mock_snakemake + script_dir = Path(__file__).parent.resolve() + root_dir = script_dir.parent + with open(root_dir / snakemake.config["scenariofile"], "r") as f: + scenario_config = yaml.safe_load(f) update_config(snakemake.config, scenario_config[snakemake.wildcards.run]) From 7b9ab155f47f156f2900b711e9a6ebd893a512c0 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 22 Aug 2023 18:15:50 +0200 Subject: [PATCH 22/76] helpers: fix condition in set_scenario_config prepare_sector: insert set_scenario_config --- scripts/_helpers.py | 2 +- scripts/prepare_sector_network.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index b41ed60d..3951be18 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -31,7 +31,7 @@ def mute_print(): def set_scenario_config(snakemake): - if snakemake.config["run"]["scenarios"] and "run" in snakemake.wildcards: + if snakemake.config["run"]["scenarios"] and "run" in snakemake.wildcards.keys(): try: with open(snakemake.config["scenariofile"], "r") as f: scenario_config = yaml.safe_load(f) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 0c1faacc..1ee42a72 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -17,7 +17,12 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import generate_periodic_profiles, update_config_with_sector_opts +from _helpers import ( + configure_logging, + generate_periodic_profiles, + set_scenario_config, + update_config_with_sector_opts, +) from add_electricity import calculate_annuity, sanitize_carriers from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2 from networkx.algorithms import complement @@ -3286,6 +3291,7 @@ if __name__ == "__main__": ) logging.basicConfig(level=snakemake.config["logging"]["level"]) + set_scenario_config(snakemake) update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) From 62c61438636494a9dc2a917e1706ae0df25e2d8c Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 23 Aug 2023 17:14:57 +0200 Subject: [PATCH 23/76] scenario-management: reenable shared resources, make shared resources wildcards dependent --- .github/workflows/ci.yaml | 2 +- Snakefile | 26 +- config/config.default.yaml | 5 +- config/scenarios.yaml | 26 + config/test/config.scenarios.electricity.yaml | 7 +- doc/configtables/run.csv | 8 +- doc/configtables/toplevel.csv | 1 - rules/build_electricity.smk | 184 +++---- rules/build_sector.smk | 483 ++++++++++-------- rules/collect.smk | 6 +- rules/common.smk | 4 +- rules/postprocess.smk | 2 +- rules/retrieve.smk | 4 +- rules/solve_electricity.smk | 2 +- rules/solve_myopic.smk | 16 +- rules/solve_overnight.smk | 3 +- rules/validate.smk | 20 +- scripts/__init__.py | 4 + scripts/_helpers.py | 47 +- 19 files changed, 479 insertions(+), 371 deletions(-) create mode 100644 scripts/__init__.py diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 68bffc4c..6cbee85c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -83,7 +83,7 @@ jobs: snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime - snakemake -call solve_elec_networks --configfile config/test/config.scenarios.electricity.yaml -n + snakemake -call solve_elec_networks --configfile config/test/config.scenarios.electricity.yaml - name: Upload artifacts uses: actions/upload-artifact@v3 diff --git a/Snakefile b/Snakefile index e495e7d3..55b90d5d 100644 --- a/Snakefile +++ b/Snakefile @@ -8,6 +8,7 @@ from pathlib import Path import yaml from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider from snakemake.utils import min_version +from scripts._helpers import path_provider min_version("7.7") HTTP = HTTPRemoteProvider() @@ -24,20 +25,23 @@ COSTS = f"data/costs_{config['costs']['year']}.csv" ATLITE_NPROCESSES = config["atlite"].get("nprocesses", 4) run = config["run"] -if run.get("scenarios", False): - if run["shared_resources"]: - raise ValueError("Cannot use shared resources with scenarios") - scenarios = yaml.safe_load(Path(config["scenariofile"]).read_text()) +scenario = run.get("scenario", {}) +if run["name"]: + if scenario.get("enable"): + fn = Path(scenario["file"]) + scenarios = yaml.safe_load(fn.read_text()) RDIR = "{run}/" -elif run["name"]: - RDIR = run["name"] + "/" else: RDIR = "" -CDIR = RDIR if not run.get("shared_cutouts") else "" +# for possibly shared resources +logs = path_provider("logs/", RDIR, run["shared_resources"]) +benchmarks = path_provider("benchmarks/", RDIR, run["shared_resources"]) +resources = path_provider("resources/", RDIR, run["shared_resources"]) + +CDIR = "" if run["shared_cutouts"] else RDIR LOGS = "logs/" + RDIR BENCHMARKS = "benchmarks/" + RDIR -RESOURCES = "resources/" + RDIR if not run.get("shared_resources") else "resources/" RESULTS = "results/" + RDIR @@ -86,9 +90,9 @@ rule dag: message: "Creating DAG of workflow." output: - dot=RESOURCES + "dag.dot", - pdf=RESOURCES + "dag.pdf", - png=RESOURCES + "dag.png", + dot=resources("dag.dot"), + pdf=resources("dag.pdf"), + png=resources("dag.png"), conda: "envs/environment.yaml" shell: diff --git a/config/config.default.yaml b/config/config.default.yaml index 5357db8d..238c3d41 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -5,7 +5,6 @@ # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#top-level-configuration version: 0.8.1 tutorial: false -scenariofile: config/scenarios.yaml logging: level: INFO @@ -22,7 +21,9 @@ remote: # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run run: name: "" - scenarios: false + scenarios: + enable: false + file: config/scenarios.yaml disable_progressbar: false shared_resources: false shared_cutouts: true diff --git a/config/scenarios.yaml b/config/scenarios.yaml index 37d32243..c493311f 100644 --- a/config/scenarios.yaml +++ b/config/scenarios.yaml @@ -10,3 +10,29 @@ # custom-scenario: # name of the scenario # electricity: # renewable_carriers: [wind, solar] # override the list of renewable carriers + + +network2013: + snapshots: + start: "2013-01-01" + end: "2014-01-01" + inclusive: 'left' + + +network2019: + snapshots: + start: "2019-01-01" + end: "2020-01-01" + inclusive: 'left' + + renewable: + onwind: + cutout: europe-2019-era5 + offwind-ac: + cutout: europe-2019-era5 + offwind-dc: + cutout: europe-2019-era5 + solar: + cutout: europe-2019-era5 + hydro: + cutout: europe-2019-era5 diff --git a/config/test/config.scenarios.electricity.yaml b/config/test/config.scenarios.electricity.yaml index 0e4ced04..63b1892b 100644 --- a/config/test/config.scenarios.electricity.yaml +++ b/config/test/config.scenarios.electricity.yaml @@ -3,15 +3,16 @@ # SPDX-License-Identifier: CC0-1.0 tutorial: true -scenariofile: "config/test/scenarios.electricity.yaml" run: name: - test-elec-no-offshore-wind - test-elec-no-onshore-wind - scenarios: true + scenario: + enable: true + file: "config/test/scenarios.electricity.yaml" disable_progressbar: true - shared_resources: false # cannot be true if scenarios is true + shared_resources: base shared_cutouts: true scenario: diff --git a/doc/configtables/run.csv b/doc/configtables/run.csv index 2d5cf5d9..3d8e4e8c 100644 --- a/doc/configtables/run.csv +++ b/doc/configtables/run.csv @@ -1,6 +1,8 @@ ,Unit,Values,Description -name,--,str/list,"Specify a name for your run. Results will be stored under this name. If ``scenarios`` is set to ``true``, the name must contain a subset of scenario names defined in ``scenariofile``." -scenarios,--,bool,"{true, false}","Switch to select whether workflow should generate scenarios based on ``scenariofile``." +name,--,str/list,"Specify a name for your run. Results will be stored under this name. If ``scenario: enable`` is set to ``true``, the name must contain a subset of scenario names defined in ``scenario: file``." +scenario,,, +-- enable,bool,"{true, false}","Switch to select whether workflow should generate scenarios based on ``file``." +-- file,str,,Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run:scenarios`` has to be set to ``true`` and ``run:name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combindation of settings, alter and use the ``create_scenarios.py`` script in ``scripts``. disable_progrssbar,bool,"{true, false}","Switch to select whether progressbar should be disabled." -shared_resources,bool,"{true, false}","Switch to select whether resources should be shared across runs." +shared_resources,bool/str,,"Switch to select whether resources should be shared across runs. If a string is passed, it is assumed to be a wildcard or 'base' that indicates the cutoff after which resources are no longer shared. If 'base' is passed, resources before creating the elec.nc file are shared." shared_cutouts,bool,"{true, false}","Switch to select whether cutouts should be shared across runs." diff --git a/doc/configtables/toplevel.csv b/doc/configtables/toplevel.csv index 8cbb3e56..67954389 100644 --- a/doc/configtables/toplevel.csv +++ b/doc/configtables/toplevel.csv @@ -1,7 +1,6 @@ ,Unit,Values,Description version,--,0.x.x,Version of PyPSA-Eur. Descriptive only. tutorial,bool,"{true, false}",Switch to retrieve the tutorial data set instead of the full data set. -scenariofile,str,,Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run:scenarios`` has to be set to ``true`` and ``run:name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combindation of settings, alter and use the ``create_scenarios.py`` script in ``scripts``. logging,,, -- level,--,"Any of {'INFO', 'WARNING', 'ERROR'}","Restrict console outputs to all infos, warning or errors only" -- format,--,,Custom format for log messages. See `LogRecord `_ attributes. diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 5f44d2ef..be72be6d 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -8,7 +8,7 @@ if config["enable"].get("prepare_links_p_nom", False): output: "data/links_p_nom.csv", log: - LOGS + "prepare_links_p_nom.log", + logs("prepare_links_p_nom.log"), threads: 1 resources: mem_mb=1500, @@ -26,9 +26,9 @@ rule build_electricity_demand: input: ancient("data/electricity_demand.csv"), output: - RESOURCES + "load.csv", + resources("load.csv"), log: - LOGS + "build_electricity_demand.log", + logs("build_electricity_demand.log"), resources: mem_mb=5000, conda: @@ -43,12 +43,12 @@ rule build_powerplants: custom_powerplants=config_provider("electricity", "custom_powerplants"), countries=config_provider("countries"), input: - base_network=RESOURCES + "networks/base.nc", + base_network=resources("networks/base.nc"), custom_powerplants="data/custom_powerplants.csv", output: - RESOURCES + "powerplants.csv", + resources("powerplants.csv"), log: - LOGS + "build_powerplants.log", + logs("build_powerplants.log"), threads: 1 resources: mem_mb=5000, @@ -74,15 +74,15 @@ rule base_network: parameter_corrections="data/parameter_corrections.yaml", links_p_nom="data/links_p_nom.csv", links_tyndp="data/links_tyndp.csv", - country_shapes=RESOURCES + "country_shapes.geojson", - offshore_shapes=RESOURCES + "offshore_shapes.geojson", - europe_shape=RESOURCES + "europe_shape.geojson", + country_shapes=resources("country_shapes.geojson"), + offshore_shapes=resources("offshore_shapes.geojson"), + europe_shape=resources("europe_shape.geojson"), output: - RESOURCES + "networks/base.nc", + resources("networks/base.nc"), log: - LOGS + "base_network.log", + logs("base_network.log"), benchmark: - BENCHMARKS + "base_network" + benchmarks("base_network") threads: 1 resources: mem_mb=1500, @@ -104,12 +104,12 @@ rule build_shapes: ch_cantons=ancient("data/bundle/ch_cantons.csv"), ch_popgdp=ancient("data/bundle/je-e-21.03.02.xls"), output: - country_shapes=RESOURCES + "country_shapes.geojson", - offshore_shapes=RESOURCES + "offshore_shapes.geojson", - europe_shape=RESOURCES + "europe_shape.geojson", - nuts3_shapes=RESOURCES + "nuts3_shapes.geojson", + country_shapes=resources("country_shapes.geojson"), + offshore_shapes=resources("offshore_shapes.geojson"), + europe_shape=resources("europe_shape.geojson"), + nuts3_shapes=resources("nuts3_shapes.geojson"), log: - LOGS + "build_shapes.log", + logs("build_shapes.log"), threads: 1 resources: mem_mb=1500, @@ -123,14 +123,14 @@ rule build_bus_regions: params: countries=config_provider("countries"), input: - country_shapes=RESOURCES + "country_shapes.geojson", - offshore_shapes=RESOURCES + "offshore_shapes.geojson", - base_network=RESOURCES + "networks/base.nc", + country_shapes=resources("country_shapes.geojson"), + offshore_shapes=resources("offshore_shapes.geojson"), + base_network=resources("networks/base.nc"), output: - regions_onshore=RESOURCES + "regions_onshore.geojson", - regions_offshore=RESOURCES + "regions_offshore.geojson", + regions_onshore=resources("regions_onshore.geojson"), + regions_offshore=resources("regions_offshore.geojson"), log: - LOGS + "build_bus_regions.log", + logs("build_bus_regions.log"), threads: 1 resources: mem_mb=1000, @@ -147,8 +147,8 @@ if config["enable"].get("build_cutout", False): snapshots=config_provider("snapshots"), cutouts=config_provider("atlite", "cutouts"), input: - regions_onshore=RESOURCES + "regions_onshore.geojson", - regions_offshore=RESOURCES + "regions_offshore.geojson", + regions_onshore=resources("regions_onshore.geojson"), + regions_offshore=resources("regions_offshore.geojson"), output: protected("cutouts/" + CDIR + "{cutout}.nc"), log: @@ -171,11 +171,11 @@ if config["enable"].get("build_natura_raster", False): natura=ancient("data/bundle/natura/Natura2000_end2015.shp"), cutouts=expand("cutouts/" + CDIR + "{cutouts}.nc", **config["atlite"]), output: - RESOURCES + "natura.tiff", + resources("natura.tiff"), resources: mem_mb=5000, log: - LOGS + "build_natura_raster.log", + logs("build_natura_raster.log"), conda: "../envs/environment.yaml" script: @@ -193,13 +193,13 @@ rule build_ship_raster: ], ), output: - RESOURCES + "shipdensity_raster.tif", + resources("shipdensity_raster.tif"), log: - LOGS + "build_ship_raster.log", + logs("build_ship_raster.log"), resources: mem_mb=5000, benchmark: - BENCHMARKS + "build_ship_raster" + benchmarks("build_ship_raster") conda: "../envs/environment.yaml" script: @@ -210,10 +210,10 @@ rule build_renewable_profiles: params: renewable=config_provider("renewable"), input: - base_network=RESOURCES + "networks/base.nc", + base_network=resources("networks/base.nc"), corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"), natura=lambda w: ( - RESOURCES + "natura.tiff" + resources("natura.tiff") if config_provider("renewable", w.technology, "natura")(w) else [] ), @@ -225,27 +225,27 @@ rule build_renewable_profiles: ) ), ship_density=lambda w: ( - RESOURCES + "shipdensity_raster.tif" + resources("shipdensity_raster.tif") if "ship_threshold" in config_provider("renewable", w.technology)(w).keys() else [] ), - country_shapes=RESOURCES + "country_shapes.geojson", - offshore_shapes=RESOURCES + "offshore_shapes.geojson", + country_shapes=resources("country_shapes.geojson"), + offshore_shapes=resources("offshore_shapes.geojson"), regions=lambda w: ( - RESOURCES + "regions_onshore.geojson" + resources("regions_onshore.geojson") if w.technology in ("onwind", "solar") - else RESOURCES + "regions_offshore.geojson" + else resources("regions_offshore.geojson") ), cutout=lambda w: "cutouts/" + CDIR + config_provider("renewable", w.technology, "cutout")(w) + ".nc", output: - profile=RESOURCES + "profile_{technology}.nc", + profile=resources("profile_{technology}.nc"), log: - LOGS + "build_renewable_profile_{technology}.log", + logs("build_renewable_profile_{technology}.log"), benchmark: - BENCHMARKS + "build_renewable_profiles_{technology}" + benchmarks("build_renewable_profiles_{technology}") threads: ATLITE_NPROCESSES resources: mem_mb=ATLITE_NPROCESSES * 5000, @@ -262,10 +262,10 @@ rule build_monthly_prices: co2_price_raw="data/validation/emission-spot-primary-market-auction-report-2019-data.xls", fuel_price_raw="data/validation/energy-price-trends-xlsx-5619002.xlsx", output: - co2_price=RESOURCES + "co2_price.csv", - fuel_price=RESOURCES + "monthly_fuel_price.csv", + co2_price=resources("co2_price.csv"), + fuel_price=resources("monthly_fuel_price.csv"), log: - LOGS + "build_monthly_prices.log", + logs("build_monthly_prices.log"), threads: 1 resources: mem_mb=5000, @@ -280,13 +280,13 @@ rule build_hydro_profile: hydro=config_provider("renewable", "hydro"), countries=config_provider("countries"), input: - country_shapes=RESOURCES + "country_shapes.geojson", + country_shapes=resources("country_shapes.geojson"), eia_hydro_generation="data/eia_hydro_annual_generation.csv", cutout=f"cutouts/" + CDIR + config["renewable"]["hydro"]["cutout"] + ".nc", output: - RESOURCES + "profile_hydro.nc", + resources("profile_hydro.nc"), log: - LOGS + "build_hydro_profile.log", + logs("build_hydro_profile.log"), resources: mem_mb=5000, conda: @@ -299,17 +299,17 @@ if config["lines"]["dynamic_line_rating"]["activate"]: rule build_line_rating: input: - base_network=RESOURCES + "networks/base.nc", + base_network=resources("networks/base.nc"), cutout="cutouts/" + CDIR + config["lines"]["dynamic_line_rating"]["cutout"] + ".nc", output: - output=RESOURCES + "networks/line_rating.nc", + output=resources("networks/line_rating.nc"), log: - LOGS + "build_line_rating.log", + logs("build_line_rating.log"), benchmark: - BENCHMARKS + "build_line_rating" + benchmarks("build_line_rating") threads: ATLITE_NPROCESSES resources: mem_mb=ATLITE_NPROCESSES * 1000, @@ -330,7 +330,7 @@ rule add_electricity: costs=config_provider("costs"), input: **{ - f"profile_{tech}": RESOURCES + f"profile_{tech}.nc" + f"profile_{tech}": resources(f"profile_{tech}.nc") for tech in config["electricity"]["renewable_carriers"] }, **{ @@ -340,27 +340,27 @@ rule add_electricity: for attr, fn in d.items() if str(fn).startswith("data/") }, - base_network=RESOURCES + "networks/base.nc", - line_rating=RESOURCES + "networks/line_rating.nc" + base_network=resources("networks/base.nc"), + line_rating=resources("networks/line_rating.nc") if config["lines"]["dynamic_line_rating"]["activate"] - else RESOURCES + "networks/base.nc", + else resources("networks/base.nc"), tech_costs=COSTS, - regions=RESOURCES + "regions_onshore.geojson", - powerplants=RESOURCES + "powerplants.csv", + regions=resources("regions_onshore.geojson"), + powerplants=resources("powerplants.csv"), hydro_capacities=ancient("data/bundle/hydro_capacities.csv"), geth_hydro_capacities="data/geth2015_hydro_capacities.csv", unit_commitment="data/unit_commitment.csv", - fuel_price=RESOURCES + "monthly_fuel_price.csv" + fuel_price=resources("monthly_fuel_price.csv") if config["conventional"]["dynamic_fuel_price"] else [], - load=RESOURCES + "load.csv", - nuts3_shapes=RESOURCES + "nuts3_shapes.geojson", + load=resources("load.csv"), + nuts3_shapes=resources("nuts3_shapes.geojson"), output: - RESOURCES + "networks/elec.nc", + resources("networks/elec.nc"), log: - LOGS + "add_electricity.log", + logs("add_electricity.log"), benchmark: - BENCHMARKS + "add_electricity" + benchmarks("add_electricity") threads: 1 resources: mem_mb=10000, @@ -383,20 +383,20 @@ rule simplify_network: p_max_pu=config_provider("links", "p_max_pu", default=1.0), costs=config_provider("costs"), input: - network=RESOURCES + "networks/elec.nc", + network=resources("networks/elec.nc"), tech_costs=COSTS, - regions_onshore=RESOURCES + "regions_onshore.geojson", - regions_offshore=RESOURCES + "regions_offshore.geojson", + regions_onshore=resources("regions_onshore.geojson"), + regions_offshore=resources("regions_offshore.geojson"), output: - network=RESOURCES + "networks/elec_s{simpl}.nc", - regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}.geojson", - regions_offshore=RESOURCES + "regions_offshore_elec_s{simpl}.geojson", - busmap=RESOURCES + "busmap_elec_s{simpl}.csv", - connection_costs=RESOURCES + "connection_costs_s{simpl}.csv", + network=resources("networks/elec_s{simpl}.nc"), + regions_onshore=resources("regions_onshore_elec_s{simpl}.geojson"), + regions_offshore=resources("regions_offshore_elec_s{simpl}.geojson"), + busmap=resources("busmap_elec_s{simpl}.csv"), + connection_costs=resources("connection_costs_s{simpl}.csv"), log: - LOGS + "simplify_network/elec_s{simpl}.log", + logs("simplify_network/elec_s{simpl}.log"), benchmark: - BENCHMARKS + "simplify_network/elec_s{simpl}" + benchmarks("simplify_network/elec_s{simpl}") threads: 1 resources: mem_mb=12000, @@ -422,10 +422,10 @@ rule cluster_network: length_factor=config_provider("lines", "length_factor"), costs=config_provider("costs"), input: - network=RESOURCES + "networks/elec_s{simpl}.nc", - regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}.geojson", - regions_offshore=RESOURCES + "regions_offshore_elec_s{simpl}.geojson", - busmap=ancient(RESOURCES + "busmap_elec_s{simpl}.csv"), + network=resources("networks/elec_s{simpl}.nc"), + regions_onshore=resources("regions_onshore_elec_s{simpl}.geojson"), + regions_offshore=resources("regions_offshore_elec_s{simpl}.geojson"), + busmap=ancient(resources("busmap_elec_s{simpl}.csv")), custom_busmap=( "data/custom_busmap_elec_s{simpl}_{clusters}.csv" if config["enable"].get("custom_busmap", False) @@ -433,15 +433,15 @@ rule cluster_network: ), tech_costs=COSTS, output: - network=RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", - regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", - regions_offshore=RESOURCES + "regions_offshore_elec_s{simpl}_{clusters}.geojson", - busmap=RESOURCES + "busmap_elec_s{simpl}_{clusters}.csv", - linemap=RESOURCES + "linemap_elec_s{simpl}_{clusters}.csv", + network=resources("networks/elec_s{simpl}_{clusters}.nc"), + regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), + regions_offshore=resources("regions_offshore_elec_s{simpl}_{clusters}.geojson"), + busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), + linemap=resources("linemap_elec_s{simpl}_{clusters}.csv"), log: - LOGS + "cluster_network/elec_s{simpl}_{clusters}.log", + logs("cluster_network/elec_s{simpl}_{clusters}.log"), benchmark: - BENCHMARKS + "cluster_network/elec_s{simpl}_{clusters}" + benchmarks("cluster_network/elec_s{simpl}_{clusters}") threads: 1 resources: mem_mb=10000, @@ -457,14 +457,14 @@ rule add_extra_components: max_hours=config_provider("electricity", "max_hours"), costs=config_provider("costs"), input: - network=RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", + network=resources("networks/elec_s{simpl}_{clusters}.nc"), tech_costs=COSTS, output: - RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc", + resources("networks/elec_s{simpl}_{clusters}_ec.nc"), log: - LOGS + "add_extra_components/elec_s{simpl}_{clusters}.log", + logs("add_extra_components/elec_s{simpl}_{clusters}.log"), benchmark: - BENCHMARKS + "add_extra_components/elec_s{simpl}_{clusters}_ec" + benchmarks("add_extra_components/elec_s{simpl}_{clusters}_ec") threads: 1 resources: mem_mb=4000, @@ -484,15 +484,15 @@ rule prepare_network: max_hours=config_provider("electricity", "max_hours"), costs=config_provider("costs"), input: - RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc", + resources("networks/elec_s{simpl}_{clusters}_ec.nc"), tech_costs=COSTS, - co2_price=RESOURCES + "co2_price.csv", + co2_price=resources("co2_price.csv"), output: - RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", + resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), log: - LOGS + "prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.log", + logs("prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"), benchmark: - (BENCHMARKS + "prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}") + (benchmarks("prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}")) threads: 1 resources: mem_mb=4000, diff --git a/rules/build_sector.smk b/rules/build_sector.smk index cc80ab64..483dcdb7 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -5,19 +5,19 @@ rule build_population_layouts: input: - nuts3_shapes=RESOURCES + "nuts3_shapes.geojson", + nuts3_shapes=resources("nuts3_shapes.geojson"), urban_percent="data/urban_percent.csv", cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", output: - pop_layout_total=RESOURCES + "pop_layout_total.nc", - pop_layout_urban=RESOURCES + "pop_layout_urban.nc", - pop_layout_rural=RESOURCES + "pop_layout_rural.nc", + pop_layout_total=resources("pop_layout_total.nc"), + pop_layout_urban=resources("pop_layout_urban.nc"), + pop_layout_rural=resources("pop_layout_rural.nc"), log: - LOGS + "build_population_layouts.log", + logs("build_population_layouts.log"), resources: mem_mb=20000, benchmark: - BENCHMARKS + "build_population_layouts" + benchmarks("build_population_layouts") threads: 8 conda: "../envs/environment.yaml" @@ -27,19 +27,19 @@ rule build_population_layouts: rule build_clustered_population_layouts: input: - pop_layout_total=RESOURCES + "pop_layout_total.nc", - pop_layout_urban=RESOURCES + "pop_layout_urban.nc", - pop_layout_rural=RESOURCES + "pop_layout_rural.nc", - regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + pop_layout_total=resources("pop_layout_total.nc"), + pop_layout_urban=resources("pop_layout_urban.nc"), + pop_layout_rural=resources("pop_layout_rural.nc"), + regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", output: - clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", + clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), log: - LOGS + "build_clustered_population_layouts_{simpl}_{clusters}.log", + logs("build_clustered_population_layouts_{simpl}_{clusters}.log"), resources: mem_mb=10000, benchmark: - BENCHMARKS + "build_clustered_population_layouts/s{simpl}_{clusters}" + benchmarks("build_clustered_population_layouts/s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -48,19 +48,19 @@ rule build_clustered_population_layouts: rule build_simplified_population_layouts: input: - pop_layout_total=RESOURCES + "pop_layout_total.nc", - pop_layout_urban=RESOURCES + "pop_layout_urban.nc", - pop_layout_rural=RESOURCES + "pop_layout_rural.nc", - regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}.geojson", + pop_layout_total=resources("pop_layout_total.nc"), + pop_layout_urban=resources("pop_layout_urban.nc"), + pop_layout_rural=resources("pop_layout_rural.nc"), + regions_onshore=resources("regions_onshore_elec_s{simpl}.geojson"), cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", output: - clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}.csv", + clustered_pop_layout=resources("pop_layout_elec_s{simpl}.csv"), resources: mem_mb=10000, log: - LOGS + "build_simplified_population_layouts_{simpl}", + logs("build_simplified_population_layouts_{simpl}"), benchmark: - BENCHMARKS + "build_simplified_population_layouts/s{simpl}" + benchmarks("build_simplified_population_layouts/s{simpl}") conda: "../envs/environment.yaml" script: @@ -73,11 +73,11 @@ if config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]: input: gas_network="data/gas_network/scigrid-gas/data/IGGIELGN_PipeSegments.geojson", output: - cleaned_gas_network=RESOURCES + "gas_network.csv", + cleaned_gas_network=resources("gas_network.csv"), resources: mem_mb=4000, log: - LOGS + "build_gas_network.log", + logs("build_gas_network.log"), conda: "../envs/environment.yaml" script: @@ -91,19 +91,21 @@ if config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]: ), entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson", production="data/gas_network/scigrid-gas/data/IGGIELGN_Productions.geojson", - regions_onshore=RESOURCES - + "regions_onshore_elec_s{simpl}_{clusters}.geojson", - regions_offshore=RESOURCES - + "regions_offshore_elec_s{simpl}_{clusters}.geojson", + regions_onshore=resources( + "regions_onshore_elec_s{simpl}_{clusters}.geojson" + ), + regions_offshore=resources( + "regions_offshore_elec_s{simpl}_{clusters}.geojson" + ), output: - gas_input_nodes=RESOURCES - + "gas_input_locations_s{simpl}_{clusters}.geojson", - gas_input_nodes_simplified=RESOURCES - + "gas_input_locations_s{simpl}_{clusters}_simplified.csv", + gas_input_nodes=resources("gas_input_locations_s{simpl}_{clusters}.geojson"), + gas_input_nodes_simplified=resources( + "gas_input_locations_s{simpl}_{clusters}_simplified.csv" + ), resources: mem_mb=2000, log: - LOGS + "build_gas_input_locations_s{simpl}_{clusters}.log", + logs("build_gas_input_locations_s{simpl}_{clusters}.log"), conda: "../envs/environment.yaml" script: @@ -111,17 +113,19 @@ if config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]: rule cluster_gas_network: input: - cleaned_gas_network=RESOURCES + "gas_network.csv", - regions_onshore=RESOURCES - + "regions_onshore_elec_s{simpl}_{clusters}.geojson", - regions_offshore=RESOURCES - + "regions_offshore_elec_s{simpl}_{clusters}.geojson", + cleaned_gas_network=resources("gas_network.csv"), + regions_onshore=resources( + "regions_onshore_elec_s{simpl}_{clusters}.geojson" + ), + regions_offshore=resources( + "regions_offshore_elec_s{simpl}_{clusters}.geojson" + ), output: - clustered_gas_network=RESOURCES + "gas_network_elec_s{simpl}_{clusters}.csv", + clustered_gas_network=resources("gas_network_elec_s{simpl}_{clusters}.csv"), resources: mem_mb=4000, log: - LOGS + "cluster_gas_network_s{simpl}_{clusters}.log", + logs("cluster_gas_network_s{simpl}_{clusters}.log"), conda: "../envs/environment.yaml" script: @@ -143,18 +147,18 @@ rule build_heat_demands: params: snapshots=config_provider("snapshots"), input: - pop_layout=RESOURCES + "pop_layout_{scope}.nc", - regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + pop_layout=resources("pop_layout_{scope}.nc"), + regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", output: - heat_demand=RESOURCES + "heat_demand_{scope}_elec_s{simpl}_{clusters}.nc", + heat_demand=resources("heat_demand_{scope}_elec_s{simpl}_{clusters}.nc"), resources: mem_mb=20000, threads: 8 log: - LOGS + "build_heat_demands_{scope}_{simpl}_{clusters}.loc", + logs("build_heat_demands_{scope}_{simpl}_{clusters}.loc"), benchmark: - BENCHMARKS + "build_heat_demands/{scope}_s{simpl}_{clusters}" + benchmarks("build_heat_demands/{scope}_s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -165,19 +169,19 @@ rule build_temperature_profiles: params: snapshots=config_provider("snapshots"), input: - pop_layout=RESOURCES + "pop_layout_{scope}.nc", - regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + pop_layout=resources("pop_layout_{scope}.nc"), + regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", output: - temp_soil=RESOURCES + "temp_soil_{scope}_elec_s{simpl}_{clusters}.nc", - temp_air=RESOURCES + "temp_air_{scope}_elec_s{simpl}_{clusters}.nc", + temp_soil=resources("temp_soil_{scope}_elec_s{simpl}_{clusters}.nc"), + temp_air=resources("temp_air_{scope}_elec_s{simpl}_{clusters}.nc"), resources: mem_mb=20000, threads: 8 log: - LOGS + "build_temperature_profiles_{scope}_{simpl}_{clusters}.log", + logs("build_temperature_profiles_{scope}_{simpl}_{clusters}.log"), benchmark: - BENCHMARKS + "build_temperature_profiles/{scope}_s{simpl}_{clusters}" + benchmarks("build_temperature_profiles/{scope}_s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -188,25 +192,25 @@ rule build_cop_profiles: params: heat_pump_sink_T=config_provider("sector", "heat_pump_sink_T"), input: - temp_soil_total=RESOURCES + "temp_soil_total_elec_s{simpl}_{clusters}.nc", - temp_soil_rural=RESOURCES + "temp_soil_rural_elec_s{simpl}_{clusters}.nc", - temp_soil_urban=RESOURCES + "temp_soil_urban_elec_s{simpl}_{clusters}.nc", - temp_air_total=RESOURCES + "temp_air_total_elec_s{simpl}_{clusters}.nc", - temp_air_rural=RESOURCES + "temp_air_rural_elec_s{simpl}_{clusters}.nc", - temp_air_urban=RESOURCES + "temp_air_urban_elec_s{simpl}_{clusters}.nc", + temp_soil_total=resources("temp_soil_total_elec_s{simpl}_{clusters}.nc"), + temp_soil_rural=resources("temp_soil_rural_elec_s{simpl}_{clusters}.nc"), + temp_soil_urban=resources("temp_soil_urban_elec_s{simpl}_{clusters}.nc"), + temp_air_total=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"), + temp_air_rural=resources("temp_air_rural_elec_s{simpl}_{clusters}.nc"), + temp_air_urban=resources("temp_air_urban_elec_s{simpl}_{clusters}.nc"), output: - cop_soil_total=RESOURCES + "cop_soil_total_elec_s{simpl}_{clusters}.nc", - cop_soil_rural=RESOURCES + "cop_soil_rural_elec_s{simpl}_{clusters}.nc", - cop_soil_urban=RESOURCES + "cop_soil_urban_elec_s{simpl}_{clusters}.nc", - cop_air_total=RESOURCES + "cop_air_total_elec_s{simpl}_{clusters}.nc", - cop_air_rural=RESOURCES + "cop_air_rural_elec_s{simpl}_{clusters}.nc", - cop_air_urban=RESOURCES + "cop_air_urban_elec_s{simpl}_{clusters}.nc", + cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), + cop_soil_rural=resources("cop_soil_rural_elec_s{simpl}_{clusters}.nc"), + cop_soil_urban=resources("cop_soil_urban_elec_s{simpl}_{clusters}.nc"), + cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), + cop_air_rural=resources("cop_air_rural_elec_s{simpl}_{clusters}.nc"), + cop_air_urban=resources("cop_air_urban_elec_s{simpl}_{clusters}.nc"), resources: mem_mb=20000, log: - LOGS + "build_cop_profiles_s{simpl}_{clusters}.log", + logs("build_cop_profiles_s{simpl}_{clusters}.log"), benchmark: - BENCHMARKS + "build_cop_profiles/s{simpl}_{clusters}" + benchmarks("build_cop_profiles/s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -218,18 +222,18 @@ rule build_solar_thermal_profiles: snapshots=config_provider("snapshots"), solar_thermal=config_provider("solar_thermal"), input: - pop_layout=RESOURCES + "pop_layout_{scope}.nc", - regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + pop_layout=resources("pop_layout_{scope}.nc"), + regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", output: - solar_thermal=RESOURCES + "solar_thermal_{scope}_elec_s{simpl}_{clusters}.nc", + solar_thermal=resources("solar_thermal_{scope}_elec_s{simpl}_{clusters}.nc"), resources: mem_mb=20000, threads: 16 log: - LOGS + "build_solar_thermal_profiles_{scope}_s{simpl}_{clusters}.log", + logs("build_solar_thermal_profiles_{scope}_s{simpl}_{clusters}.log"), benchmark: - BENCHMARKS + "build_solar_thermal_profiles/{scope}_s{simpl}_{clusters}" + benchmarks("build_solar_thermal_profiles/{scope}_s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -241,23 +245,23 @@ rule build_energy_totals: countries=config_provider("countries"), energy=config_provider("energy"), input: - nuts3_shapes=RESOURCES + "nuts3_shapes.geojson", + nuts3_shapes=resources("nuts3_shapes.geojson"), co2="data/eea/UNFCCC_v23.csv", swiss="data/switzerland-sfoe/switzerland-new_format.csv", idees="data/jrc-idees-2015", district_heat_share="data/district_heat_share.csv", eurostat=input_eurostat, output: - energy_name=RESOURCES + "energy_totals.csv", - co2_name=RESOURCES + "co2_totals.csv", - transport_name=RESOURCES + "transport_data.csv", + energy_name=resources("energy_totals.csv"), + co2_name=resources("co2_totals.csv"), + transport_name=resources("transport_data.csv"), threads: 16 resources: mem_mb=10000, log: - LOGS + "build_energy_totals.log", + logs("build_energy_totals.log"), benchmark: - BENCHMARKS + "build_energy_totals" + benchmarks("build_energy_totals") conda: "../envs/environment.yaml" script: @@ -273,22 +277,23 @@ rule build_biomass_potentials: keep_local=True, ), nuts2="data/nuts/NUTS_RG_10M_2013_4326_LEVL_2.geojson", # https://gisco-services.ec.europa.eu/distribution/v2/nuts/download/#nuts21 - regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), nuts3_population=ancient("data/bundle/nama_10r_3popgdp.tsv.gz"), swiss_cantons=ancient("data/bundle/ch_cantons.csv"), swiss_population=ancient("data/bundle/je-e-21.03.02.xls"), - country_shapes=RESOURCES + "country_shapes.geojson", + country_shapes=resources("country_shapes.geojson"), output: - biomass_potentials_all=RESOURCES - + "biomass_potentials_all_s{simpl}_{clusters}.csv", - biomass_potentials=RESOURCES + "biomass_potentials_s{simpl}_{clusters}.csv", + biomass_potentials_all=resources( + "biomass_potentials_all_s{simpl}_{clusters}.csv" + ), + biomass_potentials=resources("biomass_potentials_s{simpl}_{clusters}.csv"), threads: 1 resources: mem_mb=1000, log: - LOGS + "build_biomass_potentials_s{simpl}_{clusters}.log", + logs("build_biomass_potentials_s{simpl}_{clusters}.log"), benchmark: - BENCHMARKS + "build_biomass_potentials_s{simpl}_{clusters}" + benchmarks("build_biomass_potentials_s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -304,14 +309,14 @@ if config["sector"]["biomass_transport"] or config["sector"]["biomass_spatial"]: keep_local=True, ), output: - biomass_transport_costs=RESOURCES + "biomass_transport_costs.csv", + biomass_transport_costs=resources("biomass_transport_costs.csv"), threads: 1 resources: mem_mb=1000, log: - LOGS + "build_biomass_transport_costs.log", + logs("build_biomass_transport_costs.log"), benchmark: - BENCHMARKS + "build_biomass_transport_costs" + benchmarks("build_biomass_transport_costs") conda: "../envs/environment.yaml" script: @@ -337,20 +342,23 @@ if config["sector"]["regional_co2_sequestration_potential"]["enable"]: "https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson", keep_local=True, ), - regions_onshore=RESOURCES - + "regions_onshore_elec_s{simpl}_{clusters}.geojson", - regions_offshore=RESOURCES - + "regions_offshore_elec_s{simpl}_{clusters}.geojson", + regions_onshore=resources( + "regions_onshore_elec_s{simpl}_{clusters}.geojson" + ), + regions_offshore=resources( + "regions_offshore_elec_s{simpl}_{clusters}.geojson" + ), output: - sequestration_potential=RESOURCES - + "co2_sequestration_potential_elec_s{simpl}_{clusters}.csv", + sequestration_potential=resources( + "co2_sequestration_potential_elec_s{simpl}_{clusters}.csv" + ), threads: 1 resources: mem_mb=4000, log: - LOGS + "build_sequestration_potentials_s{simpl}_{clusters}.log", + logs("build_sequestration_potentials_s{simpl}_{clusters}.log"), benchmark: - BENCHMARKS + "build_sequestration_potentials_s{simpl}_{clusters}" + benchmarks("build_sequestration_potentials_s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -367,17 +375,17 @@ if not config["sector"]["regional_co2_sequestration_potential"]["enable"]: rule build_salt_cavern_potentials: input: salt_caverns="data/h2_salt_caverns_GWh_per_sqkm.geojson", - regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", - regions_offshore=RESOURCES + "regions_offshore_elec_s{simpl}_{clusters}.geojson", + regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), + regions_offshore=resources("regions_offshore_elec_s{simpl}_{clusters}.geojson"), output: - h2_cavern_potential=RESOURCES + "salt_cavern_potentials_s{simpl}_{clusters}.csv", + h2_cavern_potential=resources("salt_cavern_potentials_s{simpl}_{clusters}.csv"), threads: 1 resources: mem_mb=2000, log: - LOGS + "build_salt_cavern_potentials_s{simpl}_{clusters}.log", + logs("build_salt_cavern_potentials_s{simpl}_{clusters}.log"), benchmark: - BENCHMARKS + "build_salt_cavern_potentials_s{simpl}_{clusters}" + benchmarks("build_salt_cavern_potentials_s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -390,14 +398,14 @@ rule build_ammonia_production: input: usgs="data/myb1-2017-nitro.xls", output: - ammonia_production=RESOURCES + "ammonia_production.csv", + ammonia_production=resources("ammonia_production.csv"), threads: 1 resources: mem_mb=1000, log: - LOGS + "build_ammonia_production.log", + logs("build_ammonia_production.log"), benchmark: - BENCHMARKS + "build_ammonia_production" + benchmarks("build_ammonia_production") conda: "../envs/environment.yaml" script: @@ -409,17 +417,17 @@ rule build_industry_sector_ratios: industry=config_provider("industry"), ammonia=config_provider("sector", "ammonia", default=False), input: - ammonia_production=RESOURCES + "ammonia_production.csv", + ammonia_production=resources("ammonia_production.csv"), idees="data/jrc-idees-2015", output: - industry_sector_ratios=RESOURCES + "industry_sector_ratios.csv", + industry_sector_ratios=resources("industry_sector_ratios.csv"), threads: 1 resources: mem_mb=1000, log: - LOGS + "build_industry_sector_ratios.log", + logs("build_industry_sector_ratios.log"), benchmark: - BENCHMARKS + "build_industry_sector_ratios" + benchmarks("build_industry_sector_ratios") conda: "../envs/environment.yaml" script: @@ -431,19 +439,20 @@ rule build_industrial_production_per_country: industry=config_provider("industry"), countries=config_provider("countries"), input: - ammonia_production=RESOURCES + "ammonia_production.csv", + ammonia_production=resources("ammonia_production.csv"), jrc="data/jrc-idees-2015", eurostat="data/eurostat-energy_balances-may_2018_edition", output: - industrial_production_per_country=RESOURCES - + "industrial_production_per_country.csv", + industrial_production_per_country=resources( + "industrial_production_per_country.csv" + ), threads: 8 resources: mem_mb=1000, log: - LOGS + "build_industrial_production_per_country.log", + logs("build_industrial_production_per_country.log"), benchmark: - BENCHMARKS + "build_industrial_production_per_country" + benchmarks("build_industrial_production_per_country") conda: "../envs/environment.yaml" script: @@ -454,21 +463,23 @@ rule build_industrial_production_per_country_tomorrow: params: industry=config_provider("industry"), input: - industrial_production_per_country=RESOURCES - + "industrial_production_per_country.csv", + industrial_production_per_country=resources( + "industrial_production_per_country.csv" + ), output: - industrial_production_per_country_tomorrow=RESOURCES - + "industrial_production_per_country_tomorrow_{planning_horizons}.csv", + industrial_production_per_country_tomorrow=resources( + "industrial_production_per_country_tomorrow_{planning_horizons}.csv" + ), threads: 1 resources: mem_mb=1000, log: - LOGS - + "build_industrial_production_per_country_tomorrow_{planning_horizons}.log", + logs("build_industrial_production_per_country_tomorrow_{planning_horizons}.log"), benchmark: ( - BENCHMARKS - + "build_industrial_production_per_country_tomorrow_{planning_horizons}" + benchmarks( + "build_industrial_production_per_country_tomorrow_{planning_horizons}" + ) ) conda: "../envs/environment.yaml" @@ -483,19 +494,20 @@ rule build_industrial_distribution_key: ), countries=config_provider("countries"), input: - regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", - clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", + regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), + clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), hotmaps_industrial_database="data/Industrial_Database.csv", output: - industrial_distribution_key=RESOURCES - + "industrial_distribution_key_elec_s{simpl}_{clusters}.csv", + industrial_distribution_key=resources( + "industrial_distribution_key_elec_s{simpl}_{clusters}.csv" + ), threads: 1 resources: mem_mb=1000, log: - LOGS + "build_industrial_distribution_key_s{simpl}_{clusters}.log", + logs("build_industrial_distribution_key_s{simpl}_{clusters}.log"), benchmark: - BENCHMARKS + "build_industrial_distribution_key/s{simpl}_{clusters}" + benchmarks("build_industrial_distribution_key/s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -504,23 +516,28 @@ rule build_industrial_distribution_key: rule build_industrial_production_per_node: input: - industrial_distribution_key=RESOURCES - + "industrial_distribution_key_elec_s{simpl}_{clusters}.csv", - industrial_production_per_country_tomorrow=RESOURCES - + "industrial_production_per_country_tomorrow_{planning_horizons}.csv", + industrial_distribution_key=resources( + "industrial_distribution_key_elec_s{simpl}_{clusters}.csv" + ), + industrial_production_per_country_tomorrow=resources( + "industrial_production_per_country_tomorrow_{planning_horizons}.csv" + ), output: - industrial_production_per_node=RESOURCES - + "industrial_production_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + industrial_production_per_node=resources( + "industrial_production_elec_s{simpl}_{clusters}_{planning_horizons}.csv" + ), threads: 1 resources: mem_mb=1000, log: - LOGS - + "build_industrial_production_per_node_s{simpl}_{clusters}_{planning_horizons}.log", + logs( + "build_industrial_production_per_node_s{simpl}_{clusters}_{planning_horizons}.log" + ), benchmark: ( - BENCHMARKS - + "build_industrial_production_per_node/s{simpl}_{clusters}_{planning_horizons}" + benchmarks( + "build_industrial_production_per_node/s{simpl}_{clusters}_{planning_horizons}" + ) ) conda: "../envs/environment.yaml" @@ -530,24 +547,29 @@ rule build_industrial_production_per_node: rule build_industrial_energy_demand_per_node: input: - industry_sector_ratios=RESOURCES + "industry_sector_ratios.csv", - industrial_production_per_node=RESOURCES - + "industrial_production_elec_s{simpl}_{clusters}_{planning_horizons}.csv", - industrial_energy_demand_per_node_today=RESOURCES - + "industrial_energy_demand_today_elec_s{simpl}_{clusters}.csv", + industry_sector_ratios=resources("industry_sector_ratios.csv"), + industrial_production_per_node=resources( + "industrial_production_elec_s{simpl}_{clusters}_{planning_horizons}.csv" + ), + industrial_energy_demand_per_node_today=resources( + "industrial_energy_demand_today_elec_s{simpl}_{clusters}.csv" + ), output: - industrial_energy_demand_per_node=RESOURCES - + "industrial_energy_demand_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + industrial_energy_demand_per_node=resources( + "industrial_energy_demand_elec_s{simpl}_{clusters}_{planning_horizons}.csv" + ), threads: 1 resources: mem_mb=1000, log: - LOGS - + "build_industrial_energy_demand_per_node_s{simpl}_{clusters}_{planning_horizons}.log", + logs( + "build_industrial_energy_demand_per_node_s{simpl}_{clusters}_{planning_horizons}.log" + ), benchmark: ( - BENCHMARKS - + "build_industrial_energy_demand_per_node/s{simpl}_{clusters}_{planning_horizons}" + benchmarks( + "build_industrial_energy_demand_per_node/s{simpl}_{clusters}_{planning_horizons}" + ) ) conda: "../envs/environment.yaml" @@ -561,19 +583,21 @@ rule build_industrial_energy_demand_per_country_today: industry=config_provider("industry"), input: jrc="data/jrc-idees-2015", - ammonia_production=RESOURCES + "ammonia_production.csv", - industrial_production_per_country=RESOURCES - + "industrial_production_per_country.csv", + ammonia_production=resources("ammonia_production.csv"), + industrial_production_per_country=resources( + "industrial_production_per_country.csv" + ), output: - industrial_energy_demand_per_country_today=RESOURCES - + "industrial_energy_demand_per_country_today.csv", + industrial_energy_demand_per_country_today=resources( + "industrial_energy_demand_per_country_today.csv" + ), threads: 8 resources: mem_mb=1000, log: - LOGS + "build_industrial_energy_demand_per_country_today.log", + logs("build_industrial_energy_demand_per_country_today.log"), benchmark: - BENCHMARKS + "build_industrial_energy_demand_per_country_today" + benchmarks("build_industrial_energy_demand_per_country_today") conda: "../envs/environment.yaml" script: @@ -582,20 +606,23 @@ rule build_industrial_energy_demand_per_country_today: rule build_industrial_energy_demand_per_node_today: input: - industrial_distribution_key=RESOURCES - + "industrial_distribution_key_elec_s{simpl}_{clusters}.csv", - industrial_energy_demand_per_country_today=RESOURCES - + "industrial_energy_demand_per_country_today.csv", + industrial_distribution_key=resources( + "industrial_distribution_key_elec_s{simpl}_{clusters}.csv" + ), + industrial_energy_demand_per_country_today=resources( + "industrial_energy_demand_per_country_today.csv" + ), output: - industrial_energy_demand_per_node_today=RESOURCES - + "industrial_energy_demand_today_elec_s{simpl}_{clusters}.csv", + industrial_energy_demand_per_node_today=resources( + "industrial_energy_demand_today_elec_s{simpl}_{clusters}.csv" + ), threads: 1 resources: mem_mb=1000, log: - LOGS + "build_industrial_energy_demand_per_node_today_s{simpl}_{clusters}.log", + logs("build_industrial_energy_demand_per_node_today_s{simpl}_{clusters}.log"), benchmark: - BENCHMARKS + "build_industrial_energy_demand_per_node_today/s{simpl}_{clusters}" + benchmarks("build_industrial_energy_demand_per_node_today/s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -611,23 +638,23 @@ if config["sector"]["retrofitting"]["retro_endogen"]: input: building_stock="data/retro/data_building_stock.csv", data_tabula="data/retro/tabula-calculator-calcsetbuilding.csv", - air_temperature=RESOURCES + "temp_air_total_elec_s{simpl}_{clusters}.nc", + air_temperature=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"), u_values_PL="data/retro/u_values_poland.csv", tax_w="data/retro/electricity_taxes_eu.csv", construction_index="data/retro/comparative_level_investment.csv", floor_area_missing="data/retro/floor_area_missing.csv", - clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", + clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), cost_germany="data/retro/retro_cost_germany.csv", window_assumptions="data/retro/window_assumptions.csv", output: - retro_cost=RESOURCES + "retro_cost_elec_s{simpl}_{clusters}.csv", - floor_area=RESOURCES + "floor_area_elec_s{simpl}_{clusters}.csv", + retro_cost=resources("retro_cost_elec_s{simpl}_{clusters}.csv"), + floor_area=resources("floor_area_elec_s{simpl}_{clusters}.csv"), resources: mem_mb=1000, log: - LOGS + "build_retro_cost_s{simpl}_{clusters}.log", + logs("build_retro_cost_s{simpl}_{clusters}.log"), benchmark: - BENCHMARKS + "build_retro_cost/s{simpl}_{clusters}" + benchmarks("build_retro_cost/s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -643,15 +670,15 @@ if not config["sector"]["retrofitting"]["retro_endogen"]: rule build_population_weighted_energy_totals: input: - energy_totals=RESOURCES + "energy_totals.csv", - clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", + energy_totals=resources("energy_totals.csv"), + clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), output: - RESOURCES + "pop_weighted_energy_totals_s{simpl}_{clusters}.csv", + resources("pop_weighted_energy_totals_s{simpl}_{clusters}.csv"), threads: 1 resources: mem_mb=2000, log: - LOGS + "build_population_weighted_energy_totals_s{simpl}_{clusters}.log", + logs("build_population_weighted_energy_totals_s{simpl}_{clusters}.log"), conda: "../envs/environment.yaml" script: @@ -661,16 +688,16 @@ rule build_population_weighted_energy_totals: rule build_shipping_demand: input: ports="data/attributed_ports.json", - scope=RESOURCES + "europe_shape.geojson", - regions=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", - demand=RESOURCES + "energy_totals.csv", + scope=resources("europe_shape.geojson"), + regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), + demand=resources("energy_totals.csv"), output: - RESOURCES + "shipping_demand_s{simpl}_{clusters}.csv", + resources("shipping_demand_s{simpl}_{clusters}.csv"), threads: 1 resources: mem_mb=2000, log: - LOGS + "build_shipping_demand_s{simpl}_{clusters}.log", + logs("build_shipping_demand_s{simpl}_{clusters}.log"), conda: "../envs/environment.yaml" script: @@ -682,23 +709,24 @@ rule build_transport_demand: snapshots=config_provider("snapshots"), sector=config_provider("sector"), input: - clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", - pop_weighted_energy_totals=RESOURCES - + "pop_weighted_energy_totals_s{simpl}_{clusters}.csv", - transport_data=RESOURCES + "transport_data.csv", + clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), + pop_weighted_energy_totals=resources( + "pop_weighted_energy_totals_s{simpl}_{clusters}.csv" + ), + transport_data=resources("transport_data.csv"), traffic_data_KFZ="data/emobility/KFZ__count", traffic_data_Pkw="data/emobility/Pkw__count", - temp_air_total=RESOURCES + "temp_air_total_elec_s{simpl}_{clusters}.nc", + temp_air_total=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"), output: - transport_demand=RESOURCES + "transport_demand_s{simpl}_{clusters}.csv", - transport_data=RESOURCES + "transport_data_s{simpl}_{clusters}.csv", - avail_profile=RESOURCES + "avail_profile_s{simpl}_{clusters}.csv", - dsm_profile=RESOURCES + "dsm_profile_s{simpl}_{clusters}.csv", + transport_demand=resources("transport_demand_s{simpl}_{clusters}.csv"), + transport_data=resources("transport_data_s{simpl}_{clusters}.csv"), + avail_profile=resources("avail_profile_s{simpl}_{clusters}.csv"), + dsm_profile=resources("dsm_profile_s{simpl}_{clusters}.csv"), threads: 1 resources: mem_mb=2000, log: - LOGS + "build_transport_demand_s{simpl}_{clusters}.log", + logs("build_transport_demand_s{simpl}_{clusters}.log"), conda: "../envs/environment.yaml" script: @@ -727,57 +755,62 @@ rule prepare_sector_network: **build_biomass_transport_costs_output, **gas_infrastructure, **build_sequestration_potentials_output, - network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", - energy_totals_name=RESOURCES + "energy_totals.csv", + network=resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), + energy_totals_name=resources("energy_totals.csv"), eurostat=input_eurostat, - pop_weighted_energy_totals=RESOURCES - + "pop_weighted_energy_totals_s{simpl}_{clusters}.csv", - shipping_demand=RESOURCES + "shipping_demand_s{simpl}_{clusters}.csv", - transport_demand=RESOURCES + "transport_demand_s{simpl}_{clusters}.csv", - transport_data=RESOURCES + "transport_data_s{simpl}_{clusters}.csv", - avail_profile=RESOURCES + "avail_profile_s{simpl}_{clusters}.csv", - dsm_profile=RESOURCES + "dsm_profile_s{simpl}_{clusters}.csv", - co2_totals_name=RESOURCES + "co2_totals.csv", + pop_weighted_energy_totals=resources( + "pop_weighted_energy_totals_s{simpl}_{clusters}.csv" + ), + shipping_demand=resources("shipping_demand_s{simpl}_{clusters}.csv"), + transport_demand=resources("transport_demand_s{simpl}_{clusters}.csv"), + transport_data=resources("transport_data_s{simpl}_{clusters}.csv"), + avail_profile=resources("avail_profile_s{simpl}_{clusters}.csv"), + dsm_profile=resources("dsm_profile_s{simpl}_{clusters}.csv"), + co2_totals_name=resources("co2_totals.csv"), co2="data/eea/UNFCCC_v23.csv", - biomass_potentials=RESOURCES + "biomass_potentials_s{simpl}_{clusters}.csv", + biomass_potentials=resources("biomass_potentials_s{simpl}_{clusters}.csv"), heat_profile="data/heat_load_profile_BDEW.csv", costs="data/costs_{}.csv".format(config["costs"]["year"]) if config["foresight"] == "overnight" else "data/costs_{planning_horizons}.csv", - profile_offwind_ac=RESOURCES + "profile_offwind-ac.nc", - profile_offwind_dc=RESOURCES + "profile_offwind-dc.nc", - h2_cavern=RESOURCES + "salt_cavern_potentials_s{simpl}_{clusters}.csv", - busmap_s=RESOURCES + "busmap_elec_s{simpl}.csv", - busmap=RESOURCES + "busmap_elec_s{simpl}_{clusters}.csv", - clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", - simplified_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}.csv", - industrial_demand=RESOURCES - + "industrial_energy_demand_elec_s{simpl}_{clusters}_{planning_horizons}.csv", - heat_demand_urban=RESOURCES + "heat_demand_urban_elec_s{simpl}_{clusters}.nc", - heat_demand_rural=RESOURCES + "heat_demand_rural_elec_s{simpl}_{clusters}.nc", - heat_demand_total=RESOURCES + "heat_demand_total_elec_s{simpl}_{clusters}.nc", - temp_soil_total=RESOURCES + "temp_soil_total_elec_s{simpl}_{clusters}.nc", - temp_soil_rural=RESOURCES + "temp_soil_rural_elec_s{simpl}_{clusters}.nc", - temp_soil_urban=RESOURCES + "temp_soil_urban_elec_s{simpl}_{clusters}.nc", - temp_air_total=RESOURCES + "temp_air_total_elec_s{simpl}_{clusters}.nc", - temp_air_rural=RESOURCES + "temp_air_rural_elec_s{simpl}_{clusters}.nc", - temp_air_urban=RESOURCES + "temp_air_urban_elec_s{simpl}_{clusters}.nc", - cop_soil_total=RESOURCES + "cop_soil_total_elec_s{simpl}_{clusters}.nc", - cop_soil_rural=RESOURCES + "cop_soil_rural_elec_s{simpl}_{clusters}.nc", - cop_soil_urban=RESOURCES + "cop_soil_urban_elec_s{simpl}_{clusters}.nc", - cop_air_total=RESOURCES + "cop_air_total_elec_s{simpl}_{clusters}.nc", - cop_air_rural=RESOURCES + "cop_air_rural_elec_s{simpl}_{clusters}.nc", - cop_air_urban=RESOURCES + "cop_air_urban_elec_s{simpl}_{clusters}.nc", - solar_thermal_total=RESOURCES - + "solar_thermal_total_elec_s{simpl}_{clusters}.nc" + profile_offwind_ac=resources("profile_offwind-ac.nc"), + profile_offwind_dc=resources("profile_offwind-dc.nc"), + h2_cavern=resources("salt_cavern_potentials_s{simpl}_{clusters}.csv"), + busmap_s=resources("busmap_elec_s{simpl}.csv"), + busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), + clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), + simplified_pop_layout=resources("pop_layout_elec_s{simpl}.csv"), + industrial_demand=resources( + "industrial_energy_demand_elec_s{simpl}_{clusters}_{planning_horizons}.csv" + ), + heat_demand_urban=resources("heat_demand_urban_elec_s{simpl}_{clusters}.nc"), + heat_demand_rural=resources("heat_demand_rural_elec_s{simpl}_{clusters}.nc"), + heat_demand_total=resources("heat_demand_total_elec_s{simpl}_{clusters}.nc"), + temp_soil_total=resources("temp_soil_total_elec_s{simpl}_{clusters}.nc"), + temp_soil_rural=resources("temp_soil_rural_elec_s{simpl}_{clusters}.nc"), + temp_soil_urban=resources("temp_soil_urban_elec_s{simpl}_{clusters}.nc"), + temp_air_total=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"), + temp_air_rural=resources("temp_air_rural_elec_s{simpl}_{clusters}.nc"), + temp_air_urban=resources("temp_air_urban_elec_s{simpl}_{clusters}.nc"), + cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), + cop_soil_rural=resources("cop_soil_rural_elec_s{simpl}_{clusters}.nc"), + cop_soil_urban=resources("cop_soil_urban_elec_s{simpl}_{clusters}.nc"), + cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), + cop_air_rural=resources("cop_air_rural_elec_s{simpl}_{clusters}.nc"), + cop_air_urban=resources("cop_air_urban_elec_s{simpl}_{clusters}.nc"), + solar_thermal_total=resources( + "solar_thermal_total_elec_s{simpl}_{clusters}.nc" + ) if config["sector"]["solar_thermal"] else [], - solar_thermal_urban=RESOURCES - + "solar_thermal_urban_elec_s{simpl}_{clusters}.nc" + solar_thermal_urban=resources( + "solar_thermal_urban_elec_s{simpl}_{clusters}.nc" + ) if config["sector"]["solar_thermal"] else [], - solar_thermal_rural=RESOURCES - + "solar_thermal_rural_elec_s{simpl}_{clusters}.nc" + solar_thermal_rural=resources( + "solar_thermal_rural_elec_s{simpl}_{clusters}.nc" + ) if config["sector"]["solar_thermal"] else [], output: diff --git a/rules/collect.smk b/rules/collect.smk index 70dc4641..e0f19a4c 100644 --- a/rules/collect.smk +++ b/rules/collect.smk @@ -23,7 +23,7 @@ rule all: rule cluster_networks: input: expand( - RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", + resources("networks/elec_s{simpl}_{clusters}.nc"), **config["scenario"], run=config["run"]["name"] ), @@ -32,7 +32,7 @@ rule cluster_networks: rule extra_components_networks: input: expand( - RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc", + resources("networks/elec_s{simpl}_{clusters}_ec.nc"), **config["scenario"], run=config["run"]["name"] ), @@ -41,7 +41,7 @@ rule extra_components_networks: rule prepare_elec_networks: input: expand( - RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", + resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), **config["scenario"], run=config["run"]["name"] ), diff --git a/rules/common.smk b/rules/common.smk index f24301c8..0467d560 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -40,10 +40,12 @@ def static_getter(wildcards, keys, default): def dynamic_getter(wildcards, keys, default): """Getter function for dynamic config values based on scenario.""" + if "run" not in wildcards: + return get_config(config, keys, default) scenario_name = wildcards.run if scenario_name not in scenarios: raise ValueError( - f"Scenario {scenario_name} not found in file {config['scenariofile']}." + f"Scenario {scenario_name} not found in file {config['run']['scenario']['file']}." ) return get_config(scenario_config(scenario_name), keys, default) diff --git a/rules/postprocess.smk b/rules/postprocess.smk index 4c833b4f..e09103cc 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -14,7 +14,7 @@ rule plot_network: input: network=RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", - regions=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), output: map=RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 34e2eb7c..1c454633 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -92,9 +92,9 @@ if config["enable"]["retrieve"] and config["enable"].get( static=True, ), output: - RESOURCES + "natura.tiff", + resources("natura.tiff"), log: - LOGS + "retrieve_natura_raster.log", + logs("retrieve_natura_raster.log"), resources: mem_mb=5000, retries: 2 diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index c396ebd5..cfdb1da0 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -12,7 +12,7 @@ rule solve_network: "co2_sequestration_potential", 200 ), input: - network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", + network=resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), config=RESULTS + "config.yaml", output: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 8a93d24a..214733b7 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -12,13 +12,13 @@ rule add_existing_baseyear: input: network=RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", - powerplants=RESOURCES + "powerplants.csv", - busmap_s=RESOURCES + "busmap_elec_s{simpl}.csv", - busmap=RESOURCES + "busmap_elec_s{simpl}_{clusters}.csv", - clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", + powerplants=resources("powerplants.csv"), + busmap_s=resources("busmap_elec_s{simpl}.csv"), + busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), + clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), costs="data/costs_{}.csv".format(config["scenario"]["planning_horizons"][0]), - cop_soil_total=RESOURCES + "cop_soil_total_elec_s{simpl}_{clusters}.nc", - cop_air_total=RESOURCES + "cop_air_total_elec_s{simpl}_{clusters}.nc", + cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), + cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), existing_heating="data/existing_infrastructure/existing_heating_raw.csv", existing_solar="data/existing_infrastructure/solar_capacity_IRENA.csv", existing_onwind="data/existing_infrastructure/onwind_capacity_IRENA.csv", @@ -55,8 +55,8 @@ rule add_brownfield: + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", network_p=solved_previous_horizon, #solved network at previous time step costs="data/costs_{planning_horizons}.csv", - cop_soil_total=RESOURCES + "cop_soil_total_elec_s{simpl}_{clusters}.nc", - cop_air_total=RESOURCES + "cop_air_total_elec_s{simpl}_{clusters}.nc", + cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), + cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), output: RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index c7700760..d8476868 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -31,8 +31,7 @@ rule solve_sector_network: walltime=config["solving"].get("walltime", "12:00:00"), benchmark: ( - RESULTS - + BENCHMARKS + BENCHMARKS + "solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" ) conda: diff --git a/rules/validate.smk b/rules/validate.smk index cfb8c959..09344673 100644 --- a/rules/validate.smk +++ b/rules/validate.smk @@ -20,9 +20,9 @@ rule build_electricity_production: snapshots=config["snapshots"], countries=config["countries"], output: - RESOURCES + "historical_electricity_production.csv", + resources("historical_electricity_production.csv"), log: - LOGS + "build_electricity_production.log", + logs("build_electricity_production.log"), resources: mem_mb=5000, script: @@ -38,11 +38,11 @@ rule build_cross_border_flows: snapshots=config["snapshots"], countries=config["countries"], input: - network=RESOURCES + "networks/base.nc", + network=resources("networks/base.nc"), output: - RESOURCES + "historical_cross_border_flows.csv", + resources("historical_cross_border_flows.csv"), log: - LOGS + "build_cross_border_flows.log", + logs("build_cross_border_flows.log"), resources: mem_mb=5000, script: @@ -58,9 +58,9 @@ rule build_electricity_prices: snapshots=config["snapshots"], countries=config["countries"], output: - RESOURCES + "historical_electricity_prices.csv", + resources("historical_electricity_prices.csv"), log: - LOGS + "build_electricity_prices.log", + logs("build_electricity_prices.log"), resources: mem_mb=5000, script: @@ -70,7 +70,7 @@ rule build_electricity_prices: rule plot_validation_electricity_production: input: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", - electricity_production=RESOURCES + "historical_electricity_production.csv", + electricity_production=resources("historical_electricity_production.csv"), output: **{ plot: RESULTS @@ -88,7 +88,7 @@ rule plot_validation_cross_border_flows: countries=config["countries"], input: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", - cross_border_flows=RESOURCES + "historical_cross_border_flows.csv", + cross_border_flows=resources("historical_cross_border_flows.csv"), output: **{ plot: RESULTS @@ -104,7 +104,7 @@ rule plot_validation_cross_border_flows: rule plot_validation_electricity_prices: input: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", - electricity_prices=RESOURCES + "historical_electricity_prices.csv", + electricity_prices=resources("historical_electricity_prices.csv"), output: **{ plot: RESULTS diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 00000000..fc781c2f --- /dev/null +++ b/scripts/__init__.py @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- +# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 3951be18..c166c61c 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -6,14 +6,13 @@ import contextlib import logging import os +import re import urllib from pathlib import Path import pandas as pd import pytz import yaml -from pypsa.components import component_attrs, components -from pypsa.descriptors import Dict from snakemake.utils import update_config from tqdm import tqdm @@ -22,6 +21,43 @@ logger = logging.getLogger(__name__) REGION_COLS = ["geometry", "name", "x", "y", "country"] +def path_provider(dir, rdir, shared_resources): + """ + Dynamically provide paths based on shared resources. + + Use this function whenever there is an input or output to a + snakemake rule that should, optionally, be either shared across runs + or created individually for each run. If shared_resources is a + string, it is assumed to be the wildcard that indicates the cutoff + after which resources are no longer shared. The function returns a + function which takes a filename and returns a path that is either + shared or individual to each run. + """ + + def path(fn): + pattern = r"\{([^{}]+)\}" + existing_wildcards = list(re.findall(pattern, fn)) + if shared_resources == "base": + # special case for shared "base" resources + no_relevant_wildcards = not len(set(existing_wildcards) - {"technology"}) + no_elec_rule = not fn.startswith("networks/elec") and not fn.startswith( + "add_electricity" + ) + is_shared = no_relevant_wildcards and no_elec_rule + elif isinstance(shared_resources, str): + final_wildcard = shared_resources + is_shared = final_wildcard not in existing_wildcards[:-1] + else: + is_shared = shared_resources + + if is_shared: + return f"{dir}{fn}" + else: + return f"{dir}{rdir}{fn}" + + return path + + # Define a context manager to temporarily mute print statements @contextlib.contextmanager def mute_print(): @@ -31,15 +67,16 @@ def mute_print(): def set_scenario_config(snakemake): - if snakemake.config["run"]["scenarios"] and "run" in snakemake.wildcards.keys(): + scenario = snakemake.config["run"].get("scenario", {}) + if scenario.get("enable") and "run" in snakemake.wildcards.keys(): try: - with open(snakemake.config["scenariofile"], "r") as f: + with open(scenario["file"], "r") as f: scenario_config = yaml.safe_load(f) except FileNotFoundError: # fallback for mock_snakemake script_dir = Path(__file__).parent.resolve() root_dir = script_dir.parent - with open(root_dir / snakemake.config["scenariofile"], "r") as f: + with open(root_dir / scenario["file"], "r") as f: scenario_config = yaml.safe_load(f) update_config(snakemake.config, scenario_config[snakemake.wildcards.run]) From b91a7b9c514f6a380f7cea758325156f095cf8c7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 24 Aug 2023 08:19:38 +0000 Subject: [PATCH 24/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/add_existing_baseyear.py | 7 +++++-- scripts/build_industrial_distribution_key.py | 3 ++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index 269705a7..08810470 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -447,8 +447,11 @@ def add_heating_capacities_installed_before_baseyear( ) # if rural heating demand for one of the nodes doesn't exist, # then columns were dropped before and heating demand share should be 0.0 - if all(f"{node} {service} rural heat" in p_set_sum.index for service in ["residential", "services"]) - else 0. + if all( + f"{node} {service} rural heat" in p_set_sum.index + for service in ["residential", "services"] + ) + else 0.0 for node in nodal_df.index ], index=nodal_df.index, diff --git a/scripts/build_industrial_distribution_key.py b/scripts/build_industrial_distribution_key.py index c5cbd19c..e6d515b0 100644 --- a/scripts/build_industrial_distribution_key.py +++ b/scripts/build_industrial_distribution_key.py @@ -13,13 +13,14 @@ logger = logging.getLogger(__name__) import uuid from itertools import product +import country_converter as coco import geopandas as gpd import pandas as pd from packaging.version import Version, parse -import country_converter as coco cc = coco.CountryConverter() + def locate_missing_industrial_sites(df): """ Locate industrial sites without valid locations based on city and From 04a2bea176475c64efc15a85923996311c9af437 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 24 Aug 2023 13:17:44 +0200 Subject: [PATCH 25/76] scenario-management: use list for including wildcards with shared resource --- Snakefile | 6 +- config/test/config.scenarios.electricity.yaml | 2 +- doc/configtables/run.csv | 4 +- rules/common.smk | 2 +- scripts/_helpers.py | 96 ++++++++++++------- 5 files changed, 71 insertions(+), 39 deletions(-) diff --git a/Snakefile b/Snakefile index 55b90d5d..c9fdbb34 100644 --- a/Snakefile +++ b/Snakefile @@ -25,10 +25,10 @@ COSTS = f"data/costs_{config['costs']['year']}.csv" ATLITE_NPROCESSES = config["atlite"].get("nprocesses", 4) run = config["run"] -scenario = run.get("scenario", {}) +scenarios = run.get("scenarios", {}) if run["name"]: - if scenario.get("enable"): - fn = Path(scenario["file"]) + if scenarios.get("enable"): + fn = Path(scenarios["file"]) scenarios = yaml.safe_load(fn.read_text()) RDIR = "{run}/" else: diff --git a/config/test/config.scenarios.electricity.yaml b/config/test/config.scenarios.electricity.yaml index 63b1892b..185dcda4 100644 --- a/config/test/config.scenarios.electricity.yaml +++ b/config/test/config.scenarios.electricity.yaml @@ -8,7 +8,7 @@ run: name: - test-elec-no-offshore-wind - test-elec-no-onshore-wind - scenario: + scenarios: enable: true file: "config/test/scenarios.electricity.yaml" disable_progressbar: true diff --git a/doc/configtables/run.csv b/doc/configtables/run.csv index 3d8e4e8c..718867a6 100644 --- a/doc/configtables/run.csv +++ b/doc/configtables/run.csv @@ -1,8 +1,8 @@ ,Unit,Values,Description name,--,str/list,"Specify a name for your run. Results will be stored under this name. If ``scenario: enable`` is set to ``true``, the name must contain a subset of scenario names defined in ``scenario: file``." -scenario,,, +scenarios,,, -- enable,bool,"{true, false}","Switch to select whether workflow should generate scenarios based on ``file``." -- file,str,,Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run:scenarios`` has to be set to ``true`` and ``run:name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combindation of settings, alter and use the ``create_scenarios.py`` script in ``scripts``. disable_progrssbar,bool,"{true, false}","Switch to select whether progressbar should be disabled." -shared_resources,bool/str,,"Switch to select whether resources should be shared across runs. If a string is passed, it is assumed to be a wildcard or 'base' that indicates the cutoff after which resources are no longer shared. If 'base' is passed, resources before creating the elec.nc file are shared." +shared_resources,bool/str/list,,"Switch to select whether resources should be shared across runs. If a string or list is passed, it is assumed to be wildcard(s) which indicates up to which set of wildcards the resource folder should be shared. If set to 'base', only resources before creating the elec.nc file are shared." shared_cutouts,bool,"{true, false}","Switch to select whether cutouts should be shared across runs." diff --git a/rules/common.smk b/rules/common.smk index 0467d560..dab15c0d 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -58,7 +58,7 @@ def config_provider(*keys, default=None): my_param=config_provider("key1", "key2", default="some_default_value") """ # Using functools.partial to freeze certain arguments in our getter functions. - if config["run"].get("scenarios", False): + if config["run"].get("scenarios", {}).get("enable", False): return partial(dynamic_getter, keys=keys, default=default) else: return partial(static_getter, keys=keys, default=default) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index c166c61c..ed46db77 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -8,6 +8,7 @@ import logging import os import re import urllib +from functools import partial from pathlib import Path import pandas as pd @@ -21,41 +22,72 @@ logger = logging.getLogger(__name__) REGION_COLS = ["geometry", "name", "x", "y", "country"] +def get_run_path(fn, dir, rdir, shared_resources): + """ + Dynamically provide paths based on shared resources and filename. + + Use this function for snakemake rule inputs or outputs that should be + optionally shared across runs or created individually for each run. + + Parameters + ---------- + fn : str + The filename for the path to be generated. + dir : str + The base directory. + rdir : str + Relative directory for non-shared resources. + shared_resources : str, list, or bool + Specifies which resources should be shared. + - If string or list, assumed to be superset of wildcards for sharing. + - If "base", special handling for shared "base" resources. + - If boolean, directly specifies if the resource is shared. + + Returns + ------- + str + Full path where the resource should be stored. + + Notes + ----- + Special case for "base" allows no wildcards other than + "technology" and excludes filenames starting with "networks/elec" or + "add_electricity". + """ + pattern = r"\{([^{}]+)\}" + existing_wildcards = list(re.findall(pattern, fn)) + if shared_resources == "base": + # special case for shared "base" resources + no_relevant_wildcards = not len(set(existing_wildcards) - {"technology"}) + no_elec_rule = not fn.startswith("networks/elec") and not fn.startswith( + "add_electricity" + ) + is_shared = no_relevant_wildcards and no_elec_rule + elif isinstance(shared_resources, (str, list)): + if isinstance(shared_resources, str): + shared_resources = [shared_resources] + is_shared = set(existing_wildcards).issubset(shared_resources) + else: + is_shared = shared_resources + + if is_shared: + return f"{dir}{fn}" + else: + return f"{dir}{rdir}{fn}" + + def path_provider(dir, rdir, shared_resources): """ - Dynamically provide paths based on shared resources. + Returns a partial function that dynamically provides paths based on shared + resources and the filename. - Use this function whenever there is an input or output to a - snakemake rule that should, optionally, be either shared across runs - or created individually for each run. If shared_resources is a - string, it is assumed to be the wildcard that indicates the cutoff - after which resources are no longer shared. The function returns a - function which takes a filename and returns a path that is either - shared or individual to each run. + Returns + ------- + partial function + A partial function that takes a filename as input and + returns the path to the file based on the shared_resources parameter. """ - - def path(fn): - pattern = r"\{([^{}]+)\}" - existing_wildcards = list(re.findall(pattern, fn)) - if shared_resources == "base": - # special case for shared "base" resources - no_relevant_wildcards = not len(set(existing_wildcards) - {"technology"}) - no_elec_rule = not fn.startswith("networks/elec") and not fn.startswith( - "add_electricity" - ) - is_shared = no_relevant_wildcards and no_elec_rule - elif isinstance(shared_resources, str): - final_wildcard = shared_resources - is_shared = final_wildcard not in existing_wildcards[:-1] - else: - is_shared = shared_resources - - if is_shared: - return f"{dir}{fn}" - else: - return f"{dir}{rdir}{fn}" - - return path + return partial(get_run_path, dir=dir, rdir=rdir, shared_resources=shared_resources) # Define a context manager to temporarily mute print statements @@ -67,7 +99,7 @@ def mute_print(): def set_scenario_config(snakemake): - scenario = snakemake.config["run"].get("scenario", {}) + scenario = snakemake.config["run"].get("scenarios", {}) if scenario.get("enable") and "run" in snakemake.wildcards.keys(): try: with open(scenario["file"], "r") as f: From c7e6d36014ef7e16aafadb0b3eaa8a69900e3240 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 24 Aug 2023 13:33:26 +0200 Subject: [PATCH 26/76] fix separators in run.csv --- doc/configtables/run.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/configtables/run.csv b/doc/configtables/run.csv index 718867a6..925c2dea 100644 --- a/doc/configtables/run.csv +++ b/doc/configtables/run.csv @@ -2,7 +2,7 @@ name,--,str/list,"Specify a name for your run. Results will be stored under this name. If ``scenario: enable`` is set to ``true``, the name must contain a subset of scenario names defined in ``scenario: file``." scenarios,,, -- enable,bool,"{true, false}","Switch to select whether workflow should generate scenarios based on ``file``." --- file,str,,Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run:scenarios`` has to be set to ``true`` and ``run:name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combindation of settings, alter and use the ``create_scenarios.py`` script in ``scripts``. +-- file,str,,"Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run:scenarios`` has to be set to ``true`` and ``run:name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combindation of settings, alter and use the ``create_scenarios.py`` script in ``scripts``." disable_progrssbar,bool,"{true, false}","Switch to select whether progressbar should be disabled." shared_resources,bool/str/list,,"Switch to select whether resources should be shared across runs. If a string or list is passed, it is assumed to be wildcard(s) which indicates up to which set of wildcards the resource folder should be shared. If set to 'base', only resources before creating the elec.nc file are shared." shared_cutouts,bool,"{true, false}","Switch to select whether cutouts should be shared across runs." From f96c2d05899bb8d51087223f54a384410e4579df Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 22 Sep 2023 13:58:08 +0200 Subject: [PATCH 27/76] update solve smk files to properly consider scenanrios --- rules/common.smk | 7 +++++-- rules/solve_electricity.smk | 16 ++++++++-------- rules/solve_myopic.smk | 36 ++++++++++++++++++++---------------- rules/solve_overnight.smk | 14 +++++++------- scripts/solve_network.py | 8 ++++---- 5 files changed, 44 insertions(+), 37 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index dab15c0d..c4b8e6d2 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -10,7 +10,10 @@ def get_config(config, keys, default=None): """Retrieve a nested value from a dictionary using a tuple of keys.""" value = config for key in keys: - value = value.get(key, default) + if isinstance(value, list): + value = value[key] + else: + value = value.get(key, default) if value == default: return default return value @@ -40,7 +43,7 @@ def static_getter(wildcards, keys, default): def dynamic_getter(wildcards, keys, default): """Getter function for dynamic config values based on scenario.""" - if "run" not in wildcards: + if "run" not in wildcards.keys(): return get_config(config, keys, default) scenario_name = wildcards.run if scenario_name not in scenarios: diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index cfdb1da0..424748e2 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -5,11 +5,11 @@ rule solve_network: params: - solving=config["solving"], - foresight=config["foresight"], - planning_horizons=config["scenario"]["planning_horizons"], - co2_sequestration_potential=config["sector"].get( - "co2_sequestration_potential", 200 + solving=config_provider("solving"), + foresight=config_provider("foresight"), + planning_horizons=config_provider("scenario", "planning_horizons"), + co2_sequestration_potential=config_provider( + "sector", "co2_sequestration_potential", default=200 ), input: network=resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), @@ -27,7 +27,7 @@ rule solve_network: threads: 4 resources: mem_mb=memory, - walltime=config["solving"].get("walltime", "12:00:00"), + walltime=config_provider("solving", "walltime", default="12:00:00"), shadow: "minimal" conda: @@ -38,7 +38,7 @@ rule solve_network: rule solve_operations_network: params: - options=config["solving"]["options"], + options=config_provider("solving", "options"), input: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", output: @@ -58,7 +58,7 @@ rule solve_operations_network: threads: 4 resources: mem_mb=(lambda w: 10000 + 372 * int(w.clusters)), - walltime=config["solving"].get("walltime", "12:00:00"), + walltime=config_provider("solving", "walltime", default="12:00:00"), shadow: "minimal" conda: diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 214733b7..7f851326 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -5,10 +5,10 @@ rule add_existing_baseyear: params: - baseyear=config["scenario"]["planning_horizons"][0], - sector=config["sector"], - existing_capacities=config["existing_capacities"], - costs=config["costs"], + baseyear=config_provider("scenario", "planning_horizons", 0), + sector=config_provider("sector"), + existing_capacities=config_provider("existing_capacities"), + costs=config_provider("costs"), input: network=RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", @@ -16,7 +16,9 @@ rule add_existing_baseyear: busmap_s=resources("busmap_elec_s{simpl}.csv"), busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), - costs="data/costs_{}.csv".format(config["scenario"]["planning_horizons"][0]), + costs=lambda w: "data/costs_{}.csv".format( + config_provider("scenario", "planning_horizons", 0)(w) + ), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), existing_heating="data/existing_infrastructure/existing_heating_raw.csv", @@ -27,7 +29,7 @@ rule add_existing_baseyear: RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", wildcard_constraints: - planning_horizons=config["scenario"]["planning_horizons"][0], #only applies to baseyear + planning_horizons=config_provider("scenario", "planning_horizons", 0), #only applies to baseyear threads: 1 resources: mem_mb=2000, @@ -47,9 +49,11 @@ rule add_existing_baseyear: rule add_brownfield: params: - H2_retrofit=config["sector"]["H2_retrofit"], - H2_retrofit_capacity_per_CH4=config["sector"]["H2_retrofit_capacity_per_CH4"], - threshold_capacity=config["existing_capacities"]["threshold_capacity"], + H2_retrofit=config_provider("sector", "H2_retrofit"), + H2_retrofit_capacity_per_CH4=config_provider( + "sector", "H2_retrofit_capacity_per_CH4" + ), + threshold_capacity=config_provider("existing_capacities", " threshold_capacity"), input: network=RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", @@ -82,11 +86,11 @@ ruleorder: add_existing_baseyear > add_brownfield rule solve_sector_network_myopic: params: - solving=config["solving"], - foresight=config["foresight"], - planning_horizons=config["scenario"]["planning_horizons"], - co2_sequestration_potential=config["sector"].get( - "co2_sequestration_potential", 200 + solving=config_provider("solving"), + foresight=config_provider("foresight"), + planning_horizons=config_provider("scenario", "planning_horizons"), + co2_sequestration_potential=config_provider( + "sector", "co2_sequestration_potential", default=200 ), input: network=RESULTS @@ -105,8 +109,8 @@ rule solve_sector_network_myopic: + "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log", threads: 4 resources: - mem_mb=config["solving"]["mem"], - walltime=config["solving"].get("walltime", "12:00:00"), + mem_mb=config_provider("solving", "mem"), + walltime=config_provider("solving", "walltime", default="12:00:00"), benchmark: ( BENCHMARKS diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index d8476868..8f2ff139 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -5,11 +5,11 @@ rule solve_sector_network: params: - solving=config["solving"], - foresight=config["foresight"], - planning_horizons=config["scenario"]["planning_horizons"], - co2_sequestration_potential=config["sector"].get( - "co2_sequestration_potential", 200 + solving=config_provider("solving"), + foresight=config_provider("foresight"), + planning_horizons=config_provider("scenario", "planning_horizons"), + co2_sequestration_potential=config_provider( + "sector", "co2_sequestration_potential", default=200 ), input: network=RESULTS @@ -27,8 +27,8 @@ rule solve_sector_network: + "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log", threads: config["solving"]["solver"].get("threads", 4) resources: - mem_mb=config["solving"]["mem"], - walltime=config["solving"].get("walltime", "12:00:00"), + mem_mb=config_provider("solving", "mem"), + walltime=config_provider("solving", "walltime", default="12:00:00"), benchmark: ( BENCHMARKS diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 1e1e738b..bf860054 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -137,10 +137,10 @@ def add_co2_sequestration_limit(n, limit=200): n.add( "GlobalConstraint", "co2_sequestration_limit", - sense="<=", - constant=limit, - type="primary_energy", - carrier_attribute="co2_absorptions", + sense=">=", + constant=-limit, + type="operational_limit", + carrier_attribute="co2 sequestered", ) From e838b63ded1eb2084964639c132b24c91f1bc123 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 10 Feb 2024 16:22:36 +0000 Subject: [PATCH 28/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/build_electricity.smk | 27 ++++++++++------- rules/build_sector.smk | 42 ++++++++++++++++++--------- rules/collect.smk | 18 ++++++------ rules/postprocess.smk | 10 +++---- rules/solve_myopic.smk | 6 ++-- scripts/cluster_network.py | 2 +- scripts/prepare_sector_network.py | 2 +- scripts/retrieve_sector_databundle.py | 2 +- scripts/solve_network.py | 2 +- 9 files changed, 67 insertions(+), 44 deletions(-) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index fd925d1f..4c6650bc 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -20,7 +20,7 @@ if config["enable"].get("prepare_links_p_nom", False): rule build_electricity_demand: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider + snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider countries=config_provider("countries"), load=config_provider("load"), input: @@ -62,7 +62,7 @@ rule build_powerplants: rule base_network: params: countries=config_provider("countries"), - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider + snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider lines=config_provider("lines"), links=config_provider("links"), transformers=config_provider("transformers"), @@ -145,7 +145,7 @@ if config["enable"].get("build_cutout", False): rule build_cutout: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider + snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider cutouts=config_provider("atlite", "cutouts"), input: regions_onshore=resources("regions_onshore.geojson"), @@ -259,7 +259,7 @@ else: rule build_renewable_profiles: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider + snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider renewable=config_provider("renewable"), input: **opt, @@ -401,18 +401,22 @@ rule add_electricity: if str(fn).startswith("data/") }, base_network=resources("networks/base.nc"), - line_rating=resources("networks/line_rating.nc") - if config["lines"]["dynamic_line_rating"]["activate"] - else resources("networks/base.nc"), + line_rating=( + resources("networks/line_rating.nc") + if config["lines"]["dynamic_line_rating"]["activate"] + else resources("networks/base.nc") + ), tech_costs=COSTS, regions=resources("regions_onshore.geojson"), powerplants=resources("powerplants.csv"), hydro_capacities=ancient("data/bundle/hydro_capacities.csv"), geth_hydro_capacities="data/geth2015_hydro_capacities.csv", unit_commitment="data/unit_commitment.csv", - fuel_price=resources("monthly_fuel_price.csv") - if config["conventional"]["dynamic_fuel_price"] - else [], + fuel_price=( + resources("monthly_fuel_price.csv") + if config["conventional"]["dynamic_fuel_price"] + else [] + ), load=resources("load.csv"), nuts3_shapes=resources("nuts3_shapes.geojson"), ua_md_gdp="data/GDP_PPP_30arcsec_v3_mapped_default.csv", @@ -540,7 +544,8 @@ rule prepare_network: snapshots={ "resolution": config["snapshots"].get("resolution", False), "segmentation": config["snapshots"].get("segmentation", False), - }, # TODO: use config provider + }, + # TODO: use config provider links=config_provider("links"), lines=config_provider("lines"), co2base=config_provider("electricity", "co2base"), diff --git a/rules/build_sector.smk b/rules/build_sector.smk index f0081d32..aea16519 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -94,7 +94,9 @@ rule build_gas_input_locations: regions_offshore=resources("regions_offshore_elec_s{simpl}_{clusters}.geojson"), output: gas_input_nodes=resources("gas_input_locations_s{simpl}_{clusters}.geojson"), - gas_input_nodes_simplified=resources("gas_input_locations_s{simpl}_{clusters}_simplified.csv"), + gas_input_nodes_simplified=resources( + "gas_input_locations_s{simpl}_{clusters}_simplified.csv" + ), resources: mem_mb=2000, log: @@ -124,7 +126,7 @@ rule cluster_gas_network: rule build_daily_heat_demand: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider + snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider input: pop_layout=resources("pop_layout_{scope}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), @@ -167,7 +169,7 @@ rule build_hourly_heat_demand: rule build_temperature_profiles: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider + snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider input: pop_layout=resources("pop_layout_{scope}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), @@ -219,7 +221,7 @@ rule build_cop_profiles: rule build_solar_thermal_profiles: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO use config_provider + snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO use config_provider solar_thermal=config_provider("solar_thermal"), input: pop_layout=resources("pop_layout_{scope}.nc"), @@ -287,7 +289,9 @@ rule build_biomass_potentials: biomass_potentials_all=resources( "biomass_potentials_all_s{simpl}_{clusters}_{planning_horizons}.csv" ), - biomass_potentials=resources("biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv"), + biomass_potentials=resources( + "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv" + ), threads: 1 resources: mem_mb=1000, @@ -707,7 +711,7 @@ rule build_shipping_demand: rule build_transport_demand: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider + snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider sector=config_provider("sector"), input: clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), @@ -814,7 +818,9 @@ rule prepare_sector_network: network=resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), energy_totals_name=resources("energy_totals.csv"), eurostat=input_eurostat, - pop_weighted_energy_totals=resources("pop_weighted_energy_totals_s{simpl}_{clusters}.csv"), + pop_weighted_energy_totals=resources( + "pop_weighted_energy_totals_s{simpl}_{clusters}.csv" + ), shipping_demand=resources("shipping_demand_s{simpl}_{clusters}.csv"), transport_demand=resources("transport_demand_s{simpl}_{clusters}.csv"), transport_data=resources("transport_data_s{simpl}_{clusters}.csv"), @@ -823,10 +829,14 @@ rule prepare_sector_network: co2_totals_name=resources("co2_totals.csv"), co2="data/bundle-sector/eea/UNFCCC_v23.csv", biomass_potentials=( - resources("biomass_potentials_s{simpl}_{clusters}_" - + "{}.csv".format(config["biomass"]["year"])) + resources( + "biomass_potentials_s{simpl}_{clusters}_" + + "{}.csv".format(config["biomass"]["year"]) + ) if config["foresight"] == "overnight" - else resources("biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv") + else resources( + "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv" + ) ), costs=( "data/costs_{}.csv".format(config["costs"]["year"]) @@ -840,9 +850,15 @@ rule prepare_sector_network: busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), simplified_pop_layout=resources("pop_layout_elec_s{simpl}.csv"), - industrial_demand=resources("industrial_energy_demand_elec_s{simpl}_{clusters}_{planning_horizons}.csv"), - hourly_heat_demand_total=resources("hourly_heat_demand_total_elec_s{simpl}_{clusters}.nc"), - district_heat_share=resources("district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv"), + industrial_demand=resources( + "industrial_energy_demand_elec_s{simpl}_{clusters}_{planning_horizons}.csv" + ), + hourly_heat_demand_total=resources( + "hourly_heat_demand_total_elec_s{simpl}_{clusters}.nc" + ), + district_heat_share=resources( + "district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv" + ), temp_soil_total=resources("temp_soil_total_elec_s{simpl}_{clusters}.nc"), temp_soil_rural=resources("temp_soil_rural_elec_s{simpl}_{clusters}.nc"), temp_soil_urban=resources("temp_soil_urban_elec_s{simpl}_{clusters}.nc"), diff --git a/rules/collect.smk b/rules/collect.smk index 0fb19165..8a451d7a 100644 --- a/rules/collect.smk +++ b/rules/collect.smk @@ -24,7 +24,7 @@ rule cluster_networks: expand( resources("networks/elec_s{simpl}_{clusters}.nc"), **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), @@ -33,7 +33,7 @@ rule extra_components_networks: expand( resources("networks/elec_s{simpl}_{clusters}_ec.nc"), **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), @@ -42,7 +42,7 @@ rule prepare_elec_networks: expand( resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), @@ -52,7 +52,7 @@ rule prepare_sector_networks: RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), @@ -61,7 +61,7 @@ rule solve_elec_networks: expand( RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), @@ -71,7 +71,7 @@ rule solve_sector_networks: RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), @@ -81,7 +81,7 @@ rule solve_sector_networks_perfect: RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), @@ -91,12 +91,12 @@ rule validate_elec_networks: RESULTS + "figures/.statistics_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}", **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), expand( RESULTS + "figures/.validation_{kind}_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}", **config["scenario"], run=config["run"]["name"], - kind=["production", "prices", "cross_border"] + kind=["production", "prices", "cross_border"], ), diff --git a/rules/postprocess.smk b/rules/postprocess.smk index 19dc34be..7e269688 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -162,7 +162,7 @@ rule make_summary: params: foresight=config_provider("foresight"), costs=config_provider("costs"), - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider + snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider scenario=config_provider("scenario"), RDIR=RDIR, input: @@ -174,7 +174,7 @@ rule make_summary: RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), costs=( "data/costs_{}.csv".format(config["costs"]["year"]) @@ -189,7 +189,7 @@ rule make_summary: RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), h2_plot=expand( ( @@ -199,7 +199,7 @@ rule make_summary: else [] ), **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), ch4_plot=expand( ( @@ -209,7 +209,7 @@ rule make_summary: else [] ), **config["scenario"], - run=config["run"]["name"] + run=config["run"]["name"], ), output: nodal_costs=RESULTS + "csvs/nodal_costs.csv", diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index fac58456..7035f1c1 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -21,7 +21,9 @@ rule add_existing_baseyear: ), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), - existing_heating_distribution=resources("existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv"), + existing_heating_distribution=resources( + "existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv" + ), existing_solar="data/existing_infrastructure/solar_capacity_IRENA.csv", existing_onwind="data/existing_infrastructure/onwind_capacity_IRENA.csv", existing_offwind="data/existing_infrastructure/offwind_capacity_IRENA.csv", @@ -54,7 +56,7 @@ rule add_brownfield: "sector", "H2_retrofit_capacity_per_CH4" ), threshold_capacity=config_provider("existing_capacities", " threshold_capacity"), - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider + snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider carriers=config_provider("electricity", "renewable_carriers"), input: **{ diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 44b83e99..bd00a258 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -133,7 +133,7 @@ import numpy as np import pandas as pd import pypsa import seaborn as sns -from _helpers import configure_logging, update_p_nom_max, set_scenario_config +from _helpers import configure_logging, set_scenario_config, update_p_nom_max from add_electricity import load_costs from packaging.version import Version, parse from pypsa.clustering.spatial import ( diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 06aea9ec..b1161c19 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -18,7 +18,7 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import update_config_with_sector_opts, set_scenario_config +from _helpers import set_scenario_config, update_config_with_sector_opts from add_electricity import calculate_annuity, sanitize_carriers, sanitize_locations from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2 from networkx.algorithms import complement diff --git a/scripts/retrieve_sector_databundle.py b/scripts/retrieve_sector_databundle.py index defc806c..f95986b4 100644 --- a/scripts/retrieve_sector_databundle.py +++ b/scripts/retrieve_sector_databundle.py @@ -13,8 +13,8 @@ from pathlib import Path from _helpers import ( configure_logging, progress_retrieve, - validate_checksum, set_scenario_config, + validate_checksum, ) logger = logging.getLogger(__name__) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 5ca0ec82..b828bbd2 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -40,8 +40,8 @@ from _benchmark import memory_logger from _helpers import ( configure_logging, get_opt, - update_config_with_sector_opts, set_scenario_config, + update_config_with_sector_opts, ) from pypsa.descriptors import get_activity_mask from pypsa.descriptors import get_switchable_as_dense as get_as_dense From a9dad3f34ece29af49481738b1d0b6ddd43d67d1 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 10 Feb 2024 18:09:23 +0100 Subject: [PATCH 29/76] add new resources(), logs(), benchmarks(), config_providers() --- Snakefile | 2 +- rules/build_electricity.smk | 63 +++++++++++++------------- rules/build_sector.smk | 90 ++++++++++++++++--------------------- rules/collect.smk | 6 --- rules/common.smk | 10 ++--- rules/postprocess.smk | 68 ++++++++++------------------ rules/retrieve.smk | 4 +- rules/solve_electricity.smk | 18 +++----- rules/solve_myopic.smk | 37 +++++---------- rules/solve_overnight.smk | 5 +-- rules/solve_perfect.smk | 84 +++++++++++++++------------------- rules/validate.smk | 14 +++--- 12 files changed, 163 insertions(+), 238 deletions(-) diff --git a/Snakefile b/Snakefile index e0949ed9..7df61162 100644 --- a/Snakefile +++ b/Snakefile @@ -91,7 +91,7 @@ if config["foresight"] == "perfect": rule all: input: - RESULTS + "graphs/costs.pdf", + expand(RESULTS + "graphs/costs.pdf", run=config["run"]["name"]), default_target: True diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 4c6650bc..951d3331 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -20,7 +20,7 @@ if config["enable"].get("prepare_links_p_nom", False): rule build_electricity_demand: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, countries=config_provider("countries"), load=config_provider("load"), input: @@ -62,7 +62,7 @@ rule build_powerplants: rule base_network: params: countries=config_provider("countries"), - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, lines=config_provider("lines"), links=config_provider("links"), transformers=config_provider("transformers"), @@ -145,7 +145,7 @@ if config["enable"].get("build_cutout", False): rule build_cutout: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, cutouts=config_provider("atlite", "cutouts"), input: regions_onshore=resources("regions_onshore.geojson"), @@ -170,7 +170,7 @@ if config["enable"].get("build_natura_raster", False): rule build_natura_raster: input: natura=ancient("data/bundle/natura/Natura2000_end2015.shp"), - cutouts=expand("cutouts/" + CDIR + "{cutouts}.nc", **config["atlite"]), + cutouts=expand("cutouts/" + CDIR + "{cutouts}.nc", **config_provider("atlite")), output: resources("natura.tiff"), resources: @@ -189,8 +189,8 @@ rule build_ship_raster: cutouts=expand( "cutouts/" + CDIR + "{cutout}.nc", cutout=[ - config["renewable"][k]["cutout"] - for k in config["electricity"]["renewable_carriers"] + config_provider("renewable", k, "cutout") + for k in config_provider("electricity", "renewable_carriers") ], ), output: @@ -214,30 +214,30 @@ rule determine_availability_matrix_MD_UA: wdpa_marine="data/WDPA_WDOECM_marine.gpkg", gebco=lambda w: ( "data/bundle/GEBCO_2014_2D.nc" - if "max_depth" in config["renewable"][w.technology].keys() + if config_provider("renewable", w.technology)(w).get("max_depth") else [] ), ship_density=lambda w: ( - RESOURCES + "shipdensity_raster.tif" - if "ship_threshold" in config["renewable"][w.technology].keys() + resources("shipdensity_raster.tif") + if "ship_threshold" in config_provider("renewable", w.technology)(w).keys() else [] ), - country_shapes=RESOURCES + "country_shapes.geojson", - offshore_shapes=RESOURCES + "offshore_shapes.geojson", + country_shapes=resources("country_shapes.geojson"), + offshore_shapes=resources("offshore_shapes.geojson"), regions=lambda w: ( - RESOURCES + "regions_onshore.geojson" + resources("regions_onshore.geojson") if w.technology in ("onwind", "solar") - else RESOURCES + "regions_offshore.geojson" + else resources("regions_offshore.geojson") ), cutout=lambda w: "cutouts/" + CDIR - + config["renewable"][w.technology]["cutout"] + + config_provider("renewable", w.technology, "cutout")(w) + ".nc", output: - availability_matrix=RESOURCES + "availability_matrix_MD-UA_{technology}.nc", - availability_map=RESOURCES + "availability_matrix_MD-UA_{technology}.png", + availability_matrix=resources("availability_matrix_MD-UA_{technology}.nc"), + availability_map=resources("availability_matrix_MD-UA_{technology}.png"), log: - LOGS + "determine_availability_matrix_MD_UA_{technology}.log", + logs("determine_availability_matrix_MD_UA_{technology}.log"), threads: ATLITE_NPROCESSES resources: mem_mb=ATLITE_NPROCESSES * 5000, @@ -250,8 +250,7 @@ rule determine_availability_matrix_MD_UA: # Optional input when having Ukraine (UA) or Moldova (MD) in the countries list if {"UA", "MD"}.intersection(set(config["countries"])): opt = { - "availability_matrix_MD_UA": RESOURCES - + "availability_matrix_MD-UA_{technology}.nc" + "availability_matrix_MD_UA": resources("availability_matrix_MD-UA_{technology}.nc") } else: opt = {} @@ -259,7 +258,7 @@ else: rule build_renewable_profiles: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config provider + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, renewable=config_provider("renewable"), input: **opt, @@ -272,7 +271,7 @@ rule build_renewable_profiles: ), luisa=lambda w: ( "data/LUISA_basemap_020321_50m.tif" - if config["renewable"][w.technology].get("luisa") + if config_provider("renewable", w.technology, "luisa")(w) else [] ), gebco=ancient( @@ -340,7 +339,7 @@ rule build_hydro_profile: input: country_shapes=resources("country_shapes.geojson"), eia_hydro_generation="data/eia_hydro_annual_generation.csv", - cutout=f"cutouts/" + CDIR + config["renewable"]["hydro"]["cutout"] + ".nc", + cutout=f"cutouts/" + CDIR + config_provider("renewable", "hydro", "cutout") + ".nc", output: resources("profile_hydro.nc"), log: @@ -357,12 +356,12 @@ if config["lines"]["dynamic_line_rating"]["activate"]: rule build_line_rating: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, input: base_network=resources("networks/base.nc"), cutout="cutouts/" + CDIR - + config["lines"]["dynamic_line_rating"]["cutout"] + + config_provider("lines", "dynamic_line_rating", "cutout") + ".nc", output: output=resources("networks/line_rating.nc"), @@ -391,19 +390,19 @@ rule add_electricity: input: **{ f"profile_{tech}": resources(f"profile_{tech}.nc") - for tech in config["electricity"]["renewable_carriers"] + for tech in config_provider("electricity", "renewable_carriers") }, **{ f"conventional_{carrier}_{attr}": fn for carrier, d in config.get("conventional", {None: {}}).items() - if carrier in config["electricity"]["conventional_carriers"] + if carrier in config_provider("electricity", "conventional_carriers") for attr, fn in d.items() if str(fn).startswith("data/") }, base_network=resources("networks/base.nc"), line_rating=( resources("networks/line_rating.nc") - if config["lines"]["dynamic_line_rating"]["activate"] + if config_provider("lines", "dynamic_line_rating", "activate") else resources("networks/base.nc") ), tech_costs=COSTS, @@ -414,7 +413,7 @@ rule add_electricity: unit_commitment="data/unit_commitment.csv", fuel_price=( resources("monthly_fuel_price.csv") - if config["conventional"]["dynamic_fuel_price"] + if config_provider("conventional", "dynamic_fuel_price") else [] ), load=resources("load.csv"), @@ -493,7 +492,7 @@ rule cluster_network: busmap=ancient(resources("busmap_elec_s{simpl}.csv")), custom_busmap=( "data/custom_busmap_elec_s{simpl}_{clusters}.csv" - if config["enable"].get("custom_busmap", False) + if config_provider("enable", "custom_busmap", default=False) else [] ), tech_costs=COSTS, @@ -542,10 +541,10 @@ rule add_extra_components: rule prepare_network: params: snapshots={ - "resolution": config["snapshots"].get("resolution", False), - "segmentation": config["snapshots"].get("segmentation", False), + "resolution": config_provider("snapshots", "resolution", default=False), + "segmentation": config_provider("snapshots", "segmentation", default=False), }, - # TODO: use config provider + links=config_provider("links"), lines=config_provider("lines"), co2base=config_provider("electricity", "co2base"), diff --git a/rules/build_sector.smk b/rules/build_sector.smk index aea16519..a26efaf4 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -7,7 +7,7 @@ rule build_population_layouts: input: nuts3_shapes=resources("nuts3_shapes.geojson"), urban_percent="data/urban_percent.csv", - cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", + cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", output: pop_layout_total=resources("pop_layout_total.nc"), pop_layout_urban=resources("pop_layout_urban.nc"), @@ -31,7 +31,7 @@ rule build_clustered_population_layouts: pop_layout_urban=resources("pop_layout_urban.nc"), pop_layout_rural=resources("pop_layout_rural.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), - cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", + cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", output: clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), log: @@ -52,7 +52,7 @@ rule build_simplified_population_layouts: pop_layout_urban=resources("pop_layout_urban.nc"), pop_layout_rural=resources("pop_layout_rural.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}.geojson"), - cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", + cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", output: clustered_pop_layout=resources("pop_layout_elec_s{simpl}.csv"), resources: @@ -126,11 +126,11 @@ rule cluster_gas_network: rule build_daily_heat_demand: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, input: pop_layout=resources("pop_layout_{scope}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), - cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", + cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", output: heat_demand=resources("daily_heat_demand_{scope}_elec_s{simpl}_{clusters}.nc"), resources: @@ -148,19 +148,19 @@ rule build_daily_heat_demand: rule build_hourly_heat_demand: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, input: heat_profile="data/heat_load_profile_BDEW.csv", - heat_demand=RESOURCES + "daily_heat_demand_{scope}_elec_s{simpl}_{clusters}.nc", + heat_demand=resources("daily_heat_demand_{scope}_elec_s{simpl}_{clusters}.nc"), output: - heat_demand=RESOURCES + "hourly_heat_demand_{scope}_elec_s{simpl}_{clusters}.nc", + heat_demand=resources("hourly_heat_demand_{scope}_elec_s{simpl}_{clusters}.nc"), resources: mem_mb=2000, threads: 8 log: - LOGS + "build_hourly_heat_demand_{scope}_{simpl}_{clusters}.loc", + logs("build_hourly_heat_demand_{scope}_{simpl}_{clusters}.loc"), benchmark: - BENCHMARKS + "build_hourly_heat_demand/{scope}_s{simpl}_{clusters}" + benchmarks("build_hourly_heat_demand/{scope}_s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -169,11 +169,11 @@ rule build_hourly_heat_demand: rule build_temperature_profiles: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, input: pop_layout=resources("pop_layout_{scope}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), - cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", + cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", output: temp_soil=resources("temp_soil_{scope}_elec_s{simpl}_{clusters}.nc"), temp_air=resources("temp_air_{scope}_elec_s{simpl}_{clusters}.nc"), @@ -221,12 +221,12 @@ rule build_cop_profiles: rule build_solar_thermal_profiles: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO use config_provider + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, # TODO use config_provider solar_thermal=config_provider("solar_thermal"), input: pop_layout=resources("pop_layout_{scope}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), - cutout="cutouts/" + CDIR + config["atlite"]["default_cutout"] + ".nc", + cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", output: solar_thermal=resources("solar_thermal_{scope}_elec_s{simpl}_{clusters}.nc"), resources: @@ -711,7 +711,7 @@ rule build_shipping_demand: rule build_transport_demand: params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, sector=config_provider("sector"), input: clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), @@ -740,18 +740,17 @@ rule build_transport_demand: rule build_district_heat_share: params: - sector=config["sector"], + sector=config_provider("sector"), input: - district_heat_share=RESOURCES + "district_heat_share.csv", - clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", + district_heat_share=resources("district_heat_share.csv"), + clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), output: - district_heat_share=RESOURCES - + "district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + district_heat_share=resources("district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv"), threads: 1 resources: mem_mb=1000, log: - LOGS + "build_district_heat_share_s{simpl}_{clusters}_{planning_horizons}.log", + logs("build_district_heat_share_s{simpl}_{clusters}_{planning_horizons}.log"), conda: "../envs/environment.yaml" script: @@ -760,32 +759,25 @@ rule build_district_heat_share: rule build_existing_heating_distribution: params: - baseyear=config["scenario"]["planning_horizons"][0], - sector=config["sector"], - existing_capacities=config["existing_capacities"], + baseyear=config_provider("scenario", "planning_horizons", 0), + sector=config_provider("sector"), + existing_capacities=config_provider("existing_capacities"), input: existing_heating="data/existing_infrastructure/existing_heating_raw.csv", - clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", - clustered_pop_energy_layout=RESOURCES - + "pop_weighted_energy_totals_s{simpl}_{clusters}.csv", - district_heat_share=RESOURCES - + "district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), + clustered_pop_energy_layout=resources("pop_weighted_energy_totals_s{simpl}_{clusters}.csv"), + district_heat_share=resources("district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv"), output: - existing_heating_distribution=RESOURCES - + "existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + existing_heating_distribution=resources("existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv"), wildcard_constraints: - planning_horizons=config["scenario"]["planning_horizons"][0], #only applies to baseyear + planning_horizons=config_provider("scenario", "planning_horizons", 0), #only applies to baseyear threads: 1 resources: mem_mb=2000, log: - LOGS - + "build_existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.log", + logs("build_existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.log"), benchmark: - ( - BENCHMARKS - + "build_existing_heating_distribution/elec_s{simpl}_{clusters}_{planning_horizons}" - ) + benchmarks("build_existing_heating_distribution/elec_s{simpl}_{clusters}_{planning_horizons}") conda: "../envs/environment.yaml" script: @@ -831,16 +823,16 @@ rule prepare_sector_network: biomass_potentials=( resources( "biomass_potentials_s{simpl}_{clusters}_" - + "{}.csv".format(config["biomass"]["year"]) + + "{}.csv".format(config_provider("biomass", "year")) ) - if config["foresight"] == "overnight" + if config_provider("foresight") == "overnight" else resources( "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv" ) ), costs=( - "data/costs_{}.csv".format(config["costs"]["year"]) - if config["foresight"] == "overnight" + "data/costs_{}.csv".format(config_provider("costs", "year")) + if config_provider("foresight") == "overnight" else "data/costs_{planning_horizons}.csv" ), profile_offwind_ac=resources("profile_offwind-ac.nc"), @@ -873,17 +865,17 @@ rule prepare_sector_network: cop_air_urban=resources("cop_air_urban_elec_s{simpl}_{clusters}.nc"), solar_thermal_total=( resources("solar_thermal_total_elec_s{simpl}_{clusters}.nc") - if config["sector"]["solar_thermal"] + if config_provider("sector", "solar_thermal") else [] ), solar_thermal_urban=( resources("solar_thermal_urban_elec_s{simpl}_{clusters}.nc") - if config["sector"]["solar_thermal"] + if config_provider("sector", "solar_thermal") else [] ), solar_thermal_rural=( resources("solar_thermal_rural_elec_s{simpl}_{clusters}.nc") - if config["sector"]["solar_thermal"] + if config_provider("sector", "solar_thermal") else [] ), output: @@ -893,13 +885,9 @@ rule prepare_sector_network: resources: mem_mb=2000, log: - LOGS - + "prepare_sector_network_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log", + logs("prepare_sector_network_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), benchmark: - ( - BENCHMARKS - + "prepare_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" - ) + benchmarks("prepare_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") conda: "../envs/environment.yaml" script: diff --git a/rules/collect.smk b/rules/collect.smk index 8a451d7a..9be12f25 100644 --- a/rules/collect.smk +++ b/rules/collect.smk @@ -13,12 +13,6 @@ localrules: solve_sector_networks, -rule all: - input: - expand(RESULTS + "graphs/costs.pdf", run=config["run"]["name"]), - default_target: True - - rule cluster_networks: input: expand( diff --git a/rules/common.smk b/rules/common.smk index b6c0f734..af991be7 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -78,8 +78,8 @@ def config_provider(*keys, default=None): def solver_threads(w): - solver_options = config["solving"]["solver_options"] - option_set = config["solving"]["solver"]["options"] + solver_options = config_provider("solving", "solver_options") + option_set = config_provider("solving", "solver", "options") threads = solver_options[option_set].get("threads", 4) return threads @@ -105,7 +105,7 @@ def memory(w): def input_custom_extra_functionality(w): - path = config["solving"]["options"].get("custom_extra_functionality", False) + path = config_provider("solving", "options", "custom_extra_functionality", default=False) if path: return os.path.join(os.path.dirname(workflow.snakefile), path) return [] @@ -129,12 +129,12 @@ def has_internet_access(url="www.zenodo.org") -> bool: def input_eurostat(w): # 2016 includes BA, 2017 does not - report_year = config["energy"]["eurostat_report_year"] + report_year = config_provider("energy", "eurostat_report_year") return f"data/bundle-sector/eurostat-energy_balances-june_{report_year}_edition" def solved_previous_horizon(wildcards): - planning_horizons = config["scenario"]["planning_horizons"] + planning_horizons = config_provider("scenario", "planning_horizons") i = planning_horizons.index(int(wildcards.planning_horizons)) planning_horizon_p = str(planning_horizons[i - 1]) return ( diff --git a/rules/postprocess.smk b/rules/postprocess.smk index 7e269688..98399cde 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -13,16 +13,15 @@ if config_provider("foresight") != "perfect": params: plotting=config_provider("plotting"), input: - network=RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", - regions_onshore=RESOURCES - + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + network=resources("networks/elec_s{simpl}_{clusters}.nc"), + regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), output: map=RESULTS + "maps/power-network-s{simpl}-{clusters}.pdf", threads: 1 resources: mem_mb=4000, benchmark: - BENCHMARKS + "plot_power_network_clustered/elec_s{simpl}_{clusters}" + benchmarks("plot_power_network_clustered/elec_s{simpl}_{clusters}") conda: "../envs/environment.yaml" script: @@ -34,7 +33,7 @@ if config_provider("foresight") != "perfect": input: network=RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", - regions=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), output: map=RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", @@ -42,15 +41,9 @@ if config_provider("foresight") != "perfect": resources: mem_mb=10000, log: - ( - LOGS - + "plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" - ), + logs("plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log") benchmark: - ( - BENCHMARKS - + "plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" - ) + benchmarks("plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") conda: "../envs/environment.yaml" script: @@ -63,7 +56,7 @@ if config_provider("foresight") != "perfect": input: network=RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", - regions=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), output: map=RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-h2_network_{planning_horizons}.pdf", @@ -71,15 +64,9 @@ if config_provider("foresight") != "perfect": resources: mem_mb=10000, log: - ( - LOGS - + "plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" - ), + logs("plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), benchmark: - ( - BENCHMARKS - + "plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" - ) + benchmarks("plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") conda: "../envs/environment.yaml" script: @@ -91,7 +78,7 @@ if config_provider("foresight") != "perfect": input: network=RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", - regions=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), output: map=RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-ch4_network_{planning_horizons}.pdf", @@ -99,15 +86,9 @@ if config_provider("foresight") != "perfect": resources: mem_mb=10000, log: - ( - LOGS - + "plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" - ), + logs("plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), benchmark: - ( - BENCHMARKS - + "plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" - ) + benchmarks("plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") conda: "../envs/environment.yaml" script: @@ -122,7 +103,7 @@ if config_provider("foresight") == "perfect": input: network=RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc", - regions=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), output: **{ f"map_{year}": RESULTS @@ -134,8 +115,7 @@ if config_provider("foresight") == "perfect": resources: mem_mb=10000, benchmark: - BENCHMARKS - +"postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_benchmark" + benchmarks("postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_benchmark") conda: "../envs/environment.yaml" script: @@ -151,7 +131,7 @@ rule copy_config: resources: mem_mb=1000, benchmark: - BENCHMARKS + "copy_config" + benchmarks("copy_config") conda: "../envs/environment.yaml" script: @@ -162,7 +142,7 @@ rule make_summary: params: foresight=config_provider("foresight"), costs=config_provider("costs"), - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, scenario=config_provider("scenario"), RDIR=RDIR, input: @@ -177,9 +157,9 @@ rule make_summary: run=config["run"]["name"], ), costs=( - "data/costs_{}.csv".format(config["costs"]["year"]) + "data/costs_{}.csv".format(config_provider("costs", "year")) if config_provider("foresight") == "overnight" - else "data/costs_{}.csv".format(config["scenario"]["planning_horizons"][0]) + else "data/costs_{}.csv".format(config_provider("scenario", "planning_horizons", 0)) ), ac_plot=expand( RESULTS + "maps/power-network-s{simpl}-{clusters}.pdf", @@ -195,7 +175,7 @@ rule make_summary: ( RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-h2_network_{planning_horizons}.pdf" - if config["sector"]["H2_network"] + if config_provider("sector", "H2_network") else [] ), **config["scenario"], @@ -205,7 +185,7 @@ rule make_summary: ( RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-ch4_network_{planning_horizons}.pdf" - if config["sector"]["gas_network"] + if config_provider("sector", "gas_network") else [] ), **config["scenario"], @@ -231,9 +211,9 @@ rule make_summary: resources: mem_mb=10000, log: - LOGS + "make_summary.log", + logs("make_summary.log"), benchmark: - BENCHMARKS + "make_summary" + benchmarks("make_summary") conda: "../envs/environment.yaml" script: @@ -263,9 +243,9 @@ rule plot_summary: resources: mem_mb=10000, log: - LOGS + "plot_summary.log", + logs("plot_summary.log"), benchmark: - BENCHMARKS + "plot_summary" + benchmarks("plot_summary") conda: "../envs/environment.yaml" script: diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 46741830..a0647e57 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -50,7 +50,7 @@ if config["enable"].get("retrieve_irena"): onwind="data/existing_infrastructure/onwind_capacity_IRENA.csv", solar="data/existing_infrastructure/solar_capacity_IRENA.csv", log: - LOGS + "retrieve_irena.log", + logs("retrieve_irena.log"), resources: mem_mb=1000, retries: 2 @@ -86,7 +86,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cost_data", T input: HTTP.remote( "raw.githubusercontent.com/PyPSA/technology-data/{}/outputs/".format( - config["costs"]["version"] + config_provider("costs", "version") ) + "costs_{year}.csv", keep_local=True, diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index fc8e8cea..4ff94bf2 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -19,12 +19,11 @@ rule solve_network: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", log: solver=normpath( - LOGS + "solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log" + logs("solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log") ), - python=LOGS - + "solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log", + python=logs("solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log"), benchmark: - BENCHMARKS + "solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}" + benchmarks("solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}") threads: solver_threads resources: mem_mb=memory, @@ -46,16 +45,11 @@ rule solve_operations_network: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc", log: solver=normpath( - LOGS - + "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log" + logs("solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log") ), - python=LOGS - + "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log", + python=logs("solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log"), benchmark: - ( - BENCHMARKS - + "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}" - ) + benchmarks("solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}") threads: 4 resources: mem_mb=(lambda w: 10000 + 372 * int(w.clusters)), diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 7035f1c1..260837b7 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -36,13 +36,9 @@ rule add_existing_baseyear: resources: mem_mb=2000, log: - LOGS - + "add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log", + logs("add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), benchmark: - ( - BENCHMARKS - + "add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" - ) + benchmarks("add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") conda: "../envs/environment.yaml" script: @@ -56,16 +52,16 @@ rule add_brownfield: "sector", "H2_retrofit_capacity_per_CH4" ), threshold_capacity=config_provider("existing_capacities", " threshold_capacity"), - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, # TODO: use config_provider + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, carriers=config_provider("electricity", "renewable_carriers"), input: **{ - f"profile_{tech}": RESOURCES + f"profile_{tech}.nc" - for tech in config["electricity"]["renewable_carriers"] + f"profile_{tech}": resources(f"profile_{tech}.nc") + for tech in config_provider("electricity", "renewable_carriers") if tech != "hydro" }, - simplify_busmap=RESOURCES + "busmap_elec_s{simpl}.csv", - cluster_busmap=RESOURCES + "busmap_elec_s{simpl}_{clusters}.csv", + simplify_busmap=resources("busmap_elec_s{simpl}.csv"), + cluster_busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), network=RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", network_p=solved_previous_horizon, #solved network at previous time step @@ -79,13 +75,9 @@ rule add_brownfield: resources: mem_mb=10000, log: - LOGS - + "add_brownfield_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log", + logs("add_brownfield_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), benchmark: - ( - BENCHMARKS - + "add_brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" - ) + benchmarks("add_brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") conda: "../envs/environment.yaml" script: @@ -115,19 +107,14 @@ rule solve_sector_network_myopic: shadow: "shallow" log: - solver=LOGS - + "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log", - python=LOGS - + "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log", + solver=logs("elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log"), + python=logs("elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log"), threads: solver_threads resources: mem_mb=config_provider("solving", "mem"), walltime=config_provider("solving", "walltime", default="12:00:00"), benchmark: - ( - BENCHMARKS - + "solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" - ) + benchmarks("solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") conda: "../envs/environment.yaml" script: diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index 76621012..7811efe3 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -33,10 +33,7 @@ rule solve_sector_network: mem_mb=config_provider("solving", "mem"), walltime=config_provider("solving", "walltime", default="12:00:00"), benchmark: - ( - BENCHMARKS - + "solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" - ) + benchmarks("solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") conda: "../envs/environment.yaml" script: diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index 9e164a16..85f87d9b 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -3,22 +3,21 @@ # SPDX-License-Identifier: MIT rule add_existing_baseyear: params: - baseyear=config["scenario"]["planning_horizons"][0], - sector=config["sector"], - existing_capacities=config["existing_capacities"], - costs=config["costs"], + baseyear=config_provider("scenario", "planning_horizons", 0), + sector=config_provider("sector"), + existing_capacities=config_provider("existing_capacities"), + costs=config_provider("costs"), input: network=RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", - powerplants=RESOURCES + "powerplants.csv", - busmap_s=RESOURCES + "busmap_elec_s{simpl}.csv", - busmap=RESOURCES + "busmap_elec_s{simpl}_{clusters}.csv", - clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", - costs="data/costs_{}.csv".format(config["scenario"]["planning_horizons"][0]), - cop_soil_total=RESOURCES + "cop_soil_total_elec_s{simpl}_{clusters}.nc", - cop_air_total=RESOURCES + "cop_air_total_elec_s{simpl}_{clusters}.nc", - existing_heating_distribution=RESOURCES - + "existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv", + powerplants=resources("powerplants.csv"), + busmap_s=resources("busmap_elec_s{simpl}.csv"), + busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), + clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), + costs="data/costs_{}.csv".format(config_provider("scenario", "planning_horizons", 0)), + cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), + cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), + existing_heating_distribution=resources("existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv"), existing_heating="data/existing_infrastructure/existing_heating_raw.csv", existing_solar="data/existing_infrastructure/solar_capacity_IRENA.csv", existing_onwind="data/existing_infrastructure/onwind_capacity_IRENA.csv", @@ -27,18 +26,14 @@ rule add_existing_baseyear: RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", wildcard_constraints: - planning_horizons=config["scenario"]["planning_horizons"][0], #only applies to baseyear + planning_horizons=config_provider("scenario", "planning_horizons", 0), #only applies to baseyear threads: 1 resources: mem_mb=2000, log: - LOGS - + "add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log", + logs("add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), benchmark: - ( - BENCHMARKS - + "add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" - ) + benchmarks("add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") conda: "../envs/environment.yaml" script: @@ -51,13 +46,13 @@ rule prepare_perfect_foresight: f"network_{year}": RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_" + f"{year}.nc" - for year in config["scenario"]["planning_horizons"][1:] + for year in config_provider("scenario", "planning_horizons")[1:] }, brownfield_network=lambda w: ( RESULTS + "prenetworks-brownfield/" + "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_" - + "{}.nc".format(str(config["scenario"]["planning_horizons"][0])) + + "{}.nc".format(str(config_provider("scenario", "planning_horizons", 0))) ), output: RESULTS @@ -66,13 +61,9 @@ rule prepare_perfect_foresight: resources: mem_mb=10000, log: - LOGS - + "prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}.log", + logs("prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}.log"), benchmark: - ( - BENCHMARKS - + "prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}" - ) + benchmarks("prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}") conda: "../envs/environment.yaml" script: @@ -81,13 +72,11 @@ rule prepare_perfect_foresight: rule solve_sector_network_perfect: params: - solving=config["solving"], - foresight=config["foresight"], - sector=config["sector"], - planning_horizons=config["scenario"]["planning_horizons"], - co2_sequestration_potential=config["sector"].get( - "co2_sequestration_potential", 200 - ), + solving=config_provider("solving"), + foresight=config_provider("foresight"), + sector=config_provider("sector"), + planning_horizons=config_provider("scenario", "planning_horizons"), + co2_sequestration_potential=config_provider("sector", "co2_sequestration_potential", 200), custom_extra_functionality=input_custom_extra_functionality, input: network=RESULTS @@ -99,7 +88,7 @@ rule solve_sector_network_perfect: + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc", threads: solver_threads resources: - mem_mb=config["solving"]["mem"], + mem_mb=config_provider("solving", "mem"), shadow: "shallow" log: @@ -110,10 +99,7 @@ rule solve_sector_network_perfect: memory=RESULTS + "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_memory.log", benchmark: - ( - BENCHMARKS - + "solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years}" - ) + benchmarks("solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years}") conda: "../envs/environment.yaml" script: @@ -124,13 +110,13 @@ rule make_summary_perfect: input: **{ f"networks_{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}": RESULTS - + f"postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc" - for simpl in config["scenario"]["simpl"] - for clusters in config["scenario"]["clusters"] - for opts in config["scenario"]["opts"] - for sector_opts in config["scenario"]["sector_opts"] - for ll in config["scenario"]["ll"] - }, + + f"postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc" + for simpl in config_provider("scenario", "simpl") + for clusters in config_provider("scenario", "clusters") + for opts in config_provider("scenario", "opts") + for sector_opts in config_provider("scenario", "sector_opts") + for ll in config_provider("scenario", "ll") + }, costs="data/costs_2020.csv", output: nodal_costs=RESULTS + "csvs/nodal_costs.csv", @@ -153,9 +139,9 @@ rule make_summary_perfect: resources: mem_mb=10000, log: - LOGS + "make_summary_perfect.log", + logs("make_summary_perfect.log"), benchmark: - (BENCHMARKS + "make_summary_perfect") + benchmarks("make_summary_perfect") conda: "../envs/environment.yaml" script: diff --git a/rules/validate.smk b/rules/validate.smk index fefb6ba6..f8ebea5d 100644 --- a/rules/validate.smk +++ b/rules/validate.smk @@ -17,8 +17,8 @@ rule build_electricity_production: The data is used for validation of the optimization results. """ params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, - countries=config["countries"], + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + countries=config_provider("countries"), output: resources("historical_electricity_production.csv"), log: @@ -35,8 +35,8 @@ rule build_cross_border_flows: The data is used for validation of the optimization results. """ params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, - countries=config["countries"], + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + countries=config_provider("countries"), input: network=resources("networks/base.nc"), output: @@ -55,8 +55,8 @@ rule build_electricity_prices: The data is used for validation of the optimization results. """ params: - snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]}, - countries=config["countries"], + snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + countries=config_provider("countries"), output: resources("historical_electricity_prices.csv"), log: @@ -85,7 +85,7 @@ rule plot_validation_electricity_production: rule plot_validation_cross_border_flows: params: - countries=config["countries"], + countries=config_provider("countries"), input: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", cross_border_flows=resources("historical_cross_border_flows.csv"), From 4ff06046fc72b316a7fbc01ad8295d2546db322f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 10 Feb 2024 17:09:46 +0000 Subject: [PATCH 30/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/build_electricity.smk | 40 +++++++++++++++++++++------- rules/build_sector.smk | 52 +++++++++++++++++++++++++++---------- rules/common.smk | 4 ++- rules/postprocess.smk | 40 +++++++++++++++++++++------- rules/solve_electricity.smk | 8 ++++-- rules/solve_myopic.smk | 32 +++++++++++++++++------ rules/solve_overnight.smk | 4 ++- rules/solve_perfect.smk | 46 +++++++++++++++++++++----------- rules/validate.smk | 12 ++++++--- 9 files changed, 175 insertions(+), 63 deletions(-) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 951d3331..bdae24d1 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -20,7 +20,9 @@ if config["enable"].get("prepare_links_p_nom", False): rule build_electricity_demand: params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, countries=config_provider("countries"), load=config_provider("load"), input: @@ -62,7 +64,9 @@ rule build_powerplants: rule base_network: params: countries=config_provider("countries"), - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, lines=config_provider("lines"), links=config_provider("links"), transformers=config_provider("transformers"), @@ -145,7 +149,10 @@ if config["enable"].get("build_cutout", False): rule build_cutout: params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) + for k in ["start", "end", "inclusive"] + }, cutouts=config_provider("atlite", "cutouts"), input: regions_onshore=resources("regions_onshore.geojson"), @@ -170,7 +177,9 @@ if config["enable"].get("build_natura_raster", False): rule build_natura_raster: input: natura=ancient("data/bundle/natura/Natura2000_end2015.shp"), - cutouts=expand("cutouts/" + CDIR + "{cutouts}.nc", **config_provider("atlite")), + cutouts=expand( + "cutouts/" + CDIR + "{cutouts}.nc", **config_provider("atlite") + ), output: resources("natura.tiff"), resources: @@ -250,7 +259,9 @@ rule determine_availability_matrix_MD_UA: # Optional input when having Ukraine (UA) or Moldova (MD) in the countries list if {"UA", "MD"}.intersection(set(config["countries"])): opt = { - "availability_matrix_MD_UA": resources("availability_matrix_MD-UA_{technology}.nc") + "availability_matrix_MD_UA": resources( + "availability_matrix_MD-UA_{technology}.nc" + ) } else: opt = {} @@ -258,7 +269,9 @@ else: rule build_renewable_profiles: params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, renewable=config_provider("renewable"), input: **opt, @@ -339,7 +352,10 @@ rule build_hydro_profile: input: country_shapes=resources("country_shapes.geojson"), eia_hydro_generation="data/eia_hydro_annual_generation.csv", - cutout=f"cutouts/" + CDIR + config_provider("renewable", "hydro", "cutout") + ".nc", + cutout=f"cutouts/" + + CDIR + + config_provider("renewable", "hydro", "cutout") + + ".nc", output: resources("profile_hydro.nc"), log: @@ -356,7 +372,10 @@ if config["lines"]["dynamic_line_rating"]["activate"]: rule build_line_rating: params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) + for k in ["start", "end", "inclusive"] + }, input: base_network=resources("networks/base.nc"), cutout="cutouts/" @@ -542,9 +561,10 @@ rule prepare_network: params: snapshots={ "resolution": config_provider("snapshots", "resolution", default=False), - "segmentation": config_provider("snapshots", "segmentation", default=False), + "segmentation": config_provider( + "snapshots", "segmentation", default=False + ), }, - links=config_provider("links"), lines=config_provider("lines"), co2base=config_provider("electricity", "co2base"), diff --git a/rules/build_sector.smk b/rules/build_sector.smk index a26efaf4..268113f9 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -126,7 +126,9 @@ rule cluster_gas_network: rule build_daily_heat_demand: params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, input: pop_layout=resources("pop_layout_{scope}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), @@ -148,7 +150,9 @@ rule build_daily_heat_demand: rule build_hourly_heat_demand: params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, input: heat_profile="data/heat_load_profile_BDEW.csv", heat_demand=resources("daily_heat_demand_{scope}_elec_s{simpl}_{clusters}.nc"), @@ -169,7 +173,9 @@ rule build_hourly_heat_demand: rule build_temperature_profiles: params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, input: pop_layout=resources("pop_layout_{scope}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), @@ -221,7 +227,9 @@ rule build_cop_profiles: rule build_solar_thermal_profiles: params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, # TODO use config_provider + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, # TODO use config_provider solar_thermal=config_provider("solar_thermal"), input: pop_layout=resources("pop_layout_{scope}.nc"), @@ -711,7 +719,9 @@ rule build_shipping_demand: rule build_transport_demand: params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, sector=config_provider("sector"), input: clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), @@ -745,7 +755,9 @@ rule build_district_heat_share: district_heat_share=resources("district_heat_share.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), output: - district_heat_share=resources("district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv"), + district_heat_share=resources( + "district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv" + ), threads: 1 resources: mem_mb=1000, @@ -765,19 +777,29 @@ rule build_existing_heating_distribution: input: existing_heating="data/existing_infrastructure/existing_heating_raw.csv", clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), - clustered_pop_energy_layout=resources("pop_weighted_energy_totals_s{simpl}_{clusters}.csv"), - district_heat_share=resources("district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv"), + clustered_pop_energy_layout=resources( + "pop_weighted_energy_totals_s{simpl}_{clusters}.csv" + ), + district_heat_share=resources( + "district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv" + ), output: - existing_heating_distribution=resources("existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv"), + existing_heating_distribution=resources( + "existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv" + ), wildcard_constraints: planning_horizons=config_provider("scenario", "planning_horizons", 0), #only applies to baseyear threads: 1 resources: mem_mb=2000, log: - logs("build_existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.log"), + logs( + "build_existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.log" + ), benchmark: - benchmarks("build_existing_heating_distribution/elec_s{simpl}_{clusters}_{planning_horizons}") + benchmarks( + "build_existing_heating_distribution/elec_s{simpl}_{clusters}_{planning_horizons}" + ) conda: "../envs/environment.yaml" script: @@ -885,9 +907,13 @@ rule prepare_sector_network: resources: mem_mb=2000, log: - logs("prepare_sector_network_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), + logs( + "prepare_sector_network_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" + ), benchmark: - benchmarks("prepare_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") + benchmarks( + "prepare_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ) conda: "../envs/environment.yaml" script: diff --git a/rules/common.smk b/rules/common.smk index af991be7..bf5e0894 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -105,7 +105,9 @@ def memory(w): def input_custom_extra_functionality(w): - path = config_provider("solving", "options", "custom_extra_functionality", default=False) + path = config_provider( + "solving", "options", "custom_extra_functionality", default=False + ) if path: return os.path.join(os.path.dirname(workflow.snakefile), path) return [] diff --git a/rules/postprocess.smk b/rules/postprocess.smk index 98399cde..cf0ef6cd 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -14,7 +14,9 @@ if config_provider("foresight") != "perfect": plotting=config_provider("plotting"), input: network=resources("networks/elec_s{simpl}_{clusters}.nc"), - regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), + regions_onshore=resources( + "regions_onshore_elec_s{simpl}_{clusters}.geojson" + ), output: map=RESULTS + "maps/power-network-s{simpl}-{clusters}.pdf", threads: 1 @@ -41,9 +43,13 @@ if config_provider("foresight") != "perfect": resources: mem_mb=10000, log: - logs("plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log") + logs( + "plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" + ), benchmark: - benchmarks("plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") + benchmarks( + "plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ) conda: "../envs/environment.yaml" script: @@ -64,9 +70,13 @@ if config_provider("foresight") != "perfect": resources: mem_mb=10000, log: - logs("plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), + logs( + "plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" + ), benchmark: - benchmarks("plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") + benchmarks( + "plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ) conda: "../envs/environment.yaml" script: @@ -86,9 +96,13 @@ if config_provider("foresight") != "perfect": resources: mem_mb=10000, log: - logs("plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), + logs( + "plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" + ), benchmark: - benchmarks("plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") + benchmarks( + "plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ) conda: "../envs/environment.yaml" script: @@ -115,7 +129,9 @@ if config_provider("foresight") == "perfect": resources: mem_mb=10000, benchmark: - benchmarks("postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_benchmark") + benchmarks( + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_benchmark" + ) conda: "../envs/environment.yaml" script: @@ -142,7 +158,9 @@ rule make_summary: params: foresight=config_provider("foresight"), costs=config_provider("costs"), - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, scenario=config_provider("scenario"), RDIR=RDIR, input: @@ -159,7 +177,9 @@ rule make_summary: costs=( "data/costs_{}.csv".format(config_provider("costs", "year")) if config_provider("foresight") == "overnight" - else "data/costs_{}.csv".format(config_provider("scenario", "planning_horizons", 0)) + else "data/costs_{}.csv".format( + config_provider("scenario", "planning_horizons", 0) + ) ), ac_plot=expand( RESULTS + "maps/power-network-s{simpl}-{clusters}.pdf", diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index 4ff94bf2..d3aa8d4c 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -45,9 +45,13 @@ rule solve_operations_network: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc", log: solver=normpath( - logs("solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log") + logs( + "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log" + ) + ), + python=logs( + "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log" ), - python=logs("solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log"), benchmark: benchmarks("solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}") threads: 4 diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 260837b7..a6313cac 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -36,9 +36,13 @@ rule add_existing_baseyear: resources: mem_mb=2000, log: - logs("add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), + logs( + "add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" + ), benchmark: - benchmarks("add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") + benchmarks( + "add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ) conda: "../envs/environment.yaml" script: @@ -52,7 +56,9 @@ rule add_brownfield: "sector", "H2_retrofit_capacity_per_CH4" ), threshold_capacity=config_provider("existing_capacities", " threshold_capacity"), - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, carriers=config_provider("electricity", "renewable_carriers"), input: **{ @@ -75,9 +81,13 @@ rule add_brownfield: resources: mem_mb=10000, log: - logs("add_brownfield_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), + logs( + "add_brownfield_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" + ), benchmark: - benchmarks("add_brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") + benchmarks( + "add_brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ) conda: "../envs/environment.yaml" script: @@ -107,14 +117,20 @@ rule solve_sector_network_myopic: shadow: "shallow" log: - solver=logs("elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log"), - python=logs("elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log"), + solver=logs( + "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log" + ), + python=logs( + "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log" + ), threads: solver_threads resources: mem_mb=config_provider("solving", "mem"), walltime=config_provider("solving", "walltime", default="12:00:00"), benchmark: - benchmarks("solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") + benchmarks( + "solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ) conda: "../envs/environment.yaml" script: diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index 7811efe3..64ad007c 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -33,7 +33,9 @@ rule solve_sector_network: mem_mb=config_provider("solving", "mem"), walltime=config_provider("solving", "walltime", default="12:00:00"), benchmark: - benchmarks("solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") + benchmarks( + "solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ) conda: "../envs/environment.yaml" script: diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index 85f87d9b..ee16a201 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -14,10 +14,14 @@ rule add_existing_baseyear: busmap_s=resources("busmap_elec_s{simpl}.csv"), busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), - costs="data/costs_{}.csv".format(config_provider("scenario", "planning_horizons", 0)), + costs="data/costs_{}.csv".format( + config_provider("scenario", "planning_horizons", 0) + ), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), - existing_heating_distribution=resources("existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv"), + existing_heating_distribution=resources( + "existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv" + ), existing_heating="data/existing_infrastructure/existing_heating_raw.csv", existing_solar="data/existing_infrastructure/solar_capacity_IRENA.csv", existing_onwind="data/existing_infrastructure/onwind_capacity_IRENA.csv", @@ -31,9 +35,13 @@ rule add_existing_baseyear: resources: mem_mb=2000, log: - logs("add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"), + logs( + "add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" + ), benchmark: - benchmarks("add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}") + benchmarks( + "add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ) conda: "../envs/environment.yaml" script: @@ -61,9 +69,13 @@ rule prepare_perfect_foresight: resources: mem_mb=10000, log: - logs("prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}.log"), + logs( + "prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}.log" + ), benchmark: - benchmarks("prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}") + benchmarks( + "prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}" + ) conda: "../envs/environment.yaml" script: @@ -76,7 +88,9 @@ rule solve_sector_network_perfect: foresight=config_provider("foresight"), sector=config_provider("sector"), planning_horizons=config_provider("scenario", "planning_horizons"), - co2_sequestration_potential=config_provider("sector", "co2_sequestration_potential", 200), + co2_sequestration_potential=config_provider( + "sector", "co2_sequestration_potential", 200 + ), custom_extra_functionality=input_custom_extra_functionality, input: network=RESULTS @@ -99,7 +113,9 @@ rule solve_sector_network_perfect: memory=RESULTS + "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_memory.log", benchmark: - benchmarks("solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years}") + benchmarks( + "solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years}" + ) conda: "../envs/environment.yaml" script: @@ -110,13 +126,13 @@ rule make_summary_perfect: input: **{ f"networks_{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}": RESULTS - + f"postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc" - for simpl in config_provider("scenario", "simpl") - for clusters in config_provider("scenario", "clusters") - for opts in config_provider("scenario", "opts") - for sector_opts in config_provider("scenario", "sector_opts") - for ll in config_provider("scenario", "ll") - }, + + f"postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc" + for simpl in config_provider("scenario", "simpl") + for clusters in config_provider("scenario", "clusters") + for opts in config_provider("scenario", "opts") + for sector_opts in config_provider("scenario", "sector_opts") + for ll in config_provider("scenario", "ll") + }, costs="data/costs_2020.csv", output: nodal_costs=RESULTS + "csvs/nodal_costs.csv", diff --git a/rules/validate.smk b/rules/validate.smk index f8ebea5d..3c42c5f0 100644 --- a/rules/validate.smk +++ b/rules/validate.smk @@ -17,7 +17,9 @@ rule build_electricity_production: The data is used for validation of the optimization results. """ params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, countries=config_provider("countries"), output: resources("historical_electricity_production.csv"), @@ -35,7 +37,9 @@ rule build_cross_border_flows: The data is used for validation of the optimization results. """ params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, countries=config_provider("countries"), input: network=resources("networks/base.nc"), @@ -55,7 +59,9 @@ rule build_electricity_prices: The data is used for validation of the optimization results. """ params: - snapshots={k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"]}, + snapshots={ + k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + }, countries=config_provider("countries"), output: resources("historical_electricity_prices.csv"), From 1a883debb5170e754f83ace5c5cf598453041019 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 12 Feb 2024 10:00:02 +0000 Subject: [PATCH 31/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/build_sector.smk | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 268113f9..d7fbe638 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -229,7 +229,8 @@ rule build_solar_thermal_profiles: params: snapshots={ k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] - }, # TODO use config_provider + }, + # TODO use config_provider solar_thermal=config_provider("solar_thermal"), input: pop_layout=resources("pop_layout_{scope}.nc"), From bb7b65eebd9c1dce423b1ff3127ae9d7ce9d1264 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 12 Feb 2024 11:52:07 +0100 Subject: [PATCH 32/76] move create_scenarios.py to config directory --- .gitignore | 1 + {scripts => config}/create_scenarios.py | 0 doc/configtables/run.csv | 2 +- 3 files changed, 2 insertions(+), 1 deletion(-) rename {scripts => config}/create_scenarios.py (100%) diff --git a/.gitignore b/.gitignore index 467ecd95..f5f88861 100644 --- a/.gitignore +++ b/.gitignore @@ -25,6 +25,7 @@ doc/_build /scripts/old /scripts/create_scenarios.py +/config/create_scenarios.py config.yaml config/scenarios.yaml diff --git a/scripts/create_scenarios.py b/config/create_scenarios.py similarity index 100% rename from scripts/create_scenarios.py rename to config/create_scenarios.py diff --git a/doc/configtables/run.csv b/doc/configtables/run.csv index 925c2dea..75f29928 100644 --- a/doc/configtables/run.csv +++ b/doc/configtables/run.csv @@ -2,7 +2,7 @@ name,--,str/list,"Specify a name for your run. Results will be stored under this name. If ``scenario: enable`` is set to ``true``, the name must contain a subset of scenario names defined in ``scenario: file``." scenarios,,, -- enable,bool,"{true, false}","Switch to select whether workflow should generate scenarios based on ``file``." --- file,str,,"Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run:scenarios`` has to be set to ``true`` and ``run:name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combindation of settings, alter and use the ``create_scenarios.py`` script in ``scripts``." +-- file,str,,"Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run:scenarios`` has to be set to ``true`` and ``run:name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combindation of settings, alter and use the ``config/create_scenarios.py`` script in ``scripts``." disable_progrssbar,bool,"{true, false}","Switch to select whether progressbar should be disabled." shared_resources,bool/str/list,,"Switch to select whether resources should be shared across runs. If a string or list is passed, it is assumed to be wildcard(s) which indicates up to which set of wildcards the resource folder should be shared. If set to 'base', only resources before creating the elec.nc file are shared." shared_cutouts,bool,"{true, false}","Switch to select whether cutouts should be shared across runs." From 40b27b4107713e00029ca2c7c88c86eed1ad94e5 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 12 Feb 2024 11:53:20 +0100 Subject: [PATCH 33/76] use set_scenario_config everywhere --- scripts/add_brownfield.py | 5 +++-- scripts/add_existing_baseyear.py | 5 +++-- scripts/build_ammonia_production.py | 4 ++++ scripts/build_biomass_potentials.py | 5 +++++ scripts/build_clustered_population_layouts.py | 4 ++++ scripts/build_cop_profiles.py | 4 ++++ scripts/build_daily_heat_demand.py | 2 ++ scripts/build_district_heat_share.py | 3 +++ scripts/build_energy_totals.py | 5 +++-- scripts/build_existing_heating_distribution.py | 2 ++ scripts/build_gas_input_locations.py | 4 +++- scripts/build_gas_network.py | 4 +++- scripts/build_hourly_heat_demand.py | 3 ++- scripts/build_industrial_distribution_key.py | 6 ++++-- .../build_industrial_energy_demand_per_country_today.py | 2 ++ scripts/build_industrial_energy_demand_per_node.py | 3 +++ scripts/build_industrial_energy_demand_per_node_today.py | 3 +++ scripts/build_industrial_production_per_country.py | 6 +++--- .../build_industrial_production_per_country_tomorrow.py | 3 +++ scripts/build_industrial_production_per_node.py | 2 ++ scripts/build_industry_sector_ratios.py | 3 ++- scripts/build_population_layouts.py | 5 ++++- scripts/build_population_weighted_energy_totals.py | 3 +++ scripts/build_retro_cost.py | 2 ++ scripts/build_salt_cavern_potentials.py | 3 +++ scripts/build_sequestration_potentials.py | 4 ++++ scripts/build_shipping_demand.py | 2 ++ scripts/build_solar_thermal_profiles.py | 3 +++ scripts/build_temperature_profiles.py | 2 ++ scripts/build_transport_demand.py | 3 ++- scripts/cluster_gas_network.py | 5 +++-- scripts/determine_availability_matrix_MD_UA.py | 3 ++- scripts/make_summary.py | 4 +++- scripts/make_summary_perfect.py | 2 ++ scripts/plot_gas_network.py | 3 ++- scripts/plot_hydrogen_network.py | 3 ++- scripts/plot_power_network.py | 3 ++- scripts/plot_power_network_clustered.py | 3 ++- scripts/plot_power_network_perfect.py | 3 ++- scripts/plot_summary.py | 4 +++- scripts/prepare_perfect_foresight.py | 4 +++- scripts/prepare_sector_network.py | 4 ++-- scripts/retrieve_databundle.py | 7 +++---- scripts/retrieve_electricity_demand.py | 3 ++- scripts/retrieve_gas_infrastructure_data.py | 4 +++- scripts/retrieve_irena.py | 3 ++- scripts/solve_network.py | 1 + 47 files changed, 127 insertions(+), 37 deletions(-) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index 3b77c437..b0727b17 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -12,7 +12,7 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import update_config_with_sector_opts +from _helpers import update_config_with_sector_opts, configure_logging, set_scenario_config from add_existing_baseyear import add_build_year_to_new_assets from pypsa.clustering.spatial import normed_or_uniform @@ -210,7 +210,8 @@ if __name__ == "__main__": planning_horizons=2030, ) - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) + set_scenario_config(snakemake) update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index c0d37a5b..25e35edd 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -15,7 +15,7 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import update_config_with_sector_opts +from _helpers import update_config_with_sector_opts, set_scenario_config, configure_logging from add_electricity import sanitize_carriers from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs @@ -552,7 +552,8 @@ if __name__ == "__main__": planning_horizons=2020, ) - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) + set_scenario_config(snakemake) update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) diff --git a/scripts/build_ammonia_production.py b/scripts/build_ammonia_production.py index 1bcdf9ae..47907cc9 100644 --- a/scripts/build_ammonia_production.py +++ b/scripts/build_ammonia_production.py @@ -9,6 +9,8 @@ Build historical annual ammonia production per country in ktonNH3/a. import country_converter as coco import pandas as pd +from _helpers import set_scenario_config + cc = coco.CountryConverter() @@ -18,6 +20,8 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_ammonia_production") + set_scenario_config(snakemake) + ammonia = pd.read_excel( snakemake.input.usgs, sheet_name="T12", diff --git a/scripts/build_biomass_potentials.py b/scripts/build_biomass_potentials.py index 6b5cb147..2fe6922c 100644 --- a/scripts/build_biomass_potentials.py +++ b/scripts/build_biomass_potentials.py @@ -16,6 +16,8 @@ import pandas as pd logger = logging.getLogger(__name__) AVAILABLE_BIOMASS_YEARS = [2010, 2020, 2030, 2040, 2050] +from _helpers import configure_logging, set_scenario_config + def build_nuts_population_data(year=2013): pop = pd.read_csv( @@ -220,6 +222,9 @@ if __name__ == "__main__": clusters="5", planning_horizons=2050, ) + + configure_logging(snakemake) + set_scenario_config(snakemake) overnight = snakemake.config["foresight"] == "overnight" params = snakemake.params.biomass diff --git a/scripts/build_clustered_population_layouts.py b/scripts/build_clustered_population_layouts.py index f1d386bd..4217baff 100644 --- a/scripts/build_clustered_population_layouts.py +++ b/scripts/build_clustered_population_layouts.py @@ -12,6 +12,8 @@ import geopandas as gpd import pandas as pd import xarray as xr +from _helpers import set_scenario_config + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake @@ -22,6 +24,8 @@ if __name__ == "__main__": clusters=48, ) + set_scenario_config(snakemake) + cutout = atlite.Cutout(snakemake.input.cutout) clustered_regions = ( diff --git a/scripts/build_cop_profiles.py b/scripts/build_cop_profiles.py index 4b1d952e..ee68dd0a 100644 --- a/scripts/build_cop_profiles.py +++ b/scripts/build_cop_profiles.py @@ -15,6 +15,8 @@ https://doi.org/10.1039/C2EE22653G. import xarray as xr +from _helpers import set_scenario_config + def coefficient_of_performance(delta_T, source="air"): if source == "air": @@ -35,6 +37,8 @@ if __name__ == "__main__": clusters=48, ) + set_scenario_config(snakemake) + for area in ["total", "urban", "rural"]: for source in ["air", "soil"]: source_T = xr.open_dataarray(snakemake.input[f"temp_{source}_{area}"]) diff --git a/scripts/build_daily_heat_demand.py b/scripts/build_daily_heat_demand.py index e334b1b3..03ba8c2e 100644 --- a/scripts/build_daily_heat_demand.py +++ b/scripts/build_daily_heat_demand.py @@ -12,6 +12,7 @@ import numpy as np import pandas as pd import xarray as xr from dask.distributed import Client, LocalCluster +from _helpers import set_scenario_config if __name__ == "__main__": if "snakemake" not in globals(): @@ -23,6 +24,7 @@ if __name__ == "__main__": simpl="", clusters=48, ) + set_scenario_config(snakemake) nprocesses = int(snakemake.threads) cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1) diff --git a/scripts/build_district_heat_share.py b/scripts/build_district_heat_share.py index 86c42631..121e4663 100644 --- a/scripts/build_district_heat_share.py +++ b/scripts/build_district_heat_share.py @@ -10,6 +10,7 @@ import logging import pandas as pd from prepare_sector_network import get +from _helpers import configure_logging, set_scenario_config logger = logging.getLogger(__name__) @@ -24,6 +25,8 @@ if __name__ == "__main__": clusters=48, planning_horizons="2050", ) + configure_logging(snakemake) + set_scenario_config(snakemake) investment_year = int(snakemake.wildcards.planning_horizons[-4:]) diff --git a/scripts/build_energy_totals.py b/scripts/build_energy_totals.py index c67bb49d..f22ddc25 100644 --- a/scripts/build_energy_totals.py +++ b/scripts/build_energy_totals.py @@ -14,7 +14,7 @@ import country_converter as coco import geopandas as gpd import numpy as np import pandas as pd -from _helpers import mute_print +from _helpers import mute_print, configure_logging, set_scenario_config from tqdm import tqdm cc = coco.CountryConverter() @@ -743,7 +743,8 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_energy_totals") - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) + set_scenario_config(snakemake) params = snakemake.params.energy diff --git a/scripts/build_existing_heating_distribution.py b/scripts/build_existing_heating_distribution.py index 78518597..eb2361c2 100644 --- a/scripts/build_existing_heating_distribution.py +++ b/scripts/build_existing_heating_distribution.py @@ -9,6 +9,7 @@ horizon. import country_converter as coco import numpy as np import pandas as pd +from _helpers import set_scenario_config cc = coco.CountryConverter() @@ -126,5 +127,6 @@ if __name__ == "__main__": clusters=48, planning_horizons=2050, ) + set_scenario_config(snakemake) build_existing_heating() diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index 081f74b9..5aba46e5 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -12,6 +12,7 @@ import logging import geopandas as gpd import pandas as pd from cluster_gas_network import load_bus_regions +from _helpers import configure_logging, set_scenario_config logger = logging.getLogger(__name__) @@ -134,7 +135,8 @@ if __name__ == "__main__": clusters="128", ) - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) + set_scenario_config(snakemake) regions = load_bus_regions( snakemake.input.regions_onshore, snakemake.input.regions_offshore diff --git a/scripts/build_gas_network.py b/scripts/build_gas_network.py index 13cd75ba..0febd43d 100644 --- a/scripts/build_gas_network.py +++ b/scripts/build_gas_network.py @@ -13,6 +13,7 @@ import geopandas as gpd import pandas as pd from pypsa.geo import haversine_pts from shapely.geometry import Point +from _helpers import configure_logging, set_scenario_config logger = logging.getLogger(__name__) @@ -143,7 +144,8 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_gas_network") - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) + set_scenario_config(snakemake) gas_network = load_dataset(snakemake.input.gas_network) diff --git a/scripts/build_hourly_heat_demand.py b/scripts/build_hourly_heat_demand.py index c972da89..a3110e1f 100644 --- a/scripts/build_hourly_heat_demand.py +++ b/scripts/build_hourly_heat_demand.py @@ -10,7 +10,7 @@ from itertools import product import pandas as pd import xarray as xr -from _helpers import generate_periodic_profiles +from _helpers import generate_periodic_profiles, set_scenario_config if __name__ == "__main__": if "snakemake" not in globals(): @@ -22,6 +22,7 @@ if __name__ == "__main__": simpl="", clusters=48, ) + set_scenario_config(snakemake) snapshots = pd.date_range(freq="h", **snakemake.params.snapshots) diff --git a/scripts/build_industrial_distribution_key.py b/scripts/build_industrial_distribution_key.py index 9b234e29..29b7538e 100644 --- a/scripts/build_industrial_distribution_key.py +++ b/scripts/build_industrial_distribution_key.py @@ -14,6 +14,8 @@ import country_converter as coco import geopandas as gpd import pandas as pd +from _helpers import configure_logging, set_scenario_config + logger = logging.getLogger(__name__) cc = coco.CountryConverter() @@ -148,8 +150,8 @@ if __name__ == "__main__": simpl="", clusters=128, ) - - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) + set_scenario_config(snakemake) countries = snakemake.params.countries diff --git a/scripts/build_industrial_energy_demand_per_country_today.py b/scripts/build_industrial_energy_demand_per_country_today.py index d1c672f1..99342b9e 100644 --- a/scripts/build_industrial_energy_demand_per_country_today.py +++ b/scripts/build_industrial_energy_demand_per_country_today.py @@ -12,6 +12,7 @@ from functools import partial import country_converter as coco import pandas as pd from tqdm import tqdm +from _helpers import set_scenario_config cc = coco.CountryConverter() @@ -175,6 +176,7 @@ if __name__ == "__main__": from _helpers import mock_snakemake snakemake = mock_snakemake("build_industrial_energy_demand_per_country_today") + set_scenario_config(snakemake) params = snakemake.params.industry year = params.get("reference_year", 2015) diff --git a/scripts/build_industrial_energy_demand_per_node.py b/scripts/build_industrial_energy_demand_per_node.py index 55c10c5d..fcc0abd9 100644 --- a/scripts/build_industrial_energy_demand_per_node.py +++ b/scripts/build_industrial_energy_demand_per_node.py @@ -8,6 +8,8 @@ Build industrial energy demand per model region. import pandas as pd +from _helpers import set_scenario_config + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake @@ -18,6 +20,7 @@ if __name__ == "__main__": clusters=48, planning_horizons=2030, ) + set_scenario_config(snakemake) # import EU ratios df as csv fn = snakemake.input.industry_sector_ratios diff --git a/scripts/build_industrial_energy_demand_per_node_today.py b/scripts/build_industrial_energy_demand_per_node_today.py index d845e704..1b7aba83 100644 --- a/scripts/build_industrial_energy_demand_per_node_today.py +++ b/scripts/build_industrial_energy_demand_per_node_today.py @@ -11,6 +11,8 @@ from itertools import product import numpy as np import pandas as pd +from _helpers import set_scenario_config + # map JRC/our sectors to hotmaps sector, where mapping exist sector_mapping = { "Electric arc": "Iron and steel", @@ -75,5 +77,6 @@ if __name__ == "__main__": simpl="", clusters=48, ) + set_scenario_config(snakemake) build_nodal_industrial_energy_demand() diff --git a/scripts/build_industrial_production_per_country.py b/scripts/build_industrial_production_per_country.py index 0aea4f15..d9ec9afd 100644 --- a/scripts/build_industrial_production_per_country.py +++ b/scripts/build_industrial_production_per_country.py @@ -13,7 +13,7 @@ from functools import partial import country_converter as coco import numpy as np import pandas as pd -from _helpers import mute_print +from _helpers import mute_print, set_scenario_config, configure_logging from tqdm import tqdm logger = logging.getLogger(__name__) @@ -274,8 +274,8 @@ if __name__ == "__main__": from _helpers import mock_snakemake snakemake = mock_snakemake("build_industrial_production_per_country") - - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) + set_scenario_config(snakemake) countries = snakemake.params.countries diff --git a/scripts/build_industrial_production_per_country_tomorrow.py b/scripts/build_industrial_production_per_country_tomorrow.py index ffed5195..835a2687 100644 --- a/scripts/build_industrial_production_per_country_tomorrow.py +++ b/scripts/build_industrial_production_per_country_tomorrow.py @@ -9,11 +9,14 @@ Build future industrial production per country. import pandas as pd from prepare_sector_network import get +from _helpers import set_scenario_config + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake snakemake = mock_snakemake("build_industrial_production_per_country_tomorrow") + set_scenario_config(snakemake) params = snakemake.params.industry diff --git a/scripts/build_industrial_production_per_node.py b/scripts/build_industrial_production_per_node.py index 7b69948a..c84590b7 100644 --- a/scripts/build_industrial_production_per_node.py +++ b/scripts/build_industrial_production_per_node.py @@ -7,6 +7,7 @@ Build industrial production per model region. """ from itertools import product +from _helpers import set_scenario_config import pandas as pd @@ -72,5 +73,6 @@ if __name__ == "__main__": simpl="", clusters=48, ) + set_scenario_config(snakemake) build_nodal_industrial_production() diff --git a/scripts/build_industry_sector_ratios.py b/scripts/build_industry_sector_ratios.py index 45705002..6014feee 100644 --- a/scripts/build_industry_sector_ratios.py +++ b/scripts/build_industry_sector_ratios.py @@ -7,7 +7,7 @@ Build specific energy consumption by carrier and industries. """ import pandas as pd -from _helpers import mute_print +from _helpers import mute_print, set_scenario_config # GWh/ktoe OR MWh/toe toe_to_MWh = 11.630 @@ -1464,6 +1464,7 @@ if __name__ == "__main__": from _helpers import mock_snakemake snakemake = mock_snakemake("build_industry_sector_ratios") + set_scenario_config(snakemake) # TODO make params option year = 2015 diff --git a/scripts/build_population_layouts.py b/scripts/build_population_layouts.py index cb63c27e..db548140 100644 --- a/scripts/build_population_layouts.py +++ b/scripts/build_population_layouts.py @@ -14,6 +14,8 @@ import numpy as np import pandas as pd import xarray as xr +from _helpers import configure_logging, set_scenario_config + logger = logging.getLogger(__name__) if __name__ == "__main__": @@ -22,7 +24,8 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_population_layouts") - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) + set_scenario_config(snakemake) cutout = atlite.Cutout(snakemake.input.cutout) diff --git a/scripts/build_population_weighted_energy_totals.py b/scripts/build_population_weighted_energy_totals.py index 879e3b9b..6ed32086 100644 --- a/scripts/build_population_weighted_energy_totals.py +++ b/scripts/build_population_weighted_energy_totals.py @@ -8,6 +8,8 @@ Distribute country-level energy demands by population. import pandas as pd +from _helpers import set_scenario_config + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake @@ -17,6 +19,7 @@ if __name__ == "__main__": simpl="", clusters=48, ) + set_scenario_config(snakemake) pop_layout = pd.read_csv(snakemake.input.clustered_pop_layout, index_col=0) diff --git a/scripts/build_retro_cost.py b/scripts/build_retro_cost.py index 60d74afa..b6153a2d 100755 --- a/scripts/build_retro_cost.py +++ b/scripts/build_retro_cost.py @@ -68,6 +68,7 @@ The script has the following structure: """ import pandas as pd import xarray as xr +from _helpers import set_scenario_config # (i) --- FIXED PARAMETER / STANDARD VALUES ----------------------------------- @@ -1053,6 +1054,7 @@ if __name__ == "__main__": ll="v1.0", sector_opts="Co2L0-168H-T-H-B-I-solar3-dist1", ) + set_scenario_config(snakemake) # ******** config ********************************************************* diff --git a/scripts/build_salt_cavern_potentials.py b/scripts/build_salt_cavern_potentials.py index ed039772..08071b22 100644 --- a/scripts/build_salt_cavern_potentials.py +++ b/scripts/build_salt_cavern_potentials.py @@ -24,6 +24,7 @@ onshore (>50km from sea), offshore (Figure 7). import geopandas as gpd import pandas as pd +from _helpers import set_scenario_config def concat_gdf(gdf_list, crs="EPSG:4326"): @@ -77,6 +78,8 @@ if __name__ == "__main__": "build_salt_cavern_potentials", simpl="", clusters="37" ) + set_scenario_config(snakemake) + fn_onshore = snakemake.input.regions_onshore fn_offshore = snakemake.input.regions_offshore diff --git a/scripts/build_sequestration_potentials.py b/scripts/build_sequestration_potentials.py index f6ad3526..9e7678bb 100644 --- a/scripts/build_sequestration_potentials.py +++ b/scripts/build_sequestration_potentials.py @@ -11,6 +11,8 @@ database_en>`_. import geopandas as gpd import pandas as pd +from _helpers import set_scenario_config + def area(gdf): """ @@ -39,6 +41,8 @@ if __name__ == "__main__": "build_sequestration_potentials", simpl="", clusters="181" ) + set_scenario_config(snakemake) + cf = snakemake.params.sequestration_potential gdf = gpd.read_file(snakemake.input.sequestration_potential[0]) diff --git a/scripts/build_shipping_demand.py b/scripts/build_shipping_demand.py index 8000c66c..0690e33d 100644 --- a/scripts/build_shipping_demand.py +++ b/scripts/build_shipping_demand.py @@ -11,6 +11,7 @@ import json import geopandas as gpd import pandas as pd +from _helpers import set_scenario_config if __name__ == "__main__": if "snakemake" not in globals(): @@ -21,6 +22,7 @@ if __name__ == "__main__": simpl="", clusters=48, ) + set_scenario_config(snakemake) scope = gpd.read_file(snakemake.input.scope).geometry[0] regions = gpd.read_file(snakemake.input.regions).set_index("name") diff --git a/scripts/build_solar_thermal_profiles.py b/scripts/build_solar_thermal_profiles.py index ee6ed881..12826420 100644 --- a/scripts/build_solar_thermal_profiles.py +++ b/scripts/build_solar_thermal_profiles.py @@ -13,6 +13,8 @@ import pandas as pd import xarray as xr from dask.distributed import Client, LocalCluster +from _helpers import set_scenario_config + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake @@ -22,6 +24,7 @@ if __name__ == "__main__": simpl="", clusters=48, ) + set_scenario_config(snakemake) nprocesses = int(snakemake.threads) cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1) diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index 02fa4a71..878ebc03 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -12,6 +12,7 @@ import numpy as np import pandas as pd import xarray as xr from dask.distributed import Client, LocalCluster +from _helpers import set_scenario_config if __name__ == "__main__": if "snakemake" not in globals(): @@ -22,6 +23,7 @@ if __name__ == "__main__": simpl="", clusters=48, ) + set_scenario_config(snakemake) nprocesses = int(snakemake.threads) cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1) diff --git a/scripts/build_transport_demand.py b/scripts/build_transport_demand.py index 33c8faae..671357dd 100644 --- a/scripts/build_transport_demand.py +++ b/scripts/build_transport_demand.py @@ -13,7 +13,7 @@ import logging import numpy as np import pandas as pd import xarray as xr -from _helpers import configure_logging, generate_periodic_profiles +from _helpers import configure_logging, generate_periodic_profiles, set_scenario_config logger = logging.getLogger(__name__) @@ -171,6 +171,7 @@ if __name__ == "__main__": clusters=48, ) configure_logging(snakemake) + set_scenario_config(snakemake) pop_layout = pd.read_csv(snakemake.input.clustered_pop_layout, index_col=0) diff --git a/scripts/cluster_gas_network.py b/scripts/cluster_gas_network.py index e709d772..567f4458 100755 --- a/scripts/cluster_gas_network.py +++ b/scripts/cluster_gas_network.py @@ -12,6 +12,7 @@ import geopandas as gpd import pandas as pd from pypsa.geo import haversine_pts from shapely import wkt +from _helpers import set_scenario_config, configure_logging logger = logging.getLogger(__name__) @@ -105,8 +106,8 @@ if __name__ == "__main__": from _helpers import mock_snakemake snakemake = mock_snakemake("cluster_gas_network", simpl="", clusters="37") - - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) + set_scenario_config(snakemake) fn = snakemake.input.cleaned_gas_network df = pd.read_csv(fn, index_col=0) diff --git a/scripts/determine_availability_matrix_MD_UA.py b/scripts/determine_availability_matrix_MD_UA.py index efe9a712..84076383 100644 --- a/scripts/determine_availability_matrix_MD_UA.py +++ b/scripts/determine_availability_matrix_MD_UA.py @@ -15,7 +15,7 @@ import fiona import geopandas as gpd import matplotlib.pyplot as plt import numpy as np -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from atlite.gis import shape_availability from rasterio.plot import show @@ -38,6 +38,7 @@ if __name__ == "__main__": "determine_availability_matrix_MD_UA", technology="solar" ) configure_logging(snakemake) + set_scenario_config(snakemake) nprocesses = None # snakemake.config["atlite"].get("nprocesses") noprogress = not snakemake.config["atlite"].get("show_progress", True) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 76d8099c..82ce2328 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -14,6 +14,7 @@ import numpy as np import pandas as pd import pypsa from prepare_sector_network import prepare_costs +from _helpers import set_scenario_config, configure_logging idx = pd.IndexSlice logger = logging.getLogger(__name__) @@ -673,7 +674,8 @@ if __name__ == "__main__": snakemake = mock_snakemake("make_summary") - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) + set_scenario_config(snakemake) networks_dict = { (cluster, ll, opt + sector_opt, planning_horizon): "results/" diff --git a/scripts/make_summary_perfect.py b/scripts/make_summary_perfect.py index 064db454..7c2e8d30 100644 --- a/scripts/make_summary_perfect.py +++ b/scripts/make_summary_perfect.py @@ -19,6 +19,7 @@ from make_summary import assign_carriers, assign_locations from prepare_sector_network import prepare_costs from pypsa.descriptors import get_active_assets from six import iteritems +from _helpers import set_scenario_config idx = pd.IndexSlice @@ -722,6 +723,7 @@ if __name__ == "__main__": from _helpers import mock_snakemake snakemake = mock_snakemake("make_summary_perfect") + set_scenario_config(snakemake) run = snakemake.config["run"]["name"] if run != "": diff --git a/scripts/plot_gas_network.py b/scripts/plot_gas_network.py index e2953604..26186d51 100644 --- a/scripts/plot_gas_network.py +++ b/scripts/plot_gas_network.py @@ -13,7 +13,7 @@ import geopandas as gpd import matplotlib.pyplot as plt import pandas as pd import pypsa -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from plot_power_network import assign_location, load_projection from pypsa.plot import add_legend_circles, add_legend_lines, add_legend_patches @@ -237,6 +237,7 @@ if __name__ == "__main__": ) configure_logging(snakemake) + set_scenario_config(snakemake) n = pypsa.Network(snakemake.input.network) diff --git a/scripts/plot_hydrogen_network.py b/scripts/plot_hydrogen_network.py index 95741170..4cb58557 100644 --- a/scripts/plot_hydrogen_network.py +++ b/scripts/plot_hydrogen_network.py @@ -13,7 +13,7 @@ import geopandas as gpd import matplotlib.pyplot as plt import pandas as pd import pypsa -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from plot_power_network import assign_location, load_projection from pypsa.plot import add_legend_circles, add_legend_lines, add_legend_patches @@ -254,6 +254,7 @@ if __name__ == "__main__": ) configure_logging(snakemake) + set_scenario_config(snakemake) n = pypsa.Network(snakemake.input.network) diff --git a/scripts/plot_power_network.py b/scripts/plot_power_network.py index 0e13e497..6db53bcc 100644 --- a/scripts/plot_power_network.py +++ b/scripts/plot_power_network.py @@ -14,7 +14,7 @@ import geopandas as gpd import matplotlib.pyplot as plt import pandas as pd import pypsa -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from plot_summary import preferred_order, rename_techs from pypsa.plot import add_legend_circles, add_legend_lines, add_legend_patches @@ -257,6 +257,7 @@ if __name__ == "__main__": ) configure_logging(snakemake) + set_scenario_config(snakemake) n = pypsa.Network(snakemake.input.network) diff --git a/scripts/plot_power_network_clustered.py b/scripts/plot_power_network_clustered.py index 8217ac2e..43746039 100644 --- a/scripts/plot_power_network_clustered.py +++ b/scripts/plot_power_network_clustered.py @@ -6,13 +6,13 @@ Plot clustered electricity transmission network. """ -import cartopy.crs as ccrs import geopandas as gpd import matplotlib.pyplot as plt import pypsa from matplotlib.lines import Line2D from plot_power_network import load_projection from pypsa.plot import add_legend_lines +from _helpers import set_scenario_config if __name__ == "__main__": if "snakemake" not in globals(): @@ -23,6 +23,7 @@ if __name__ == "__main__": clusters=128, configfiles=["../../config/config.test.yaml"], ) + set_scenario_config(snakemake) lw_factor = 2e3 diff --git a/scripts/plot_power_network_perfect.py b/scripts/plot_power_network_perfect.py index ff576d33..f7506a00 100644 --- a/scripts/plot_power_network_perfect.py +++ b/scripts/plot_power_network_perfect.py @@ -13,7 +13,7 @@ import geopandas as gpd import matplotlib.pyplot as plt import pandas as pd import pypsa -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config from plot_power_network import assign_location, load_projection, rename_techs_tyndp from plot_summary import preferred_order from pypsa.plot import add_legend_circles, add_legend_lines @@ -184,6 +184,7 @@ if __name__ == "__main__": ) configure_logging(snakemake) + set_scenario_config(snakemake) n = pypsa.Network(snakemake.input.network) diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index cfb32441..da077348 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -12,6 +12,7 @@ import matplotlib.gridspec as gridspec import matplotlib.pyplot as plt import pandas as pd from prepare_sector_network import co2_emissions_year +from _helpers import configure_logging, set_scenario_config logger = logging.getLogger(__name__) plt.style.use("ggplot") @@ -572,7 +573,8 @@ if __name__ == "__main__": snakemake = mock_snakemake("plot_summary") - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) + set_scenario_config(snakemake) n_header = 4 diff --git a/scripts/prepare_perfect_foresight.py b/scripts/prepare_perfect_foresight.py index cf013577..de9932fd 100644 --- a/scripts/prepare_perfect_foresight.py +++ b/scripts/prepare_perfect_foresight.py @@ -12,7 +12,7 @@ import re import numpy as np import pandas as pd import pypsa -from _helpers import update_config_with_sector_opts +from _helpers import update_config_with_sector_opts, set_scenario_config, configure_logging from add_existing_baseyear import add_build_year_to_new_assets from pypsa.descriptors import expand_series from pypsa.io import import_components_from_dataframe @@ -514,6 +514,8 @@ if __name__ == "__main__": ll="v1.5", sector_opts="1p7-4380H-T-H-B-I-A-dist1", ) + configure_logging(snakemake) + set_scenario_config(snakemake) update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) # parameters ----------------------------------------------------------- diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b1161c19..bf9ed58f 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -18,7 +18,7 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import set_scenario_config, update_config_with_sector_opts +from _helpers import configure_logging, set_scenario_config, update_config_with_sector_opts from add_electricity import calculate_annuity, sanitize_carriers, sanitize_locations from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2 from networkx.algorithms import complement @@ -3572,7 +3572,7 @@ if __name__ == "__main__": planning_horizons="2030", ) - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) set_scenario_config(snakemake) update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index 25894063..b6a4d378 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -36,7 +36,7 @@ import logging import tarfile from pathlib import Path -from _helpers import configure_logging, progress_retrieve, validate_checksum +from _helpers import configure_logging, progress_retrieve, validate_checksum, set_scenario_config logger = logging.getLogger(__name__) @@ -49,9 +49,8 @@ if __name__ == "__main__": rootpath = ".." else: rootpath = "." - configure_logging( - snakemake - ) # TODO Make logging compatible with progressbar (see PR #102) + configure_logging(snakemake) + set_scenario_config(snakemake) if snakemake.config["tutorial"]: url = "https://zenodo.org/record/3517921/files/pypsa-eur-tutorial-data-bundle.tar.xz" diff --git a/scripts/retrieve_electricity_demand.py b/scripts/retrieve_electricity_demand.py index a8a44b68..94077fdf 100644 --- a/scripts/retrieve_electricity_demand.py +++ b/scripts/retrieve_electricity_demand.py @@ -12,7 +12,7 @@ import pandas as pd logger = logging.getLogger(__name__) -from _helpers import configure_logging +from _helpers import configure_logging, set_scenario_config if __name__ == "__main__": if "snakemake" not in globals(): @@ -23,6 +23,7 @@ if __name__ == "__main__": else: rootpath = "." configure_logging(snakemake) + set_scenario_config(snakemake) url = "https://data.open-power-system-data.org/time_series/{version}/time_series_60min_singleindex.csv" diff --git a/scripts/retrieve_gas_infrastructure_data.py b/scripts/retrieve_gas_infrastructure_data.py index d984b9fe..7cc1c270 100644 --- a/scripts/retrieve_gas_infrastructure_data.py +++ b/scripts/retrieve_gas_infrastructure_data.py @@ -11,7 +11,7 @@ import logging import zipfile from pathlib import Path -from _helpers import progress_retrieve, validate_checksum +from _helpers import progress_retrieve, validate_checksum, set_scenario_config, configure_logging logger = logging.getLogger(__name__) @@ -24,6 +24,8 @@ if __name__ == "__main__": rootpath = ".." else: rootpath = "." + configure_logging(snakemake) + set_scenario_config(snakemake) url = "https://zenodo.org/record/4767098/files/IGGIELGN.zip" diff --git a/scripts/retrieve_irena.py b/scripts/retrieve_irena.py index 7b123475..dbd11129 100644 --- a/scripts/retrieve_irena.py +++ b/scripts/retrieve_irena.py @@ -26,7 +26,7 @@ This rule downloads the existing capacities from `IRENASTAT Date: Mon, 12 Feb 2024 11:53:37 +0100 Subject: [PATCH 34/76] do not use config_provider outside rule definitions --- rules/postprocess.smk | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rules/postprocess.smk b/rules/postprocess.smk index cf0ef6cd..38e5f7d9 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -7,7 +7,7 @@ localrules: copy_config, -if config_provider("foresight") != "perfect": +if config["foresight"] != "perfect": rule plot_power_network_clustered: params: @@ -109,7 +109,7 @@ if config_provider("foresight") != "perfect": "../scripts/plot_gas_network.py" -if config_provider("foresight") == "perfect": +if config["foresight"] == "perfect": rule plot_power_network_perfect: params: From e0b6ebd174a9f5f55bb462c8650e0754a1121a9d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 12 Feb 2024 10:54:13 +0000 Subject: [PATCH 35/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/add_brownfield.py | 6 +++++- scripts/add_existing_baseyear.py | 6 +++++- scripts/build_ammonia_production.py | 1 - scripts/build_biomass_potentials.py | 2 +- scripts/build_clustered_population_layouts.py | 1 - scripts/build_cop_profiles.py | 1 - scripts/build_daily_heat_demand.py | 2 +- scripts/build_district_heat_share.py | 2 +- scripts/build_energy_totals.py | 2 +- scripts/build_gas_input_locations.py | 2 +- scripts/build_gas_network.py | 2 +- scripts/build_industrial_distribution_key.py | 1 - .../build_industrial_energy_demand_per_country_today.py | 2 +- scripts/build_industrial_energy_demand_per_node.py | 1 - scripts/build_industrial_energy_demand_per_node_today.py | 1 - scripts/build_industrial_production_per_country.py | 2 +- .../build_industrial_production_per_country_tomorrow.py | 3 +-- scripts/build_industrial_production_per_node.py | 2 +- scripts/build_population_layouts.py | 1 - scripts/build_population_weighted_energy_totals.py | 1 - scripts/build_sequestration_potentials.py | 1 - scripts/build_solar_thermal_profiles.py | 3 +-- scripts/build_temperature_profiles.py | 2 +- scripts/cluster_gas_network.py | 2 +- scripts/make_summary.py | 2 +- scripts/make_summary_perfect.py | 2 +- scripts/plot_power_network_clustered.py | 2 +- scripts/plot_summary.py | 2 +- scripts/prepare_perfect_foresight.py | 6 +++++- scripts/prepare_sector_network.py | 6 +++++- scripts/retrieve_databundle.py | 7 ++++++- scripts/retrieve_gas_infrastructure_data.py | 7 ++++++- 32 files changed, 49 insertions(+), 34 deletions(-) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index b0727b17..d45742dd 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -12,7 +12,11 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import update_config_with_sector_opts, configure_logging, set_scenario_config +from _helpers import ( + configure_logging, + set_scenario_config, + update_config_with_sector_opts, +) from add_existing_baseyear import add_build_year_to_new_assets from pypsa.clustering.spatial import normed_or_uniform diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index 25e35edd..780460a2 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -15,7 +15,11 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import update_config_with_sector_opts, set_scenario_config, configure_logging +from _helpers import ( + configure_logging, + set_scenario_config, + update_config_with_sector_opts, +) from add_electricity import sanitize_carriers from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs diff --git a/scripts/build_ammonia_production.py b/scripts/build_ammonia_production.py index 47907cc9..a9cd907a 100644 --- a/scripts/build_ammonia_production.py +++ b/scripts/build_ammonia_production.py @@ -8,7 +8,6 @@ Build historical annual ammonia production per country in ktonNH3/a. import country_converter as coco import pandas as pd - from _helpers import set_scenario_config cc = coco.CountryConverter() diff --git a/scripts/build_biomass_potentials.py b/scripts/build_biomass_potentials.py index 2fe6922c..6291b03e 100644 --- a/scripts/build_biomass_potentials.py +++ b/scripts/build_biomass_potentials.py @@ -222,7 +222,7 @@ if __name__ == "__main__": clusters="5", planning_horizons=2050, ) - + configure_logging(snakemake) set_scenario_config(snakemake) diff --git a/scripts/build_clustered_population_layouts.py b/scripts/build_clustered_population_layouts.py index 4217baff..c923abf9 100644 --- a/scripts/build_clustered_population_layouts.py +++ b/scripts/build_clustered_population_layouts.py @@ -11,7 +11,6 @@ import atlite import geopandas as gpd import pandas as pd import xarray as xr - from _helpers import set_scenario_config if __name__ == "__main__": diff --git a/scripts/build_cop_profiles.py b/scripts/build_cop_profiles.py index ee68dd0a..54eac3a3 100644 --- a/scripts/build_cop_profiles.py +++ b/scripts/build_cop_profiles.py @@ -14,7 +14,6 @@ https://doi.org/10.1039/C2EE22653G. """ import xarray as xr - from _helpers import set_scenario_config diff --git a/scripts/build_daily_heat_demand.py b/scripts/build_daily_heat_demand.py index 03ba8c2e..4f1a3303 100644 --- a/scripts/build_daily_heat_demand.py +++ b/scripts/build_daily_heat_demand.py @@ -11,8 +11,8 @@ import geopandas as gpd import numpy as np import pandas as pd import xarray as xr -from dask.distributed import Client, LocalCluster from _helpers import set_scenario_config +from dask.distributed import Client, LocalCluster if __name__ == "__main__": if "snakemake" not in globals(): diff --git a/scripts/build_district_heat_share.py b/scripts/build_district_heat_share.py index 121e4663..46ada2f6 100644 --- a/scripts/build_district_heat_share.py +++ b/scripts/build_district_heat_share.py @@ -9,8 +9,8 @@ Build district heat shares at each node, depending on investment year. import logging import pandas as pd -from prepare_sector_network import get from _helpers import configure_logging, set_scenario_config +from prepare_sector_network import get logger = logging.getLogger(__name__) diff --git a/scripts/build_energy_totals.py b/scripts/build_energy_totals.py index f22ddc25..2b4ebd19 100644 --- a/scripts/build_energy_totals.py +++ b/scripts/build_energy_totals.py @@ -14,7 +14,7 @@ import country_converter as coco import geopandas as gpd import numpy as np import pandas as pd -from _helpers import mute_print, configure_logging, set_scenario_config +from _helpers import configure_logging, mute_print, set_scenario_config from tqdm import tqdm cc = coco.CountryConverter() diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index 5aba46e5..b0aadce8 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -11,8 +11,8 @@ import logging import geopandas as gpd import pandas as pd -from cluster_gas_network import load_bus_regions from _helpers import configure_logging, set_scenario_config +from cluster_gas_network import load_bus_regions logger = logging.getLogger(__name__) diff --git a/scripts/build_gas_network.py b/scripts/build_gas_network.py index 0febd43d..52235cd1 100644 --- a/scripts/build_gas_network.py +++ b/scripts/build_gas_network.py @@ -11,9 +11,9 @@ import logging import geopandas as gpd import pandas as pd +from _helpers import configure_logging, set_scenario_config from pypsa.geo import haversine_pts from shapely.geometry import Point -from _helpers import configure_logging, set_scenario_config logger = logging.getLogger(__name__) diff --git a/scripts/build_industrial_distribution_key.py b/scripts/build_industrial_distribution_key.py index 29b7538e..90687b33 100644 --- a/scripts/build_industrial_distribution_key.py +++ b/scripts/build_industrial_distribution_key.py @@ -13,7 +13,6 @@ from itertools import product import country_converter as coco import geopandas as gpd import pandas as pd - from _helpers import configure_logging, set_scenario_config logger = logging.getLogger(__name__) diff --git a/scripts/build_industrial_energy_demand_per_country_today.py b/scripts/build_industrial_energy_demand_per_country_today.py index 99342b9e..1ccae6c2 100644 --- a/scripts/build_industrial_energy_demand_per_country_today.py +++ b/scripts/build_industrial_energy_demand_per_country_today.py @@ -11,8 +11,8 @@ from functools import partial import country_converter as coco import pandas as pd -from tqdm import tqdm from _helpers import set_scenario_config +from tqdm import tqdm cc = coco.CountryConverter() diff --git a/scripts/build_industrial_energy_demand_per_node.py b/scripts/build_industrial_energy_demand_per_node.py index fcc0abd9..42df4250 100644 --- a/scripts/build_industrial_energy_demand_per_node.py +++ b/scripts/build_industrial_energy_demand_per_node.py @@ -7,7 +7,6 @@ Build industrial energy demand per model region. """ import pandas as pd - from _helpers import set_scenario_config if __name__ == "__main__": diff --git a/scripts/build_industrial_energy_demand_per_node_today.py b/scripts/build_industrial_energy_demand_per_node_today.py index 1b7aba83..ce8d971f 100644 --- a/scripts/build_industrial_energy_demand_per_node_today.py +++ b/scripts/build_industrial_energy_demand_per_node_today.py @@ -10,7 +10,6 @@ from itertools import product import numpy as np import pandas as pd - from _helpers import set_scenario_config # map JRC/our sectors to hotmaps sector, where mapping exist diff --git a/scripts/build_industrial_production_per_country.py b/scripts/build_industrial_production_per_country.py index d9ec9afd..44cb0752 100644 --- a/scripts/build_industrial_production_per_country.py +++ b/scripts/build_industrial_production_per_country.py @@ -13,7 +13,7 @@ from functools import partial import country_converter as coco import numpy as np import pandas as pd -from _helpers import mute_print, set_scenario_config, configure_logging +from _helpers import configure_logging, mute_print, set_scenario_config from tqdm import tqdm logger = logging.getLogger(__name__) diff --git a/scripts/build_industrial_production_per_country_tomorrow.py b/scripts/build_industrial_production_per_country_tomorrow.py index 835a2687..67557b17 100644 --- a/scripts/build_industrial_production_per_country_tomorrow.py +++ b/scripts/build_industrial_production_per_country_tomorrow.py @@ -7,9 +7,8 @@ Build future industrial production per country. """ import pandas as pd -from prepare_sector_network import get - from _helpers import set_scenario_config +from prepare_sector_network import get if __name__ == "__main__": if "snakemake" not in globals(): diff --git a/scripts/build_industrial_production_per_node.py b/scripts/build_industrial_production_per_node.py index c84590b7..7e074967 100644 --- a/scripts/build_industrial_production_per_node.py +++ b/scripts/build_industrial_production_per_node.py @@ -7,9 +7,9 @@ Build industrial production per model region. """ from itertools import product -from _helpers import set_scenario_config import pandas as pd +from _helpers import set_scenario_config # map JRC/our sectors to hotmaps sector, where mapping exist sector_mapping = { diff --git a/scripts/build_population_layouts.py b/scripts/build_population_layouts.py index db548140..bab2e9a4 100644 --- a/scripts/build_population_layouts.py +++ b/scripts/build_population_layouts.py @@ -13,7 +13,6 @@ import geopandas as gpd import numpy as np import pandas as pd import xarray as xr - from _helpers import configure_logging, set_scenario_config logger = logging.getLogger(__name__) diff --git a/scripts/build_population_weighted_energy_totals.py b/scripts/build_population_weighted_energy_totals.py index 6ed32086..60d66e56 100644 --- a/scripts/build_population_weighted_energy_totals.py +++ b/scripts/build_population_weighted_energy_totals.py @@ -7,7 +7,6 @@ Distribute country-level energy demands by population. """ import pandas as pd - from _helpers import set_scenario_config if __name__ == "__main__": diff --git a/scripts/build_sequestration_potentials.py b/scripts/build_sequestration_potentials.py index 9e7678bb..ac1f22e2 100644 --- a/scripts/build_sequestration_potentials.py +++ b/scripts/build_sequestration_potentials.py @@ -10,7 +10,6 @@ database_en>`_. import geopandas as gpd import pandas as pd - from _helpers import set_scenario_config diff --git a/scripts/build_solar_thermal_profiles.py b/scripts/build_solar_thermal_profiles.py index 12826420..eb4c6f8f 100644 --- a/scripts/build_solar_thermal_profiles.py +++ b/scripts/build_solar_thermal_profiles.py @@ -11,9 +11,8 @@ import geopandas as gpd import numpy as np import pandas as pd import xarray as xr -from dask.distributed import Client, LocalCluster - from _helpers import set_scenario_config +from dask.distributed import Client, LocalCluster if __name__ == "__main__": if "snakemake" not in globals(): diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index 878ebc03..eb005d3d 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -11,8 +11,8 @@ import geopandas as gpd import numpy as np import pandas as pd import xarray as xr -from dask.distributed import Client, LocalCluster from _helpers import set_scenario_config +from dask.distributed import Client, LocalCluster if __name__ == "__main__": if "snakemake" not in globals(): diff --git a/scripts/cluster_gas_network.py b/scripts/cluster_gas_network.py index 567f4458..1292cdc3 100755 --- a/scripts/cluster_gas_network.py +++ b/scripts/cluster_gas_network.py @@ -10,9 +10,9 @@ import logging import geopandas as gpd import pandas as pd +from _helpers import configure_logging, set_scenario_config from pypsa.geo import haversine_pts from shapely import wkt -from _helpers import set_scenario_config, configure_logging logger = logging.getLogger(__name__) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 82ce2328..2dac717a 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -13,8 +13,8 @@ import sys import numpy as np import pandas as pd import pypsa +from _helpers import configure_logging, set_scenario_config from prepare_sector_network import prepare_costs -from _helpers import set_scenario_config, configure_logging idx = pd.IndexSlice logger = logging.getLogger(__name__) diff --git a/scripts/make_summary_perfect.py b/scripts/make_summary_perfect.py index 7c2e8d30..4a0b1556 100644 --- a/scripts/make_summary_perfect.py +++ b/scripts/make_summary_perfect.py @@ -12,6 +12,7 @@ other metrics. import numpy as np import pandas as pd import pypsa +from _helpers import set_scenario_config from make_summary import calculate_cfs # noqa: F401 from make_summary import calculate_nodal_cfs # noqa: F401 from make_summary import calculate_nodal_costs # noqa: F401 @@ -19,7 +20,6 @@ from make_summary import assign_carriers, assign_locations from prepare_sector_network import prepare_costs from pypsa.descriptors import get_active_assets from six import iteritems -from _helpers import set_scenario_config idx = pd.IndexSlice diff --git a/scripts/plot_power_network_clustered.py b/scripts/plot_power_network_clustered.py index 43746039..0c3dc635 100644 --- a/scripts/plot_power_network_clustered.py +++ b/scripts/plot_power_network_clustered.py @@ -9,10 +9,10 @@ Plot clustered electricity transmission network. import geopandas as gpd import matplotlib.pyplot as plt import pypsa +from _helpers import set_scenario_config from matplotlib.lines import Line2D from plot_power_network import load_projection from pypsa.plot import add_legend_lines -from _helpers import set_scenario_config if __name__ == "__main__": if "snakemake" not in globals(): diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index da077348..addb87ef 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -11,8 +11,8 @@ import logging import matplotlib.gridspec as gridspec import matplotlib.pyplot as plt import pandas as pd -from prepare_sector_network import co2_emissions_year from _helpers import configure_logging, set_scenario_config +from prepare_sector_network import co2_emissions_year logger = logging.getLogger(__name__) plt.style.use("ggplot") diff --git a/scripts/prepare_perfect_foresight.py b/scripts/prepare_perfect_foresight.py index de9932fd..97e5eeeb 100644 --- a/scripts/prepare_perfect_foresight.py +++ b/scripts/prepare_perfect_foresight.py @@ -12,7 +12,11 @@ import re import numpy as np import pandas as pd import pypsa -from _helpers import update_config_with_sector_opts, set_scenario_config, configure_logging +from _helpers import ( + configure_logging, + set_scenario_config, + update_config_with_sector_opts, +) from add_existing_baseyear import add_build_year_to_new_assets from pypsa.descriptors import expand_series from pypsa.io import import_components_from_dataframe diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index bf9ed58f..90df5c93 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -18,7 +18,11 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import configure_logging, set_scenario_config, update_config_with_sector_opts +from _helpers import ( + configure_logging, + set_scenario_config, + update_config_with_sector_opts, +) from add_electricity import calculate_annuity, sanitize_carriers, sanitize_locations from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2 from networkx.algorithms import complement diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index b6a4d378..1d217a43 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -36,7 +36,12 @@ import logging import tarfile from pathlib import Path -from _helpers import configure_logging, progress_retrieve, validate_checksum, set_scenario_config +from _helpers import ( + configure_logging, + progress_retrieve, + set_scenario_config, + validate_checksum, +) logger = logging.getLogger(__name__) diff --git a/scripts/retrieve_gas_infrastructure_data.py b/scripts/retrieve_gas_infrastructure_data.py index 7cc1c270..64bd6dc2 100644 --- a/scripts/retrieve_gas_infrastructure_data.py +++ b/scripts/retrieve_gas_infrastructure_data.py @@ -11,7 +11,12 @@ import logging import zipfile from pathlib import Path -from _helpers import progress_retrieve, validate_checksum, set_scenario_config, configure_logging +from _helpers import ( + configure_logging, + progress_retrieve, + set_scenario_config, + validate_checksum, +) logger = logging.getLogger(__name__) From f024412301beed2f6a36c9df74b52aaeb13969f5 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 12 Feb 2024 16:49:15 +0100 Subject: [PATCH 36/76] simplify test cases --- config/scenarios.yaml | 38 +++++++------------ config/test/config.scenarios.electricity.yaml | 34 +---------------- config/test/scenarios.electricity.yaml | 3 -- 3 files changed, 16 insertions(+), 59 deletions(-) diff --git a/config/scenarios.yaml b/config/scenarios.yaml index c493311f..0eba9d75 100644 --- a/config/scenarios.yaml +++ b/config/scenarios.yaml @@ -11,28 +11,18 @@ # electricity: # renewable_carriers: [wind, solar] # override the list of renewable carriers +normal: + electricity: + renewable_carriers: + - solar + - onwind + - offwind-ac + - offwind-dc + - hydro -network2013: - snapshots: - start: "2013-01-01" - end: "2014-01-01" - inclusive: 'left' - - -network2019: - snapshots: - start: "2019-01-01" - end: "2020-01-01" - inclusive: 'left' - - renewable: - onwind: - cutout: europe-2019-era5 - offwind-ac: - cutout: europe-2019-era5 - offwind-dc: - cutout: europe-2019-era5 - solar: - cutout: europe-2019-era5 - hydro: - cutout: europe-2019-era5 +no-offwind: + electricity: + renewable_carriers: + - solar + - onwind + - hydro diff --git a/config/test/config.scenarios.electricity.yaml b/config/test/config.scenarios.electricity.yaml index 185dcda4..dde138ed 100644 --- a/config/test/config.scenarios.electricity.yaml +++ b/config/test/config.scenarios.electricity.yaml @@ -28,16 +28,10 @@ snapshots: end: "2013-03-08" electricity: - co2limit: 100.e+6 - extendable_carriers: Generator: [OCGT] - StorageUnit: [battery] - Store: [H2] - Link: [H2 pipeline] - - renewable_carriers: [solar, onwind, offwind-ac, offwind-dc] - + StorageUnit: [battery, H2] + Store: [] atlite: default_cutout: be-03-2013-era5 @@ -60,31 +54,7 @@ renewable: solar: cutout: be-03-2013-era5 - -clustering: - exclude_carriers: ["OCGT", "offwind-ac", "coal"] - -lines: - dynamic_line_rating: - activate: true - cutout: be-03-2013-era5 - max_line_rating: 1.3 - - solving: solver: name: glpk options: "glpk-default" - - -plotting: - map: - boundaries: - eu_node_location: - x: -5.5 - y: 46. - costs_max: 1000 - costs_threshold: 0.0000001 - energy_max: - energy_min: - energy_threshold: 0.000001 diff --git a/config/test/scenarios.electricity.yaml b/config/test/scenarios.electricity.yaml index e9893479..962cc91e 100644 --- a/config/test/scenarios.electricity.yaml +++ b/config/test/scenarios.electricity.yaml @@ -6,9 +6,6 @@ test-elec-no-offshore-wind: electricity: renewable_carriers: [solar, onwind] - test-elec-no-onshore-wind: electricity: - extendable_carriers: - Generator: [OCGT] renewable_carriers: [solar, offwind-ac, offwind-dc] From c83db4e84e17a94755ab3fa914553047c1e97365 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 12 Feb 2024 16:54:33 +0100 Subject: [PATCH 37/76] add retrieve_cost_data.py script to handle config_provider and move to resources since dependent on config setting --- Snakefile | 2 +- doc/configtables/sector-opts.csv | 2 +- doc/costs.rst | 4 +-- rules/build_sector.smk | 8 +++--- rules/postprocess.smk | 8 +++--- rules/retrieve.smk | 16 ++++-------- rules/solve_myopic.smk | 6 ++--- rules/solve_perfect.smk | 12 +++------ scripts/retrieve_cost_data.py | 42 ++++++++++++++++++++++++++++++++ 9 files changed, 65 insertions(+), 35 deletions(-) create mode 100644 scripts/retrieve_cost_data.py diff --git a/Snakefile b/Snakefile index 7df61162..4acba0c8 100644 --- a/Snakefile +++ b/Snakefile @@ -22,7 +22,7 @@ if not exists(conf_file) and exists(conf_default_file): configfile: "config/config.yaml" -COSTS = f"data/costs_{config['costs']['year']}.csv" +COSTS = f"resources/costs_{config['costs']['year']}.csv" ATLITE_NPROCESSES = config["atlite"].get("nprocesses", 4) run = config["run"] diff --git a/doc/configtables/sector-opts.csv b/doc/configtables/sector-opts.csv index ea39c3b0..fc9e8c10 100644 --- a/doc/configtables/sector-opts.csv +++ b/doc/configtables/sector-opts.csv @@ -7,5 +7,5 @@ Trigger, Description, Definition, Status ``B``,Add biomass,,In active use ``I``,Add industry sector,,In active use ``A``,Add agriculture sector,,In active use -``dist``+``n``,Add distribution grid with investment costs of ``n`` times costs in ``data/costs_{cost_year}.csv``,,In active use +``dist``+``n``,Add distribution grid with investment costs of ``n`` times costs in ``resources/costs_{cost_year}.csv``,,In active use ``seq``+``n``,Sets the CO2 sequestration potential to ``n`` Mt CO2 per year,,In active use diff --git a/doc/costs.rst b/doc/costs.rst index 5ddbb360..9a06fd16 100644 --- a/doc/costs.rst +++ b/doc/costs.rst @@ -9,7 +9,7 @@ Techno-Economic Assumptions The database of cost assumptions is retrieved from the repository `PyPSA/technology-data `_ and then -saved to a file ``data/costs_{year}.csv``. The ``config/config.yaml`` provides options +saved to a file ``resources/costs_{year}.csv``. The ``config/config.yaml`` provides options to choose a reference year and use a specific version of the repository. .. literalinclude:: ../config/config.default.yaml @@ -50,7 +50,7 @@ Modifying Assumptions Some cost assumptions (e.g. marginal cost and capital cost) can be directly set in the ``config/config.yaml`` (cf. Section :ref:`costs_cf` in :ref:`config`). To change cost assumptions in more detail, make a copy of -``data/costs_{year}.csv`` and reference the new cost file in the ``Snakefile``: +``resources/costs_{year}.csv`` and reference the new cost file in the ``Snakefile``: .. literalinclude:: ../Snakefile :start-at: COSTS diff --git a/rules/build_sector.smk b/rules/build_sector.smk index d7fbe638..62b69337 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -853,10 +853,10 @@ rule prepare_sector_network: "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv" ) ), - costs=( - "data/costs_{}.csv".format(config_provider("costs", "year")) - if config_provider("foresight") == "overnight" - else "data/costs_{planning_horizons}.csv" + costs=lambda w: ( + "resources/costs_{}.csv".format(config_provider("costs", "year")) + if config_provider("foresight")(w) == "overnight" + else "resources/costs_{planning_horizons}.csv" ), profile_offwind_ac=resources("profile_offwind-ac.nc"), profile_offwind_dc=resources("profile_offwind-dc.nc"), diff --git a/rules/postprocess.smk b/rules/postprocess.smk index 38e5f7d9..542c8d29 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -174,10 +174,10 @@ rule make_summary: **config["scenario"], run=config["run"]["name"], ), - costs=( - "data/costs_{}.csv".format(config_provider("costs", "year")) - if config_provider("foresight") == "overnight" - else "data/costs_{}.csv".format( + costs=lambda w: ( + "resources/costs_{}.csv".format(config_provider("costs", "year")) + if config_provider("foresight")(w) == "overnight" + else "resources/costs_{}.csv".format( config_provider("scenario", "planning_horizons", 0) ) ), diff --git a/rules/retrieve.smk b/rules/retrieve.smk index c9fd91aa..1b2513fb 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -83,23 +83,17 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cutout", True if config["enable"]["retrieve"] and config["enable"].get("retrieve_cost_data", True): rule retrieve_cost_data: - input: - HTTP.remote( - "raw.githubusercontent.com/PyPSA/technology-data/{}/outputs/".format( - config_provider("costs", "version") - ) - + "costs_{year}.csv", - keep_local=True, - ), + params: + version=lambda w: config_provider("costs", "version")(w), output: - "data/costs_{year}.csv", + resources("costs_{year}.csv"), log: "logs/retrieve_cost_data_{year}.log", resources: mem_mb=1000, retries: 2 - run: - move(input[0], output[0]) + script: + "../scripts/retrieve_cost_data.py" if config["enable"]["retrieve"] and config["enable"].get( diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index a6313cac..bea6b6cc 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -16,7 +16,7 @@ rule add_existing_baseyear: busmap_s=resources("busmap_elec_s{simpl}.csv"), busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), - costs=lambda w: "data/costs_{}.csv".format( + costs=lambda w: "resources/costs_{}.csv".format( config_provider("scenario", "planning_horizons", 0)(w) ), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), @@ -71,7 +71,7 @@ rule add_brownfield: network=RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", network_p=solved_previous_horizon, #solved network at previous time step - costs="data/costs_{planning_horizons}.csv", + costs="resources/costs_{planning_horizons}.csv", cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), output: @@ -109,7 +109,7 @@ rule solve_sector_network_myopic: input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", - costs="data/costs_{planning_horizons}.csv", + costs="resources/costs_{planning_horizons}.csv", config=RESULTS + "config.yaml", output: RESULTS diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index ee16a201..d1a5f745 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -14,7 +14,7 @@ rule add_existing_baseyear: busmap_s=resources("busmap_elec_s{simpl}.csv"), busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), - costs="data/costs_{}.csv".format( + costs="resources/costs_{}.csv".format( config_provider("scenario", "planning_horizons", 0) ), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), @@ -95,7 +95,7 @@ rule solve_sector_network_perfect: input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc", - costs="data/costs_2030.csv", + costs="resources/costs_2030.csv", config=RESULTS + "config.yaml", output: RESULTS @@ -127,13 +127,7 @@ rule make_summary_perfect: **{ f"networks_{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}": RESULTS + f"postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc" - for simpl in config_provider("scenario", "simpl") - for clusters in config_provider("scenario", "clusters") - for opts in config_provider("scenario", "opts") - for sector_opts in config_provider("scenario", "sector_opts") - for ll in config_provider("scenario", "ll") - }, - costs="data/costs_2020.csv", + costs="resources/costs_2020.csv", output: nodal_costs=RESULTS + "csvs/nodal_costs.csv", nodal_capacities=RESULTS + "csvs/nodal_capacities.csv", diff --git a/scripts/retrieve_cost_data.py b/scripts/retrieve_cost_data.py new file mode 100644 index 00000000..ceae8bf9 --- /dev/null +++ b/scripts/retrieve_cost_data.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# SPDX-FileCopyrightText: : 2024 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT +""" +Retrieve cost data from ``technology-data``. +""" + +import logging +from pathlib import Path + +from _helpers import configure_logging, progress_retrieve, set_scenario_config + +logger = logging.getLogger(__name__) + +if __name__ == "__main__": + if "snakemake" not in globals(): + from _helpers import mock_snakemake + + snakemake = mock_snakemake("retrieve_cost_data", year=2030) + rootpath = ".." + else: + rootpath = "." + configure_logging(snakemake) + set_scenario_config(snakemake) + + version = snakemake.params.version + baseurl = f"https://raw.githubusercontent.com/PyPSA/technology-data/{version}/outputs/" + filepath = Path(snakemake.output[0]) + url = baseurl + filepath.name + + print(url) + + to_fn = Path(rootpath) / filepath + + print(to_fn) + + logger.info(f"Downloading technology data from '{url}'.") + disable_progress = snakemake.config["run"].get("disable_progressbar", False) + progress_retrieve(url, to_fn, disable=disable_progress) + + logger.info(f"Technology data available at at {to_fn}") From c3feb0e283d19c7d7c963ed7cb479bb064fe7db5 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 12 Feb 2024 16:56:00 +0100 Subject: [PATCH 38/76] handle complicated config-dependent inputs using unpack() with input functions --- Snakefile | 2 + doc/configtables/run.csv | 2 +- rules/build_electricity.smk | 71 +++++---- rules/build_sector.smk | 277 +++++++++++++++++++----------------- rules/common.smk | 8 +- rules/postprocess.smk | 20 +-- rules/solve_myopic.smk | 19 ++- rules/solve_perfect.smk | 31 ++-- rules/validate.smk | 15 +- 9 files changed, 247 insertions(+), 198 deletions(-) diff --git a/Snakefile b/Snakefile index 4acba0c8..826521c1 100644 --- a/Snakefile +++ b/Snakefile @@ -8,6 +8,8 @@ from pathlib import Path import yaml from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider from snakemake.utils import min_version + +# TODO: check if this works with mock_snakemake from scripts._helpers import path_provider min_version("7.7") diff --git a/doc/configtables/run.csv b/doc/configtables/run.csv index 75f29928..e2a81e0b 100644 --- a/doc/configtables/run.csv +++ b/doc/configtables/run.csv @@ -2,7 +2,7 @@ name,--,str/list,"Specify a name for your run. Results will be stored under this name. If ``scenario: enable`` is set to ``true``, the name must contain a subset of scenario names defined in ``scenario: file``." scenarios,,, -- enable,bool,"{true, false}","Switch to select whether workflow should generate scenarios based on ``file``." --- file,str,,"Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run:scenarios`` has to be set to ``true`` and ``run:name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combindation of settings, alter and use the ``config/create_scenarios.py`` script in ``scripts``." +-- file,str,,"Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run: scenarios`` has to be set to ``true`` and ``run: name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combination of settings, alter and use the ``config/create_scenarios.py`` script in the ``config`` directory." disable_progrssbar,bool,"{true, false}","Switch to select whether progressbar should be disabled." shared_resources,bool/str/list,,"Switch to select whether resources should be shared across runs. If a string or list is passed, it is assumed to be wildcard(s) which indicates up to which set of wildcards the resource folder should be shared. If set to 'base', only resources before creating the elec.nc file are shared." shared_cutouts,bool,"{true, false}","Switch to select whether cutouts should be shared across runs." diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 4c83e0b0..8876c68e 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -20,8 +20,9 @@ if config["enable"].get("prepare_links_p_nom", False): rule build_electricity_demand: params: - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, countries=config_provider("countries"), load=config_provider("load"), @@ -64,8 +65,9 @@ rule build_powerplants: rule base_network: params: countries=config_provider("countries"), - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, lines=config_provider("lines"), links=config_provider("links"), @@ -177,8 +179,8 @@ if config["enable"].get("build_natura_raster", False): rule build_natura_raster: input: natura=ancient("data/bundle/natura/Natura2000_end2015.shp"), - cutouts=expand( - "cutouts/" + CDIR + "{cutouts}.nc", **config_provider("atlite") + cutouts=lambda w: expand( + "cutouts/" + CDIR + "{cutouts}.nc", **config_provider("atlite")(w) ), output: resources("natura.tiff"), @@ -195,11 +197,11 @@ if config["enable"].get("build_natura_raster", False): rule build_ship_raster: input: ship_density="data/shipdensity_global.zip", - cutouts=expand( + cutouts=lambda w: expand( "cutouts/" + CDIR + "{cutout}.nc", cutout=[ - config_provider("renewable", k, "cutout") - for k in config_provider("electricity", "renewable_carriers") + config_provider("renewable", k, "cutout")(w) + for k in config_provider("electricity", "renewable_carriers")(w) ], ), output: @@ -269,8 +271,9 @@ else: rule build_renewable_profiles: params: - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, renewable=config_provider("renewable"), input: @@ -352,9 +355,9 @@ rule build_hydro_profile: input: country_shapes=resources("country_shapes.geojson"), eia_hydro_generation="data/eia_hydro_annual_generation.csv", - cutout=f"cutouts/" + cutout=lambda w: f"cutouts/" + CDIR - + config_provider("renewable", "hydro", "cutout") + + config_provider("renewable", "hydro", "cutout")(w) + ".nc", output: resources("profile_hydro.nc"), @@ -397,6 +400,23 @@ if config["lines"]["dynamic_line_rating"]["activate"]: "../scripts/build_line_rating.py" +def input_profile_tech(w): + return { + f"profile_{tech}": resources(f"profile_{tech}.nc") + for tech in config_provider("electricity", "renewable_carriers")(w) + } + + +def input_conventional(w): + return { + f"conventional_{carrier}_{attr}": fn + for carrier, d in config_provider("conventional", default={None: {}})(w).items() + if carrier in config_provider("electricity", "conventional_carriers")(w) + for attr, fn in d.items() + if str(fn).startswith("data/") + } + + rule add_electricity: params: length_factor=config_provider("lines", "length_factor"), @@ -407,21 +427,12 @@ rule add_electricity: conventional=config_provider("conventional"), costs=config_provider("costs"), input: - **{ - f"profile_{tech}": resources(f"profile_{tech}.nc") - for tech in config_provider("electricity", "renewable_carriers") - }, - **{ - f"conventional_{carrier}_{attr}": fn - for carrier, d in config.get("conventional", {None: {}}).items() - if carrier in config_provider("electricity", "conventional_carriers") - for attr, fn in d.items() - if str(fn).startswith("data/") - }, + unpack(input_profile_tech), + unpack(input_conventional), base_network=resources("networks/base.nc"), - line_rating=( + line_rating=lambda w: ( resources("networks/line_rating.nc") - if config_provider("lines", "dynamic_line_rating", "activate") + if config_provider("lines", "dynamic_line_rating", "activate")(w) else resources("networks/base.nc") ), tech_costs=COSTS, @@ -430,9 +441,9 @@ rule add_electricity: hydro_capacities=ancient("data/bundle/hydro_capacities.csv"), geth_hydro_capacities="data/geth2015_hydro_capacities.csv", unit_commitment="data/unit_commitment.csv", - fuel_price=( + fuel_price=lambda w: ( resources("monthly_fuel_price.csv") - if config_provider("conventional", "dynamic_fuel_price") + if config_provider("conventional", "dynamic_fuel_price")(w) else [] ), load=resources("electricity_demand.csv"), @@ -509,9 +520,9 @@ rule cluster_network: regions_onshore=resources("regions_onshore_elec_s{simpl}.geojson"), regions_offshore=resources("regions_offshore_elec_s{simpl}.geojson"), busmap=ancient(resources("busmap_elec_s{simpl}.csv")), - custom_busmap=( + custom_busmap=lambda w: ( "data/custom_busmap_elec_s{simpl}_{clusters}.csv" - if config_provider("enable", "custom_busmap", default=False) + if config_provider("enable", "custom_busmap", default=False)(w) else [] ), tech_costs=COSTS, diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 62b69337..cfd7191e 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -7,7 +7,10 @@ rule build_population_layouts: input: nuts3_shapes=resources("nuts3_shapes.geojson"), urban_percent="data/urban_percent.csv", - cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", + cutout=lambda w: "cutouts/" + + CDIR + + config_provider("atlite", "default_cutout")(w) + + ".nc", output: pop_layout_total=resources("pop_layout_total.nc"), pop_layout_urban=resources("pop_layout_urban.nc"), @@ -31,7 +34,10 @@ rule build_clustered_population_layouts: pop_layout_urban=resources("pop_layout_urban.nc"), pop_layout_rural=resources("pop_layout_rural.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), - cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", + cutout=lambda w: "cutouts/" + + CDIR + + config_provider("atlite", "default_cutout")(w) + + ".nc", output: clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), log: @@ -52,7 +58,10 @@ rule build_simplified_population_layouts: pop_layout_urban=resources("pop_layout_urban.nc"), pop_layout_rural=resources("pop_layout_rural.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}.geojson"), - cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", + cutout=lambda w: "cutouts/" + + CDIR + + config_provider("atlite", "default_cutout")(w) + + ".nc", output: clustered_pop_layout=resources("pop_layout_elec_s{simpl}.csv"), resources: @@ -126,13 +135,17 @@ rule cluster_gas_network: rule build_daily_heat_demand: params: - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, input: pop_layout=resources("pop_layout_{scope}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), - cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", + cutout=lambda w: "cutouts/" + + CDIR + + config_provider("atlite", "default_cutout")(w) + + ".nc", output: heat_demand=resources("daily_heat_demand_{scope}_elec_s{simpl}_{clusters}.nc"), resources: @@ -150,8 +163,9 @@ rule build_daily_heat_demand: rule build_hourly_heat_demand: params: - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, input: heat_profile="data/heat_load_profile_BDEW.csv", @@ -173,13 +187,17 @@ rule build_hourly_heat_demand: rule build_temperature_profiles: params: - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, input: pop_layout=resources("pop_layout_{scope}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), - cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", + cutout=lambda w: "cutouts/" + + CDIR + + config_provider("atlite", "default_cutout")(w) + + ".nc", output: temp_soil=resources("temp_soil_{scope}_elec_s{simpl}_{clusters}.nc"), temp_air=resources("temp_air_{scope}_elec_s{simpl}_{clusters}.nc"), @@ -227,15 +245,18 @@ rule build_cop_profiles: rule build_solar_thermal_profiles: params: - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, - # TODO use config_provider solar_thermal=config_provider("solar_thermal"), input: pop_layout=resources("pop_layout_{scope}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), - cutout="cutouts/" + CDIR + config_provider("atlite", "default_cutout") + ".nc", + cutout=lambda w: "cutouts/" + + CDIR + + config_provider("atlite", "default_cutout")(w) + + ".nc", output: solar_thermal=resources("solar_thermal_{scope}_elec_s{simpl}_{clusters}.nc"), resources: @@ -314,76 +335,54 @@ rule build_biomass_potentials: "../scripts/build_biomass_potentials.py" -if config["sector"]["biomass_transport"] or config["sector"]["biomass_spatial"]: - - rule build_biomass_transport_costs: - input: - transport_cost_data=HTTP.remote( - "publications.jrc.ec.europa.eu/repository/bitstream/JRC98626/biomass potentials in europe_web rev.pdf", - keep_local=True, - ), - output: - biomass_transport_costs=resources("biomass_transport_costs.csv"), - threads: 1 - resources: - mem_mb=1000, - log: - logs("build_biomass_transport_costs.log"), - benchmark: - benchmarks("build_biomass_transport_costs") - conda: - "../envs/environment.yaml" - script: - "../scripts/build_biomass_transport_costs.py" - - build_biomass_transport_costs_output = rules.build_biomass_transport_costs.output +rule build_biomass_transport_costs: + input: + transport_cost_data=HTTP.remote( + "publications.jrc.ec.europa.eu/repository/bitstream/JRC98626/biomass potentials in europe_web rev.pdf", + keep_local=True, + ), + output: + biomass_transport_costs=resources("biomass_transport_costs.csv"), + threads: 1 + resources: + mem_mb=1000, + log: + logs("build_biomass_transport_costs.log"), + benchmark: + benchmarks("build_biomass_transport_costs") + conda: + "../envs/environment.yaml" + script: + "../scripts/build_biomass_transport_costs.py" -if not (config["sector"]["biomass_transport"] or config["sector"]["biomass_spatial"]): - # this is effecively an `else` statement which is however not liked by snakefmt - build_biomass_transport_costs_output = {} - - -if config["sector"]["regional_co2_sequestration_potential"]["enable"]: - - rule build_sequestration_potentials: - params: - sequestration_potential=config_provider( - "sector", "regional_co2_sequestration_potential" - ), - input: - sequestration_potential=HTTP.remote( - "https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson", - keep_local=True, - ), - regions_onshore=resources( - "regions_onshore_elec_s{simpl}_{clusters}.geojson" - ), - regions_offshore=resources( - "regions_offshore_elec_s{simpl}_{clusters}.geojson" - ), - output: - sequestration_potential=resources( - "co2_sequestration_potential_elec_s{simpl}_{clusters}.csv" - ), - threads: 1 - resources: - mem_mb=4000, - log: - logs("build_sequestration_potentials_s{simpl}_{clusters}.log"), - benchmark: - benchmarks("build_sequestration_potentials_s{simpl}_{clusters}") - conda: - "../envs/environment.yaml" - script: - "../scripts/build_sequestration_potentials.py" - - build_sequestration_potentials_output = rules.build_sequestration_potentials.output - - -if not config["sector"]["regional_co2_sequestration_potential"]["enable"]: - # this is effecively an `else` statement which is however not liked by snakefmt - build_sequestration_potentials_output = {} +rule build_sequestration_potentials: + params: + sequestration_potential=config_provider( + "sector", "regional_co2_sequestration_potential" + ), + input: + sequestration_potential=HTTP.remote( + "https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson", + keep_local=True, + ), + regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), + regions_offshore=resources("regions_offshore_elec_s{simpl}_{clusters}.geojson"), + output: + sequestration_potential=resources( + "co2_sequestration_potential_elec_s{simpl}_{clusters}.csv" + ), + threads: 1 + resources: + mem_mb=4000, + log: + logs("build_sequestration_potentials_s{simpl}_{clusters}.log"), + benchmark: + benchmarks("build_sequestration_potentials_s{simpl}_{clusters}") + conda: + "../envs/environment.yaml" + script: + "../scripts/build_sequestration_potentials.py" rule build_salt_cavern_potentials: @@ -643,43 +642,34 @@ rule build_industrial_energy_demand_per_node_today: "../scripts/build_industrial_energy_demand_per_node_today.py" -if config["sector"]["retrofitting"]["retro_endogen"]: - - rule build_retro_cost: - params: - retrofitting=config_provider("sector", "retrofitting"), - countries=config_provider("countries"), - input: - building_stock="data/retro/data_building_stock.csv", - data_tabula="data/bundle-sector/retro/tabula-calculator-calcsetbuilding.csv", - air_temperature=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"), - u_values_PL="data/retro/u_values_poland.csv", - tax_w="data/retro/electricity_taxes_eu.csv", - construction_index="data/retro/comparative_level_investment.csv", - floor_area_missing="data/retro/floor_area_missing.csv", - clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), - cost_germany="data/retro/retro_cost_germany.csv", - window_assumptions="data/retro/window_assumptions.csv", - output: - retro_cost=resources("retro_cost_elec_s{simpl}_{clusters}.csv"), - floor_area=resources("floor_area_elec_s{simpl}_{clusters}.csv"), - resources: - mem_mb=1000, - log: - logs("build_retro_cost_s{simpl}_{clusters}.log"), - benchmark: - benchmarks("build_retro_cost/s{simpl}_{clusters}") - conda: - "../envs/environment.yaml" - script: - "../scripts/build_retro_cost.py" - - build_retro_cost_output = rules.build_retro_cost.output - - -if not config["sector"]["retrofitting"]["retro_endogen"]: - # this is effecively an `else` statement which is however not liked by snakefmt - build_retro_cost_output = {} +rule build_retro_cost: + params: + retrofitting=config_provider("sector", "retrofitting"), + countries=config_provider("countries"), + input: + building_stock="data/retro/data_building_stock.csv", + data_tabula="data/bundle-sector/retro/tabula-calculator-calcsetbuilding.csv", + air_temperature=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"), + u_values_PL="data/retro/u_values_poland.csv", + tax_w="data/retro/electricity_taxes_eu.csv", + construction_index="data/retro/comparative_level_investment.csv", + floor_area_missing="data/retro/floor_area_missing.csv", + clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), + cost_germany="data/retro/retro_cost_germany.csv", + window_assumptions="data/retro/window_assumptions.csv", + output: + retro_cost=resources("retro_cost_elec_s{simpl}_{clusters}.csv"), + floor_area=resources("floor_area_elec_s{simpl}_{clusters}.csv"), + resources: + mem_mb=1000, + log: + logs("build_retro_cost_s{simpl}_{clusters}.log"), + benchmark: + benchmarks("build_retro_cost/s{simpl}_{clusters}") + conda: + "../envs/environment.yaml" + script: + "../scripts/build_retro_cost.py" rule build_population_weighted_energy_totals: @@ -720,8 +710,9 @@ rule build_shipping_demand: rule build_transport_demand: params: - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, sector=config_provider("sector"), input: @@ -825,11 +816,31 @@ rule prepare_sector_network: eurostat_report_year=config_provider("energy", "eurostat_report_year"), RDIR=RDIR, input: - **build_retro_cost_output, - **build_biomass_transport_costs_output, **rules.cluster_gas_network.output, **rules.build_gas_input_locations.output, - **build_sequestration_potentials_output, + retro_cost=lambda w: ( + resources("retro_cost_elec_s{simpl}_{clusters}.csv") + if config_provider("sector", "retrofitting", "retro_endogen")(w) + else [] + ), + floor_area=lambda w: ( + resources("floor_area_elec_s{simpl}_{clusters}.csv") + if config_provider("sector", "retrofitting", "retro_endogen")(w) + else [] + ), + biomass_transport_costs=lambda w: ( + resources("biomass_transport_costs.csv") + if config_provider("sector", "biomass_transport")(w) + or config_provider("sector", "biomass_spatial")(w) + else [] + ), + sequestration_potential=lambda w: ( + resources("co2_sequestration_potential_elec_s{simpl}_{clusters}.csv") + if config_provider( + "sector", "regional_co2_sequestration_potential", "enable" + )(w) + else [] + ), network=resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), energy_totals_name=resources("energy_totals.csv"), eurostat=input_eurostat, @@ -843,12 +854,12 @@ rule prepare_sector_network: dsm_profile=resources("dsm_profile_s{simpl}_{clusters}.csv"), co2_totals_name=resources("co2_totals.csv"), co2="data/bundle-sector/eea/UNFCCC_v23.csv", - biomass_potentials=( + biomass_potentials=lambda w: ( resources( "biomass_potentials_s{simpl}_{clusters}_" + "{}.csv".format(config_provider("biomass", "year")) ) - if config_provider("foresight") == "overnight" + if config_provider("foresight")(w) == "overnight" else resources( "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv" ) @@ -886,19 +897,19 @@ rule prepare_sector_network: cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), cop_air_rural=resources("cop_air_rural_elec_s{simpl}_{clusters}.nc"), cop_air_urban=resources("cop_air_urban_elec_s{simpl}_{clusters}.nc"), - solar_thermal_total=( + solar_thermal_total=lambda w: ( resources("solar_thermal_total_elec_s{simpl}_{clusters}.nc") - if config_provider("sector", "solar_thermal") + if config_provider("sector", "solar_thermal")(w) else [] ), - solar_thermal_urban=( + solar_thermal_urban=lambda w: ( resources("solar_thermal_urban_elec_s{simpl}_{clusters}.nc") - if config_provider("sector", "solar_thermal") + if config_provider("sector", "solar_thermal")(w) else [] ), - solar_thermal_rural=( + solar_thermal_rural=lambda w: ( resources("solar_thermal_rural_elec_s{simpl}_{clusters}.nc") - if config_provider("sector", "solar_thermal") + if config_provider("sector", "solar_thermal")(w) else [] ), output: diff --git a/rules/common.smk b/rules/common.smk index bf5e0894..c352a3a9 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -78,8 +78,8 @@ def config_provider(*keys, default=None): def solver_threads(w): - solver_options = config_provider("solving", "solver_options") - option_set = config_provider("solving", "solver", "options") + solver_options = config_provider("solving", "solver_options")(w) + option_set = config_provider("solving", "solver", "options")(w) threads = solver_options[option_set].get("threads", 4) return threads @@ -107,7 +107,7 @@ def memory(w): def input_custom_extra_functionality(w): path = config_provider( "solving", "options", "custom_extra_functionality", default=False - ) + )(w) if path: return os.path.join(os.path.dirname(workflow.snakefile), path) return [] @@ -131,7 +131,7 @@ def has_internet_access(url="www.zenodo.org") -> bool: def input_eurostat(w): # 2016 includes BA, 2017 does not - report_year = config_provider("energy", "eurostat_report_year") + report_year = config_provider("energy", "eurostat_report_year")(w) return f"data/bundle-sector/eurostat-energy_balances-june_{report_year}_edition" diff --git a/rules/postprocess.smk b/rules/postprocess.smk index 542c8d29..79634bf6 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -111,6 +111,14 @@ if config["foresight"] != "perfect": if config["foresight"] == "perfect": + def output_map_year(w): + return { + f"map_{year}": RESULTS + + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_" + + f"{year}.pdf" + for year in config_provider("scenario", "planning_horizons")(w) + } + rule plot_power_network_perfect: params: plotting=config_provider("plotting"), @@ -119,12 +127,7 @@ if config["foresight"] == "perfect": + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc", regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), output: - **{ - f"map_{year}": RESULTS - + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_" - + f"{year}.pdf" - for year in config_provider("scenario", "planning_horizons") - }, + unpack(output_map_year), threads: 2 resources: mem_mb=10000, @@ -158,8 +161,9 @@ rule make_summary: params: foresight=config_provider("foresight"), costs=config_provider("costs"), - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, scenario=config_provider("scenario"), RDIR=RDIR, diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index bea6b6cc..a52f17c0 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -49,6 +49,14 @@ rule add_existing_baseyear: "../scripts/add_existing_baseyear.py" +def input_profile_tech_brownfield(w): + return { + f"profile_{tech}": resources(f"profile_{tech}.nc") + for tech in config_provider("electricity", "renewable_carriers")(w) + if tech != "hydro" + } + + rule add_brownfield: params: H2_retrofit=config_provider("sector", "H2_retrofit"), @@ -56,16 +64,13 @@ rule add_brownfield: "sector", "H2_retrofit_capacity_per_CH4" ), threshold_capacity=config_provider("existing_capacities", " threshold_capacity"), - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, carriers=config_provider("electricity", "renewable_carriers"), input: - **{ - f"profile_{tech}": resources(f"profile_{tech}.nc") - for tech in config_provider("electricity", "renewable_carriers") - if tech != "hydro" - }, + unpack(input_profile_tech_brownfield), simplify_busmap=resources("busmap_elec_s{simpl}.csv"), cluster_busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), network=RESULTS diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index d1a5f745..0d94ad9c 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -48,14 +48,17 @@ rule add_existing_baseyear: "../scripts/add_existing_baseyear.py" +def input_network_year(w): + return { + f"network_{year}": RESULTS + + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{year}.nc" + for year in config_provider("scenario", "planning_horizons")(w)[1:] + } + + rule prepare_perfect_foresight: input: - **{ - f"network_{year}": RESULTS - + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_" - + f"{year}.nc" - for year in config_provider("scenario", "planning_horizons")[1:] - }, + unpack(input_network_year), brownfield_network=lambda w: ( RESULTS + "prenetworks-brownfield/" @@ -122,11 +125,21 @@ rule solve_sector_network_perfect: "../scripts/solve_network.py" +def input_networks_make_summary_perfect(w): + return { + f"networks_{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}": RESULTS + + f"postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc" + for simpl in config_provider("scenario", "simpl")(w) + for clusters in config_provider("scenario", "clusters")(w) + for opts in config_provider("scenario", "opts")(w) + for sector_opts in config_provider("scenario", "sector_opts")(w) + for ll in config_provider("scenario", "ll")(w) + } + + rule make_summary_perfect: input: - **{ - f"networks_{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}": RESULTS - + f"postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc" + unpack(input_networks_make_summary_perfect), costs="resources/costs_2020.csv", output: nodal_costs=RESULTS + "csvs/nodal_costs.csv", diff --git a/rules/validate.smk b/rules/validate.smk index 3c42c5f0..66949335 100644 --- a/rules/validate.smk +++ b/rules/validate.smk @@ -17,8 +17,9 @@ rule build_electricity_production: The data is used for validation of the optimization results. """ params: - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, countries=config_provider("countries"), output: @@ -37,8 +38,9 @@ rule build_cross_border_flows: The data is used for validation of the optimization results. """ params: - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, countries=config_provider("countries"), input: @@ -59,8 +61,9 @@ rule build_electricity_prices: The data is used for validation of the optimization results. """ params: - snapshots={ - k: config_provider("snapshots", k) for k in ["start", "end", "inclusive"] + snapshots=lambda w: { + k: config_provider("snapshots", k)(w) + for k in ["start", "end", "inclusive"] }, countries=config_provider("countries"), output: From 78b184ad0f457b4ba3ef9dd44306802e9c22f670 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 12 Feb 2024 15:57:45 +0000 Subject: [PATCH 39/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/retrieve_cost_data.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/retrieve_cost_data.py b/scripts/retrieve_cost_data.py index ceae8bf9..eb1ef041 100644 --- a/scripts/retrieve_cost_data.py +++ b/scripts/retrieve_cost_data.py @@ -25,7 +25,9 @@ if __name__ == "__main__": set_scenario_config(snakemake) version = snakemake.params.version - baseurl = f"https://raw.githubusercontent.com/PyPSA/technology-data/{version}/outputs/" + baseurl = ( + f"https://raw.githubusercontent.com/PyPSA/technology-data/{version}/outputs/" + ) filepath = Path(snakemake.output[0]) url = baseurl + filepath.name From 16e42c8fe61fdb9441773dcaf35e82765a106d31 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 16 Feb 2024 11:17:00 +0100 Subject: [PATCH 40/76] further progress and bugfixes --- .gitignore | 2 +- Snakefile | 20 +++++++++----------- config/config.default.yaml | 2 +- rules/build_electricity.smk | 34 +++++++++++++++++----------------- rules/build_sector.smk | 6 +++--- rules/postprocess.smk | 25 ++++++++----------------- rules/retrieve.smk | 6 +++++- rules/solve_electricity.smk | 1 - rules/solve_myopic.smk | 8 ++++---- rules/solve_perfect.smk | 8 ++++---- scripts/_helpers.py | 11 ++++++----- 11 files changed, 58 insertions(+), 65 deletions(-) diff --git a/.gitignore b/.gitignore index f5f88861..5dafb3ce 100644 --- a/.gitignore +++ b/.gitignore @@ -27,7 +27,7 @@ doc/_build /scripts/create_scenarios.py /config/create_scenarios.py -config.yaml +config/config.yaml config/scenarios.yaml diff --git a/Snakefile b/Snakefile index 2b4fd64e..2c342efc 100644 --- a/Snakefile +++ b/Snakefile @@ -20,9 +20,6 @@ configfile: "config/config.default.yaml" configfile: "config/config.yaml" -COSTS = f"resources/costs_{config['costs']['year']}.csv" -ATLITE_NPROCESSES = config["atlite"].get("nprocesses", 4) - run = config["run"] scenarios = run.get("scenarios", {}) if run["name"]: @@ -39,16 +36,17 @@ benchmarks = path_provider("benchmarks/", RDIR, run["shared_resources"]) resources = path_provider("resources/", RDIR, run["shared_resources"]) CDIR = "" if run["shared_cutouts"] else RDIR -LOGS = "logs/" + RDIR -BENCHMARKS = "benchmarks/" + RDIR -if not (shared_resources := run.get("shared_resources")): - RESOURCES = "resources/" + RDIR -elif isinstance(shared_resources, str): - RESOURCES = "resources/" + shared_resources + "/" -else: - RESOURCES = "resources/" RESULTS = "results/" + RDIR +# TODO: this needs to be aligned with new scenario management +# if not (shared_resources := run.get("shared_resources")): +# RESOURCES = "resources/" + RDIR +# elif isinstance(shared_resources, str): +# RESOURCES = "resources/" + shared_resources + "/" +# else: +# RESOURCES = "resources/" + + localrules: purge, diff --git a/config/config.default.yaml b/config/config.default.yaml index ccd3baf8..9e169a5d 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -25,7 +25,7 @@ run: enable: false file: config/scenarios.yaml disable_progressbar: false - shared_resources: false + shared_resources: false # TODO: splitting resources by wildcard does not work well, neither does true, only base works well shared_cutouts: true # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 8876c68e..e18ae5dc 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -162,12 +162,12 @@ if config["enable"].get("build_cutout", False): output: protected("cutouts/" + CDIR + "{cutout}.nc"), log: - "logs/" + CDIR + "build_cutout/{cutout}.log", + logs(CDIR + "build_cutout/{cutout}.log"), benchmark: "benchmarks/" + CDIR + "build_cutout_{cutout}" - threads: ATLITE_NPROCESSES + threads: config["atlite"].get("nprocesses", 4) resources: - mem_mb=ATLITE_NPROCESSES * 1000, + mem_mb=config["atlite"].get("nprocesses", 4) * 1000, conda: "../envs/environment.yaml" script: @@ -249,9 +249,9 @@ rule determine_availability_matrix_MD_UA: availability_map=resources("availability_matrix_MD-UA_{technology}.png"), log: logs("determine_availability_matrix_MD_UA_{technology}.log"), - threads: ATLITE_NPROCESSES + threads: config["atlite"].get("nprocesses", 4) resources: - mem_mb=ATLITE_NPROCESSES * 5000, + mem_mb=config["atlite"].get("nprocesses", 4) * 5000, conda: "../envs/environment.yaml" script: @@ -319,9 +319,9 @@ rule build_renewable_profiles: logs("build_renewable_profile_{technology}.log"), benchmark: benchmarks("build_renewable_profiles_{technology}") - threads: ATLITE_NPROCESSES + threads: config["atlite"].get("nprocesses", 4) resources: - mem_mb=ATLITE_NPROCESSES * 5000, + mem_mb=config["atlite"].get("nprocesses", 4) * 5000, wildcard_constraints: technology="(?!hydro).*", # Any technology other than hydro conda: @@ -391,9 +391,9 @@ if config["lines"]["dynamic_line_rating"]["activate"]: logs("build_line_rating.log"), benchmark: benchmarks("build_line_rating") - threads: ATLITE_NPROCESSES + threads: config["atlite"].get("nprocesses", 4) resources: - mem_mb=ATLITE_NPROCESSES * 1000, + mem_mb=config["atlite"].get("nprocesses", 4) * 1000, conda: "../envs/environment.yaml" script: @@ -435,7 +435,7 @@ rule add_electricity: if config_provider("lines", "dynamic_line_rating", "activate")(w) else resources("networks/base.nc") ), - tech_costs=COSTS, + tech_costs=resources(f"costs_{config['costs']['year']}.csv"), regions=resources("regions_onshore.geojson"), powerplants=resources("powerplants.csv"), hydro_capacities=ancient("data/bundle/hydro_capacities.csv"), @@ -478,7 +478,7 @@ rule simplify_network: costs=config_provider("costs"), input: network=resources("networks/elec.nc"), - tech_costs=COSTS, + tech_costs=resources(f"costs_{config['costs']['year']}.csv"), regions_onshore=resources("regions_onshore.geojson"), regions_offshore=resources("regions_offshore.geojson"), output: @@ -525,7 +525,7 @@ rule cluster_network: if config_provider("enable", "custom_busmap", default=False)(w) else [] ), - tech_costs=COSTS, + tech_costs=resources(f"costs_{config['costs']['year']}.csv"), output: network=resources("networks/elec_s{simpl}_{clusters}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), @@ -552,7 +552,7 @@ rule add_extra_components: costs=config_provider("costs"), input: network=resources("networks/elec_s{simpl}_{clusters}.nc"), - tech_costs=COSTS, + tech_costs=resources(f"costs_{config['costs']['year']}.csv"), output: resources("networks/elec_s{simpl}_{clusters}_ec.nc"), log: @@ -570,11 +570,11 @@ rule add_extra_components: rule prepare_network: params: - snapshots={ - "resolution": config_provider("snapshots", "resolution", default=False), + snapshots=lambda w: { + "resolution": config_provider("snapshots", "resolution", default=False)(w), "segmentation": config_provider( "snapshots", "segmentation", default=False - ), + )(w), }, links=config_provider("links"), lines=config_provider("lines"), @@ -588,7 +588,7 @@ rule prepare_network: autarky=config_provider("electricity", "autarky", default={}), input: resources("networks/elec_s{simpl}_{clusters}_ec.nc"), - tech_costs=COSTS, + tech_costs=resources(f"costs_{config['costs']['year']}.csv"), co2_price=lambda w: resources("co2_price.csv") if "Ept" in w.opts else [], output: resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), diff --git a/rules/build_sector.smk b/rules/build_sector.smk index cfd7191e..ba56564e 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -857,7 +857,7 @@ rule prepare_sector_network: biomass_potentials=lambda w: ( resources( "biomass_potentials_s{simpl}_{clusters}_" - + "{}.csv".format(config_provider("biomass", "year")) + + "{}.csv".format(config_provider("biomass", "year")(w)) ) if config_provider("foresight")(w) == "overnight" else resources( @@ -865,9 +865,9 @@ rule prepare_sector_network: ) ), costs=lambda w: ( - "resources/costs_{}.csv".format(config_provider("costs", "year")) + resources("costs_{}.csv".format(config_provider("costs", "year")(w))) if config_provider("foresight")(w) == "overnight" - else "resources/costs_{planning_horizons}.csv" + else resources("costs_{planning_horizons}.csv") ), profile_offwind_ac=resources("profile_offwind-ac.nc"), profile_offwind_dc=resources("profile_offwind-dc.nc"), diff --git a/rules/postprocess.smk b/rules/postprocess.smk index 79634bf6..6302bb46 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -18,7 +18,7 @@ if config["foresight"] != "perfect": "regions_onshore_elec_s{simpl}_{clusters}.geojson" ), output: - map=RESULTS + "maps/power-network-s{simpl}-{clusters}.pdf", + map=resources("maps/power-network-s{simpl}-{clusters}.pdf"), threads: 1 resources: mem_mb=4000, @@ -149,8 +149,6 @@ rule copy_config: threads: 1 resources: mem_mb=1000, - benchmark: - benchmarks("copy_config") conda: "../envs/environment.yaml" script: @@ -168,10 +166,6 @@ rule make_summary: scenario=config_provider("scenario"), RDIR=RDIR, input: - expand( - RESULTS + "maps/power-network-s{simpl}-{clusters}.pdf", - **config["scenario"], - ), networks=expand( RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", @@ -179,15 +173,16 @@ rule make_summary: run=config["run"]["name"], ), costs=lambda w: ( - "resources/costs_{}.csv".format(config_provider("costs", "year")) + resources("costs_{}.csv".format(config_provider("costs", "year")(w))) if config_provider("foresight")(w) == "overnight" - else "resources/costs_{}.csv".format( + else resources("costs_{}.csv".format( config_provider("scenario", "planning_horizons", 0) - ) + )) ), ac_plot=expand( - RESULTS + "maps/power-network-s{simpl}-{clusters}.pdf", + resources("maps/power-network-s{simpl}-{clusters}.pdf"), **config["scenario"], + run=config["run"]["name"], ), costs_plot=expand( RESULTS @@ -235,9 +230,7 @@ rule make_summary: resources: mem_mb=10000, log: - logs("make_summary.log"), - benchmark: - benchmarks("make_summary") + RESULTS + "logs/make_summary.log", conda: "../envs/environment.yaml" script: @@ -267,9 +260,7 @@ rule plot_summary: resources: mem_mb=10000, log: - logs("plot_summary.log"), - benchmark: - benchmarks("plot_summary") + RESULTS + "logs/plot_summary.log", conda: "../envs/environment.yaml" script: diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 1b2513fb..9e9ff1f0 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -88,10 +88,12 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cost_data", T output: resources("costs_{year}.csv"), log: - "logs/retrieve_cost_data_{year}.log", + logs("retrieve_cost_data_{year}.log"), resources: mem_mb=1000, retries: 2 + conda: + "../envs/retrieve.yaml" script: "../scripts/retrieve_cost_data.py" @@ -191,6 +193,8 @@ if config["enable"]["retrieve"]: resources: mem_mb=5000, retries: 2 + conda: + "../envs/retrieve.yaml" script: "../scripts/retrieve_electricity_demand.py" diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index d3aa8d4c..b6a7902e 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: MIT - rule solve_network: params: solving=config_provider("solving"), diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index a52f17c0..8caf5201 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -16,9 +16,9 @@ rule add_existing_baseyear: busmap_s=resources("busmap_elec_s{simpl}.csv"), busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), - costs=lambda w: "resources/costs_{}.csv".format( + costs=lambda w: resources("costs_{}.csv".format( config_provider("scenario", "planning_horizons", 0)(w) - ), + )), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), existing_heating_distribution=resources( @@ -76,7 +76,7 @@ rule add_brownfield: network=RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", network_p=solved_previous_horizon, #solved network at previous time step - costs="resources/costs_{planning_horizons}.csv", + costs=resources("costs_{planning_horizons}.csv"), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), output: @@ -114,7 +114,7 @@ rule solve_sector_network_myopic: input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", - costs="resources/costs_{planning_horizons}.csv", + costs=resources("costs_{planning_horizons}.csv"), config=RESULTS + "config.yaml", output: RESULTS diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index 0d94ad9c..af5e884c 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -14,9 +14,9 @@ rule add_existing_baseyear: busmap_s=resources("busmap_elec_s{simpl}.csv"), busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), - costs="resources/costs_{}.csv".format( + costs=resources("costs_{}.csv".format( config_provider("scenario", "planning_horizons", 0) - ), + )), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), existing_heating_distribution=resources( @@ -98,7 +98,7 @@ rule solve_sector_network_perfect: input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc", - costs="resources/costs_2030.csv", + costs=resources("costs_2030.csv"), config=RESULTS + "config.yaml", output: RESULTS @@ -140,7 +140,7 @@ def input_networks_make_summary_perfect(w): rule make_summary_perfect: input: unpack(input_networks_make_summary_perfect), - costs="resources/costs_2020.csv", + costs=resources("costs_2020.csv"), output: nodal_costs=RESULTS + "csvs/nodal_costs.csv", nodal_capacities=RESULTS + "csvs/nodal_capacities.csv", diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 1aa90168..c33d39f6 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -52,15 +52,16 @@ def get_run_path(fn, dir, rdir, shared_resources): Notes ----- - Special case for "base" allows no wildcards other than - "technology" and excludes filenames starting with "networks/elec" or + Special case for "base" allows no wildcards other than "technology", "year" + and "scope" and excludes filenames starting with "networks/elec" or "add_electricity". """ pattern = r"\{([^{}]+)\}" - existing_wildcards = list(re.findall(pattern, fn)) + existing_wildcards = set(re.findall(pattern, fn)) if shared_resources == "base": # special case for shared "base" resources - no_relevant_wildcards = not len(set(existing_wildcards) - {"technology"}) + irrelevant_wildcards = {"technology", "year", "scope"} + no_relevant_wildcards = not len(existing_wildcards - irrelevant_wildcards) no_elec_rule = not fn.startswith("networks/elec") and not fn.startswith( "add_electricity" ) @@ -68,7 +69,7 @@ def get_run_path(fn, dir, rdir, shared_resources): elif isinstance(shared_resources, (str, list)): if isinstance(shared_resources, str): shared_resources = [shared_resources] - is_shared = set(existing_wildcards).issubset(shared_resources) + is_shared = (existing_wildcards).issubset(shared_resources) else: is_shared = shared_resources From dd2416a59e16560fbecd6eed383aa60a1c730632 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 11:57:16 +0100 Subject: [PATCH 41/76] handle all {opts} and {sector_opts} wildcard values in config --- config/config.default.yaml | 17 ++- doc/configtables/adjustments.csv | 8 + doc/configtables/clustering.csv | 3 + doc/configtables/sector.csv | 6 + doc/configtables/snapshots.csv | 2 - doc/configuration.rst | 15 ++ rules/build_electricity.smk | 16 +- rules/build_sector.smk | 13 +- rules/postprocess.smk | 5 +- rules/solve_myopic.smk | 2 +- rules/solve_perfect.smk | 2 + rules/validate.smk | 6 +- scripts/_helpers.py | 181 +++++++++++++++++++++- scripts/add_brownfield.py | 4 +- scripts/add_existing_baseyear.py | 7 +- scripts/plot_summary.py | 9 +- scripts/prepare_network.py | 109 +++++--------- scripts/prepare_perfect_foresight.py | 48 ++---- scripts/prepare_sector_network.py | 218 ++++++++------------------- scripts/solve_network.py | 49 ++---- scripts/solve_operations_network.py | 10 +- 21 files changed, 385 insertions(+), 345 deletions(-) create mode 100644 doc/configtables/adjustments.csv diff --git a/config/config.default.yaml b/config/config.default.yaml index 91f38b91..1b207f1e 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -59,9 +59,6 @@ snapshots: start: "2013-01-01" end: "2014-01-01" inclusive: 'left' - resolution: false - segmentation: false - #representative: false # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#enable enable: @@ -366,6 +363,11 @@ existing_capacities: # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#sector sector: + transport: true + heating: true + biomass: true + industry: true + agriculture: true district_heating: potential: 0.6 progress: @@ -531,6 +533,7 @@ sector: use_methanation_waste_heat: true use_fuel_cell_waste_heat: true use_electrolysis_waste_heat: true + electricity_transmission_grid: true electricity_distribution_grid: true electricity_distribution_grid_cost_factor: 1.0 electricity_grid_connection: true @@ -712,6 +715,14 @@ clustering: committable: any ramp_limit_up: max ramp_limit_down: max + temporal: + resolution_elec: false + resolution_sector: false + +# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#adjustments +adjustments: + electricity: false + sector: false # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#solving solving: diff --git a/doc/configtables/adjustments.csv b/doc/configtables/adjustments.csv new file mode 100644 index 00000000..52617352 --- /dev/null +++ b/doc/configtables/adjustments.csv @@ -0,0 +1,8 @@ +,Unit,Values,Description +adjustments,,, +-- electricity,bool or dict,,"Parameter adjustments for capital cost, marginal cost, and maximum capacities of carriers. Applied in :mod:`prepare_network.`" +-- -- {attr},,,"Attribute can be ``e_nom_opt``, ``p_nom_opt``, ``marginal_cost`` or ``capital_cost``" +-- -- -- {carrier},float,per-unit,"Any carrier of the network to which parameter adjustment factor should be applied." +-- sector,bool or dict,,"Parameter adjustments for capital cost, marginal cost, and maximum capacities of carriers. Applied in :mod:`prepare_sector_network.`" +-- -- {attr},,,"Attribute can be ``e_nom_opt``, ``p_nom_opt``, ``marginal_cost`` or ``capital_cost``" +-- -- -- {carrier},float,per-unit,"Any carrier of the network to which parameter adjustment factor should be applied." diff --git a/doc/configtables/clustering.csv b/doc/configtables/clustering.csv index e831ca84..65411738 100644 --- a/doc/configtables/clustering.csv +++ b/doc/configtables/clustering.csv @@ -17,3 +17,6 @@ aggregation_strategies,,, -- -- {key},str,"{key} can be any of the component of the generator (str). It’s value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator." -- buses,,, -- -- {key},str,"{key} can be any of the component of the bus (str). It’s value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new bus." +temporal,,,Options for temporal resolution +-- resolution_elec,--,"{false,``nH``; i.e. ``2H``-``6H``}","Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks." +-- resolution_sector,--,"{false,``nH``; i.e. ``2H``-``6H``}","Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_sector_network`." diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index d8cc3288..1f8bb030 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -1,4 +1,9 @@ ,Unit,Values,Description +transport,--,"{true, false}",Flag to include transport sector. +heating,--,"{true, false}",Flag to include heating sector. +biomass,--,"{true, false}",Flag to include biomass sector. +industry,--,"{true, false}",Flag to include industry sector. +agriculture,--,"{true, false}",Flag to include agriculture sector. district_heating,--,,`prepare_sector_network.py `_ -- potential,--,float,maximum fraction of urban demand which can be supplied by district heating -- progress,--,Dictionary with planning horizons as keys., Increase of today's district heating demand to potential maximum district heating share. Progress = 0 means today's district heating share. Progress = 1 means maximum fraction of urban demand is supplied by district heating @@ -109,6 +114,7 @@ min_part_load _methanolisation,per unit of p_nom ,float,The minimum unit dispatc use_fischer_tropsch _waste_heat,--,"{true, false}",Add option for using waste heat of Fischer Tropsch in district heating networks use_fuel_cell_waste_heat,--,"{true, false}",Add option for using waste heat of fuel cells in district heating networks use_electrolysis_waste _heat,--,"{true, false}",Add option for using waste heat of electrolysis in district heating networks +electricity_transmission _grid,--,"{true, false}",Switch for enabling/disabling the electricity transmission grid. electricity_distribution _grid,--,"{true, false}",Add a simplified representation of the exchange capacity between transmission and distribution grid level through a link. electricity_distribution _grid_cost_factor,,,Multiplies the investment cost of the electricity distribution grid ,,, diff --git a/doc/configtables/snapshots.csv b/doc/configtables/snapshots.csv index 0226a9aa..4be0439b 100644 --- a/doc/configtables/snapshots.csv +++ b/doc/configtables/snapshots.csv @@ -2,5 +2,3 @@ start,--,str or datetime-like; e.g. YYYY-MM-DD,Left bound of date range end,--,str or datetime-like; e.g. YYYY-MM-DD,Right bound of date range inclusive,--,"One of {'neither', 'both', ‘left’, ‘right’}","Make the time interval closed to the ``left``, ``right``, or both sides ``both`` or neither side ``None``." -resolution ,--,"{false,``nH``; i.e. ``2H``-``6H``}","Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks." -segmentation,--,"{false,``n``; e.g. ``4380``}","Apply time series segmentation with `tsam `_ package to ``n`` adjacent snapshots of varying lengths based on capacity factors of varying renewables, hydro inflow and load in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks." diff --git a/doc/configuration.rst b/doc/configuration.rst index f65aa4c2..e8b0426a 100644 --- a/doc/configuration.rst +++ b/doc/configuration.rst @@ -561,6 +561,21 @@ The list of available biomass is given by the category in `ENSPRESO_BIOMASS 0: - return True, float(m[0]) + return True, float(m[0].replace("p", ".").replace("m", "-")) else: return True, None return False, None @@ -379,13 +380,177 @@ def parse(infix): return {infix.pop(0): parse(infix)} -def update_config_with_sector_opts(config, sector_opts): - from snakemake.utils import update_config +def update_config_from_wildcards(config, w): + """ + Parses configuration settings from wildcards and updates the config. - for o in sector_opts.split("-"): - if o.startswith("CF+"): - infix = o.split("+")[1:] - update_config(config, parse(infix)) + - TODO: Should be run inside config_provider function. + """ + + if w.get("opts"): + opts = w.opts.split("-") + + if nhours := get_opt(opts, r"^\d+(h|seg)$"): + config["clustering"]["temporal"]["resolution_elec"] = nhours + + co2l_enable, co2l_value = find_opt(opts, "Co2L") + if co2l_enable: + config["electricity"]["co2limit_enable"] = True + if co2l_value is not None: + config["electricity"]["co2limit"] = ( + co2l_value * config["electricity"]["co2base"] + ) + + gasl_enable, gasl_value = find_opt(opts, "CH4L") + if gasl_enable: + config["electricity"]["gaslimit_enable"] = True + if gasl_value is not None: + config["electricity"]["gaslimit"] = gasl_value * 1e6 + + if "Ept" in opts: + config["costs"]["emission_prices"]["co2_monthly_prices"] = True + + ep_enable, ep_value = find_opt(opts, "Ep") + if ep_enable: + config["costs"]["emission_prices"]["enable"] = True + if ep_value is not None: + config["costs"]["emission_prices"]["co2"] = ep_value + + if "ATK" in opts: + config["autarky"]["enable"] = True + if "ATKc" in opts: + config["autarky"]["by_country"] = True + + attr_lookup = { + "p": "p_nom_max", + "e": "e_nom_max", + "c": "capital_cost", + "m": "marginal_cost", + } + for o in opts: + flags = ["+e", "+p", "+m", "+c"] + if all(flag not in o for flag in flags): + continue + carrier, attr_factor = o.split("+") + attr = attr_lookup[attr_factor[0]] + factor = float(attr_factor[1:]) + if not isinstance(config["adjustments"]["electricity"], dict): + config["adjustments"]["electricity"] = dict() + update_config( + config["adjustments"]["electricity"], {attr: {carrier: factor}} + ) + + if w.get("sector_opts"): + opts = w.sector_opts.split("-") + + if "T" in opts: + config["sector"]["transport"] = True + + if "H" in opts: + config["sector"]["heating"] = True + + if "B" in opts: + config["sector"]["biomass"] = True + + if "I" in opts: + config["sector"]["industry"] = True + + if "A" in opts: + config["sector"]["agriculture"] = True + + if "CCL" in opts: + config["solving"]["constraints"]["CCL"] = True + + eq_value = get_opt(opts, r"^EQ+\d*\.?\d+(c|)") + for o in opts: + if eq_value is not None: + config["solving"]["constraints"]["EQ"] = eq_value + elif "EQ" in o: + config["solving"]["constraints"]["EQ"] = True + break + + if "BAU" in opts: + config["solving"]["constraints"]["BAU"] = True + + if "SAFE" in opts: + config["solving"]["constraints"]["SAFE"] = True + + if nhours := get_opt(opts, r"^\d+(h|sn|seg)$"): + config["clustering"]["temporal"]["resolution_sector"] = nhours + + if "decentral" in opts: + config["sector"]["electricity_transmission_grid"] = False + + if "noH2network" in opts: + config["sector"]["H2_network"] = False + + if "nowasteheat" in opts: + config["sector"]["use_fischer_tropsch_waste_heat"] = False + config["sector"]["use_methanolisation_waste_heat"] = False + config["sector"]["use_haber_bosch_waste_heat"] = False + config["sector"]["use_methanation_waste_heat"] = False + config["sector"]["use_fuel_cell_waste_heat"] = False + config["sector"]["use_electrolysis_waste_heat"] = False + + if "nodistrict" in opts: + config["sector"]["district_heating"]["progress"] = 0.0 + + dg_enable, dg_factor = find_opt(opts, "dist") + if dg_enable: + config["sector"]["electricity_distribution_grid"] = True + if dg_factor is not None: + config["sector"][ + "electricity_distribution_grid_cost_factor" + ] = dg_factor + + if "biomasstransport" in opts: + config["sector"]["biomass_transport"] = True + + _, maxext = find_opt(opts, "linemaxext") + if maxext is not None: + config["lines"]["max_extension"] = maxext * 1e3 + config["links"]["max_extension"] = maxext * 1e3 + + _, co2l_value = find_opt(opts, "Co2L") + if co2l_value is not None: + config["co2_budget"] = float(co2l_value) + + if co2_distribution := get_opt(opts, r"^(cb)\d+(\.\d+)?(ex|be)$"): + config["co2_budget"] = co2_distribution + + if co2_budget := get_opt(opts, r"^(cb)\d+(\.\d+)?$"): + config["co2_budget"] = float(co2_budget[2:]) + + attr_lookup = { + "p": "p_nom_max", + "e": "e_nom_max", + "c": "capital_cost", + "m": "marginal_cost", + } + for o in opts: + flags = ["+e", "+p", "+m", "+c"] + if all(flag not in o for flag in flags): + continue + carrier, attr_factor = o.split("+") + attr = attr_lookup[attr_factor[0]] + factor = float(attr_factor[1:]) + if not isinstance(config["adjustments"]["sector"], dict): + config["adjustments"]["sector"] = dict() + update_config(config["adjustments"]["sector"], {attr: {carrier: factor}}) + + _, sdr_value = find_opt(opts, "sdr") + if sdr_value is not None: + config["costs"]["social_discountrate"] = sdr_value / 100 + + _, seq_limit = find_opt(opts, "seq") + if seq_limit is not None: + config["sector"]["co2_sequestration_potential"] = seq_limit + + # any config option can be represented in wildcard + for o in opts: + if o.startswith("CF+"): + infix = o.split("+")[1:] + update_config(config, parse(infix)) def get_checksum_from_zenodo(file_url): diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index 3b77c437..329bde4c 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -12,7 +12,7 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import update_config_with_sector_opts +from _helpers import update_config_from_wildcards from add_existing_baseyear import add_build_year_to_new_assets from pypsa.clustering.spatial import normed_or_uniform @@ -212,7 +212,7 @@ if __name__ == "__main__": logging.basicConfig(level=snakemake.config["logging"]["level"]) - update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) logger.info(f"Preparing brownfield from the file {snakemake.input.network_p}") diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index c0d37a5b..02711f59 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -15,7 +15,7 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import update_config_with_sector_opts +from _helpers import update_config_from_wildcards from add_electricity import sanitize_carriers from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs @@ -554,10 +554,9 @@ if __name__ == "__main__": logging.basicConfig(level=snakemake.config["logging"]["level"]) - update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) options = snakemake.params.sector - opts = snakemake.wildcards.sector_opts.split("-") baseyear = snakemake.params.baseyear @@ -580,7 +579,7 @@ if __name__ == "__main__": n, grouping_years_power, costs, baseyear ) - if "H" in opts: + if options["heating"]: time_dep_hp_cop = options["time_dep_hp_cop"] ashp_cop = ( xr.open_dataarray(snakemake.input.cop_air_total) diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index cfb32441..05ffa2d4 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -582,7 +582,8 @@ if __name__ == "__main__": plot_balances() - for sector_opts in snakemake.params.sector_opts: - opts = sector_opts.split("-") - if any("cb" in o for o in opts) or snakemake.config["foresight"] == "perfect": - plot_carbon_budget_distribution(snakemake.input.eurostat) + if ( + snakemake.params["co2_budget"].startswith("cb") + or snakemake.params["foresight"] == "perfect" + ): + plot_carbon_budget_distribution(snakemake.input.eurostat) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index e358c05e..a9216d51 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -62,7 +62,7 @@ import logging import numpy as np import pandas as pd import pypsa -from _helpers import configure_logging, find_opt, get_opt +from _helpers import configure_logging, update_config_from_wildcards from add_electricity import load_costs, update_transmission_costs from pypsa.descriptors import expand_series @@ -71,6 +71,28 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) +def maybe_adjust_costs_and_potentials(n, adjustments): + if not adjustments: + return + + for attr, carrier_factor in adjustments.items(): + for carrier, factor in carrier_factor.items(): + # beware if factor is 0 and p_nom_max is np.inf, 0*np.inf is nan + if carrier == "AC": # lines do not have carrier + n.lines[attr] *= factor + continue + comps = { + "p_nom_max": {"Generator", "Link", "StorageUnit"}, + "e_nom_max": {"Store"}, + "capital_cost": {"Generator", "Link", "StorageUnit", "Store"}, + "marginal_cost": {"Generator", "Link", "StorageUnit", "Store"}, + } + for c in n.iterate_components(comps[attr]): + sel = c.df.index[c.df.carrier == carrier] + c.df.loc[sel, attr] *= factor + logger.info(f"changing {attr} for {carrier} by factor {factor}") + + def add_co2limit(n, co2limit, Nyears=1.0): n.add( "GlobalConstraint", @@ -278,11 +300,10 @@ if __name__ == "__main__": from _helpers import mock_snakemake snakemake = mock_snakemake( - "prepare_network", simpl="", clusters="37", ll="v1.0", opts="Ept" + "prepare_network", simpl="", clusters="37", ll="v1.0", opts="Co2L-4H" ) configure_logging(snakemake) - - opts = snakemake.wildcards.opts.split("-") + update_config_from_wildcards(snakemake.config, snakemake.wildcards) n = pypsa.Network(snakemake.input[0]) Nyears = n.snapshot_weightings.objective.sum() / 8760.0 @@ -296,81 +317,32 @@ if __name__ == "__main__": set_line_s_max_pu(n, snakemake.params.lines["s_max_pu"]) # temporal averaging - nhours_config = snakemake.params.snapshots.get("resolution", False) - nhours_wildcard = get_opt(opts, r"^\d+h$") - nhours = nhours_wildcard or nhours_config - if nhours: + if nhours := snakemake.params.time_resolution: n = average_every_nhours(n, nhours) # segments with package tsam - time_seg_config = snakemake.params.snapshots.get("segmentation", False) - time_seg_wildcard = get_opt(opts, r"^\d+seg$") - time_seg = time_seg_wildcard or time_seg_config - if time_seg: + if time_seg := snakemake.params.time_resolution: solver_name = snakemake.config["solving"]["solver"]["name"] n = apply_time_segmentation(n, time_seg.replace("seg", ""), solver_name) - Co2L_config = snakemake.params.co2limit_enable - Co2L_wildcard, co2limit_wildcard = find_opt(opts, "Co2L") - if Co2L_wildcard or Co2L_config: - if co2limit_wildcard is not None: - co2limit = co2limit_wildcard * snakemake.params.co2base - add_co2limit(n, co2limit, Nyears) - logger.info("Setting CO2 limit according to wildcard value.") - else: - add_co2limit(n, snakemake.params.co2limit, Nyears) - logger.info("Setting CO2 limit according to config value.") + if snakemake.params.co2limit_enable: + add_co2limit(n, snakemake.params.co2limit, Nyears) - CH4L_config = snakemake.params.gaslimit_enable - CH4L_wildcard, gaslimit_wildcard = find_opt(opts, "CH4L") - if CH4L_wildcard or CH4L_config: - if gaslimit_wildcard is not None: - gaslimit = gaslimit_wildcard * 1e6 - add_gaslimit(n, gaslimit, Nyears) - logger.info("Setting gas usage limit according to wildcard value.") - else: - add_gaslimit(n, snakemake.params.gaslimit, Nyears) - logger.info("Setting gas usage limit according to config value.") + if snakemake.params.gaslimit_enable: + add_gaslimit(n, snakemake.params.gaslimit, Nyears) - for o in opts: - if "+" not in o: - continue - oo = o.split("+") - suptechs = map(lambda c: c.split("-", 2)[0], n.carriers.index) - if oo[0].startswith(tuple(suptechs)): - carrier = oo[0] - # handles only p_nom_max as stores and lines have no potentials - attr_lookup = {"p": "p_nom_max", "c": "capital_cost", "m": "marginal_cost"} - attr = attr_lookup[oo[1][0]] - factor = float(oo[1][1:]) - if carrier == "AC": # lines do not have carrier - n.lines[attr] *= factor - else: - comps = {"Generator", "Link", "StorageUnit", "Store"} - for c in n.iterate_components(comps): - sel = c.df.carrier.str.contains(carrier) - c.df.loc[sel, attr] *= factor + maybe_adjust_costs_and_potentials(n, snakemake.params["adjustments"]) emission_prices = snakemake.params.costs["emission_prices"] - Ept_config = emission_prices.get("co2_monthly_prices", False) - Ept_wildcard = "Ept" in opts - Ep_config = emission_prices.get("enable", False) - Ep_wildcard, co2_wildcard = find_opt(opts, "Ep") - - if Ept_wildcard or Ept_config: + if emission_prices["co2_monthly_prices"]: logger.info( "Setting time dependent emission prices according spot market price" ) add_dynamic_emission_prices(n) - elif Ep_wildcard or Ep_config: - if co2_wildcard is not None: - logger.info("Setting CO2 prices according to wildcard value.") - add_emission_prices(n, dict(co2=co2_wildcard)) - else: - logger.info("Setting CO2 prices according to config value.") - add_emission_prices( - n, dict(co2=snakemake.params.costs["emission_prices"]["co2"]) - ) + elif emission_prices["enable"]: + add_emission_prices( + n, dict(co2=snakemake.params.costs["emission_prices"]["co2"]) + ) ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] set_transmission_limit(n, ll_type, factor, costs, Nyears) @@ -383,11 +355,8 @@ if __name__ == "__main__": p_nom_max_ext=snakemake.params.links.get("max_extension", np.inf), ) - autarky_config = snakemake.params.autarky - if "ATK" in opts or autarky_config.get("enable", False): - only_crossborder = False - if "ATKc" in opts or autarky_config.get("by_country", False): - only_crossborder = True + if snakemake.params.autarky["enable"]: + only_crossborder = snakemake.params.autarky["by_country"] enforce_autarky(n, only_crossborder=only_crossborder) n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) diff --git a/scripts/prepare_perfect_foresight.py b/scripts/prepare_perfect_foresight.py index cf013577..54b2ecc0 100644 --- a/scripts/prepare_perfect_foresight.py +++ b/scripts/prepare_perfect_foresight.py @@ -12,7 +12,7 @@ import re import numpy as np import pandas as pd import pypsa -from _helpers import update_config_with_sector_opts +from _helpers import update_config_from_wildcards from add_existing_baseyear import add_build_year_to_new_assets from pypsa.descriptors import expand_series from pypsa.io import import_components_from_dataframe @@ -304,17 +304,14 @@ def set_all_phase_outs(n): n.mremove("Link", remove_i) -def set_carbon_constraints(n, opts): +def set_carbon_constraints(n): """ Add global constraints for carbon emissions. """ - budget = None - for o in opts: - # other budgets - m = re.match(r"^\d+p\d$", o, re.IGNORECASE) - if m is not None: - budget = snakemake.config["co2_budget"][m.group(0)] * 1e9 - if budget is not None: + budget = snakemake.config["co2_budget"] + if budget and isinstance(budget, float): + budget *= 1e9 # convert to t CO2 + logger.info(f"add carbon budget of {budget}") n.add( "GlobalConstraint", @@ -341,7 +338,7 @@ def set_carbon_constraints(n, opts): ) # set minimum CO2 emission constraint to avoid too fast reduction - if "co2min" in opts: + if "co2min" in snakemake.wildcards.sector_opts.split("-"): emissions_1990 = 4.53693 emissions_2019 = 3.344096 target_2030 = 0.45 * emissions_1990 @@ -487,21 +484,6 @@ def apply_time_segmentation_perfect( return n -def set_temporal_aggregation_SEG(n, opts, solver_name): - """ - Aggregate network temporally with tsam. - """ - for o in opts: - # segments with package tsam - m = re.match(r"^(\d+)seg$", o, re.IGNORECASE) - if m is not None: - segments = int(m[1]) - logger.info(f"Use temporal segmentation with {segments} segments") - n = apply_time_segmentation_perfect(n, segments, solver_name=solver_name) - break - return n - - if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake @@ -515,14 +497,10 @@ if __name__ == "__main__": sector_opts="1p7-4380H-T-H-B-I-A-dist1", ) - update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) # parameters ----------------------------------------------------------- years = snakemake.config["scenario"]["planning_horizons"] - opts = snakemake.wildcards.sector_opts.split("-") - social_discountrate = snakemake.config["costs"]["social_discountrate"] - for o in opts: - if "sdr" in o: - social_discountrate = float(o.replace("sdr", "")) / 100 + social_discountrate = snakemake.params.costs["social_discountrate"] logger.info( f"Concat networks of investment period {years} with social discount rate of {social_discountrate * 100}%" @@ -532,9 +510,10 @@ if __name__ == "__main__": n = concat_networks(years) # temporal aggregate - opts = snakemake.wildcards.sector_opts.split("-") solver_name = snakemake.config["solving"]["solver"]["name"] - n = set_temporal_aggregation_SEG(n, opts, solver_name) + segments = snakemake.params["clustering"]["temporal"]["resolution_sector"] + if isinstance(segments, (int, float)): + n = apply_time_segmentation_perfect(n, segments, solver_name=solver_name) # adjust global constraints lv limit if the same for all years n = adjust_lvlimit(n) @@ -550,8 +529,7 @@ if __name__ == "__main__": add_H2_boilers(n) # set carbon constraints - opts = snakemake.wildcards.sector_opts.split("-") - n = set_carbon_constraints(n, opts) + n = set_carbon_constraints(n) # export network n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 5d5e271b..d9cce254 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -9,7 +9,6 @@ technologies for the buildings, transport and industry sectors. import logging import os -import re from itertools import product from types import SimpleNamespace @@ -18,11 +17,12 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import update_config_with_sector_opts +from _helpers import configure_logging, update_config_from_wildcards from add_electricity import calculate_annuity, sanitize_carriers, sanitize_locations from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2 from networkx.algorithms import complement from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation +from prepare_network import maybe_adjust_costs_and_potentials from pypsa.geo import haversine_pts from pypsa.io import import_components_from_dataframe from scipy.stats import beta @@ -190,13 +190,13 @@ def define_spatial(nodes, options): spatial = SimpleNamespace() -def emission_sectors_from_opts(opts): +def determine_emission_sectors(options): sectors = ["electricity"] - if "T" in opts: + if options["transport"]: sectors += ["rail non-elec", "road non-elec"] - if "H" in opts: + if options["heating"]: sectors += ["residential non-elec", "services non-elec"] - if "I" in opts: + if options["industry"]: sectors += [ "industrial non-elec", "industrial processes", @@ -205,7 +205,7 @@ def emission_sectors_from_opts(opts): "domestic navigation", "international navigation", ] - if "A" in opts: + if options["agriculture"]: sectors += ["agriculture"] return sectors @@ -219,7 +219,7 @@ def get(item, investment_year=None): def co2_emissions_year( - countries, input_eurostat, opts, emissions_scope, report_year, input_co2, year + countries, input_eurostat, options, emissions_scope, report_year, input_co2, year ): """ Calculate CO2 emissions in one specific year (e.g. 1990 or 2018). @@ -237,7 +237,7 @@ def co2_emissions_year( co2_totals = build_co2_totals(countries, eea_co2, eurostat_co2) - sectors = emission_sectors_from_opts(opts) + sectors = determine_emission_sectors(options) co2_emissions = co2_totals.loc[countries, sectors].sum().sum() @@ -248,11 +248,12 @@ def co2_emissions_year( # TODO: move to own rule with sector-opts wildcard? -def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year): +def build_carbon_budget( + o, input_eurostat, fn, emissions_scope, report_year, input_co2, options +): """ Distribute carbon budget following beta or exponential transition path. """ - # opts? if "be" in o: # beta decay @@ -268,7 +269,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year): e_1990 = co2_emissions_year( countries, input_eurostat, - opts, + options, emissions_scope, report_year, input_co2, @@ -279,7 +280,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year): e_0 = co2_emissions_year( countries, input_eurostat, - opts, + options, emissions_scope, report_year, input_co2, @@ -756,12 +757,12 @@ def add_dac(n, costs): ) -def add_co2limit(n, nyears=1.0, limit=0.0): +def add_co2limit(n, options, nyears=1.0, limit=0.0): logger.info(f"Adding CO2 budget limit as per unit of 1990 levels of {limit}") countries = snakemake.params.countries - sectors = emission_sectors_from_opts(opts) + sectors = determine_emission_sectors(options) # convert Mt to tCO2 co2_totals = 1e6 * pd.read_csv(snakemake.input.co2_totals_name, index_col=0) @@ -2000,13 +2001,6 @@ def add_heat(n, costs): if options["retrofitting"]["retro_endogen"]: logger.info("Add retrofitting endogenously") - # resample heat demand temporal 'heat_demand_r' depending on in config - # specified temporal resolution, to not overestimate retrofitting - hours = list(filter(re.compile(r"^\d+h$", re.IGNORECASE).search, opts)) - if len(hours) == 0: - hours = [n.snapshots[1] - n.snapshots[0]] - heat_demand_r = heat_demand.resample(hours[0]).mean() - # retrofitting data 'retro_data' with 'costs' [EUR/m^2] and heat # demand 'dE' [per unit of original heat demand] for each country and # different retrofitting strengths [additional insulation thickness in m] @@ -2024,12 +2018,12 @@ def add_heat(n, costs): # share of space heat demand 'w_space' of total heat demand w_space = {} for sector in sectors: - w_space[sector] = heat_demand_r[sector + " space"] / ( - heat_demand_r[sector + " space"] + heat_demand_r[sector + " water"] + w_space[sector] = heat_demand[sector + " space"] / ( + heat_demand[sector + " space"] + heat_demand[sector + " water"] ) w_space["tot"] = ( - heat_demand_r["services space"] + heat_demand_r["residential space"] - ) / heat_demand_r.T.groupby(level=[1]).sum().T + heat_demand["services space"] + heat_demand["residential space"] + ) / heat_demand.T.groupby(level=[1]).sum().T for name in n.loads[ n.loads.carrier.isin([x + " heat" for x in heat_systems]) @@ -2059,7 +2053,7 @@ def add_heat(n, costs): pop_layout.loc[node].fraction * floor_area.loc[ct, "value"] * 10**6 ).loc[sec] * f # total heat demand at node [MWh] - demand = n.loads_t.p_set[name].resample(hours[0]).mean() + demand = n.loads_t.p_set[name] # space heat demand at node [MWh] space_heat_demand = demand * w_space[sec][node] @@ -3292,52 +3286,6 @@ def remove_h2_network(n): n.stores.drop("EU H2 Store", inplace=True) -def maybe_adjust_costs_and_potentials(n, opts): - for o in opts: - flags = ["+e", "+p", "+m", "+c"] - if all(flag not in o for flag in flags): - continue - oo = o.split("+") - carrier_list = np.hstack( - ( - n.generators.carrier.unique(), - n.links.carrier.unique(), - n.stores.carrier.unique(), - n.storage_units.carrier.unique(), - ) - ) - suptechs = map(lambda c: c.split("-", 2)[0], carrier_list) - if oo[0].startswith(tuple(suptechs)): - carrier = oo[0] - attr_lookup = { - "p": "p_nom_max", - "e": "e_nom_max", - "c": "capital_cost", - "m": "marginal_cost", - } - attr = attr_lookup[oo[1][0]] - factor = float(oo[1][1:]) - # beware if factor is 0 and p_nom_max is np.inf, 0*np.inf is nan - if carrier == "AC": # lines do not have carrier - n.lines[attr] *= factor - else: - if attr == "p_nom_max": - comps = {"Generator", "Link", "StorageUnit"} - elif attr == "e_nom_max": - comps = {"Store"} - else: - comps = {"Generator", "Link", "StorageUnit", "Store"} - for c in n.iterate_components(comps): - if carrier == "solar": - sel = c.df.carrier.str.contains( - carrier - ) & ~c.df.carrier.str.contains("solar rooftop") - else: - sel = c.df.carrier.str.contains(carrier) - c.df.loc[sel, attr] *= factor - logger.info(f"changing {attr} for {carrier} by factor {factor}") - - def limit_individual_line_extension(n, maxext): logger.info(f"Limiting new HVAC and HVDC extensions to {maxext} MW") n.lines["s_nom_max"] = n.lines["s_nom"] + maxext @@ -3507,31 +3455,31 @@ def apply_time_segmentation( return n -def set_temporal_aggregation(n, opts, solver_name): +def set_temporal_aggregation(n, resolution, solver_name): """ Aggregate network temporally. """ - for o in opts: - # temporal averaging - m = re.match(r"^\d+h$", o, re.IGNORECASE) - if m is not None: - n = average_every_nhours(n, m.group(0)) - break - # representative snapshots - m = re.match(r"(^\d+)sn$", o, re.IGNORECASE) - if m is not None: - sn = int(m[1]) - logger.info(f"Use every {sn} snapshot as representative") - n.set_snapshots(n.snapshots[::sn]) - n.snapshot_weightings *= sn - break - # segments with package tsam - m = re.match(r"^(\d+)seg$", o, re.IGNORECASE) - if m is not None: - segments = int(m[1]) - logger.info(f"Use temporal segmentation with {segments} segments") - n = apply_time_segmentation(n, segments, solver_name=solver_name) - break + if not resolution: + return n + + # representative snapshots + if "sn" in resolution.lower(): + sn = int(resolution[:-2]) + logger.info("Use every %s snapshot as representative", sn) + n.set_snapshots(n.snapshots[::sn]) + n.snapshot_weightings *= sn + + # segments with package tsam + elif "seg" in resolution.lower(): + segments = int(resolution[:-3]) + logger.info("Use temporal segmentation with %s segments", segments) + n = apply_time_segmentation(n, segments, solver_name=solver_name) + + # temporal averaging + elif "h" in resolution.lower(): + logger.info("Aggregate to frequency %s", resolution) + n = average_every_nhours(n, resolution) + return n @@ -3600,14 +3548,12 @@ if __name__ == "__main__": planning_horizons="2030", ) - logging.basicConfig(level=snakemake.config["logging"]["level"]) + configure_logging(snakemake) - update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) options = snakemake.params.sector - opts = snakemake.wildcards.sector_opts.split("-") - investment_year = int(snakemake.wildcards.planning_horizons[-4:]) n = pypsa.Network(snakemake.input.network) @@ -3645,56 +3591,34 @@ if __name__ == "__main__": add_storage_and_grids(n, costs) - # TODO merge with opts cost adjustment below - for o in opts: - if o[:4] == "dist": - options["electricity_distribution_grid"] = True - options["electricity_distribution_grid_cost_factor"] = float( - o[4:].replace("p", ".").replace("m", "-") - ) - if o == "biomasstransport": - options["biomass_transport"] = True - - if "nodistrict" in opts: - options["district_heating"]["progress"] = 0.0 - - if "nowasteheat" in opts: - logger.info("Disabling waste heat.") - options["use_fischer_tropsch_waste_heat"] = False - options["use_methanolisation_waste_heat"] = False - options["use_haber_bosch_waste_heat"] = False - options["use_methanation_waste_heat"] = False - options["use_fuel_cell_waste_heat"] = False - options["use_electrolysis_waste_heat"] = False - - if "T" in opts: + if options["transport"]: add_land_transport(n, costs) - if "H" in opts: + if options["heating"]: add_heat(n, costs) - if "B" in opts: + if options["biomass"]: add_biomass(n, costs) if options["ammonia"]: add_ammonia(n, costs) - if "I" in opts: + if options["industry"]: add_industry(n, costs) - if "H" in opts: + if options["heating"]: add_waste_heat(n) - if "A" in opts: # requires H and I + if options["agriculture"]: # requires H and I add_agriculture(n, costs) if options["dac"]: add_dac(n, costs) - if "decentral" in opts: + if not options["electricity_transmission_grid"]: decentral(n) - if "noH2network" in opts: + if not options["H2_network"]: remove_h2_network(n) if options["co2network"]: @@ -3704,51 +3628,39 @@ if __name__ == "__main__": add_allam(n, costs) solver_name = snakemake.config["solving"]["solver"]["name"] - n = set_temporal_aggregation(n, opts, solver_name) + resolution = snakemake.params.time_resolution + n = set_temporal_aggregation(n, resolution, solver_name) - limit_type = "config" - limit = get(snakemake.params.co2_budget, investment_year) - for o in opts: - if "cb" not in o: - continue - limit_type = "carbon budget" + co2_budget = snakemake.params.co2_budget + if isinstance(co2_budget, str) and co2_budget.startswith("cb"): fn = "results/" + snakemake.params.RDIR + "/csvs/carbon_budget_distribution.csv" if not os.path.exists(fn): emissions_scope = snakemake.params.emissions_scope report_year = snakemake.params.eurostat_report_year input_co2 = snakemake.input.co2 build_carbon_budget( - o, + co2_budget, snakemake.input.eurostat, fn, emissions_scope, report_year, input_co2, + options, ) co2_cap = pd.read_csv(fn, index_col=0).squeeze() limit = co2_cap.loc[investment_year] - break - for o in opts: - if "Co2L" not in o: - continue - limit_type = "wildcard" - limit = o[o.find("Co2L") + 4 :] - limit = float(limit.replace("p", ".").replace("m", "-")) - break - logger.info(f"Add CO2 limit from {limit_type}") - add_co2limit(n, nyears, limit) + else: + limit = get(co2_budget, investment_year) + add_co2limit(n, options, nyears, limit) - for o in opts: - if not o[:10] == "linemaxext": - continue - maxext = float(o[10:]) * 1e3 + maxext = snakemake.params["lines"]["max_extension"] + if maxext is not None: limit_individual_line_extension(n, maxext) - break if options["electricity_distribution_grid"]: insert_electricity_distribution_grid(n, costs) - maybe_adjust_costs_and_potentials(n, opts) + maybe_adjust_costs_and_potentials(n, snakemake.params["adjustments"]) if options["gas_distribution_grid"]: insert_gas_distribution_costs(n, costs) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 551222c0..278171bc 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -37,7 +37,7 @@ import pandas as pd import pypsa import xarray as xr from _benchmark import memory_logger -from _helpers import configure_logging, get_opt, update_config_with_sector_opts +from _helpers import configure_logging, update_config_from_wildcards from pypsa.descriptors import get_activity_mask from pypsa.descriptors import get_switchable_as_dense as get_as_dense @@ -178,16 +178,10 @@ def _add_land_use_constraint_m(n, planning_horizons, config): n.generators.p_nom_max.clip(lower=0, inplace=True) -def add_co2_sequestration_limit(n, config, limit=200): +def add_co2_sequestration_limit(n, limit=200): """ Add a global constraint on the amount of Mt CO2 that can be sequestered. """ - limit = limit * 1e6 - for o in opts: - if "seq" not in o: - continue - limit = float(o[o.find("seq") + 3 :]) * 1e6 - break if not n.investment_periods.empty: periods = n.investment_periods @@ -200,7 +194,7 @@ def add_co2_sequestration_limit(n, config, limit=200): "GlobalConstraint", names, sense=">=", - constant=-limit, + constant=-limit * 1e6, type="operational_limit", carrier_attribute="co2 sequestered", investment_period=periods, @@ -260,7 +254,7 @@ def add_carbon_budget_constraint(n, snapshots): n.model.add_constraints(lhs <= rhs, name=f"GlobalConstraint-{name}") -def add_max_growth(n, config): +def add_max_growth(n): """ Add maximum growth rates for different carriers. """ @@ -393,11 +387,11 @@ def prepare_network( if foresight == "perfect": n = add_land_use_constraint_perfect(n) if snakemake.params["sector"]["limit_max_growth"]["enable"]: - n = add_max_growth(n, config) + n = add_max_growth(n) if n.stores.carrier.eq("co2 sequestered").any(): limit = co2_sequestration_potential - add_co2_sequestration_limit(n, config, limit=limit) + add_co2_sequestration_limit(n, limit=limit) return n @@ -831,30 +825,20 @@ def extra_functionality(n, snapshots): location to add them. The arguments ``opts`` and ``snakemake.config`` are expected to be attached to the network. """ - opts = n.opts config = n.config constraints = config["solving"].get("constraints", {}) - if ( - "BAU" in opts or constraints.get("BAU", False) - ) and n.generators.p_nom_extendable.any(): + if constraints["BAU"] and n.generators.p_nom_extendable.any(): add_BAU_constraints(n, config) - if ( - "SAFE" in opts or constraints.get("SAFE", False) - ) and n.generators.p_nom_extendable.any(): + if constraints["SAFE"] and n.generators.p_nom_extendable.any(): add_SAFE_constraints(n, config) - if ( - "CCL" in opts or constraints.get("CCL", False) - ) and n.generators.p_nom_extendable.any(): + if constraints["CCL"] and n.generators.p_nom_extendable.any(): add_CCL_constraints(n, config) reserve = config["electricity"].get("operational_reserve", {}) if reserve.get("activate"): add_operational_reserve_margin(n, snapshots, config) - EQ_config = constraints.get("EQ", False) - EQ_wildcard = get_opt(opts, r"^EQ+[0-9]*\.?[0-9]+(c|)") - EQ_o = EQ_wildcard or EQ_config - if EQ_o: + if EQ_o := constraints["EQ"]: add_EQ_constraints(n, EQ_o.replace("EQ", "")) add_battery_constraints(n) @@ -877,7 +861,7 @@ def extra_functionality(n, snapshots): custom_extra_functionality(n, snapshots, snakemake) -def solve_network(n, config, solving, opts="", **kwargs): +def solve_network(n, config, solving, **kwargs): set_of_options = solving["solver"]["options"] cf_solving = solving["options"] @@ -905,7 +889,6 @@ def solve_network(n, config, solving, opts="", **kwargs): # add to network for extra_functionality n.config = config - n.opts = opts if rolling_horizon: kwargs["horizon"] = cf_solving.get("horizon", 365) @@ -950,15 +933,8 @@ if __name__ == "__main__": planning_horizons="2030", ) configure_logging(snakemake) - if "sector_opts" in snakemake.wildcards.keys(): - update_config_with_sector_opts( - snakemake.config, snakemake.wildcards.sector_opts - ) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) - opts = snakemake.wildcards.opts - if "sector_opts" in snakemake.wildcards.keys(): - opts += "-" + snakemake.wildcards.sector_opts - opts = [o for o in opts.split("-") if o != ""] solve_opts = snakemake.params.solving["options"] np.random.seed(solve_opts.get("seed", 123)) @@ -981,7 +957,6 @@ if __name__ == "__main__": n, config=snakemake.config, solving=snakemake.params.solving, - opts=opts, log_fn=snakemake.log.solver, ) diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index dca49d02..ec0b3cc1 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -12,7 +12,7 @@ import logging import numpy as np import pypsa -from _helpers import configure_logging, update_config_with_sector_opts +from _helpers import configure_logging, update_config_from_wildcards from solve_network import prepare_network, solve_network logger = logging.getLogger(__name__) @@ -34,10 +34,8 @@ if __name__ == "__main__": ) configure_logging(snakemake) - update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) + update_config_from_wildcards(snakemake.config, snakemake.wildcards) - opts = f"{snakemake.wildcards.opts}-{snakemake.wildcards.sector_opts}".split("-") - opts = [o for o in opts if o != ""] solve_opts = snakemake.params.options np.random.seed(solve_opts.get("seed", 123)) @@ -46,9 +44,7 @@ if __name__ == "__main__": n.optimize.fix_optimal_capacities() n = prepare_network(n, solve_opts, config=snakemake.config) - n = solve_network( - n, config=snakemake.config, opts=opts, log_fn=snakemake.log.solver - ) + n = solve_network(n, config=snakemake.config, log_fn=snakemake.log.solver) n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output[0]) From 89d0fa24b69f9e3ab572a65adbc73ca00f894160 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 17:38:46 +0100 Subject: [PATCH 42/76] further adjustments to latest master branch commits --- rules/build_electricity.smk | 75 ++++++++++++++++++------------------- rules/build_sector.smk | 27 +++++++------ rules/postprocess.smk | 8 ++-- rules/solve_electricity.smk | 1 + rules/solve_perfect.smk | 4 +- 5 files changed, 61 insertions(+), 54 deletions(-) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 5cf3c099..8a2148fc 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -250,14 +250,15 @@ rule determine_availability_matrix_MD_UA: # Optional input when having Ukraine (UA) or Moldova (MD) in the countries list -if {"UA", "MD"}.intersection(set(config["countries"])): - opt = { - "availability_matrix_MD_UA": resources( - "availability_matrix_MD-UA_{technology}.nc" - ) - } -else: - opt = {} +def input_ua_md_availability_matrix(w): + countries = set(config_provider("countries")(w)) + if {"UA", "MD"}.intersection(countries): + return { + "availability_matrix_MD_UA": resources( + "availability_matrix_MD-UA_{technology}.nc" + ) + } + return {} rule build_renewable_profiles: @@ -265,7 +266,7 @@ rule build_renewable_profiles: snapshots=config_provider("snapshots"), renewable=config_provider("renewable"), input: - **opt, + unpack(input_ua_md_availability_matrix), base_network=resources("networks/base.nc"), corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"), natura=lambda w: ( @@ -359,30 +360,28 @@ rule build_hydro_profile: "../scripts/build_hydro_profile.py" -if config["lines"]["dynamic_line_rating"]["activate"]: - - rule build_line_rating: - params: - snapshots=config_provider("snapshots"), - input: - base_network=resources("networks/base.nc"), - cutout="cutouts/" - + CDIR - + config_provider("lines", "dynamic_line_rating", "cutout") - + ".nc", - output: - output=resources("networks/line_rating.nc"), - log: - logs("build_line_rating.log"), - benchmark: - benchmarks("build_line_rating") - threads: config["atlite"].get("nprocesses", 4) - resources: - mem_mb=config["atlite"].get("nprocesses", 4) * 1000, - conda: - "../envs/environment.yaml" - script: - "../scripts/build_line_rating.py" +rule build_line_rating: + params: + snapshots=config_provider("snapshots"), + input: + base_network=resources("networks/base.nc"), + cutout="cutouts/" + + CDIR + + config_provider("lines", "dynamic_line_rating", "cutout") + + ".nc", + output: + output=resources("networks/line_rating.nc"), + log: + logs("build_line_rating.log"), + benchmark: + benchmarks("build_line_rating") + threads: config["atlite"].get("nprocesses", 4) + resources: + mem_mb=config["atlite"].get("nprocesses", 4) * 1000, + conda: + "../envs/environment.yaml" + script: + "../scripts/build_line_rating.py" def input_profile_tech(w): @@ -420,7 +419,7 @@ rule add_electricity: if config_provider("lines", "dynamic_line_rating", "activate")(w) else resources("networks/base.nc") ), - tech_costs=resources(f"costs_{config['costs']['year']}.csv"), + tech_costs=resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), regions=resources("regions_onshore.geojson"), powerplants=resources("powerplants.csv"), hydro_capacities=ancient("data/bundle/hydro_capacities.csv"), @@ -463,7 +462,7 @@ rule simplify_network: costs=config_provider("costs"), input: network=resources("networks/elec.nc"), - tech_costs=resources(f"costs_{config['costs']['year']}.csv"), + tech_costs=resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), regions_onshore=resources("regions_onshore.geojson"), regions_offshore=resources("regions_offshore.geojson"), output: @@ -510,7 +509,7 @@ rule cluster_network: if config_provider("enable", "custom_busmap", default=False)(w) else [] ), - tech_costs=resources(f"costs_{config['costs']['year']}.csv"), + tech_costs=resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), output: network=resources("networks/elec_s{simpl}_{clusters}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), @@ -537,7 +536,7 @@ rule add_extra_components: costs=config_provider("costs"), input: network=resources("networks/elec_s{simpl}_{clusters}.nc"), - tech_costs=resources(f"costs_{config['costs']['year']}.csv"), + tech_costs=resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), output: resources("networks/elec_s{simpl}_{clusters}_ec.nc"), log: @@ -569,7 +568,7 @@ rule prepare_network: autarky=config_provider("electricity", "autarky", default={}), input: resources("networks/elec_s{simpl}_{clusters}_ec.nc"), - tech_costs=resources(f"costs_{config['costs']['year']}.csv"), + tech_costs=resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), co2_price=lambda w: resources("co2_price.csv") if "Ept" in w.opts else [], output: resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 1bbb626d..0c755834 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -435,23 +435,26 @@ rule build_industry_sector_ratios: rule build_industry_sector_ratios_intermediate: params: - industry=config["industry"], + industry=config_provider("industry"), input: - industry_sector_ratios=RESOURCES + "industry_sector_ratios.csv", - industrial_energy_demand_per_country_today=RESOURCES - + "industrial_energy_demand_per_country_today.csv", - industrial_production_per_country=RESOURCES - + "industrial_production_per_country.csv", + industry_sector_ratios=resources("industry_sector_ratios.csv"), + industrial_energy_demand_per_country_today=resources( + "industrial_energy_demand_per_country_today.csv" + ), + industrial_production_per_country=resources( + "industrial_production_per_country.csv" + ), output: - industry_sector_ratios=RESOURCES - + "industry_sector_ratios_{planning_horizons}.csv", + industry_sector_ratios=resources( + "industry_sector_ratios_{planning_horizons}.csv" + ), threads: 1 resources: mem_mb=1000, log: - LOGS + "build_industry_sector_ratios_{planning_horizons}.log", + logs("build_industry_sector_ratios_{planning_horizons}.log"), benchmark: - BENCHMARKS + "build_industry_sector_ratios_{planning_horizons}" + benchmarks("build_industry_sector_ratios_{planning_horizons}") conda: "../envs/environment.yaml" script: @@ -571,7 +574,9 @@ rule build_industrial_production_per_node: rule build_industrial_energy_demand_per_node: input: - industry_sector_ratios=resources("industry_sector_ratios_{planning_horizons}.csv"), + industry_sector_ratios=resources( + "industry_sector_ratios_{planning_horizons}.csv" + ), industrial_production_per_node=resources( "industrial_production_elec_s{simpl}_{clusters}_{planning_horizons}.csv" ), diff --git a/rules/postprocess.smk b/rules/postprocess.smk index e274c502..dfcf9654 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -172,9 +172,11 @@ rule make_summary: costs=lambda w: ( resources("costs_{}.csv".format(config_provider("costs", "year")(w))) if config_provider("foresight")(w) == "overnight" - else resources("costs_{}.csv".format( - config_provider("scenario", "planning_horizons", 0) - )) + else resources( + "costs_{}.csv".format( + config_provider("scenario", "planning_horizons", 0)(w) + ) + ) ), ac_plot=expand( resources("maps/power-network-s{simpl}-{clusters}.pdf"), diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index b6a7902e..d3aa8d4c 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -2,6 +2,7 @@ # # SPDX-License-Identifier: MIT + rule solve_network: params: solving=config_provider("solving"), diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index f9c5112a..723fd6a7 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -58,14 +58,14 @@ def input_network_year(w): rule prepare_perfect_foresight: params: - costs=config["costs"], + costs=config_provider("costs"), input: unpack(input_network_year), brownfield_network=lambda w: ( RESULTS + "prenetworks-brownfield/" + "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_" - + "{}.nc".format(str(config_provider("scenario", "planning_horizons", 0))) + + "{}.nc".format(str(config_provider("scenario", "planning_horizons", 0)(w))) ), output: RESULTS From b8d57a6566c410e871fe4ca97096e75dc060fb0e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 17 Feb 2024 16:39:10 +0000 Subject: [PATCH 43/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- Snakefile | 1 - rules/solve_myopic.smk | 8 +++++--- rules/solve_perfect.smk | 10 ++++++---- scripts/prepare_network.py | 6 +++++- 4 files changed, 16 insertions(+), 9 deletions(-) diff --git a/Snakefile b/Snakefile index 2c342efc..a59025f9 100644 --- a/Snakefile +++ b/Snakefile @@ -47,7 +47,6 @@ RESULTS = "results/" + RDIR # RESOURCES = "resources/" - localrules: purge, diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 03cbb705..8574bbb8 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -16,9 +16,11 @@ rule add_existing_baseyear: busmap_s=resources("busmap_elec_s{simpl}.csv"), busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), - costs=lambda w: resources("costs_{}.csv".format( - config_provider("scenario", "planning_horizons", 0)(w) - )), + costs=lambda w: resources( + "costs_{}.csv".format( + config_provider("scenario", "planning_horizons", 0)(w) + ) + ), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), existing_heating_distribution=resources( diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index 723fd6a7..bba71b4d 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -14,9 +14,9 @@ rule add_existing_baseyear: busmap_s=resources("busmap_elec_s{simpl}.csv"), busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), - costs=resources("costs_{}.csv".format( - config_provider("scenario", "planning_horizons", 0) - )), + costs=resources( + "costs_{}.csv".format(config_provider("scenario", "planning_horizons", 0)) + ), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), existing_heating_distribution=resources( @@ -65,7 +65,9 @@ rule prepare_perfect_foresight: RESULTS + "prenetworks-brownfield/" + "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_" - + "{}.nc".format(str(config_provider("scenario", "planning_horizons", 0)(w))) + + "{}.nc".format( + str(config_provider("scenario", "planning_horizons", 0)(w)) + ) ), output: RESULTS diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index e1b42243..6b782d44 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -62,7 +62,11 @@ import logging import numpy as np import pandas as pd import pypsa -from _helpers import configure_logging, update_config_from_wildcards, set_scenario_config +from _helpers import ( + configure_logging, + set_scenario_config, + update_config_from_wildcards, +) from add_electricity import load_costs, update_transmission_costs from pypsa.descriptors import expand_series From 709b831e06862214c1f7709406dbee91e8007310 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 17:51:26 +0100 Subject: [PATCH 44/76] further adjustments where config_provider needs to be called --- Snakefile | 1 - rules/solve_perfect.smk | 2 +- scripts/plot_summary.py | 6 +++--- scripts/prepare_network.py | 6 +++++- 4 files changed, 9 insertions(+), 6 deletions(-) diff --git a/Snakefile b/Snakefile index 2c342efc..a59025f9 100644 --- a/Snakefile +++ b/Snakefile @@ -47,7 +47,6 @@ RESULTS = "results/" + RDIR # RESOURCES = "resources/" - localrules: purge, diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index 723fd6a7..3bb006c5 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -15,7 +15,7 @@ rule add_existing_baseyear: busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), costs=resources("costs_{}.csv".format( - config_provider("scenario", "planning_horizons", 0) + config_provider("scenario", "planning_horizons", 0)(w) )), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index f0d80176..4119a811 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -584,8 +584,8 @@ if __name__ == "__main__": plot_balances() + co2_budget = snakemake.params["co2_budget"] if ( - snakemake.params["co2_budget"].startswith("cb") - or snakemake.params["foresight"] == "perfect" - ): + isinstance(co2_budget, str) and co2_budget.startswith("cb") + ) or snakemake.params["foresight"] == "perfect": plot_carbon_budget_distribution(snakemake.input.eurostat) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index e1b42243..6b782d44 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -62,7 +62,11 @@ import logging import numpy as np import pandas as pd import pypsa -from _helpers import configure_logging, update_config_from_wildcards, set_scenario_config +from _helpers import ( + configure_logging, + set_scenario_config, + update_config_from_wildcards, +) from add_electricity import load_costs, update_transmission_costs from pypsa.descriptors import expand_series From de244414df2f36298ed5f3b36adab2b967940b1b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 17 Feb 2024 16:56:01 +0000 Subject: [PATCH 45/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/solve_perfect.smk | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index a76dfb39..639bcae8 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -14,9 +14,11 @@ rule add_existing_baseyear: busmap_s=resources("busmap_elec_s{simpl}.csv"), busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), - costs=lambda w: resources("costs_{}.csv".format( - config_provider("scenario", "planning_horizons", 0)(w) - )), + costs=lambda w: resources( + "costs_{}.csv".format( + config_provider("scenario", "planning_horizons", 0)(w) + ) + ), cop_soil_total=resources("cop_soil_total_elec_s{simpl}_{clusters}.nc"), cop_air_total=resources("cop_air_total_elec_s{simpl}_{clusters}.nc"), existing_heating_distribution=resources( From db7c98395023e67097aecbabae0de3972a5e176b Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 18:14:18 +0100 Subject: [PATCH 46/76] mock_snakemake: remove dangeours empty list as default argument --- Snakefile | 9 --------- scripts/_helpers.py | 8 +++++--- 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/Snakefile b/Snakefile index a59025f9..0bb1b665 100644 --- a/Snakefile +++ b/Snakefile @@ -30,7 +30,6 @@ if run["name"]: else: RDIR = "" -# for possibly shared resources logs = path_provider("logs/", RDIR, run["shared_resources"]) benchmarks = path_provider("benchmarks/", RDIR, run["shared_resources"]) resources = path_provider("resources/", RDIR, run["shared_resources"]) @@ -38,14 +37,6 @@ resources = path_provider("resources/", RDIR, run["shared_resources"]) CDIR = "" if run["shared_cutouts"] else RDIR RESULTS = "results/" + RDIR -# TODO: this needs to be aligned with new scenario management -# if not (shared_resources := run.get("shared_resources")): -# RESOURCES = "resources/" + RDIR -# elif isinstance(shared_resources, str): -# RESOURCES = "resources/" + shared_resources + "/" -# else: -# RESOURCES = "resources/" - localrules: purge, diff --git a/scripts/_helpers.py b/scripts/_helpers.py index f1c659ec..06e68325 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -321,7 +321,7 @@ def progress_retrieve(url, file, disable=False): def mock_snakemake( rulename, root_dir=None, - configfiles=[], + configfiles=None, submodule_dir="workflow/submodules/pypsa-eur", **wildcards, ): @@ -375,7 +375,9 @@ def mock_snakemake( if os.path.exists(p): snakefile = p break - if isinstance(configfiles, str): + if configfiles is None: + configfiles = [] + elif isinstance(configfiles, str): configfiles = [configfiles] workflow = sm.Workflow( @@ -397,7 +399,7 @@ def mock_snakemake( def make_accessable(*ios): for io in ios: - for i in range(len(io)): + for i, _ in enumerate(io): io[i] = os.path.abspath(io[i]) make_accessable(job.input, job.output, job.log) From 84c3852c2ea0a3c55d9df5f652afd8a3a5cb32d7 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 18:15:43 +0100 Subject: [PATCH 47/76] _helpers: remove resource folder separation by wildcard This was not working reliably as the number of input and output wildcards could vary, which the function did not acknowledge --- scripts/_helpers.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 06e68325..98e5aaae 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -39,10 +39,9 @@ def get_run_path(fn, dir, rdir, shared_resources): The base directory. rdir : str Relative directory for non-shared resources. - shared_resources : str, list, or bool + shared_resources : str or bool Specifies which resources should be shared. - - If string or list, assumed to be superset of wildcards for sharing. - - If "base", special handling for shared "base" resources. + - If string is "base", special handling for shared "base" resources (see notes). - If boolean, directly specifies if the resource is shared. Returns @@ -54,24 +53,23 @@ def get_run_path(fn, dir, rdir, shared_resources): ----- Special case for "base" allows no wildcards other than "technology", "year" and "scope" and excludes filenames starting with "networks/elec" or - "add_electricity". + "add_electricity". All other resources are shared. """ - pattern = r"\{([^{}]+)\}" - existing_wildcards = set(re.findall(pattern, fn)) if shared_resources == "base": - # special case for shared "base" resources + pattern = r"\{([^{}]+)\}" + existing_wildcards = set(re.findall(pattern, fn)) irrelevant_wildcards = {"technology", "year", "scope"} - no_relevant_wildcards = not len(existing_wildcards - irrelevant_wildcards) + no_relevant_wildcards = not (existing_wildcards - irrelevant_wildcards) no_elec_rule = not fn.startswith("networks/elec") and not fn.startswith( "add_electricity" ) is_shared = no_relevant_wildcards and no_elec_rule - elif isinstance(shared_resources, (str, list)): - if isinstance(shared_resources, str): - shared_resources = [shared_resources] - is_shared = (existing_wildcards).issubset(shared_resources) - else: + elif isinstance(shared_resources, bool): is_shared = shared_resources + else: + raise ValueError( + "shared_resources must be a boolean or 'base' for special handling." + ) if is_shared: return f"{dir}{fn}" From 95853bb59d492cba345d9faeda902949c4c2a48a Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 18:27:59 +0100 Subject: [PATCH 48/76] _helpers: allow specifying dedicated shared_resources folder --- doc/configtables/run.csv | 2 +- scripts/_helpers.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/doc/configtables/run.csv b/doc/configtables/run.csv index e2a81e0b..f619d8bf 100644 --- a/doc/configtables/run.csv +++ b/doc/configtables/run.csv @@ -4,5 +4,5 @@ scenarios,,, -- enable,bool,"{true, false}","Switch to select whether workflow should generate scenarios based on ``file``." -- file,str,,"Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run: scenarios`` has to be set to ``true`` and ``run: name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combination of settings, alter and use the ``config/create_scenarios.py`` script in the ``config`` directory." disable_progrssbar,bool,"{true, false}","Switch to select whether progressbar should be disabled." -shared_resources,bool/str/list,,"Switch to select whether resources should be shared across runs. If a string or list is passed, it is assumed to be wildcard(s) which indicates up to which set of wildcards the resource folder should be shared. If set to 'base', only resources before creating the elec.nc file are shared." +shared_resources,bool/str,,"Switch to select whether resources should be shared across runs. If a string is passed, this is used as a subdirectory name for shared resources. If set to 'base', only resources before creating the elec.nc file are shared." shared_cutouts,bool,"{true, false}","Switch to select whether cutouts should be shared across runs." diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 98e5aaae..b2b1b341 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -42,6 +42,7 @@ def get_run_path(fn, dir, rdir, shared_resources): shared_resources : str or bool Specifies which resources should be shared. - If string is "base", special handling for shared "base" resources (see notes). + - If random string other than "base", this folder is used instead of the `rdir` keyword. - If boolean, directly specifies if the resource is shared. Returns @@ -59,11 +60,14 @@ def get_run_path(fn, dir, rdir, shared_resources): pattern = r"\{([^{}]+)\}" existing_wildcards = set(re.findall(pattern, fn)) irrelevant_wildcards = {"technology", "year", "scope"} - no_relevant_wildcards = not (existing_wildcards - irrelevant_wildcards) + no_relevant_wildcards = not existing_wildcards - irrelevant_wildcards no_elec_rule = not fn.startswith("networks/elec") and not fn.startswith( "add_electricity" ) is_shared = no_relevant_wildcards and no_elec_rule + elif isinstance(shared_resources, str): + rdir = shared_resources + "/" + is_shared = True elif isinstance(shared_resources, bool): is_shared = shared_resources else: From e898781fda9fa45970a9c3df301a9cf5a5a4112e Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 18:35:26 +0100 Subject: [PATCH 49/76] use wildcard input functions where applicable --- rules/build_electricity.smk | 18 +++++++++--------- rules/retrieve.smk | 2 +- scripts/_helpers.py | 2 +- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 8a2148fc..f0338492 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -276,7 +276,7 @@ rule build_renewable_profiles: ), luisa=lambda w: ( "data/LUISA_basemap_020321_50m.tif" - if config_provider("renewable", w.technology, "luisa")(w) + if config_provider("renewable", w.technology, "luisa") else [] ), gebco=ancient( @@ -365,9 +365,9 @@ rule build_line_rating: snapshots=config_provider("snapshots"), input: base_network=resources("networks/base.nc"), - cutout="cutouts/" + cutout=lambda w: "cutouts/" + CDIR - + config_provider("lines", "dynamic_line_rating", "cutout") + + config_provider("lines", "dynamic_line_rating", "cutout")(w) + ".nc", output: output=resources("networks/line_rating.nc"), @@ -419,7 +419,7 @@ rule add_electricity: if config_provider("lines", "dynamic_line_rating", "activate")(w) else resources("networks/base.nc") ), - tech_costs=resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), + tech_costs=lambda w: resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), regions=resources("regions_onshore.geojson"), powerplants=resources("powerplants.csv"), hydro_capacities=ancient("data/bundle/hydro_capacities.csv"), @@ -462,7 +462,7 @@ rule simplify_network: costs=config_provider("costs"), input: network=resources("networks/elec.nc"), - tech_costs=resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), + tech_costs=lambda w: resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), regions_onshore=resources("regions_onshore.geojson"), regions_offshore=resources("regions_offshore.geojson"), output: @@ -509,7 +509,7 @@ rule cluster_network: if config_provider("enable", "custom_busmap", default=False)(w) else [] ), - tech_costs=resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), + tech_costs=lambda w: resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), output: network=resources("networks/elec_s{simpl}_{clusters}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), @@ -536,7 +536,7 @@ rule add_extra_components: costs=config_provider("costs"), input: network=resources("networks/elec_s{simpl}_{clusters}.nc"), - tech_costs=resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), + tech_costs=lambda w: resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), output: resources("networks/elec_s{simpl}_{clusters}_ec.nc"), log: @@ -564,11 +564,11 @@ rule prepare_network: gaslimit=config_provider("electricity", "gaslimit"), max_hours=config_provider("electricity", "max_hours"), costs=config_provider("costs"), - adjustments=config_provider("adjustments", "electricity", + adjustments=config_provider("adjustments", "electricity"), autarky=config_provider("electricity", "autarky", default={}), input: resources("networks/elec_s{simpl}_{clusters}_ec.nc"), - tech_costs=resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), + tech_costs=lambda w: resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), co2_price=lambda w: resources("co2_price.csv") if "Ept" in w.opts else [], output: resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 9e9ff1f0..d6fcbb26 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -84,7 +84,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cost_data", T rule retrieve_cost_data: params: - version=lambda w: config_provider("costs", "version")(w), + version=config_provider("costs", "version"), output: resources("costs_{year}.csv"), log: diff --git a/scripts/_helpers.py b/scripts/_helpers.py index b2b1b341..5fb6a540 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -72,7 +72,7 @@ def get_run_path(fn, dir, rdir, shared_resources): is_shared = shared_resources else: raise ValueError( - "shared_resources must be a boolean or 'base' for special handling." + "shared_resources must be a boolean, str, or 'base' for special handling." ) if is_shared: From cc3d7a4cbb29d6ec1f3f70cfcaabfe8a053fd2ba Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 17 Feb 2024 17:35:52 +0000 Subject: [PATCH 50/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/build_electricity.smk | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index f0338492..43be60fc 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -419,7 +419,9 @@ rule add_electricity: if config_provider("lines", "dynamic_line_rating", "activate")(w) else resources("networks/base.nc") ), - tech_costs=lambda w: resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), + tech_costs=lambda w: resources( + f"costs_{config_provider('costs', 'year')(w)}.csv" + ), regions=resources("regions_onshore.geojson"), powerplants=resources("powerplants.csv"), hydro_capacities=ancient("data/bundle/hydro_capacities.csv"), @@ -462,7 +464,9 @@ rule simplify_network: costs=config_provider("costs"), input: network=resources("networks/elec.nc"), - tech_costs=lambda w: resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), + tech_costs=lambda w: resources( + f"costs_{config_provider('costs', 'year')(w)}.csv" + ), regions_onshore=resources("regions_onshore.geojson"), regions_offshore=resources("regions_offshore.geojson"), output: @@ -509,7 +513,9 @@ rule cluster_network: if config_provider("enable", "custom_busmap", default=False)(w) else [] ), - tech_costs=lambda w: resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), + tech_costs=lambda w: resources( + f"costs_{config_provider('costs', 'year')(w)}.csv" + ), output: network=resources("networks/elec_s{simpl}_{clusters}.nc"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), @@ -536,7 +542,9 @@ rule add_extra_components: costs=config_provider("costs"), input: network=resources("networks/elec_s{simpl}_{clusters}.nc"), - tech_costs=lambda w: resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), + tech_costs=lambda w: resources( + f"costs_{config_provider('costs', 'year')(w)}.csv" + ), output: resources("networks/elec_s{simpl}_{clusters}_ec.nc"), log: @@ -568,7 +576,9 @@ rule prepare_network: autarky=config_provider("electricity", "autarky", default={}), input: resources("networks/elec_s{simpl}_{clusters}_ec.nc"), - tech_costs=lambda w: resources(f"costs_{config_provider('costs', 'year')(w)}.csv"), + tech_costs=lambda w: resources( + f"costs_{config_provider('costs', 'year')(w)}.csv" + ), co2_price=lambda w: resources("co2_price.csv") if "Ept" in w.opts else [], output: resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"), From 16793d1335f92b4b578e0f956aeac54add31c86d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 20:42:37 +0100 Subject: [PATCH 51/76] Snakefile: don't use {run} wildcard if just specifying a run name --- Snakefile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Snakefile b/Snakefile index 0bb1b665..fb610c59 100644 --- a/Snakefile +++ b/Snakefile @@ -26,7 +26,9 @@ if run["name"]: if scenarios.get("enable"): fn = Path(scenarios["file"]) scenarios = yaml.safe_load(fn.read_text()) - RDIR = "{run}/" + RDIR = "{run}/" + else: + RDIR = run["name"] + "/" else: RDIR = "" From d335275e0fa32db9bea091a2b4966398dbfbbe50 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 20:43:11 +0100 Subject: [PATCH 52/76] build_electricity: add missing wildcard call --- rules/build_electricity.smk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 43be60fc..8e62259f 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -276,7 +276,7 @@ rule build_renewable_profiles: ), luisa=lambda w: ( "data/LUISA_basemap_020321_50m.tif" - if config_provider("renewable", w.technology, "luisa") + if config_provider("renewable", w.technology, "luisa")(w) else [] ), gebco=ancient( From 388ea658471c011e685069b1c36139a694b35c07 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 20:44:22 +0100 Subject: [PATCH 53/76] common.smk: use update_config to simplify merge_configs --- rules/common.smk | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index 27f82364..a39c2d8d 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -10,7 +10,7 @@ import os, sys, glob path = workflow.source_path("../scripts/_helpers.py") sys.path.insert(0, os.path.dirname(path)) -from _helpers import validate_checksum +from snakemake.utils import update_config def get_config(config, keys, default=None): @@ -29,11 +29,7 @@ def get_config(config, keys, default=None): def merge_configs(base_config, scenario_config): """Merge base config with a specific scenario without modifying the original.""" merged = copy.deepcopy(base_config) - for key, value in scenario_config.items(): - if key in merged and isinstance(merged[key], dict): - merged[key] = merge_configs(merged[key], value) - else: - merged[key] = value + update_config(merged, scenario_config) return merged From 9f643ea429e8158e1220b5464ec8c9915eb42bf8 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 20:45:23 +0100 Subject: [PATCH 54/76] _helpers: add inplace keyword argument to update_config_from_wildcards --- rules/common.smk | 12 ++++++++++-- scripts/_helpers.py | 11 ++++++++--- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index a39c2d8d..ca339cec 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -10,6 +10,7 @@ import os, sys, glob path = workflow.source_path("../scripts/_helpers.py") sys.path.insert(0, os.path.dirname(path)) +from _helpers import validate_checksum, update_config_from_wildcards from snakemake.utils import update_config @@ -41,7 +42,10 @@ def scenario_config(scenario_name): def static_getter(wildcards, keys, default): """Getter function for static config values.""" - return get_config(config, keys, default) + config_with_wildcards = update_config_from_wildcards( + config, wildcards, inplace=False + ) + return get_config(config_with_wildcards, keys, default) def dynamic_getter(wildcards, keys, default): @@ -53,7 +57,11 @@ def dynamic_getter(wildcards, keys, default): raise ValueError( f"Scenario {scenario_name} not found in file {config['run']['scenario']['file']}." ) - return get_config(scenario_config(scenario_name), keys, default) + config_with_scenario = scenario_config(scenario_name) + config_with_wildcards = update_config_from_wildcards( + config_with_scenario, wildcards, inplace=False + ) + return get_config(config_with_wildcards, keys, default) def config_provider(*keys, default=None): diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 5fb6a540..ed60fb5b 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -8,6 +8,7 @@ import hashlib import logging import os import re +import copy import urllib from functools import partial from pathlib import Path @@ -469,13 +470,14 @@ def parse(infix): return {infix.pop(0): parse(infix)} -def update_config_from_wildcards(config, w): +def update_config_from_wildcards(config, w, inplace=True): """ Parses configuration settings from wildcards and updates the config. - - - TODO: Should be run inside config_provider function. """ + if not inplace: + config = copy.deepcopy(config) + if w.get("opts"): opts = w.opts.split("-") @@ -641,6 +643,9 @@ def update_config_from_wildcards(config, w): infix = o.split("+")[1:] update_config(config, parse(infix)) + if not inplace: + return config + def get_checksum_from_zenodo(file_url): parts = file_url.split("/") From 68f2e086c5c0f896de630d0327670d55ab4148e5 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 20:48:17 +0100 Subject: [PATCH 55/76] postprocess.smk: add missing wildcard call --- rules/postprocess.smk | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/rules/postprocess.smk b/rules/postprocess.smk index dfcf9654..fe5a8c62 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -189,21 +189,21 @@ rule make_summary: **config["scenario"], run=config["run"]["name"], ), - h2_plot=expand( + h2_plot=lambda w: expand( ( RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-h2_network_{planning_horizons}.pdf" - if config_provider("sector", "H2_network") + if config_provider("sector", "H2_network")(w) else [] ), **config["scenario"], run=config["run"]["name"], ), - ch4_plot=expand( + ch4_plot=lambda w: expand( ( RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-ch4_network_{planning_horizons}.pdf" - if config_provider("sector", "gas_network") + if config_provider("sector", "gas_network")(w) else [] ), **config["scenario"], From b543bf37c99bd757319869211cccf22b89b22154 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 20:48:46 +0100 Subject: [PATCH 56/76] retrieve.smk: do not protect gas network data --- rules/retrieve.smk | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index d6fcbb26..766e7cf9 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -169,9 +169,7 @@ if config["enable"]["retrieve"]: rule retrieve_gas_infrastructure_data: output: - protected( - expand("data/gas_network/scigrid-gas/data/{files}", files=datafiles) - ), + expand("data/gas_network/scigrid-gas/data/{files}", files=datafiles), log: "logs/retrieve_gas_infrastructure_data.log", retries: 2 From 945180f153055036bcc67f633f39042299c76aa8 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 23:36:46 +0100 Subject: [PATCH 57/76] allow_missing in collection rules, fixes for myopic, wildcard constraints without function --- config/config.default.yaml | 2 +- rules/build_sector.smk | 12 ++++---- rules/common.smk | 7 +++-- rules/postprocess.smk | 45 +++++++++++++--------------- rules/solve_electricity.smk | 23 +++++++------- rules/solve_myopic.smk | 40 +++++++++++++------------ rules/solve_overnight.smk | 5 ++-- rules/solve_perfect.smk | 13 ++++---- scripts/base_network.py | 2 +- scripts/build_electricity_demand.py | 2 +- scripts/prepare_perfect_foresight.py | 2 +- 11 files changed, 79 insertions(+), 74 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 403b7269..1e7c87ce 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -25,7 +25,7 @@ run: enable: false file: config/scenarios.yaml disable_progressbar: false - shared_resources: false # TODO: splitting resources by wildcard does not work well, neither does true, only base works well + shared_resources: false shared_cutouts: true # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 0c755834..9e8bcb90 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -791,8 +791,6 @@ rule build_existing_heating_distribution: existing_heating_distribution=resources( "existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv" ), - wildcard_constraints: - planning_horizons=config_provider("scenario", "planning_horizons", 0), #only applies to baseyear threads: 1 resources: mem_mb=2000, @@ -934,12 +932,12 @@ rule prepare_sector_network: resources: mem_mb=2000, log: - logs( - "prepare_sector_network_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" - ), + RESULTS + + "logs/prepare_sector_network_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log", benchmark: - benchmarks( - "prepare_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ( + RESULTS + + "benchmarks/prepare_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" ) conda: "../envs/environment.yaml" diff --git a/rules/common.smk b/rules/common.smk index ca339cec..9c41b57b 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -136,10 +136,11 @@ def input_eurostat(w): return f"data/bundle-sector/eurostat-energy_balances-june_{report_year}_edition" -def solved_previous_horizon(wildcards): - planning_horizons = config_provider("scenario", "planning_horizons") - i = planning_horizons.index(int(wildcards.planning_horizons)) +def solved_previous_horizon(w): + planning_horizons = config_provider("scenario", "planning_horizons")(w) + i = planning_horizons.index(int(w.planning_horizons)) planning_horizon_p = str(planning_horizons[i - 1]) + return ( RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_" diff --git a/rules/postprocess.smk b/rules/postprocess.smk index fe5a8c62..dc08699f 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -43,12 +43,12 @@ if config["foresight"] != "perfect": resources: mem_mb=10000, log: - logs( - "plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" - ), + RESULTS + + "logs/plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log", benchmark: - benchmarks( - "plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ( + RESULTS + + "benchmarksplot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" ) conda: "../envs/environment.yaml" @@ -70,12 +70,12 @@ if config["foresight"] != "perfect": resources: mem_mb=10000, log: - logs( - "plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" - ), + RESULTS + + "logs/plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log", benchmark: - benchmarks( - "plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ( + RESULTS + + "benchmarks/plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" ) conda: "../envs/environment.yaml" @@ -96,12 +96,12 @@ if config["foresight"] != "perfect": resources: mem_mb=10000, log: - logs( - "plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" - ), + RESULTS + + "logs/plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log", benchmark: - benchmarks( - "plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ( + RESULTS + + "benchmarks/plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" ) conda: "../envs/environment.yaml" @@ -131,10 +131,6 @@ if config["foresight"] == "perfect": threads: 2 resources: mem_mb=10000, - benchmark: - benchmarks( - "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_benchmark" - ) conda: "../envs/environment.yaml" script: @@ -167,7 +163,7 @@ rule make_summary: RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", **config["scenario"], - run=config["run"]["name"], + allow_missing=True, ), costs=lambda w: ( resources("costs_{}.csv".format(config_provider("costs", "year")(w))) @@ -181,13 +177,13 @@ rule make_summary: ac_plot=expand( resources("maps/power-network-s{simpl}-{clusters}.pdf"), **config["scenario"], - run=config["run"]["name"], + allow_missing=True, ), costs_plot=expand( RESULTS + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", **config["scenario"], - run=config["run"]["name"], + allow_missing=True, ), h2_plot=lambda w: expand( ( @@ -197,7 +193,7 @@ rule make_summary: else [] ), **config["scenario"], - run=config["run"]["name"], + allow_missing=True, ), ch4_plot=lambda w: expand( ( @@ -207,7 +203,7 @@ rule make_summary: else [] ), **config["scenario"], - run=config["run"]["name"], + allow_missing=True, ), output: nodal_costs=RESULTS + "csvs/nodal_costs.csv", @@ -245,6 +241,7 @@ rule plot_summary: plotting=config_provider("plotting"), foresight=config_provider("foresight"), co2_budget=config_provider("co2_budget"), + sector=config_provider("sector"), RDIR=RDIR, input: costs=RESULTS + "csvs/costs.csv", diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index d3aa8d4c..6ddbc415 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -19,11 +19,13 @@ rule solve_network: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", log: solver=normpath( - logs("solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log") + RESULTS + + "logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log" ), - python=logs("solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log"), + python=RESULTS + + "logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log", benchmark: - benchmarks("solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}") + RESULTS + "benchmarks/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}" threads: solver_threads resources: mem_mb=memory, @@ -45,15 +47,16 @@ rule solve_operations_network: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc", log: solver=normpath( - logs( - "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log" - ) - ), - python=logs( - "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log" + RESULTS + + "logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log" ), + python=RESULTS + + "logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log", benchmark: - benchmarks("solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}") + ( + RESULTS + + "benchmarks/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}" + ) threads: 4 resources: mem_mb=(lambda w: 10000 + 372 * int(w.clusters)), diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 8574bbb8..5eb30233 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -33,17 +33,20 @@ rule add_existing_baseyear: RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", wildcard_constraints: - planning_horizons=config_provider("scenario", "planning_horizons", 0), #only applies to baseyear + # TODO: The first planning_horizon needs to be aligned across scenarios + # snakemake does not support passing functions to wildcard_constraints + # reference: https://github.com/snakemake/snakemake/issues/2703 + planning_horizons=config["scenario"]["planning_horizons"][0], #only applies to baseyear threads: 1 resources: mem_mb=2000, log: - logs( - "add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" - ), + RESULTS + + "logs/add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log", benchmark: - benchmarks( - "add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ( + RESULTS + + "benchmarks/add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" ) conda: "../envs/environment.yaml" @@ -85,12 +88,12 @@ rule add_brownfield: resources: mem_mb=10000, log: - logs( - "add_brownfield_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" - ), + RESULTS + + "logs/add_brownfield_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log", benchmark: - benchmarks( - "add_brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ( + RESULTS + + "benchmarks/add_brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" ) conda: "../envs/environment.yaml" @@ -121,19 +124,18 @@ rule solve_sector_network_myopic: shadow: "shallow" log: - solver=logs( - "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log" - ), - python=logs( - "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log" - ), + solver=RESULTS + + "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log", + python=RESULTS + + "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log", threads: solver_threads resources: mem_mb=config_provider("solving", "mem"), walltime=config_provider("solving", "walltime", default="12:00:00"), benchmark: - benchmarks( - "solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ( + RESULTS + + "benchmarks/solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" ) conda: "../envs/environment.yaml" diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index 64ad007c..69e2f364 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -33,8 +33,9 @@ rule solve_sector_network: mem_mb=config_provider("solving", "mem"), walltime=config_provider("solving", "walltime", default="12:00:00"), benchmark: - benchmarks( - "solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" + ( + RESULTS + + "benchmarks/solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}" ) conda: "../envs/environment.yaml" diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index 639bcae8..d3badcb0 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -32,7 +32,7 @@ rule add_existing_baseyear: RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", wildcard_constraints: - planning_horizons=config_provider("scenario", "planning_horizons", 0), #only applies to baseyear + planning_horizons=config["scenario"]["planning_horizons"][0], #only applies to baseyear threads: 1 resources: mem_mb=2000, @@ -53,7 +53,8 @@ rule add_existing_baseyear: def input_network_year(w): return { f"network_{year}": RESULTS - + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{year}.nc" + + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}" + + f"_{year}.nc" for year in config_provider("scenario", "planning_horizons")(w)[1:] } @@ -61,6 +62,7 @@ def input_network_year(w): rule prepare_perfect_foresight: params: costs=config_provider("costs"), + time_resolution=config_provider("clustering", "temporal", "sector"), input: unpack(input_network_year), brownfield_network=lambda w: ( @@ -98,7 +100,7 @@ rule solve_sector_network_perfect: sector=config_provider("sector"), planning_horizons=config_provider("scenario", "planning_horizons"), co2_sequestration_potential=config_provider( - "sector", "co2_sequestration_potential", 200 + "sector", "co2_sequestration_potential", default=200 ), custom_extra_functionality=input_custom_extra_functionality, input: @@ -122,8 +124,9 @@ rule solve_sector_network_perfect: memory=RESULTS + "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_memory.log", benchmark: - benchmarks( - "solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years}" + ( + RESULTS + + "benchmarks/solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years}" ) conda: "../envs/environment.yaml" diff --git a/scripts/base_network.py b/scripts/base_network.py index d83e0588..515ae481 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -767,7 +767,7 @@ if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake - snakemake = mock_snakemake("base_network", run="network2019") + snakemake = mock_snakemake("base_network") configure_logging(snakemake) set_scenario_config(snakemake) diff --git a/scripts/build_electricity_demand.py b/scripts/build_electricity_demand.py index 4415c835..47befb8a 100755 --- a/scripts/build_electricity_demand.py +++ b/scripts/build_electricity_demand.py @@ -258,7 +258,7 @@ if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake - snakemake = mock_snakemake("build_electricity_demand", run="network2019") + snakemake = mock_snakemake("build_electricity_demand") configure_logging(snakemake) set_scenario_config(snakemake) diff --git a/scripts/prepare_perfect_foresight.py b/scripts/prepare_perfect_foresight.py index 2f2ff608..7f312a8f 100644 --- a/scripts/prepare_perfect_foresight.py +++ b/scripts/prepare_perfect_foresight.py @@ -517,7 +517,7 @@ if __name__ == "__main__": # temporal aggregate solver_name = snakemake.config["solving"]["solver"]["name"] - segments = snakemake.params["clustering"]["temporal"]["resolution_sector"] + segments = snakemake.params.time_resolution if isinstance(segments, (int, float)): n = apply_time_segmentation_perfect(n, segments, solver_name=solver_name) From 770d3526520ec1f3e814d7b694344d9d17888989 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 23:37:31 +0100 Subject: [PATCH 58/76] handle that time_resolution can be float --- scripts/prepare_network.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 6b782d44..72f4963a 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -291,7 +291,7 @@ def set_line_nom_max( n.lines["s_nom_max"] = n.lines["s_nom"] + s_nom_max_ext if np.isfinite(p_nom_max_ext) and p_nom_max_ext > 0: - logger.info(f"Limiting line extensions to {p_nom_max_ext} MW") + logger.info(f"Limiting link extensions to {p_nom_max_ext} MW") hvdc = n.links.index[n.links.carrier == "DC"] n.links.loc[hvdc, "p_nom_max"] = n.links.loc[hvdc, "p_nom"] + p_nom_max_ext @@ -322,13 +322,16 @@ if __name__ == "__main__": set_line_s_max_pu(n, snakemake.params.lines["s_max_pu"]) # temporal averaging - if nhours := snakemake.params.time_resolution: - n = average_every_nhours(n, nhours) + time_resolution = snakemake.params.time_resolution + is_string = isinstance(time_resolution, str) + if is_string and time_resolution.lower().endswith("h"): + n = average_every_nhours(n, time_resolution) # segments with package tsam - if time_seg := snakemake.params.time_resolution: + if is_string and time_resolution.lower().endswith("seg"): solver_name = snakemake.config["solving"]["solver"]["name"] - n = apply_time_segmentation(n, time_seg.replace("seg", ""), solver_name) + segments = int(time_resolution.replace("seg", "")) + n = apply_time_segmentation(n, segments, solver_name) if snakemake.params.co2limit_enable: add_co2limit(n, snakemake.params.co2limit, Nyears) From cf7018c8c44e56d0dfcfb295e5a50d1c5aeaf578 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 23:38:00 +0100 Subject: [PATCH 59/76] plot_summary: remove leftover references to opts wildcard --- scripts/plot_summary.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index 4119a811..82222620 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -428,13 +428,13 @@ def historical_emissions(countries): ) emissions = co2_totals.loc["electricity"] - if "T" in opts: + if options["transport"]: emissions += co2_totals.loc[[i + " non-elec" for i in ["rail", "road"]]].sum() - if "H" in opts: + if options["heating"]: emissions += co2_totals.loc[ [i + " non-elec" for i in ["residential", "services"]] ].sum() - if "I" in opts: + if options["industry"]: emissions += co2_totals.loc[ [ "industrial non-elec", @@ -448,7 +448,7 @@ def historical_emissions(countries): return emissions -def plot_carbon_budget_distribution(input_eurostat): +def plot_carbon_budget_distribution(input_eurostat, options): """ Plot historical carbon emissions in the EU and decarbonization path. """ @@ -470,7 +470,7 @@ def plot_carbon_budget_distribution(input_eurostat): e_1990 = co2_emissions_year( countries, input_eurostat, - opts, + options, emissions_scope, report_year, input_co2, @@ -588,4 +588,5 @@ if __name__ == "__main__": if ( isinstance(co2_budget, str) and co2_budget.startswith("cb") ) or snakemake.params["foresight"] == "perfect": - plot_carbon_budget_distribution(snakemake.input.eurostat) + options = snakemake.params.sector + plot_carbon_budget_distribution(snakemake.input.eurostat, options) From df0eb38a68a0fe1d93eab2d57ac7a8dd19cbf16d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 17 Feb 2024 23:38:59 +0100 Subject: [PATCH 60/76] _helpers: generalise regular expression in find_opt --- scripts/_helpers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index ed60fb5b..d30f205c 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -4,11 +4,11 @@ # SPDX-License-Identifier: MIT import contextlib +import copy import hashlib import logging import os import re -import copy import urllib from functools import partial from pathlib import Path @@ -117,9 +117,9 @@ def find_opt(opts, expr): """ for o in opts: if expr in o: - m = re.findall("^m?\d*(\.|p)?\d+$", o) + m = re.findall(r"m?\d+(?:[\.p]\d+)?", o) if len(m) > 0: - return True, float(m[0].replace("p", ".").replace("m", "-")) + return True, float(m[-1].replace("p", ".").replace("m", "-")) else: return True, None return False, None From 20e37c232fc156aa998954a8b76eecfaaec5c338 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sun, 18 Feb 2024 09:55:45 +0100 Subject: [PATCH 61/76] copy a scenarios.yaml from scenarios.template.yaml so it is not git tracked --- Snakefile | 13 +++++++++---- config/{scenarios.yaml => scenarios.template.yaml} | 0 2 files changed, 9 insertions(+), 4 deletions(-) rename config/{scenarios.yaml => scenarios.template.yaml} (100%) diff --git a/Snakefile b/Snakefile index 1c8e1323..4f35dd74 100644 --- a/Snakefile +++ b/Snakefile @@ -16,10 +16,15 @@ from scripts._helpers import path_provider min_version("7.7") HTTP = HTTPRemoteProvider() -conf_file = os.path.join(workflow.current_basedir, "config/config.yaml") -conf_default_file = os.path.join(workflow.current_basedir, "config/config.default.yaml") -if not exists(conf_file) and exists(conf_default_file): - copyfile(conf_default_file, conf_file) +default_files = { + "config/config.default.yaml": "config/config.yaml", + "config/scenarios.template.yaml": "config/scenarios.yaml", +} +for template, target in default_files.items(): + target = os.path.join(workflow.current_basedir, target) + template = os.path.join(workflow.current_basedir, template) + if not exists(target) and exists(template): + copyfile(template, target) configfile: "config/config.default.yaml" configfile: "config/config.yaml" diff --git a/config/scenarios.yaml b/config/scenarios.template.yaml similarity index 100% rename from config/scenarios.yaml rename to config/scenarios.template.yaml From 831f0779b3f0f333dc4527d473dc725c3d1c3bed Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 18 Feb 2024 08:57:24 +0000 Subject: [PATCH 62/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- Snakefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Snakefile b/Snakefile index 4f35dd74..2ab6bdc7 100644 --- a/Snakefile +++ b/Snakefile @@ -26,6 +26,7 @@ for template, target in default_files.items(): if not exists(target) and exists(template): copyfile(template, target) + configfile: "config/config.default.yaml" configfile: "config/config.yaml" From 67acbbda8a015d981f09f9a8b7f61b103da028ae Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sun, 18 Feb 2024 10:24:26 +0100 Subject: [PATCH 63/76] run: name: all will collect all keys from scenarios.yaml if run: scenarios: enable: true --- Snakefile | 16 ++++++++-------- doc/configtables/run.csv | 4 ++-- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/Snakefile b/Snakefile index 4f35dd74..1f02b877 100644 --- a/Snakefile +++ b/Snakefile @@ -10,7 +10,6 @@ import yaml from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider from snakemake.utils import min_version -# TODO: check if this works with mock_snakemake from scripts._helpers import path_provider min_version("7.7") @@ -32,13 +31,14 @@ configfile: "config/config.yaml" run = config["run"] scenarios = run.get("scenarios", {}) -if run["name"]: - if scenarios.get("enable"): - fn = Path(scenarios["file"]) - scenarios = yaml.safe_load(fn.read_text()) - RDIR = "{run}/" - else: - RDIR = run["name"] + "/" +if run["name"] and scenarios.get("enable"): + fn = Path(scenarios["file"]) + scenarios = yaml.safe_load(fn.read_text()) + RDIR = "{run}/" + if run["name"] == "all": + config["run"]["name"] = list(scenarios.keys()) +elif run["name"]: + RDIR = run["name"] + "/" else: RDIR = "" diff --git a/doc/configtables/run.csv b/doc/configtables/run.csv index f619d8bf..2835a324 100644 --- a/doc/configtables/run.csv +++ b/doc/configtables/run.csv @@ -1,8 +1,8 @@ ,Unit,Values,Description -name,--,str/list,"Specify a name for your run. Results will be stored under this name. If ``scenario: enable`` is set to ``true``, the name must contain a subset of scenario names defined in ``scenario: file``." +name,--,str/list,"Specify a name for your run. Results will be stored under this name. If ``scenario: enable:`` is set to ``true``, the name must contain a subset of scenario names defined in ``scenario: file:``. If the name is 'all', all defined scenarios will be run." scenarios,,, -- enable,bool,"{true, false}","Switch to select whether workflow should generate scenarios based on ``file``." -- file,str,,"Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run: scenarios`` has to be set to ``true`` and ``run: name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combination of settings, alter and use the ``config/create_scenarios.py`` script in the ``config`` directory." -disable_progrssbar,bool,"{true, false}","Switch to select whether progressbar should be disabled." +disable_progressbar,bool,"{true, false}","Switch to select whether progressbar should be disabled." shared_resources,bool/str,,"Switch to select whether resources should be shared across runs. If a string is passed, this is used as a subdirectory name for shared resources. If set to 'base', only resources before creating the elec.nc file are shared." shared_cutouts,bool,"{true, false}","Switch to select whether cutouts should be shared across runs." From 83f1b5e41d0030083ea5b84d7f305549035cc752 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sun, 18 Feb 2024 10:40:44 +0100 Subject: [PATCH 64/76] Add rule create_scenarios that runs config/create_scenarios.py on request --- Snakefile | 7 +++++++ config/create_scenarios.py | 16 ++++++++++------ 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/Snakefile b/Snakefile index 1f02b877..344b0934 100644 --- a/Snakefile +++ b/Snakefile @@ -93,6 +93,13 @@ rule all: default_target: True +rule create_scenarios: + output: + config["run"]["scenarios"]["file"], + script: + "config/create_scenarios.py" + + rule purge: run: import builtins diff --git a/config/create_scenarios.py b/config/create_scenarios.py index 40a3c331..4c407058 100644 --- a/config/create_scenarios.py +++ b/config/create_scenarios.py @@ -1,11 +1,15 @@ # -*- coding: utf-8 -*- -# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors +# SPDX-FileCopyrightText: : 2023-2024 The PyPSA-Eur Authors # # SPDX-License-Identifier: MIT # This script helps to generate a scenarios.yaml file for PyPSA-Eur. # You can modify the template to your needs and define all possible combinations of config values that should be considered. +if "snakemake" in globals(): + filename = snakemake.output[0] +else: + filename = "../config/scenarios.yaml" import itertools @@ -14,23 +18,23 @@ import itertools template = """ scenario{scenario_number}: config_section: - config_value: {config_value} - + config_key: {config_value} config_section2: config_key2: {config_value2} """ # Define all possible combinations of config values. # This must define all config values that are used in the template. -config_values = dict(config_values=["true", "false"], config_values2=[1, 2, 3, 4, 5]) +config_values = dict( + config_value=["true", "false"], + config_value2=[1, 2, 3, 4] +) combinations = [ dict(zip(config_values.keys(), values)) for values in itertools.product(*config_values.values()) ] -# write the scenarios to a file -filename = "../config/scenarios.yaml" with open(filename, "w") as f: for i, config in enumerate(combinations): f.write(template.format(scenario_number=i, **config)) From 1fb90da743e54a096f2c193b615cdbc8f7a059d5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 18 Feb 2024 09:42:20 +0000 Subject: [PATCH 65/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- config/create_scenarios.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/config/create_scenarios.py b/config/create_scenarios.py index 4c407058..cccc29bc 100644 --- a/config/create_scenarios.py +++ b/config/create_scenarios.py @@ -25,10 +25,7 @@ scenario{scenario_number}: # Define all possible combinations of config values. # This must define all config values that are used in the template. -config_values = dict( - config_value=["true", "false"], - config_value2=[1, 2, 3, 4] -) +config_values = dict(config_value=["true", "false"], config_value2=[1, 2, 3, 4]) combinations = [ dict(zip(config_values.keys(), values)) From 8e04b7a1573f305b5065a6a83b605608e9f73f3c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sun, 18 Feb 2024 11:03:37 +0100 Subject: [PATCH 66/76] test.sh: add scenarios dry-run to test cases --- Snakefile | 2 ++ ...config.scenarios.electricity.yaml => config.scenarios.yaml} | 2 +- config/test/{scenarios.electricity.yaml => scenarios.yaml} | 0 test.sh | 3 ++- 4 files changed, 5 insertions(+), 2 deletions(-) rename config/test/{config.scenarios.electricity.yaml => config.scenarios.yaml} (95%) rename config/test/{scenarios.electricity.yaml => scenarios.yaml} (100%) diff --git a/Snakefile b/Snakefile index 7487b3d5..78a28373 100644 --- a/Snakefile +++ b/Snakefile @@ -97,6 +97,8 @@ rule all: rule create_scenarios: output: config["run"]["scenarios"]["file"], + conda: + "envs/retrieve.yaml" script: "config/create_scenarios.py" diff --git a/config/test/config.scenarios.electricity.yaml b/config/test/config.scenarios.yaml similarity index 95% rename from config/test/config.scenarios.electricity.yaml rename to config/test/config.scenarios.yaml index dde138ed..8ecbb91b 100644 --- a/config/test/config.scenarios.electricity.yaml +++ b/config/test/config.scenarios.yaml @@ -10,7 +10,7 @@ run: - test-elec-no-onshore-wind scenarios: enable: true - file: "config/test/scenarios.electricity.yaml" + file: "config/test/scenarios.yaml" disable_progressbar: true shared_resources: base shared_cutouts: true diff --git a/config/test/scenarios.electricity.yaml b/config/test/scenarios.yaml similarity index 100% rename from config/test/scenarios.electricity.yaml rename to config/test/scenarios.yaml diff --git a/test.sh b/test.sh index a40276b8..c36ce5cd 100755 --- a/test.sh +++ b/test.sh @@ -5,4 +5,5 @@ snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime && \ snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime && \ snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime && \ -snakemake -call all --configfile config/test/config.perfect.yaml --rerun-triggers=mtime +snakemake -call all --configfile config/test/config.perfect.yaml --rerun-triggers=mtime && \ +snakemake -call all --configfile config/test/config.scenarios.yaml --rerun-triggers=mtime -n && \ From 38c90155f1b7b3661335b9c722925e6e2c0d8c3e Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sun, 18 Feb 2024 11:48:55 +0100 Subject: [PATCH 67/76] add release notes --- doc/release_notes.rst | 71 +++++++++++++++++++++++++++++++++++++++---- rules/validate.smk | 5 +-- 2 files changed, 66 insertions(+), 10 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index c216fd35..0e96f013 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,12 +10,71 @@ Release Notes Upcoming Release ================ -* PyPSA-EUR now supports the simultaneous execution of multiple scenarios. For - this purpose, a scenarios.yaml file has been introduced which contains - customizable scenario names with corresponding configuration overrides. To - enable it, set the ``run: scenarios:`` key to ``True`` and define the scenario - names to run under ``run: name:`` in the configuration file. The latter must - be a subset of toplevel keys in the scenario file. +* Added new scenario management that supports the simultaneous execution of + multiple scenarios with a single ``snakemake`` call. For this purpose, a + ``scenarios.yaml`` file is introduced which contains customizable scenario + names with configuration overrides. To enable it, set the ``run: scenarios: + true`` and define the list of scenario names to run under ``run: name:`` in + the configuration file. The latter must be a subset of toplevel keys in the + scenario file. + + - To get started, a scenarios template file ``config/scenarios.template.yaml`` + is included in the repository, which is copied to ``config/scenarios.yaml`` + on first use. + + - The scenario file can be changed via ``run: scenarios: file:``. + + - If scenario management is activated with ``run: scenarios: enable: true``, a + new wildcard ``{run}`` is introduced. This means that the configuration + settings may depend on the new ``{run}`` wildcard. Therefore, a new + ``config_provider()`` function is used in the ``Snakefile`` and ``.smk`` + files, which takes wildcard values into account. The calls to the ``config`` + object have been reduced in ``.smk`` files since there is no awareness of + wildcard values outside rule definitions. + + - The scenario files can also be programmatically created using the template + script ``config/create_scenarios.py``. This script can be run with + ``snakemake -j1 create_scenarios`` and creates the scenarios file referenced + under ``run: scenarios: file:``. + + - The setting ``run: name: all`` will run all scenarios in + ``config/scenarios.yaml``. Otherwise, it will run those passed as list in + ``run: name:`` as long as ``run: scenarios: enable: true``. + + - The setting ``run: shared_resources:`` indicates via a boolean whether the + resources should be encapsulated by the ``run: name:``. The special setting + ``run: shared_resources: base`` shares resources until ``add_electricity`` + that do not contain wildcards other than ``{"technology", "year", + "scope"}``. + + - Added new configuration options for all ``{opts}`` and ``{sector_opts}`` + wildcard values to create a unique configuration file (``config.yaml``) per + PyPSA network file. This is done with the help of a new function + ``update_config_from_wildcards()`` which parses configuration settings from + wildcards and updates the ``snakemake.config`` object. These updated + configuration settings are used in the scripts rather than directly parsed + values from ``snakemake.wildcards``. + + - The cost data was moved from ``data/costs_{year}.csv`` to + ``resources/costs_{year}.csv`` since it depends on configuration settings. + The ``retrieve_cost_data`` rule was changed to calling a Python script. + + - Moved time clustering settings to ``clustering: temporal:`` from + ``snapshots:`` so that the latter is only used to define the + ``pandas.DatetimeIndex`` which simplifies the scenario management. + + - Collection rules get a new wildcard ``run=config["run"]["name"]`` so they + can collect outputs across different scenarios. + + - **Warning:** One caveat remains for the scenario management with myopic or + perfect foresight pathway optimisation. The first investment period must be + shared across all scenarios. The reason is that the ``wildcard_constraints`` + defined for the rule ``add_existing_baseyear`` do not accept wildcard-aware + input functions (cf. + `https://github.com/snakemake/snakemake/issues/2703`_). + +* The outputs of the rule ``retrieve_gas_infrastructure_data`` no longer + marked as ``protected()`` as the download size is small. * Improved representation of industry transition pathways. A new script was added to interpolate industry sector ratios from today's status quo to future diff --git a/rules/validate.smk b/rules/validate.smk index ec2d87cb..60a6ca50 100644 --- a/rules/validate.smk +++ b/rules/validate.smk @@ -17,10 +17,7 @@ rule build_electricity_production: The data is used for validation of the optimization results. """ params: - snapshots=lambda w: { - k: config_provider("snapshots", k)(w) - for k in ["start", "end", "inclusive"] - }, + snapshots=config_provider("snapshots"), countries=config_provider("countries"), output: resources("historical_electricity_production.csv"), From d6e710e74ae0dde3ff16c158ec5d4b1bfe990854 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sun, 18 Feb 2024 11:51:46 +0100 Subject: [PATCH 68/76] test.sh: patch end-of-file --- test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test.sh b/test.sh index c36ce5cd..d6007750 100755 --- a/test.sh +++ b/test.sh @@ -6,4 +6,4 @@ snakemake -call solve_elec_networks --configfile config/test/config.electricity. snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime && \ snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime && \ snakemake -call all --configfile config/test/config.perfect.yaml --rerun-triggers=mtime && \ -snakemake -call all --configfile config/test/config.scenarios.yaml --rerun-triggers=mtime -n && \ +snakemake -call all --configfile config/test/config.scenarios.yaml --rerun-triggers=mtime -n From 479bc707277b09182409ae5f4ec54879384eb5e4 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 19:36:18 +0000 Subject: [PATCH 69/76] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black-pre-commit-mirror: 24.1.1 → 24.2.0](https://github.com/psf/black-pre-commit-mirror/compare/24.1.1...24.2.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5c41e781..28d0278a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -51,7 +51,7 @@ repos: # Formatting with "black" coding style - repo: https://github.com/psf/black-pre-commit-mirror - rev: 24.1.1 + rev: 24.2.0 hooks: # Format Python files - id: black From e68743ffa6601bcbdf02025372841211e58bac2e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 19:36:37 +0000 Subject: [PATCH 70/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- envs/environment.fixed.yaml | 878 ++++++++++++++++++------------------ 1 file changed, 439 insertions(+), 439 deletions(-) diff --git a/envs/environment.fixed.yaml b/envs/environment.fixed.yaml index cde0b801..8bbd70bf 100644 --- a/envs/environment.fixed.yaml +++ b/envs/environment.fixed.yaml @@ -4,443 +4,443 @@ name: pypsa-eur channels: - - bioconda - - http://conda.anaconda.org/gurobi - - conda-forge - - defaults +- bioconda +- http://conda.anaconda.org/gurobi +- conda-forge +- defaults dependencies: - - _libgcc_mutex=0.1 - - _openmp_mutex=4.5 - - affine=2.4.0 - - alsa-lib=1.2.10 - - ampl-mp=3.1.0 - - amply=0.1.6 - - appdirs=1.4.4 - - asttokens=2.4.1 - - atk-1.0=2.38.0 - - atlite=0.2.12 - - attr=2.5.1 - - attrs=23.2.0 - - aws-c-auth=0.7.15 - - aws-c-cal=0.6.9 - - aws-c-common=0.9.12 - - aws-c-compression=0.2.17 - - aws-c-event-stream=0.4.1 - - aws-c-http=0.8.0 - - aws-c-io=0.14.3 - - aws-c-mqtt=0.10.1 - - aws-c-s3=0.5.0 - - aws-c-sdkutils=0.1.14 - - aws-checksums=0.1.17 - - aws-crt-cpp=0.26.1 - - aws-sdk-cpp=1.11.242 - - azure-core-cpp=1.10.3 - - azure-storage-blobs-cpp=12.10.0 - - azure-storage-common-cpp=12.5.0 - - beautifulsoup4=4.12.3 - - blosc=1.21.5 - - bokeh=3.3.4 - - bottleneck=1.3.7 - - branca=0.7.1 - - brotli=1.1.0 - - brotli-bin=1.1.0 - - brotli-python=1.1.0 - - bzip2=1.0.8 - - c-ares=1.26.0 - - c-blosc2=2.13.2 - - ca-certificates=2024.2.2 - - cairo=1.18.0 - - cartopy=0.22.0 - - cdsapi=0.6.1 - - certifi=2024.2.2 - - cffi=1.16.0 - - cfgv=3.3.1 - - cfitsio=4.3.1 - - cftime=1.6.3 - - charset-normalizer=3.3.2 - - click=8.1.7 - - click-plugins=1.1.1 - - cligj=0.7.2 - - cloudpickle=3.0.0 - - coin-or-cbc=2.10.10 - - coin-or-cgl=0.60.7 - - coin-or-clp=1.17.8 - - coin-or-osi=0.108.8 - - coin-or-utils=2.11.9 - - coincbc=2.10.10 - - colorama=0.4.6 - - configargparse=1.7 - - connection_pool=0.0.3 - - contourpy=1.2.0 - - country_converter=1.2 - - cppad=20240000.2 - - cycler=0.12.1 - - cytoolz=0.12.3 - - dask=2024.2.0 - - dask-core=2024.2.0 - - datrie=0.8.2 - - dbus=1.13.6 - - decorator=5.1.1 - - deprecation=2.1.0 - - descartes=1.1.0 - - distlib=0.3.8 - - distributed=2024.2.0 - - distro=1.9.0 - - docutils=0.20.1 - - dpath=2.1.6 - - entsoe-py=0.6.6 - - et_xmlfile=1.1.0 - - exceptiongroup=1.2.0 - - executing=2.0.1 - - expat=2.5.0 - - filelock=3.13.1 - - fiona=1.9.5 - - folium=0.15.1 - - font-ttf-dejavu-sans-mono=2.37 - - font-ttf-inconsolata=3.000 - - font-ttf-source-code-pro=2.038 - - font-ttf-ubuntu=0.83 - - fontconfig=2.14.2 - - fonts-conda-ecosystem=1 - - fonts-conda-forge=1 - - fonttools=4.49.0 - - freetype=2.12.1 - - freexl=2.0.0 - - fribidi=1.0.10 - - fsspec=2024.2.0 - - gdal=3.8.4 - - gdk-pixbuf=2.42.10 - - geographiclib=1.52 - - geojson-rewind=1.1.0 - - geopandas=0.14.3 - - geopandas-base=0.14.3 - - geopy=2.4.1 - - geos=3.12.1 - - geotiff=1.7.1 - - gettext=0.21.1 - - gflags=2.2.2 - - giflib=5.2.1 - - gitdb=4.0.11 - - gitpython=3.1.42 - - glib=2.78.4 - - glib-tools=2.78.4 - - glog=0.6.0 - - glpk=5.0 - - gmp=6.3.0 - - graphite2=1.3.13 - - graphviz=9.0.0 - - gst-plugins-base=1.22.9 - - gstreamer=1.22.9 - - gtk2=2.24.33 - - gts=0.7.6 - - harfbuzz=8.3.0 - - hdf4=4.2.15 - - hdf5=1.14.3 - - humanfriendly=10.0 - - icu=73.2 - - identify=2.5.35 - - idna=3.6 - - importlib-metadata=7.0.1 - - importlib_metadata=7.0.1 - - importlib_resources=6.1.1 - - iniconfig=2.0.0 - - ipopt=3.14.14 - - ipython=8.21.0 - - jedi=0.19.1 - - jinja2=3.1.3 - - joblib=1.3.2 - - json-c=0.17 - - jsonschema=4.21.1 - - jsonschema-specifications=2023.12.1 - - jupyter_core=5.7.1 - - kealib=1.5.3 - - keyutils=1.6.1 - - kiwisolver=1.4.5 - - krb5=1.21.2 - - lame=3.100 - - lcms2=2.16 - - ld_impl_linux-64=2.40 - - lerc=4.0.0 - - libabseil=20230802.1 - - libaec=1.1.2 - - libarchive=3.7.2 - - libarrow=15.0.0 - - libarrow-acero=15.0.0 - - libarrow-dataset=15.0.0 - - libarrow-flight=15.0.0 - - libarrow-flight-sql=15.0.0 - - libarrow-gandiva=15.0.0 - - libarrow-substrait=15.0.0 - - libblas=3.9.0 - - libboost-headers=1.84.0 - - libbrotlicommon=1.1.0 - - libbrotlidec=1.1.0 - - libbrotlienc=1.1.0 - - libcap=2.69 - - libcblas=3.9.0 - - libclang=15.0.7 - - libclang13=15.0.7 - - libcrc32c=1.1.2 - - libcups=2.3.3 - - libcurl=8.5.0 - - libdeflate=1.19 - - libedit=3.1.20191231 - - libev=4.33 - - libevent=2.1.12 - - libexpat=2.5.0 - - libffi=3.4.2 - - libflac=1.4.3 - - libgcc-ng=13.2.0 - - libgcrypt=1.10.3 - - libgd=2.3.3 - - libgdal=3.8.4 - - libgfortran-ng=13.2.0 - - libgfortran5=13.2.0 - - libglib=2.78.4 - - libgomp=13.2.0 - - libgoogle-cloud=2.12.0 - - libgpg-error=1.47 - - libgrpc=1.60.1 - - libhwloc=2.9.3 - - libiconv=1.17 - - libjpeg-turbo=3.0.0 - - libkml=1.3.0 - - liblapack=3.9.0 - - liblapacke=3.9.0 - - libllvm15=15.0.7 - - libnetcdf=4.9.2 - - libnghttp2=1.58.0 - - libnl=3.9.0 - - libnsl=2.0.1 - - libnuma=2.0.16 - - libogg=1.3.4 - - libopenblas=0.3.26 - - libopus=1.3.1 - - libparquet=15.0.0 - - libpng=1.6.42 - - libpq=16.2 - - libprotobuf=4.25.1 - - libre2-11=2023.06.02 - - librsvg=2.56.3 - - librttopo=1.1.0 - - libscotch=7.0.4 - - libsndfile=1.2.2 - - libspatialindex=1.9.3 - - libspatialite=5.1.0 - - libspral=2023.09.07 - - libsqlite=3.45.1 - - libssh2=1.11.0 - - libstdcxx-ng=13.2.0 - - libsystemd0=255 - - libthrift=0.19.0 - - libtiff=4.6.0 - - libutf8proc=2.8.0 - - libuuid=2.38.1 - - libvorbis=1.3.7 - - libwebp=1.3.2 - - libwebp-base=1.3.2 - - libxcb=1.15 - - libxcrypt=4.4.36 - - libxkbcommon=1.6.0 - - libxml2=2.12.5 - - libxslt=1.1.39 - - libzip=1.10.1 - - libzlib=1.2.13 - - linopy=0.3.4 - - locket=1.0.0 - - lxml=5.1.0 - - lz4=4.3.3 - - lz4-c=1.9.4 - - lzo=2.10 - - mapclassify=2.6.1 - - markupsafe=2.1.5 - - matplotlib=3.8.3 - - matplotlib-base=3.8.3 - - matplotlib-inline=0.1.6 - - memory_profiler=0.61.0 - - metis=5.1.0 - - minizip=4.0.4 - - mpg123=1.32.4 - - msgpack-python=1.0.7 - - mumps-include=5.6.2 - - mumps-seq=5.6.2 - - munkres=1.1.4 - - mysql-common=8.0.33 - - mysql-libs=8.0.33 - - nbformat=5.9.2 - - ncurses=6.4 - - netcdf4=1.6.5 - - networkx=3.2.1 - - nodeenv=1.8.0 - - nomkl=1.0 - - nspr=4.35 - - nss=3.98 - - numexpr=2.9.0 - - numpy=1.26.4 - - openjdk=21.0.2 - - openjpeg=2.5.0 - - openpyxl=3.1.2 - - openssl=3.2.1 - - orc=1.9.2 - - packaging=23.2 - - pandas=2.2.0 - - pango=1.50.14 - - parso=0.8.3 - - partd=1.4.1 - - patsy=0.5.6 - - pcre2=10.42 - - pexpect=4.9.0 - - pickleshare=0.7.5 - - pillow=10.2.0 - - pip=24.0 - - pixman=0.43.2 - - pkgutil-resolve-name=1.3.10 - - plac=1.4.2 - - platformdirs=4.2.0 - - pluggy=1.4.0 - - ply=3.11 - - poppler=24.02.0 - - poppler-data=0.4.12 - - postgresql=16.2 - - powerplantmatching=0.5.11 - - pre-commit=3.6.2 - - progressbar2=4.3.2 - - proj=9.3.1 - - prompt-toolkit=3.0.42 - - psutil=5.9.8 - - pthread-stubs=0.4 - - ptyprocess=0.7.0 - - pulp=2.7.0 - - pulseaudio-client=16.1 - - pure_eval=0.2.2 - - py-cpuinfo=9.0.0 - - pyarrow=15.0.0 - - pyarrow-hotfix=0.6 - - pycountry=22.3.5 - - pycparser=2.21 - - pygments=2.17.2 - - pyomo=6.6.1 - - pyparsing=3.1.1 - - pyproj=3.6.1 - - pypsa=0.27.0 - - pyqt=5.15.9 - - pyqt5-sip=12.12.2 - - pyscipopt=4.4.0 - - pyshp=2.3.1 - - pysocks=1.7.1 - - pytables=3.9.2 - - pytest=8.0.0 - - python=3.11.8 - - python-dateutil=2.8.2 - - python-fastjsonschema=2.19.1 - - python-tzdata=2024.1 - - python-utils=3.8.2 - - python_abi=3.11 - - pytz=2024.1 - - pyxlsb=1.0.10 - - pyyaml=6.0.1 - - qt-main=5.15.8 - - rasterio=1.3.9 - - rdma-core=50.0 - - re2=2023.06.02 - - readline=8.2 - - referencing=0.33.0 - - requests=2.31.0 - - reretry=0.11.8 - - rioxarray=0.15.1 - - rpds-py=0.18.0 - - rtree=1.2.0 - - s2n=1.4.3 - - scikit-learn=1.4.1.post1 - - scip=8.1.0 - - scipy=1.12.0 - - scotch=7.0.4 - - seaborn=0.13.2 - - seaborn-base=0.13.2 - - setuptools=69.1.0 - - setuptools-scm=8.0.4 - - setuptools_scm=8.0.4 - - shapely=2.0.2 - - sip=6.7.12 - - six=1.16.0 - - smart_open=6.4.0 - - smmap=5.0.0 - - snakemake-minimal=7.32.4 - - snappy=1.1.10 - - snuggs=1.4.7 - - sortedcontainers=2.4.0 - - soupsieve=2.5 - - sqlite=3.45.1 - - stack_data=0.6.2 - - statsmodels=0.14.1 - - stopit=1.1.2 - - tabula-py=2.7.0 - - tabulate=0.9.0 - - tbb=2021.11.0 - - tblib=3.0.0 - - threadpoolctl=3.3.0 - - throttler=1.2.2 - - tiledb=2.20.0 - - tk=8.6.13 - - toml=0.10.2 - - tomli=2.0.1 - - toolz=0.12.1 - - toposort=1.10 - - tornado=6.3.3 - - tqdm=4.66.2 - - traitlets=5.14.1 - - typing-extensions=4.9.0 - - typing_extensions=4.9.0 - - tzcode=2024a - - tzdata=2024a - - ucx=1.15.0 - - ukkonen=1.0.1 - - unidecode=1.3.8 - - unixodbc=2.3.12 - - uriparser=0.9.7 - - urllib3=2.2.1 - - validators=0.22.0 - - virtualenv=20.25.0 - - wcwidth=0.2.13 - - wheel=0.42.0 - - wrapt=1.16.0 - - xarray=2024.2.0 - - xcb-util=0.4.0 - - xcb-util-image=0.4.0 - - xcb-util-keysyms=0.4.0 - - xcb-util-renderutil=0.3.9 - - xcb-util-wm=0.4.1 - - xerces-c=3.2.5 - - xkeyboard-config=2.41 - - xlrd=2.0.1 - - xorg-fixesproto=5.0 - - xorg-inputproto=2.3.2 - - xorg-kbproto=1.0.7 - - xorg-libice=1.1.1 - - xorg-libsm=1.2.4 - - xorg-libx11=1.8.7 - - xorg-libxau=1.0.11 - - xorg-libxdmcp=1.1.3 - - xorg-libxext=1.3.4 - - xorg-libxfixes=5.0.3 - - xorg-libxi=1.7.10 - - xorg-libxrender=0.9.11 - - xorg-libxt=1.3.0 - - xorg-libxtst=1.2.3 - - xorg-recordproto=1.14.2 - - xorg-renderproto=0.11.1 - - xorg-xextproto=7.3.0 - - xorg-xf86vidmodeproto=2.3.1 - - xorg-xproto=7.0.31 - - xyzservices=2023.10.1 - - xz=5.2.6 - - yaml=0.2.5 - - yte=1.5.4 - - zict=3.0.0 - - zipp=3.17.0 - - zlib=1.2.13 - - zlib-ng=2.0.7 - - zstd=1.5.5 - - pip: - - highspy==1.5.3 - - tsam==2.3.1 +- _libgcc_mutex=0.1 +- _openmp_mutex=4.5 +- affine=2.4.0 +- alsa-lib=1.2.10 +- ampl-mp=3.1.0 +- amply=0.1.6 +- appdirs=1.4.4 +- asttokens=2.4.1 +- atk-1.0=2.38.0 +- atlite=0.2.12 +- attr=2.5.1 +- attrs=23.2.0 +- aws-c-auth=0.7.15 +- aws-c-cal=0.6.9 +- aws-c-common=0.9.12 +- aws-c-compression=0.2.17 +- aws-c-event-stream=0.4.1 +- aws-c-http=0.8.0 +- aws-c-io=0.14.3 +- aws-c-mqtt=0.10.1 +- aws-c-s3=0.5.0 +- aws-c-sdkutils=0.1.14 +- aws-checksums=0.1.17 +- aws-crt-cpp=0.26.1 +- aws-sdk-cpp=1.11.242 +- azure-core-cpp=1.10.3 +- azure-storage-blobs-cpp=12.10.0 +- azure-storage-common-cpp=12.5.0 +- beautifulsoup4=4.12.3 +- blosc=1.21.5 +- bokeh=3.3.4 +- bottleneck=1.3.7 +- branca=0.7.1 +- brotli=1.1.0 +- brotli-bin=1.1.0 +- brotli-python=1.1.0 +- bzip2=1.0.8 +- c-ares=1.26.0 +- c-blosc2=2.13.2 +- ca-certificates=2024.2.2 +- cairo=1.18.0 +- cartopy=0.22.0 +- cdsapi=0.6.1 +- certifi=2024.2.2 +- cffi=1.16.0 +- cfgv=3.3.1 +- cfitsio=4.3.1 +- cftime=1.6.3 +- charset-normalizer=3.3.2 +- click=8.1.7 +- click-plugins=1.1.1 +- cligj=0.7.2 +- cloudpickle=3.0.0 +- coin-or-cbc=2.10.10 +- coin-or-cgl=0.60.7 +- coin-or-clp=1.17.8 +- coin-or-osi=0.108.8 +- coin-or-utils=2.11.9 +- coincbc=2.10.10 +- colorama=0.4.6 +- configargparse=1.7 +- connection_pool=0.0.3 +- contourpy=1.2.0 +- country_converter=1.2 +- cppad=20240000.2 +- cycler=0.12.1 +- cytoolz=0.12.3 +- dask=2024.2.0 +- dask-core=2024.2.0 +- datrie=0.8.2 +- dbus=1.13.6 +- decorator=5.1.1 +- deprecation=2.1.0 +- descartes=1.1.0 +- distlib=0.3.8 +- distributed=2024.2.0 +- distro=1.9.0 +- docutils=0.20.1 +- dpath=2.1.6 +- entsoe-py=0.6.6 +- et_xmlfile=1.1.0 +- exceptiongroup=1.2.0 +- executing=2.0.1 +- expat=2.5.0 +- filelock=3.13.1 +- fiona=1.9.5 +- folium=0.15.1 +- font-ttf-dejavu-sans-mono=2.37 +- font-ttf-inconsolata=3.000 +- font-ttf-source-code-pro=2.038 +- font-ttf-ubuntu=0.83 +- fontconfig=2.14.2 +- fonts-conda-ecosystem=1 +- fonts-conda-forge=1 +- fonttools=4.49.0 +- freetype=2.12.1 +- freexl=2.0.0 +- fribidi=1.0.10 +- fsspec=2024.2.0 +- gdal=3.8.4 +- gdk-pixbuf=2.42.10 +- geographiclib=1.52 +- geojson-rewind=1.1.0 +- geopandas=0.14.3 +- geopandas-base=0.14.3 +- geopy=2.4.1 +- geos=3.12.1 +- geotiff=1.7.1 +- gettext=0.21.1 +- gflags=2.2.2 +- giflib=5.2.1 +- gitdb=4.0.11 +- gitpython=3.1.42 +- glib=2.78.4 +- glib-tools=2.78.4 +- glog=0.6.0 +- glpk=5.0 +- gmp=6.3.0 +- graphite2=1.3.13 +- graphviz=9.0.0 +- gst-plugins-base=1.22.9 +- gstreamer=1.22.9 +- gtk2=2.24.33 +- gts=0.7.6 +- harfbuzz=8.3.0 +- hdf4=4.2.15 +- hdf5=1.14.3 +- humanfriendly=10.0 +- icu=73.2 +- identify=2.5.35 +- idna=3.6 +- importlib-metadata=7.0.1 +- importlib_metadata=7.0.1 +- importlib_resources=6.1.1 +- iniconfig=2.0.0 +- ipopt=3.14.14 +- ipython=8.21.0 +- jedi=0.19.1 +- jinja2=3.1.3 +- joblib=1.3.2 +- json-c=0.17 +- jsonschema=4.21.1 +- jsonschema-specifications=2023.12.1 +- jupyter_core=5.7.1 +- kealib=1.5.3 +- keyutils=1.6.1 +- kiwisolver=1.4.5 +- krb5=1.21.2 +- lame=3.100 +- lcms2=2.16 +- ld_impl_linux-64=2.40 +- lerc=4.0.0 +- libabseil=20230802.1 +- libaec=1.1.2 +- libarchive=3.7.2 +- libarrow=15.0.0 +- libarrow-acero=15.0.0 +- libarrow-dataset=15.0.0 +- libarrow-flight=15.0.0 +- libarrow-flight-sql=15.0.0 +- libarrow-gandiva=15.0.0 +- libarrow-substrait=15.0.0 +- libblas=3.9.0 +- libboost-headers=1.84.0 +- libbrotlicommon=1.1.0 +- libbrotlidec=1.1.0 +- libbrotlienc=1.1.0 +- libcap=2.69 +- libcblas=3.9.0 +- libclang=15.0.7 +- libclang13=15.0.7 +- libcrc32c=1.1.2 +- libcups=2.3.3 +- libcurl=8.5.0 +- libdeflate=1.19 +- libedit=3.1.20191231 +- libev=4.33 +- libevent=2.1.12 +- libexpat=2.5.0 +- libffi=3.4.2 +- libflac=1.4.3 +- libgcc-ng=13.2.0 +- libgcrypt=1.10.3 +- libgd=2.3.3 +- libgdal=3.8.4 +- libgfortran-ng=13.2.0 +- libgfortran5=13.2.0 +- libglib=2.78.4 +- libgomp=13.2.0 +- libgoogle-cloud=2.12.0 +- libgpg-error=1.47 +- libgrpc=1.60.1 +- libhwloc=2.9.3 +- libiconv=1.17 +- libjpeg-turbo=3.0.0 +- libkml=1.3.0 +- liblapack=3.9.0 +- liblapacke=3.9.0 +- libllvm15=15.0.7 +- libnetcdf=4.9.2 +- libnghttp2=1.58.0 +- libnl=3.9.0 +- libnsl=2.0.1 +- libnuma=2.0.16 +- libogg=1.3.4 +- libopenblas=0.3.26 +- libopus=1.3.1 +- libparquet=15.0.0 +- libpng=1.6.42 +- libpq=16.2 +- libprotobuf=4.25.1 +- libre2-11=2023.06.02 +- librsvg=2.56.3 +- librttopo=1.1.0 +- libscotch=7.0.4 +- libsndfile=1.2.2 +- libspatialindex=1.9.3 +- libspatialite=5.1.0 +- libspral=2023.09.07 +- libsqlite=3.45.1 +- libssh2=1.11.0 +- libstdcxx-ng=13.2.0 +- libsystemd0=255 +- libthrift=0.19.0 +- libtiff=4.6.0 +- libutf8proc=2.8.0 +- libuuid=2.38.1 +- libvorbis=1.3.7 +- libwebp=1.3.2 +- libwebp-base=1.3.2 +- libxcb=1.15 +- libxcrypt=4.4.36 +- libxkbcommon=1.6.0 +- libxml2=2.12.5 +- libxslt=1.1.39 +- libzip=1.10.1 +- libzlib=1.2.13 +- linopy=0.3.4 +- locket=1.0.0 +- lxml=5.1.0 +- lz4=4.3.3 +- lz4-c=1.9.4 +- lzo=2.10 +- mapclassify=2.6.1 +- markupsafe=2.1.5 +- matplotlib=3.8.3 +- matplotlib-base=3.8.3 +- matplotlib-inline=0.1.6 +- memory_profiler=0.61.0 +- metis=5.1.0 +- minizip=4.0.4 +- mpg123=1.32.4 +- msgpack-python=1.0.7 +- mumps-include=5.6.2 +- mumps-seq=5.6.2 +- munkres=1.1.4 +- mysql-common=8.0.33 +- mysql-libs=8.0.33 +- nbformat=5.9.2 +- ncurses=6.4 +- netcdf4=1.6.5 +- networkx=3.2.1 +- nodeenv=1.8.0 +- nomkl=1.0 +- nspr=4.35 +- nss=3.98 +- numexpr=2.9.0 +- numpy=1.26.4 +- openjdk=21.0.2 +- openjpeg=2.5.0 +- openpyxl=3.1.2 +- openssl=3.2.1 +- orc=1.9.2 +- packaging=23.2 +- pandas=2.2.0 +- pango=1.50.14 +- parso=0.8.3 +- partd=1.4.1 +- patsy=0.5.6 +- pcre2=10.42 +- pexpect=4.9.0 +- pickleshare=0.7.5 +- pillow=10.2.0 +- pip=24.0 +- pixman=0.43.2 +- pkgutil-resolve-name=1.3.10 +- plac=1.4.2 +- platformdirs=4.2.0 +- pluggy=1.4.0 +- ply=3.11 +- poppler=24.02.0 +- poppler-data=0.4.12 +- postgresql=16.2 +- powerplantmatching=0.5.11 +- pre-commit=3.6.2 +- progressbar2=4.3.2 +- proj=9.3.1 +- prompt-toolkit=3.0.42 +- psutil=5.9.8 +- pthread-stubs=0.4 +- ptyprocess=0.7.0 +- pulp=2.7.0 +- pulseaudio-client=16.1 +- pure_eval=0.2.2 +- py-cpuinfo=9.0.0 +- pyarrow=15.0.0 +- pyarrow-hotfix=0.6 +- pycountry=22.3.5 +- pycparser=2.21 +- pygments=2.17.2 +- pyomo=6.6.1 +- pyparsing=3.1.1 +- pyproj=3.6.1 +- pypsa=0.27.0 +- pyqt=5.15.9 +- pyqt5-sip=12.12.2 +- pyscipopt=4.4.0 +- pyshp=2.3.1 +- pysocks=1.7.1 +- pytables=3.9.2 +- pytest=8.0.0 +- python=3.11.8 +- python-dateutil=2.8.2 +- python-fastjsonschema=2.19.1 +- python-tzdata=2024.1 +- python-utils=3.8.2 +- python_abi=3.11 +- pytz=2024.1 +- pyxlsb=1.0.10 +- pyyaml=6.0.1 +- qt-main=5.15.8 +- rasterio=1.3.9 +- rdma-core=50.0 +- re2=2023.06.02 +- readline=8.2 +- referencing=0.33.0 +- requests=2.31.0 +- reretry=0.11.8 +- rioxarray=0.15.1 +- rpds-py=0.18.0 +- rtree=1.2.0 +- s2n=1.4.3 +- scikit-learn=1.4.1.post1 +- scip=8.1.0 +- scipy=1.12.0 +- scotch=7.0.4 +- seaborn=0.13.2 +- seaborn-base=0.13.2 +- setuptools=69.1.0 +- setuptools-scm=8.0.4 +- setuptools_scm=8.0.4 +- shapely=2.0.2 +- sip=6.7.12 +- six=1.16.0 +- smart_open=6.4.0 +- smmap=5.0.0 +- snakemake-minimal=7.32.4 +- snappy=1.1.10 +- snuggs=1.4.7 +- sortedcontainers=2.4.0 +- soupsieve=2.5 +- sqlite=3.45.1 +- stack_data=0.6.2 +- statsmodels=0.14.1 +- stopit=1.1.2 +- tabula-py=2.7.0 +- tabulate=0.9.0 +- tbb=2021.11.0 +- tblib=3.0.0 +- threadpoolctl=3.3.0 +- throttler=1.2.2 +- tiledb=2.20.0 +- tk=8.6.13 +- toml=0.10.2 +- tomli=2.0.1 +- toolz=0.12.1 +- toposort=1.10 +- tornado=6.3.3 +- tqdm=4.66.2 +- traitlets=5.14.1 +- typing-extensions=4.9.0 +- typing_extensions=4.9.0 +- tzcode=2024a +- tzdata=2024a +- ucx=1.15.0 +- ukkonen=1.0.1 +- unidecode=1.3.8 +- unixodbc=2.3.12 +- uriparser=0.9.7 +- urllib3=2.2.1 +- validators=0.22.0 +- virtualenv=20.25.0 +- wcwidth=0.2.13 +- wheel=0.42.0 +- wrapt=1.16.0 +- xarray=2024.2.0 +- xcb-util=0.4.0 +- xcb-util-image=0.4.0 +- xcb-util-keysyms=0.4.0 +- xcb-util-renderutil=0.3.9 +- xcb-util-wm=0.4.1 +- xerces-c=3.2.5 +- xkeyboard-config=2.41 +- xlrd=2.0.1 +- xorg-fixesproto=5.0 +- xorg-inputproto=2.3.2 +- xorg-kbproto=1.0.7 +- xorg-libice=1.1.1 +- xorg-libsm=1.2.4 +- xorg-libx11=1.8.7 +- xorg-libxau=1.0.11 +- xorg-libxdmcp=1.1.3 +- xorg-libxext=1.3.4 +- xorg-libxfixes=5.0.3 +- xorg-libxi=1.7.10 +- xorg-libxrender=0.9.11 +- xorg-libxt=1.3.0 +- xorg-libxtst=1.2.3 +- xorg-recordproto=1.14.2 +- xorg-renderproto=0.11.1 +- xorg-xextproto=7.3.0 +- xorg-xf86vidmodeproto=2.3.1 +- xorg-xproto=7.0.31 +- xyzservices=2023.10.1 +- xz=5.2.6 +- yaml=0.2.5 +- yte=1.5.4 +- zict=3.0.0 +- zipp=3.17.0 +- zlib=1.2.13 +- zlib-ng=2.0.7 +- zstd=1.5.5 +- pip: + - highspy==1.5.3 + - tsam==2.3.1 From 4f91c6c43d706b49a2749f924ea7b89cdfa7bdd3 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 9 Feb 2024 19:03:39 +0100 Subject: [PATCH 71/76] bugfix: make sure coal demand is there with regional demand --- scripts/prepare_sector_network.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b1351089..810257c0 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3059,6 +3059,9 @@ def add_industry(n, costs): + mwh_coal_per_mwh_coke * industrial_demand["coke"] ) / nhours + p_set.rename(lambda x: x + " coal for industry", + inplace=True) + if not options["regional_coal_demand"]: p_set = p_set.sum() From 3466027482b61d3c71a65144cbb07b00f49aea97 Mon Sep 17 00:00:00 2001 From: lumbric Date: Mon, 19 Feb 2024 18:58:23 +0100 Subject: [PATCH 72/76] Fix broken link to mamba installation guide in docs --- doc/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/installation.rst b/doc/installation.rst index b07fd290..fbabfd15 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -31,7 +31,7 @@ Install Python Dependencies PyPSA-Eur relies on a set of other Python packages to function. We recommend using the package manager `mamba `_ to install them and manage your environments. -For instructions for your operating system follow the ``mamba`` `installation guide `_. +For instructions for your operating system follow the ``mamba`` `installation guide `_. You can also use ``conda`` equivalently. The package requirements are curated in the `envs/environment.yaml `_ file. From ac2322cd16eff8f9d056f7021329d9e6b40b324d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 17:59:11 +0000 Subject: [PATCH 73/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- envs/environment.fixed.yaml | 878 ++++++++++++++++++------------------ 1 file changed, 439 insertions(+), 439 deletions(-) diff --git a/envs/environment.fixed.yaml b/envs/environment.fixed.yaml index cde0b801..8bbd70bf 100644 --- a/envs/environment.fixed.yaml +++ b/envs/environment.fixed.yaml @@ -4,443 +4,443 @@ name: pypsa-eur channels: - - bioconda - - http://conda.anaconda.org/gurobi - - conda-forge - - defaults +- bioconda +- http://conda.anaconda.org/gurobi +- conda-forge +- defaults dependencies: - - _libgcc_mutex=0.1 - - _openmp_mutex=4.5 - - affine=2.4.0 - - alsa-lib=1.2.10 - - ampl-mp=3.1.0 - - amply=0.1.6 - - appdirs=1.4.4 - - asttokens=2.4.1 - - atk-1.0=2.38.0 - - atlite=0.2.12 - - attr=2.5.1 - - attrs=23.2.0 - - aws-c-auth=0.7.15 - - aws-c-cal=0.6.9 - - aws-c-common=0.9.12 - - aws-c-compression=0.2.17 - - aws-c-event-stream=0.4.1 - - aws-c-http=0.8.0 - - aws-c-io=0.14.3 - - aws-c-mqtt=0.10.1 - - aws-c-s3=0.5.0 - - aws-c-sdkutils=0.1.14 - - aws-checksums=0.1.17 - - aws-crt-cpp=0.26.1 - - aws-sdk-cpp=1.11.242 - - azure-core-cpp=1.10.3 - - azure-storage-blobs-cpp=12.10.0 - - azure-storage-common-cpp=12.5.0 - - beautifulsoup4=4.12.3 - - blosc=1.21.5 - - bokeh=3.3.4 - - bottleneck=1.3.7 - - branca=0.7.1 - - brotli=1.1.0 - - brotli-bin=1.1.0 - - brotli-python=1.1.0 - - bzip2=1.0.8 - - c-ares=1.26.0 - - c-blosc2=2.13.2 - - ca-certificates=2024.2.2 - - cairo=1.18.0 - - cartopy=0.22.0 - - cdsapi=0.6.1 - - certifi=2024.2.2 - - cffi=1.16.0 - - cfgv=3.3.1 - - cfitsio=4.3.1 - - cftime=1.6.3 - - charset-normalizer=3.3.2 - - click=8.1.7 - - click-plugins=1.1.1 - - cligj=0.7.2 - - cloudpickle=3.0.0 - - coin-or-cbc=2.10.10 - - coin-or-cgl=0.60.7 - - coin-or-clp=1.17.8 - - coin-or-osi=0.108.8 - - coin-or-utils=2.11.9 - - coincbc=2.10.10 - - colorama=0.4.6 - - configargparse=1.7 - - connection_pool=0.0.3 - - contourpy=1.2.0 - - country_converter=1.2 - - cppad=20240000.2 - - cycler=0.12.1 - - cytoolz=0.12.3 - - dask=2024.2.0 - - dask-core=2024.2.0 - - datrie=0.8.2 - - dbus=1.13.6 - - decorator=5.1.1 - - deprecation=2.1.0 - - descartes=1.1.0 - - distlib=0.3.8 - - distributed=2024.2.0 - - distro=1.9.0 - - docutils=0.20.1 - - dpath=2.1.6 - - entsoe-py=0.6.6 - - et_xmlfile=1.1.0 - - exceptiongroup=1.2.0 - - executing=2.0.1 - - expat=2.5.0 - - filelock=3.13.1 - - fiona=1.9.5 - - folium=0.15.1 - - font-ttf-dejavu-sans-mono=2.37 - - font-ttf-inconsolata=3.000 - - font-ttf-source-code-pro=2.038 - - font-ttf-ubuntu=0.83 - - fontconfig=2.14.2 - - fonts-conda-ecosystem=1 - - fonts-conda-forge=1 - - fonttools=4.49.0 - - freetype=2.12.1 - - freexl=2.0.0 - - fribidi=1.0.10 - - fsspec=2024.2.0 - - gdal=3.8.4 - - gdk-pixbuf=2.42.10 - - geographiclib=1.52 - - geojson-rewind=1.1.0 - - geopandas=0.14.3 - - geopandas-base=0.14.3 - - geopy=2.4.1 - - geos=3.12.1 - - geotiff=1.7.1 - - gettext=0.21.1 - - gflags=2.2.2 - - giflib=5.2.1 - - gitdb=4.0.11 - - gitpython=3.1.42 - - glib=2.78.4 - - glib-tools=2.78.4 - - glog=0.6.0 - - glpk=5.0 - - gmp=6.3.0 - - graphite2=1.3.13 - - graphviz=9.0.0 - - gst-plugins-base=1.22.9 - - gstreamer=1.22.9 - - gtk2=2.24.33 - - gts=0.7.6 - - harfbuzz=8.3.0 - - hdf4=4.2.15 - - hdf5=1.14.3 - - humanfriendly=10.0 - - icu=73.2 - - identify=2.5.35 - - idna=3.6 - - importlib-metadata=7.0.1 - - importlib_metadata=7.0.1 - - importlib_resources=6.1.1 - - iniconfig=2.0.0 - - ipopt=3.14.14 - - ipython=8.21.0 - - jedi=0.19.1 - - jinja2=3.1.3 - - joblib=1.3.2 - - json-c=0.17 - - jsonschema=4.21.1 - - jsonschema-specifications=2023.12.1 - - jupyter_core=5.7.1 - - kealib=1.5.3 - - keyutils=1.6.1 - - kiwisolver=1.4.5 - - krb5=1.21.2 - - lame=3.100 - - lcms2=2.16 - - ld_impl_linux-64=2.40 - - lerc=4.0.0 - - libabseil=20230802.1 - - libaec=1.1.2 - - libarchive=3.7.2 - - libarrow=15.0.0 - - libarrow-acero=15.0.0 - - libarrow-dataset=15.0.0 - - libarrow-flight=15.0.0 - - libarrow-flight-sql=15.0.0 - - libarrow-gandiva=15.0.0 - - libarrow-substrait=15.0.0 - - libblas=3.9.0 - - libboost-headers=1.84.0 - - libbrotlicommon=1.1.0 - - libbrotlidec=1.1.0 - - libbrotlienc=1.1.0 - - libcap=2.69 - - libcblas=3.9.0 - - libclang=15.0.7 - - libclang13=15.0.7 - - libcrc32c=1.1.2 - - libcups=2.3.3 - - libcurl=8.5.0 - - libdeflate=1.19 - - libedit=3.1.20191231 - - libev=4.33 - - libevent=2.1.12 - - libexpat=2.5.0 - - libffi=3.4.2 - - libflac=1.4.3 - - libgcc-ng=13.2.0 - - libgcrypt=1.10.3 - - libgd=2.3.3 - - libgdal=3.8.4 - - libgfortran-ng=13.2.0 - - libgfortran5=13.2.0 - - libglib=2.78.4 - - libgomp=13.2.0 - - libgoogle-cloud=2.12.0 - - libgpg-error=1.47 - - libgrpc=1.60.1 - - libhwloc=2.9.3 - - libiconv=1.17 - - libjpeg-turbo=3.0.0 - - libkml=1.3.0 - - liblapack=3.9.0 - - liblapacke=3.9.0 - - libllvm15=15.0.7 - - libnetcdf=4.9.2 - - libnghttp2=1.58.0 - - libnl=3.9.0 - - libnsl=2.0.1 - - libnuma=2.0.16 - - libogg=1.3.4 - - libopenblas=0.3.26 - - libopus=1.3.1 - - libparquet=15.0.0 - - libpng=1.6.42 - - libpq=16.2 - - libprotobuf=4.25.1 - - libre2-11=2023.06.02 - - librsvg=2.56.3 - - librttopo=1.1.0 - - libscotch=7.0.4 - - libsndfile=1.2.2 - - libspatialindex=1.9.3 - - libspatialite=5.1.0 - - libspral=2023.09.07 - - libsqlite=3.45.1 - - libssh2=1.11.0 - - libstdcxx-ng=13.2.0 - - libsystemd0=255 - - libthrift=0.19.0 - - libtiff=4.6.0 - - libutf8proc=2.8.0 - - libuuid=2.38.1 - - libvorbis=1.3.7 - - libwebp=1.3.2 - - libwebp-base=1.3.2 - - libxcb=1.15 - - libxcrypt=4.4.36 - - libxkbcommon=1.6.0 - - libxml2=2.12.5 - - libxslt=1.1.39 - - libzip=1.10.1 - - libzlib=1.2.13 - - linopy=0.3.4 - - locket=1.0.0 - - lxml=5.1.0 - - lz4=4.3.3 - - lz4-c=1.9.4 - - lzo=2.10 - - mapclassify=2.6.1 - - markupsafe=2.1.5 - - matplotlib=3.8.3 - - matplotlib-base=3.8.3 - - matplotlib-inline=0.1.6 - - memory_profiler=0.61.0 - - metis=5.1.0 - - minizip=4.0.4 - - mpg123=1.32.4 - - msgpack-python=1.0.7 - - mumps-include=5.6.2 - - mumps-seq=5.6.2 - - munkres=1.1.4 - - mysql-common=8.0.33 - - mysql-libs=8.0.33 - - nbformat=5.9.2 - - ncurses=6.4 - - netcdf4=1.6.5 - - networkx=3.2.1 - - nodeenv=1.8.0 - - nomkl=1.0 - - nspr=4.35 - - nss=3.98 - - numexpr=2.9.0 - - numpy=1.26.4 - - openjdk=21.0.2 - - openjpeg=2.5.0 - - openpyxl=3.1.2 - - openssl=3.2.1 - - orc=1.9.2 - - packaging=23.2 - - pandas=2.2.0 - - pango=1.50.14 - - parso=0.8.3 - - partd=1.4.1 - - patsy=0.5.6 - - pcre2=10.42 - - pexpect=4.9.0 - - pickleshare=0.7.5 - - pillow=10.2.0 - - pip=24.0 - - pixman=0.43.2 - - pkgutil-resolve-name=1.3.10 - - plac=1.4.2 - - platformdirs=4.2.0 - - pluggy=1.4.0 - - ply=3.11 - - poppler=24.02.0 - - poppler-data=0.4.12 - - postgresql=16.2 - - powerplantmatching=0.5.11 - - pre-commit=3.6.2 - - progressbar2=4.3.2 - - proj=9.3.1 - - prompt-toolkit=3.0.42 - - psutil=5.9.8 - - pthread-stubs=0.4 - - ptyprocess=0.7.0 - - pulp=2.7.0 - - pulseaudio-client=16.1 - - pure_eval=0.2.2 - - py-cpuinfo=9.0.0 - - pyarrow=15.0.0 - - pyarrow-hotfix=0.6 - - pycountry=22.3.5 - - pycparser=2.21 - - pygments=2.17.2 - - pyomo=6.6.1 - - pyparsing=3.1.1 - - pyproj=3.6.1 - - pypsa=0.27.0 - - pyqt=5.15.9 - - pyqt5-sip=12.12.2 - - pyscipopt=4.4.0 - - pyshp=2.3.1 - - pysocks=1.7.1 - - pytables=3.9.2 - - pytest=8.0.0 - - python=3.11.8 - - python-dateutil=2.8.2 - - python-fastjsonschema=2.19.1 - - python-tzdata=2024.1 - - python-utils=3.8.2 - - python_abi=3.11 - - pytz=2024.1 - - pyxlsb=1.0.10 - - pyyaml=6.0.1 - - qt-main=5.15.8 - - rasterio=1.3.9 - - rdma-core=50.0 - - re2=2023.06.02 - - readline=8.2 - - referencing=0.33.0 - - requests=2.31.0 - - reretry=0.11.8 - - rioxarray=0.15.1 - - rpds-py=0.18.0 - - rtree=1.2.0 - - s2n=1.4.3 - - scikit-learn=1.4.1.post1 - - scip=8.1.0 - - scipy=1.12.0 - - scotch=7.0.4 - - seaborn=0.13.2 - - seaborn-base=0.13.2 - - setuptools=69.1.0 - - setuptools-scm=8.0.4 - - setuptools_scm=8.0.4 - - shapely=2.0.2 - - sip=6.7.12 - - six=1.16.0 - - smart_open=6.4.0 - - smmap=5.0.0 - - snakemake-minimal=7.32.4 - - snappy=1.1.10 - - snuggs=1.4.7 - - sortedcontainers=2.4.0 - - soupsieve=2.5 - - sqlite=3.45.1 - - stack_data=0.6.2 - - statsmodels=0.14.1 - - stopit=1.1.2 - - tabula-py=2.7.0 - - tabulate=0.9.0 - - tbb=2021.11.0 - - tblib=3.0.0 - - threadpoolctl=3.3.0 - - throttler=1.2.2 - - tiledb=2.20.0 - - tk=8.6.13 - - toml=0.10.2 - - tomli=2.0.1 - - toolz=0.12.1 - - toposort=1.10 - - tornado=6.3.3 - - tqdm=4.66.2 - - traitlets=5.14.1 - - typing-extensions=4.9.0 - - typing_extensions=4.9.0 - - tzcode=2024a - - tzdata=2024a - - ucx=1.15.0 - - ukkonen=1.0.1 - - unidecode=1.3.8 - - unixodbc=2.3.12 - - uriparser=0.9.7 - - urllib3=2.2.1 - - validators=0.22.0 - - virtualenv=20.25.0 - - wcwidth=0.2.13 - - wheel=0.42.0 - - wrapt=1.16.0 - - xarray=2024.2.0 - - xcb-util=0.4.0 - - xcb-util-image=0.4.0 - - xcb-util-keysyms=0.4.0 - - xcb-util-renderutil=0.3.9 - - xcb-util-wm=0.4.1 - - xerces-c=3.2.5 - - xkeyboard-config=2.41 - - xlrd=2.0.1 - - xorg-fixesproto=5.0 - - xorg-inputproto=2.3.2 - - xorg-kbproto=1.0.7 - - xorg-libice=1.1.1 - - xorg-libsm=1.2.4 - - xorg-libx11=1.8.7 - - xorg-libxau=1.0.11 - - xorg-libxdmcp=1.1.3 - - xorg-libxext=1.3.4 - - xorg-libxfixes=5.0.3 - - xorg-libxi=1.7.10 - - xorg-libxrender=0.9.11 - - xorg-libxt=1.3.0 - - xorg-libxtst=1.2.3 - - xorg-recordproto=1.14.2 - - xorg-renderproto=0.11.1 - - xorg-xextproto=7.3.0 - - xorg-xf86vidmodeproto=2.3.1 - - xorg-xproto=7.0.31 - - xyzservices=2023.10.1 - - xz=5.2.6 - - yaml=0.2.5 - - yte=1.5.4 - - zict=3.0.0 - - zipp=3.17.0 - - zlib=1.2.13 - - zlib-ng=2.0.7 - - zstd=1.5.5 - - pip: - - highspy==1.5.3 - - tsam==2.3.1 +- _libgcc_mutex=0.1 +- _openmp_mutex=4.5 +- affine=2.4.0 +- alsa-lib=1.2.10 +- ampl-mp=3.1.0 +- amply=0.1.6 +- appdirs=1.4.4 +- asttokens=2.4.1 +- atk-1.0=2.38.0 +- atlite=0.2.12 +- attr=2.5.1 +- attrs=23.2.0 +- aws-c-auth=0.7.15 +- aws-c-cal=0.6.9 +- aws-c-common=0.9.12 +- aws-c-compression=0.2.17 +- aws-c-event-stream=0.4.1 +- aws-c-http=0.8.0 +- aws-c-io=0.14.3 +- aws-c-mqtt=0.10.1 +- aws-c-s3=0.5.0 +- aws-c-sdkutils=0.1.14 +- aws-checksums=0.1.17 +- aws-crt-cpp=0.26.1 +- aws-sdk-cpp=1.11.242 +- azure-core-cpp=1.10.3 +- azure-storage-blobs-cpp=12.10.0 +- azure-storage-common-cpp=12.5.0 +- beautifulsoup4=4.12.3 +- blosc=1.21.5 +- bokeh=3.3.4 +- bottleneck=1.3.7 +- branca=0.7.1 +- brotli=1.1.0 +- brotli-bin=1.1.0 +- brotli-python=1.1.0 +- bzip2=1.0.8 +- c-ares=1.26.0 +- c-blosc2=2.13.2 +- ca-certificates=2024.2.2 +- cairo=1.18.0 +- cartopy=0.22.0 +- cdsapi=0.6.1 +- certifi=2024.2.2 +- cffi=1.16.0 +- cfgv=3.3.1 +- cfitsio=4.3.1 +- cftime=1.6.3 +- charset-normalizer=3.3.2 +- click=8.1.7 +- click-plugins=1.1.1 +- cligj=0.7.2 +- cloudpickle=3.0.0 +- coin-or-cbc=2.10.10 +- coin-or-cgl=0.60.7 +- coin-or-clp=1.17.8 +- coin-or-osi=0.108.8 +- coin-or-utils=2.11.9 +- coincbc=2.10.10 +- colorama=0.4.6 +- configargparse=1.7 +- connection_pool=0.0.3 +- contourpy=1.2.0 +- country_converter=1.2 +- cppad=20240000.2 +- cycler=0.12.1 +- cytoolz=0.12.3 +- dask=2024.2.0 +- dask-core=2024.2.0 +- datrie=0.8.2 +- dbus=1.13.6 +- decorator=5.1.1 +- deprecation=2.1.0 +- descartes=1.1.0 +- distlib=0.3.8 +- distributed=2024.2.0 +- distro=1.9.0 +- docutils=0.20.1 +- dpath=2.1.6 +- entsoe-py=0.6.6 +- et_xmlfile=1.1.0 +- exceptiongroup=1.2.0 +- executing=2.0.1 +- expat=2.5.0 +- filelock=3.13.1 +- fiona=1.9.5 +- folium=0.15.1 +- font-ttf-dejavu-sans-mono=2.37 +- font-ttf-inconsolata=3.000 +- font-ttf-source-code-pro=2.038 +- font-ttf-ubuntu=0.83 +- fontconfig=2.14.2 +- fonts-conda-ecosystem=1 +- fonts-conda-forge=1 +- fonttools=4.49.0 +- freetype=2.12.1 +- freexl=2.0.0 +- fribidi=1.0.10 +- fsspec=2024.2.0 +- gdal=3.8.4 +- gdk-pixbuf=2.42.10 +- geographiclib=1.52 +- geojson-rewind=1.1.0 +- geopandas=0.14.3 +- geopandas-base=0.14.3 +- geopy=2.4.1 +- geos=3.12.1 +- geotiff=1.7.1 +- gettext=0.21.1 +- gflags=2.2.2 +- giflib=5.2.1 +- gitdb=4.0.11 +- gitpython=3.1.42 +- glib=2.78.4 +- glib-tools=2.78.4 +- glog=0.6.0 +- glpk=5.0 +- gmp=6.3.0 +- graphite2=1.3.13 +- graphviz=9.0.0 +- gst-plugins-base=1.22.9 +- gstreamer=1.22.9 +- gtk2=2.24.33 +- gts=0.7.6 +- harfbuzz=8.3.0 +- hdf4=4.2.15 +- hdf5=1.14.3 +- humanfriendly=10.0 +- icu=73.2 +- identify=2.5.35 +- idna=3.6 +- importlib-metadata=7.0.1 +- importlib_metadata=7.0.1 +- importlib_resources=6.1.1 +- iniconfig=2.0.0 +- ipopt=3.14.14 +- ipython=8.21.0 +- jedi=0.19.1 +- jinja2=3.1.3 +- joblib=1.3.2 +- json-c=0.17 +- jsonschema=4.21.1 +- jsonschema-specifications=2023.12.1 +- jupyter_core=5.7.1 +- kealib=1.5.3 +- keyutils=1.6.1 +- kiwisolver=1.4.5 +- krb5=1.21.2 +- lame=3.100 +- lcms2=2.16 +- ld_impl_linux-64=2.40 +- lerc=4.0.0 +- libabseil=20230802.1 +- libaec=1.1.2 +- libarchive=3.7.2 +- libarrow=15.0.0 +- libarrow-acero=15.0.0 +- libarrow-dataset=15.0.0 +- libarrow-flight=15.0.0 +- libarrow-flight-sql=15.0.0 +- libarrow-gandiva=15.0.0 +- libarrow-substrait=15.0.0 +- libblas=3.9.0 +- libboost-headers=1.84.0 +- libbrotlicommon=1.1.0 +- libbrotlidec=1.1.0 +- libbrotlienc=1.1.0 +- libcap=2.69 +- libcblas=3.9.0 +- libclang=15.0.7 +- libclang13=15.0.7 +- libcrc32c=1.1.2 +- libcups=2.3.3 +- libcurl=8.5.0 +- libdeflate=1.19 +- libedit=3.1.20191231 +- libev=4.33 +- libevent=2.1.12 +- libexpat=2.5.0 +- libffi=3.4.2 +- libflac=1.4.3 +- libgcc-ng=13.2.0 +- libgcrypt=1.10.3 +- libgd=2.3.3 +- libgdal=3.8.4 +- libgfortran-ng=13.2.0 +- libgfortran5=13.2.0 +- libglib=2.78.4 +- libgomp=13.2.0 +- libgoogle-cloud=2.12.0 +- libgpg-error=1.47 +- libgrpc=1.60.1 +- libhwloc=2.9.3 +- libiconv=1.17 +- libjpeg-turbo=3.0.0 +- libkml=1.3.0 +- liblapack=3.9.0 +- liblapacke=3.9.0 +- libllvm15=15.0.7 +- libnetcdf=4.9.2 +- libnghttp2=1.58.0 +- libnl=3.9.0 +- libnsl=2.0.1 +- libnuma=2.0.16 +- libogg=1.3.4 +- libopenblas=0.3.26 +- libopus=1.3.1 +- libparquet=15.0.0 +- libpng=1.6.42 +- libpq=16.2 +- libprotobuf=4.25.1 +- libre2-11=2023.06.02 +- librsvg=2.56.3 +- librttopo=1.1.0 +- libscotch=7.0.4 +- libsndfile=1.2.2 +- libspatialindex=1.9.3 +- libspatialite=5.1.0 +- libspral=2023.09.07 +- libsqlite=3.45.1 +- libssh2=1.11.0 +- libstdcxx-ng=13.2.0 +- libsystemd0=255 +- libthrift=0.19.0 +- libtiff=4.6.0 +- libutf8proc=2.8.0 +- libuuid=2.38.1 +- libvorbis=1.3.7 +- libwebp=1.3.2 +- libwebp-base=1.3.2 +- libxcb=1.15 +- libxcrypt=4.4.36 +- libxkbcommon=1.6.0 +- libxml2=2.12.5 +- libxslt=1.1.39 +- libzip=1.10.1 +- libzlib=1.2.13 +- linopy=0.3.4 +- locket=1.0.0 +- lxml=5.1.0 +- lz4=4.3.3 +- lz4-c=1.9.4 +- lzo=2.10 +- mapclassify=2.6.1 +- markupsafe=2.1.5 +- matplotlib=3.8.3 +- matplotlib-base=3.8.3 +- matplotlib-inline=0.1.6 +- memory_profiler=0.61.0 +- metis=5.1.0 +- minizip=4.0.4 +- mpg123=1.32.4 +- msgpack-python=1.0.7 +- mumps-include=5.6.2 +- mumps-seq=5.6.2 +- munkres=1.1.4 +- mysql-common=8.0.33 +- mysql-libs=8.0.33 +- nbformat=5.9.2 +- ncurses=6.4 +- netcdf4=1.6.5 +- networkx=3.2.1 +- nodeenv=1.8.0 +- nomkl=1.0 +- nspr=4.35 +- nss=3.98 +- numexpr=2.9.0 +- numpy=1.26.4 +- openjdk=21.0.2 +- openjpeg=2.5.0 +- openpyxl=3.1.2 +- openssl=3.2.1 +- orc=1.9.2 +- packaging=23.2 +- pandas=2.2.0 +- pango=1.50.14 +- parso=0.8.3 +- partd=1.4.1 +- patsy=0.5.6 +- pcre2=10.42 +- pexpect=4.9.0 +- pickleshare=0.7.5 +- pillow=10.2.0 +- pip=24.0 +- pixman=0.43.2 +- pkgutil-resolve-name=1.3.10 +- plac=1.4.2 +- platformdirs=4.2.0 +- pluggy=1.4.0 +- ply=3.11 +- poppler=24.02.0 +- poppler-data=0.4.12 +- postgresql=16.2 +- powerplantmatching=0.5.11 +- pre-commit=3.6.2 +- progressbar2=4.3.2 +- proj=9.3.1 +- prompt-toolkit=3.0.42 +- psutil=5.9.8 +- pthread-stubs=0.4 +- ptyprocess=0.7.0 +- pulp=2.7.0 +- pulseaudio-client=16.1 +- pure_eval=0.2.2 +- py-cpuinfo=9.0.0 +- pyarrow=15.0.0 +- pyarrow-hotfix=0.6 +- pycountry=22.3.5 +- pycparser=2.21 +- pygments=2.17.2 +- pyomo=6.6.1 +- pyparsing=3.1.1 +- pyproj=3.6.1 +- pypsa=0.27.0 +- pyqt=5.15.9 +- pyqt5-sip=12.12.2 +- pyscipopt=4.4.0 +- pyshp=2.3.1 +- pysocks=1.7.1 +- pytables=3.9.2 +- pytest=8.0.0 +- python=3.11.8 +- python-dateutil=2.8.2 +- python-fastjsonschema=2.19.1 +- python-tzdata=2024.1 +- python-utils=3.8.2 +- python_abi=3.11 +- pytz=2024.1 +- pyxlsb=1.0.10 +- pyyaml=6.0.1 +- qt-main=5.15.8 +- rasterio=1.3.9 +- rdma-core=50.0 +- re2=2023.06.02 +- readline=8.2 +- referencing=0.33.0 +- requests=2.31.0 +- reretry=0.11.8 +- rioxarray=0.15.1 +- rpds-py=0.18.0 +- rtree=1.2.0 +- s2n=1.4.3 +- scikit-learn=1.4.1.post1 +- scip=8.1.0 +- scipy=1.12.0 +- scotch=7.0.4 +- seaborn=0.13.2 +- seaborn-base=0.13.2 +- setuptools=69.1.0 +- setuptools-scm=8.0.4 +- setuptools_scm=8.0.4 +- shapely=2.0.2 +- sip=6.7.12 +- six=1.16.0 +- smart_open=6.4.0 +- smmap=5.0.0 +- snakemake-minimal=7.32.4 +- snappy=1.1.10 +- snuggs=1.4.7 +- sortedcontainers=2.4.0 +- soupsieve=2.5 +- sqlite=3.45.1 +- stack_data=0.6.2 +- statsmodels=0.14.1 +- stopit=1.1.2 +- tabula-py=2.7.0 +- tabulate=0.9.0 +- tbb=2021.11.0 +- tblib=3.0.0 +- threadpoolctl=3.3.0 +- throttler=1.2.2 +- tiledb=2.20.0 +- tk=8.6.13 +- toml=0.10.2 +- tomli=2.0.1 +- toolz=0.12.1 +- toposort=1.10 +- tornado=6.3.3 +- tqdm=4.66.2 +- traitlets=5.14.1 +- typing-extensions=4.9.0 +- typing_extensions=4.9.0 +- tzcode=2024a +- tzdata=2024a +- ucx=1.15.0 +- ukkonen=1.0.1 +- unidecode=1.3.8 +- unixodbc=2.3.12 +- uriparser=0.9.7 +- urllib3=2.2.1 +- validators=0.22.0 +- virtualenv=20.25.0 +- wcwidth=0.2.13 +- wheel=0.42.0 +- wrapt=1.16.0 +- xarray=2024.2.0 +- xcb-util=0.4.0 +- xcb-util-image=0.4.0 +- xcb-util-keysyms=0.4.0 +- xcb-util-renderutil=0.3.9 +- xcb-util-wm=0.4.1 +- xerces-c=3.2.5 +- xkeyboard-config=2.41 +- xlrd=2.0.1 +- xorg-fixesproto=5.0 +- xorg-inputproto=2.3.2 +- xorg-kbproto=1.0.7 +- xorg-libice=1.1.1 +- xorg-libsm=1.2.4 +- xorg-libx11=1.8.7 +- xorg-libxau=1.0.11 +- xorg-libxdmcp=1.1.3 +- xorg-libxext=1.3.4 +- xorg-libxfixes=5.0.3 +- xorg-libxi=1.7.10 +- xorg-libxrender=0.9.11 +- xorg-libxt=1.3.0 +- xorg-libxtst=1.2.3 +- xorg-recordproto=1.14.2 +- xorg-renderproto=0.11.1 +- xorg-xextproto=7.3.0 +- xorg-xf86vidmodeproto=2.3.1 +- xorg-xproto=7.0.31 +- xyzservices=2023.10.1 +- xz=5.2.6 +- yaml=0.2.5 +- yte=1.5.4 +- zict=3.0.0 +- zipp=3.17.0 +- zlib=1.2.13 +- zlib-ng=2.0.7 +- zstd=1.5.5 +- pip: + - highspy==1.5.3 + - tsam==2.3.1 From d713e3c52b2e77b30816a3b829faee3fc3ebd381 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 09:03:54 +0000 Subject: [PATCH 74/76] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 810257c0..f5e6604b 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3059,8 +3059,7 @@ def add_industry(n, costs): + mwh_coal_per_mwh_coke * industrial_demand["coke"] ) / nhours - p_set.rename(lambda x: x + " coal for industry", - inplace=True) + p_set.rename(lambda x: x + " coal for industry", inplace=True) if not options["regional_coal_demand"]: p_set = p_set.sum() From 26b202f46399efe53a10b5a1342e9059ffe8c867 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 20 Feb 2024 12:34:19 +0100 Subject: [PATCH 75/76] prepare_sector: automatically interpolate in config get() function --- doc/release_notes.rst | 8 +++++--- scripts/prepare_sector_network.py | 27 ++++++++++++++++++++++++++- 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index bc1cd2c6..15814036 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -7,9 +7,11 @@ Release Notes ########################################## -.. Upcoming Release -.. ================ -.. +Upcoming Release +================ + +* Linearly interpolate missing investment periods in year-dependent + configuration options. PyPSA-Eur 0.10.0 (19th February 2024) ===================================== diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index f5e6604b..3438e3d8 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -215,7 +215,32 @@ def get(item, investment_year=None): """ Check whether item depends on investment year. """ - return item[investment_year] if isinstance(item, dict) else item + if not isinstance(item, dict): + return item + elif investment_year in item.keys(): + return item[investment_year] + else: + logger.warning( + f"Investment key {investment_year} not found in dictionary {item}." + ) + keys = sorted(item.keys()) + if investment_year < keys[0]: + logger.warning(f"Lower than minimum key. Taking minimum key {keys[0]}") + return item[keys[0]] + elif investment_year > keys[-1]: + logger.warning(f"Higher than maximum key. Taking maximum key {keys[0]}") + return item[keys[-1]] + else: + logger.warning( + "Interpolate linearly between the next lower and next higher year." + ) + lower_key = max(k for k in keys if k < investment_year) + higher_key = min(k for k in keys if k > investment_year) + lower = item[lower_key] + higher = item[higher_key] + return lower + (higher - lower) * (investment_year - lower_key) / ( + higher_key - lower_key + ) def co2_emissions_year( From 66d078e4074986800c61e1df66d33b804a04782c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 20 Feb 2024 14:24:23 +0100 Subject: [PATCH 76/76] reorder release notes --- Snakefile | 1 - doc/release_notes.rst | 10 +++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/Snakefile b/Snakefile index b3d2735c..78a28373 100644 --- a/Snakefile +++ b/Snakefile @@ -26,7 +26,6 @@ for template, target in default_files.items(): copyfile(template, target) - configfile: "config/config.default.yaml" configfile: "config/config.yaml" diff --git a/doc/release_notes.rst b/doc/release_notes.rst index fee8186b..e60f4f95 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -13,11 +13,6 @@ Upcoming Release * Linearly interpolate missing investment periods in year-dependent configuration options. -PyPSA-Eur 0.10.0 (19th February 2024) -===================================== - -**New Features** - * Added new scenario management that supports the simultaneous execution of multiple scenarios with a single ``snakemake`` call. For this purpose, a ``scenarios.yaml`` file is introduced which contains customizable scenario @@ -84,6 +79,11 @@ PyPSA-Eur 0.10.0 (19th February 2024) * The outputs of the rule ``retrieve_gas_infrastructure_data`` no longer marked as ``protected()`` as the download size is small. +PyPSA-Eur 0.10.0 (19th February 2024) +===================================== + +**New Features** + * Improved representation of industry transition pathways. A new script was added to interpolate industry sector ratios from today's status quo to future systems (i.e. specific emissions and demands for energy and feedstocks). For