introduce scenario-management

This commit is contained in:
Fabian 2023-08-15 15:02:41 +02:00
parent e7836246ce
commit e28ae59375
47 changed files with 419 additions and 204 deletions

View File

@ -83,6 +83,7 @@ jobs:
snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime
snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime
snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime
snakemake -call all --configfile config/test/config.electricity.scenario.yaml
- name: Upload artifacts - name: Upload artifacts
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3

17
.gitignore vendored
View File

@ -19,10 +19,15 @@ gurobi.log
/notebooks /notebooks
/data /data
/cutouts /cutouts
/tmp
doc/_build doc/_build
/scripts/old
/scripts/create_scenarios.py
config.yaml config.yaml
config/scenario.yaml
dconf dconf
/data/links_p_nom.csv /data/links_p_nom.csv
@ -51,25 +56,15 @@ publications.jrc.ec.europa.eu/
*.nc *.nc
*~ *~
/scripts/old
*.pyc *.pyc
/cutouts
/tmp
/pypsa
*.xlsx *.xlsx
config.yaml
doc/_build
*.xls *.xls
*.geojson *.geojson
*.ipynb *.ipynb
data/costs_*
merger-todos.md merger-todos.md

View File

@ -4,14 +4,13 @@
from os.path import normpath, exists from os.path import normpath, exists
from shutil import copyfile, move, rmtree from shutil import copyfile, move, rmtree
from pathlib import Path
import yaml
from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider
HTTP = HTTPRemoteProvider()
from snakemake.utils import min_version from snakemake.utils import min_version
min_version("7.7") min_version("7.7")
HTTP = HTTPRemoteProvider()
if not exists("config/config.yaml"): if not exists("config/config.yaml"):
@ -24,8 +23,16 @@ configfile: "config/config.yaml"
COSTS = f"data/costs_{config['costs']['year']}.csv" COSTS = f"data/costs_{config['costs']['year']}.csv"
ATLITE_NPROCESSES = config["atlite"].get("nprocesses", 4) ATLITE_NPROCESSES = config["atlite"].get("nprocesses", 4)
run = config.get("run", {}) run = config["run"]
RDIR = run["name"] + "/" if run.get("name") else "" if run.get("scenarios", False):
if run["shared_resources"]:
raise ValueError("Cannot use shared resources with scenarios")
scenarios = yaml.safe_load(Path(config["scenariofile"]).read_text())
RDIR = "{run}/"
elif run["name"]:
RDIR = run["name"] + "/"
else:
RDIR = ""
CDIR = RDIR if not run.get("shared_cutouts") else "" CDIR = RDIR if not run.get("shared_cutouts") else ""
LOGS = "logs/" + RDIR LOGS = "logs/" + RDIR

View File

@ -5,6 +5,7 @@
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#top-level-configuration # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#top-level-configuration
version: 0.8.1 version: 0.8.1
tutorial: false tutorial: false
scenariofile: config/scenarios.yaml
logging: logging:
level: INFO level: INFO
@ -21,6 +22,7 @@ remote:
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run
run: run:
name: "" name: ""
scenarios: false
disable_progressbar: false disable_progressbar: false
shared_resources: false shared_resources: false
shared_cutouts: true shared_cutouts: true

View File

@ -1,31 +0,0 @@
# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
import itertools
# Insert your config values that should be altered in the template.
template = """
scenario{scenario_number}:
sector:
carbon_: {config_value}
config_section2:
config_key2: {config_value2}
"""
# Define all possible combinations of config values.
# This must define all config values that are used in the template.
config_values = dict(config_values=["true", "false"], config_values2=[1, 2, 3, 4, 5])
combinations = [
dict(zip(config_values.keys(), values))
for values in itertools.product(*config_values.values())
]
# write the scenarios to a file
filename = "scenarios.yaml"
with open(filename, "w") as f:
for i, config in enumerate(combinations):
f.write(template.format(scenario_number=i, **config))

View File

@ -0,0 +1,12 @@
# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
# This file is used to define the scenarios that are run by snakemake. Each entry on the first level is a scenario. Each scenario can contain configuration overrides with respect to the config/config.yaml settings.
#
# Example
#
# custom-scenario: # name of the scenario
# electricity:
# renewable_carriers: [wind, solar] # override the list of renewable carriers

View File

@ -0,0 +1,89 @@
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: CC0-1.0
tutorial: true
scenariofile: "config/test/scenarios.electricity.yaml"
run:
name:
- test-elec-no-offshore-wind
- test-elec-no-onshore-wind
scenarios: true
disable_progressbar: true
shared_resources: false # cannot be true if scenarios is true
shared_cutouts: true
scenario:
clusters:
- 5
opts:
- Co2L-24H
countries: ['BE']
snapshots:
start: "2013-03-01"
end: "2013-03-08"
electricity:
co2limit: 100.e+6
extendable_carriers:
Generator: [OCGT]
StorageUnit: [battery]
Store: [H2]
Link: [H2 pipeline]
renewable_carriers: [solar, onwind, offwind-ac, offwind-dc]
atlite:
default_cutout: be-03-2013-era5
cutouts:
be-03-2013-era5:
module: era5
x: [4., 15.]
y: [46., 56.]
time: ["2013-03-01", "2013-03-08"]
renewable:
onwind:
cutout: be-03-2013-era5
offwind-ac:
cutout: be-03-2013-era5
max_depth: false
offwind-dc:
cutout: be-03-2013-era5
max_depth: false
solar:
cutout: be-03-2013-era5
clustering:
exclude_carriers: ["OCGT", "offwind-ac", "coal"]
lines:
dynamic_line_rating:
activate: true
cutout: be-03-2013-era5
max_line_rating: 1.3
solving:
solver:
name: glpk
options: "glpk-default"
plotting:
map:
boundaries:
eu_node_location:
x: -5.5
y: 46.
costs_max: 1000
costs_threshold: 0.0000001
energy_max:
energy_min:
energy_threshold: 0.000001

View File

@ -0,0 +1,14 @@
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: CC0-1.0
test-elec-no-offshore-wind:
electricity:
renewable_carriers: [solar, onwind]
test-elec-no-onshore-wind:
electricity:
extendable_carriers:
Generator: [OCGT]
renewable_carriers: [solar, offwind-ac, offwind-dc]

View File

@ -1,5 +1,6 @@
,Unit,Values,Description ,Unit,Values,Description
name,--,"any string","Specify a name for your run. Results will be stored under this name." name,--,str/list,"Specify a name for your run. Results will be stored under this name. If ``scenarios`` is set to ``true``, the name must contain a subset of scenario names defined in ``scenariofile``."
scenarios,--,bool,"{true, false}","Switch to select whether workflow should generate scenarios based on ``scenariofile``."
disable_progrssbar,bool,"{true, false}","Switch to select whether progressbar should be disabled." disable_progrssbar,bool,"{true, false}","Switch to select whether progressbar should be disabled."
shared_resources,bool,"{true, false}","Switch to select whether resources should be shared across runs." shared_resources,bool,"{true, false}","Switch to select whether resources should be shared across runs."
shared_cutouts,bool,"{true, false}","Switch to select whether cutouts should be shared across runs." shared_cutouts,bool,"{true, false}","Switch to select whether cutouts should be shared across runs."

Can't render this file because it has a wrong number of fields in line 3.

View File

@ -1,6 +1,7 @@
,Unit,Values,Description ,Unit,Values,Description
version,--,0.x.x,Version of PyPSA-Eur. Descriptive only. version,--,0.x.x,Version of PyPSA-Eur. Descriptive only.
tutorial,bool,"{true, false}",Switch to retrieve the tutorial data set instead of the full data set. tutorial,bool,"{true, false}",Switch to retrieve the tutorial data set instead of the full data set.
scenariofile,str,,Path to the scenario yaml file. The scenario file contains config overrides for each scenario. In order to be taken account, ``run:scenarios`` has to be set to ``true`` and ``run:name`` has to be a subset of top level keys given in the scenario file. In order to automatically create a `scenario.yaml` file based on a combindation of settings, alter and use the ``create_scenarios.py`` script in ``scripts``.
logging,,, logging,,,
-- level,--,"Any of {'INFO', 'WARNING', 'ERROR'}","Restrict console outputs to all infos, warning or errors only" -- level,--,"Any of {'INFO', 'WARNING', 'ERROR'}","Restrict console outputs to all infos, warning or errors only"
-- format,--,,Custom format for log messages. See `LogRecord <https://docs.python.org/3/library/logging.html#logging.LogRecord>`_ attributes. -- format,--,,Custom format for log messages. See `LogRecord <https://docs.python.org/3/library/logging.html#logging.LogRecord>`_ attributes.

Can't render this file because it has a wrong number of fields in line 4.

View File

@ -20,9 +20,9 @@ if config["enable"].get("prepare_links_p_nom", False):
rule build_electricity_demand: rule build_electricity_demand:
params: params:
snapshots=config["snapshots"], snapshots=config_provider("snapshots"),
countries=config["countries"], countries=config_provider("countries"),
load=config["load"], load=config_provider("load"),
input: input:
ancient("data/load_raw.csv"), ancient("data/load_raw.csv"),
output: output:
@ -39,9 +39,9 @@ rule build_electricity_demand:
rule build_powerplants: rule build_powerplants:
params: params:
powerplants_filter=config["electricity"]["powerplants_filter"], powerplants_filter=config_provider("electricity", "powerplants_filter"),
custom_powerplants=config["electricity"]["custom_powerplants"], custom_powerplants=config_provider("electricity", "custom_powerplants"),
countries=config["countries"], countries=config_provider("countries"),
input: input:
base_network=RESOURCES + "networks/base.nc", base_network=RESOURCES + "networks/base.nc",
custom_powerplants="data/custom_powerplants.csv", custom_powerplants="data/custom_powerplants.csv",
@ -60,11 +60,11 @@ rule build_powerplants:
rule base_network: rule base_network:
params: params:
countries=config["countries"], countries=config_provider("countries"),
snapshots=config["snapshots"], snapshots=config_provider("snapshots"),
lines=config["lines"], lines=config_provider("lines"),
links=config["links"], links=config_provider("links"),
transformers=config["transformers"], transformers=config_provider("transformers"),
input: input:
eg_buses="data/entsoegridkit/buses.csv", eg_buses="data/entsoegridkit/buses.csv",
eg_lines="data/entsoegridkit/lines.csv", eg_lines="data/entsoegridkit/lines.csv",
@ -94,7 +94,7 @@ rule base_network:
rule build_shapes: rule build_shapes:
params: params:
countries=config["countries"], countries=config_provider("countries"),
input: input:
naturalearth=ancient("data/bundle/naturalearth/ne_10m_admin_0_countries.shp"), naturalearth=ancient("data/bundle/naturalearth/ne_10m_admin_0_countries.shp"),
eez=ancient("data/bundle/eez/World_EEZ_v8_2014.shp"), eez=ancient("data/bundle/eez/World_EEZ_v8_2014.shp"),
@ -121,7 +121,7 @@ rule build_shapes:
rule build_bus_regions: rule build_bus_regions:
params: params:
countries=config["countries"], countries=config_provider("countries"),
input: input:
country_shapes=RESOURCES + "country_shapes.geojson", country_shapes=RESOURCES + "country_shapes.geojson",
offshore_shapes=RESOURCES + "offshore_shapes.geojson", offshore_shapes=RESOURCES + "offshore_shapes.geojson",
@ -144,8 +144,8 @@ if config["enable"].get("build_cutout", False):
rule build_cutout: rule build_cutout:
params: params:
snapshots=config["snapshots"], snapshots=config_provider("snapshots"),
cutouts=config["atlite"]["cutouts"], cutouts=config_provider("atlite", "cutouts"),
input: input:
regions_onshore=RESOURCES + "regions_onshore.geojson", regions_onshore=RESOURCES + "regions_onshore.geojson",
regions_offshore=RESOURCES + "regions_offshore.geojson", regions_offshore=RESOURCES + "regions_offshore.geojson",
@ -208,7 +208,7 @@ rule build_ship_raster:
rule build_renewable_profiles: rule build_renewable_profiles:
params: params:
renewable=config["renewable"], renewable=config_provider("renewable"),
input: input:
base_network=RESOURCES + "networks/base.nc", base_network=RESOURCES + "networks/base.nc",
corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"), corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"),
@ -277,8 +277,8 @@ rule build_monthly_prices:
rule build_hydro_profile: rule build_hydro_profile:
params: params:
hydro=config["renewable"]["hydro"], hydro=config_provider("renewable", "hydro"),
countries=config["countries"], countries=config_provider("countries"),
input: input:
country_shapes=RESOURCES + "country_shapes.geojson", country_shapes=RESOURCES + "country_shapes.geojson",
eia_hydro_generation="data/eia_hydro_annual_generation.csv", eia_hydro_generation="data/eia_hydro_annual_generation.csv",
@ -321,13 +321,13 @@ if config["lines"]["dynamic_line_rating"]["activate"]:
rule add_electricity: rule add_electricity:
params: params:
length_factor=config["lines"]["length_factor"], length_factor=config_provider("lines", "length_factor"),
scaling_factor=config["load"]["scaling_factor"], scaling_factor=config_provider("load", "scaling_factor"),
countries=config["countries"], countries=config_provider("countries"),
renewable=config["renewable"], renewable=config_provider("renewable"),
electricity=config["electricity"], electricity=config_provider("electricity"),
conventional=config["conventional"], conventional=config_provider("conventional"),
costs=config["costs"], costs=config_provider("costs"),
input: input:
**{ **{
f"profile_{tech}": RESOURCES + f"profile_{tech}.nc" f"profile_{tech}": RESOURCES + f"profile_{tech}.nc"
@ -370,14 +370,16 @@ rule add_electricity:
rule simplify_network: rule simplify_network:
params: params:
simplify_network=config["clustering"]["simplify_network"], simplify_network=config_provider("clustering", "simplify_network"),
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}), aggregation_strategies=config_provider(
focus_weights=config.get("focus_weights", None), "clustering", "aggregation_strategies", default={}
renewable_carriers=config["electricity"]["renewable_carriers"], ),
max_hours=config["electricity"]["max_hours"], focus_weights=config_provider("focus_weights", default=None),
length_factor=config["lines"]["length_factor"], renewable_carriers=config_provider("electricity", "renewable_carriers"),
p_max_pu=config["links"].get("p_max_pu", 1.0), max_hours=config_provider("electricity", "max_hours"),
costs=config["costs"], length_factor=config_provider("lines", "length_factor"),
p_max_pu=config_provider("links", "p_max_pu", default=1.0),
costs=config_provider("costs"),
input: input:
network=RESOURCES + "networks/elec.nc", network=RESOURCES + "networks/elec.nc",
tech_costs=COSTS, tech_costs=COSTS,
@ -404,15 +406,19 @@ rule simplify_network:
rule cluster_network: rule cluster_network:
params: params:
cluster_network=config["clustering"]["cluster_network"], cluster_network=config_provider("clustering", "cluster_network"),
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}), aggregation_strategies=config_provider(
custom_busmap=config["enable"].get("custom_busmap", False), "clustering", "aggregation_strategies", default={}
focus_weights=config.get("focus_weights", None), ),
renewable_carriers=config["electricity"]["renewable_carriers"], custom_busmap=config_provider("enable", "custom_busmap", default=False),
conventional_carriers=config["electricity"].get("conventional_carriers", []), focus_weights=config_provider("focus_weights", default=None),
max_hours=config["electricity"]["max_hours"], renewable_carriers=config_provider("electricity", "renewable_carriers"),
length_factor=config["lines"]["length_factor"], conventional_carriers=config_provider(
costs=config["costs"], "electricity", "conventional_carriers", default=[]
),
max_hours=config_provider("electricity", "max_hours"),
length_factor=config_provider("lines", "length_factor"),
costs=config_provider("costs"),
input: input:
network=RESOURCES + "networks/elec_s{simpl}.nc", network=RESOURCES + "networks/elec_s{simpl}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}.geojson",
@ -445,9 +451,9 @@ rule cluster_network:
rule add_extra_components: rule add_extra_components:
params: params:
extendable_carriers=config["electricity"]["extendable_carriers"], extendable_carriers=config_provider("electricity", "extendable_carriers"),
max_hours=config["electricity"]["max_hours"], max_hours=config_provider("electricity", "max_hours"),
costs=config["costs"], costs=config_provider("costs"),
input: input:
network=RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", network=RESOURCES + "networks/elec_s{simpl}_{clusters}.nc",
tech_costs=COSTS, tech_costs=COSTS,
@ -468,13 +474,13 @@ rule add_extra_components:
rule prepare_network: rule prepare_network:
params: params:
links=config["links"], links=config_provider("links"),
lines=config["lines"], lines=config_provider("lines"),
co2base=config["electricity"]["co2base"], co2base=config_provider("electricity", "co2base"),
co2limit=config["electricity"]["co2limit"], co2limit=config_provider("electricity", "co2limit"),
gaslimit=config["electricity"].get("gaslimit"), gaslimit=config_provider("electricity", "gaslimit"),
max_hours=config["electricity"]["max_hours"], max_hours=config_provider("electricity", "max_hours"),
costs=config["costs"], costs=config_provider("costs"),
input: input:
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc", RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc",
tech_costs=COSTS, tech_costs=COSTS,

View File

@ -141,7 +141,7 @@ if not (config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]):
rule build_heat_demands: rule build_heat_demands:
params: params:
snapshots=config["snapshots"], snapshots=config_provider("snapshots"),
input: input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc", pop_layout=RESOURCES + "pop_layout_{scope}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
@ -163,7 +163,7 @@ rule build_heat_demands:
rule build_temperature_profiles: rule build_temperature_profiles:
params: params:
snapshots=config["snapshots"], snapshots=config_provider("snapshots"),
input: input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc", pop_layout=RESOURCES + "pop_layout_{scope}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
@ -186,7 +186,7 @@ rule build_temperature_profiles:
rule build_cop_profiles: rule build_cop_profiles:
params: params:
heat_pump_sink_T=config["sector"]["heat_pump_sink_T"], heat_pump_sink_T=config_provider("sector", "heat_pump_sink_T"),
input: input:
temp_soil_total=RESOURCES + "temp_soil_total_elec_s{simpl}_{clusters}.nc", temp_soil_total=RESOURCES + "temp_soil_total_elec_s{simpl}_{clusters}.nc",
temp_soil_rural=RESOURCES + "temp_soil_rural_elec_s{simpl}_{clusters}.nc", temp_soil_rural=RESOURCES + "temp_soil_rural_elec_s{simpl}_{clusters}.nc",
@ -215,8 +215,8 @@ rule build_cop_profiles:
rule build_solar_thermal_profiles: rule build_solar_thermal_profiles:
params: params:
snapshots=config["snapshots"], snapshots=config_provider("snapshots"),
solar_thermal=config["solar_thermal"], solar_thermal=config_provider("solar_thermal"),
input: input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc", pop_layout=RESOURCES + "pop_layout_{scope}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
@ -238,8 +238,8 @@ rule build_solar_thermal_profiles:
rule build_energy_totals: rule build_energy_totals:
params: params:
countries=config["countries"], countries=config_provider("countries"),
energy=config["energy"], energy=config_provider("energy"),
input: input:
nuts3_shapes=RESOURCES + "nuts3_shapes.geojson", nuts3_shapes=RESOURCES + "nuts3_shapes.geojson",
co2="data/eea/UNFCCC_v23.csv", co2="data/eea/UNFCCC_v23.csv",
@ -266,7 +266,7 @@ rule build_energy_totals:
rule build_biomass_potentials: rule build_biomass_potentials:
params: params:
biomass=config["biomass"], biomass=config_provider("biomass"),
input: input:
enspreso_biomass=HTTP.remote( enspreso_biomass=HTTP.remote(
"https://cidportal.jrc.ec.europa.eu/ftp/jrc-opendata/ENSPRESO/ENSPRESO_BIOMASS.xlsx", "https://cidportal.jrc.ec.europa.eu/ftp/jrc-opendata/ENSPRESO/ENSPRESO_BIOMASS.xlsx",
@ -329,9 +329,9 @@ if config["sector"]["regional_co2_sequestration_potential"]["enable"]:
rule build_sequestration_potentials: rule build_sequestration_potentials:
params: params:
sequestration_potential=config["sector"][ sequestration_potential=config_provider(
"regional_co2_sequestration_potential" "sector", "regional_co2_sequestration_potential"
], ),
input: input:
sequestration_potential=HTTP.remote( sequestration_potential=HTTP.remote(
"https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson", "https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson",
@ -386,7 +386,7 @@ rule build_salt_cavern_potentials:
rule build_ammonia_production: rule build_ammonia_production:
params: params:
countries=config["countries"], countries=config_provider("countries"),
input: input:
usgs="data/myb1-2017-nitro.xls", usgs="data/myb1-2017-nitro.xls",
output: output:
@ -406,8 +406,8 @@ rule build_ammonia_production:
rule build_industry_sector_ratios: rule build_industry_sector_ratios:
params: params:
industry=config["industry"], industry=config_provider("industry"),
ammonia=config["sector"].get("ammonia", False), ammonia=config_provider("sector", "ammonia", default=False),
input: input:
ammonia_production=RESOURCES + "ammonia_production.csv", ammonia_production=RESOURCES + "ammonia_production.csv",
idees="data/jrc-idees-2015", idees="data/jrc-idees-2015",
@ -428,8 +428,8 @@ rule build_industry_sector_ratios:
rule build_industrial_production_per_country: rule build_industrial_production_per_country:
params: params:
industry=config["industry"], industry=config_provider("industry"),
countries=config["countries"], countries=config_provider("countries"),
input: input:
ammonia_production=RESOURCES + "ammonia_production.csv", ammonia_production=RESOURCES + "ammonia_production.csv",
jrc="data/jrc-idees-2015", jrc="data/jrc-idees-2015",
@ -452,7 +452,7 @@ rule build_industrial_production_per_country:
rule build_industrial_production_per_country_tomorrow: rule build_industrial_production_per_country_tomorrow:
params: params:
industry=config["industry"], industry=config_provider("industry"),
input: input:
industrial_production_per_country=RESOURCES industrial_production_per_country=RESOURCES
+ "industrial_production_per_country.csv", + "industrial_production_per_country.csv",
@ -478,8 +478,10 @@ rule build_industrial_production_per_country_tomorrow:
rule build_industrial_distribution_key: rule build_industrial_distribution_key:
params: params:
hotmaps_locate_missing=config["industry"].get("hotmaps_locate_missing", False), hotmaps_locate_missing=config_provider(
countries=config["countries"], "industry", "hotmaps_locate_missing", default=False
),
countries=config_provider("countries"),
input: input:
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
@ -555,8 +557,8 @@ rule build_industrial_energy_demand_per_node:
rule build_industrial_energy_demand_per_country_today: rule build_industrial_energy_demand_per_country_today:
params: params:
countries=config["countries"], countries=config_provider("countries"),
industry=config["industry"], industry=config_provider("industry"),
input: input:
jrc="data/jrc-idees-2015", jrc="data/jrc-idees-2015",
ammonia_production=RESOURCES + "ammonia_production.csv", ammonia_production=RESOURCES + "ammonia_production.csv",
@ -604,8 +606,8 @@ if config["sector"]["retrofitting"]["retro_endogen"]:
rule build_retro_cost: rule build_retro_cost:
params: params:
retrofitting=config["sector"]["retrofitting"], retrofitting=config_provider("sector", "retrofitting"),
countries=config["countries"], countries=config_provider("countries"),
input: input:
building_stock="data/retro/data_building_stock.csv", building_stock="data/retro/data_building_stock.csv",
data_tabula="data/retro/tabula-calculator-calcsetbuilding.csv", data_tabula="data/retro/tabula-calculator-calcsetbuilding.csv",
@ -677,8 +679,8 @@ rule build_shipping_demand:
rule build_transport_demand: rule build_transport_demand:
params: params:
snapshots=config["snapshots"], snapshots=config_provider("snapshots"),
sector=config["sector"], sector=config_provider("sector"),
input: input:
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
pop_weighted_energy_totals=RESOURCES pop_weighted_energy_totals=RESOURCES
@ -705,18 +707,20 @@ rule build_transport_demand:
rule prepare_sector_network: rule prepare_sector_network:
params: params:
co2_budget=config["co2_budget"], co2_budget=config_provider("co2_budget"),
conventional_carriers=config["existing_capacities"]["conventional_carriers"], conventional_carriers=config_provider(
foresight=config["foresight"], "existing_capacities", "conventional_carriers"
costs=config["costs"], ),
sector=config["sector"], foresight=config_provider("foresight"),
industry=config["industry"], costs=config_provider("costs"),
pypsa_eur=config["pypsa_eur"], sector=config_provider("sector"),
length_factor=config["lines"]["length_factor"], industry=config_provider("industry"),
planning_horizons=config["scenario"]["planning_horizons"], pypsa_eur=config_provider("pypsa_eur"),
countries=config["countries"], length_factor=config_provider("lines", "length_factor"),
emissions_scope=config["energy"]["emissions"], planning_horizons=config_provider("scenario", "planning_horizons"),
eurostat_report_year=config["energy"]["eurostat_report_year"], countries=config_provider("countries"),
emissions_scope=config_provider("energy", "emissions"),
eurostat_report_year=config_provider("energy", "eurostat_report_year"),
RDIR=RDIR, RDIR=RDIR,
input: input:
**build_retro_cost_output, **build_retro_cost_output,

View File

@ -22,13 +22,19 @@ rule all:
rule cluster_networks: rule cluster_networks:
input: input:
expand(RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", **config["scenario"]), expand(
RESOURCES + "networks/elec_s{simpl}_{clusters}.nc",
**config["scenario"],
run=config["run"]["name"]
),
rule extra_components_networks: rule extra_components_networks:
input: input:
expand( expand(
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc", **config["scenario"] RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc",
**config["scenario"],
run=config["run"]["name"]
), ),
@ -36,7 +42,8 @@ rule prepare_elec_networks:
input: input:
expand( expand(
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
**config["scenario"] **config["scenario"],
run=config["run"]["name"]
), ),
@ -45,7 +52,8 @@ rule prepare_sector_networks:
expand( expand(
RESULTS RESULTS
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
**config["scenario"] **config["scenario"],
run=config["run"]["name"]
), ),
@ -53,7 +61,8 @@ rule solve_elec_networks:
input: input:
expand( expand(
RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
**config["scenario"] **config["scenario"],
run=config["run"]["name"]
), ),
@ -62,7 +71,8 @@ rule solve_sector_networks:
expand( expand(
RESULTS RESULTS
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
**config["scenario"] **config["scenario"],
run=config["run"]["name"]
), ),
@ -71,7 +81,8 @@ rule plot_networks:
expand( expand(
RESULTS RESULTS
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
**config["scenario"] **config["scenario"],
run=config["run"]["name"]
), ),
@ -80,11 +91,13 @@ rule validate_elec_networks:
expand( expand(
RESULTS RESULTS
+ "figures/.statistics_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}", + "figures/.statistics_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
**config["scenario"] **config["scenario"],
run=config["run"]["name"]
), ),
expand( expand(
RESULTS RESULTS
+ "figures/.validation_{kind}_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}", + "figures/.validation_{kind}_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
**config["scenario"], **config["scenario"],
run=config["run"]["name"],
kind=["production", "prices", "cross_border"] kind=["production", "prices", "cross_border"]
), ),

View File

@ -2,6 +2,57 @@
# #
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
import copy
def get_config(keys, config, default=None):
"""Retrieve a nested value from a dictionary using a tuple of keys."""
value = config
for key in keys:
value = value.get(key, default)
if value == default:
return default
return value
def merge_configs(base_config, scenario_config):
"""Merge base config with a specific scenario without modifying the original."""
merged = copy.deepcopy(base_config)
for key, value in scenario_config.items():
if key in merged and isinstance(merged[key], dict):
merged[key] = merge_configs(merged[key], value)
else:
merged[key] = value
return merged
def config_provider(*keys, default=None):
"""Dynamically provide config values based on 'run' -> 'name'.
Usage in Snakemake rules would look something like:
params:
my_param=config_provider("key1", "key2", default="some_default_value")
"""
def static_getter(wildcards):
"""Getter function for static config values."""
return get_config(keys, config, default)
def dynamic_getter(wildcards):
"""Getter function for dynamic config values based on scenario."""
scenario_name = wildcards.run
if scenario_name not in scenarios:
raise ValueError(
f"Scenario {scenario_name} not found in file {config['scenariofile']}."
)
merged_config = merge_configs(config, scenarios[scenario_name])
return get_config(keys, merged_config, default)
if config["run"].get("scenarios", False):
return dynamic_getter
else:
return static_getter
def memory(w): def memory(w):
factor = 3.0 factor = 3.0

View File

@ -10,8 +10,8 @@ localrules:
rule plot_network: rule plot_network:
params: params:
foresight=config["foresight"], foresight=config_provider("foresight"),
plotting=config["plotting"], plotting=config_provider("plotting"),
input: input:
network=RESULTS network=RESULTS
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
@ -53,16 +53,17 @@ rule copy_config:
rule make_summary: rule make_summary:
params: params:
foresight=config["foresight"], foresight=config_provider("foresight"),
costs=config["costs"], costs=config_provider("costs"),
snapshots=config["snapshots"], snapshots=config_provider("snapshots"),
scenario=config["scenario"], scenario=config_provider("scenario"),
RDIR=RDIR, RDIR=RDIR,
input: input:
networks=expand( networks=expand(
RESULTS RESULTS
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
**config["scenario"] **config["scenario"],
run=config["run"]["name"]
), ),
costs="data/costs_{}.csv".format(config["costs"]["year"]) costs="data/costs_{}.csv".format(config["costs"]["year"])
if config["foresight"] == "overnight" if config["foresight"] == "overnight"
@ -70,7 +71,8 @@ rule make_summary:
plots=expand( plots=expand(
RESULTS RESULTS
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", + "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
**config["scenario"] **config["scenario"],
run=config["run"]["name"]
), ),
output: output:
nodal_costs=RESULTS + "csvs/nodal_costs.csv", nodal_costs=RESULTS + "csvs/nodal_costs.csv",
@ -103,10 +105,10 @@ rule make_summary:
rule plot_summary: rule plot_summary:
params: params:
countries=config["countries"], countries=config_provider("countries"),
planning_horizons=config["scenario"]["planning_horizons"], planning_horizons=config_provider("scenario", "planning_horizons"),
sector_opts=config["scenario"]["sector_opts"], sector_opts=config_provider("scenario", "sector_opts"),
plotting=config["plotting"], plotting=config_provider("plotting"),
RDIR=RDIR, RDIR=RDIR,
input: input:
costs=RESULTS + "csvs/costs.csv", costs=RESULTS + "csvs/costs.csv",
@ -145,7 +147,7 @@ STATISTICS_BARPLOTS = [
rule plot_elec_statistics: rule plot_elec_statistics:
params: params:
plotting=config["plotting"], plotting=config_provider("plotting"),
barplots=STATISTICS_BARPLOTS, barplots=STATISTICS_BARPLOTS,
input: input:
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",

View File

@ -29,7 +29,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_databundle",
output: output:
expand("data/bundle/{file}", file=datafiles), expand("data/bundle/{file}", file=datafiles),
log: log:
LOGS + "retrieve_databundle.log", "logs/retrieve_databundle.log",
resources: resources:
mem_mb=1000, mem_mb=1000,
retries: 2 retries: 2
@ -72,7 +72,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cost_data", T
output: output:
"data/costs_{year}.csv", "data/costs_{year}.csv",
log: log:
LOGS + "retrieve_cost_data_{year}.log", "logs/retrieve_cost_data_{year}.log",
resources: resources:
mem_mb=1000, mem_mb=1000,
retries: 2 retries: 2
@ -123,7 +123,7 @@ if config["enable"]["retrieve"] and config["enable"].get(
output: output:
*datafiles, *datafiles,
log: log:
LOGS + "retrieve_sector_databundle.log", "logs/retrieve_sector_databundle.log",
retries: 2 retries: 2
conda: conda:
"../envs/environment.yaml" "../envs/environment.yaml"
@ -145,7 +145,7 @@ if config["enable"]["retrieve"] and (
output: output:
expand("data/gas_network/scigrid-gas/data/{files}", files=datafiles), expand("data/gas_network/scigrid-gas/data/{files}", files=datafiles),
log: log:
LOGS + "retrieve_gas_infrastructure_data.log", "logs/retrieve_gas_infrastructure_data.log",
retries: 2 retries: 2
conda: conda:
"../envs/environment.yaml" "../envs/environment.yaml"
@ -169,7 +169,7 @@ if config["enable"]["retrieve"]:
output: output:
"data/load_raw.csv", "data/load_raw.csv",
log: log:
LOGS + "retrieve_electricity_demand.log", "logs/retrieve_electricity_demand.log",
resources: resources:
mem_mb=5000, mem_mb=5000,
retries: 2 retries: 2
@ -189,7 +189,7 @@ if config["enable"]["retrieve"]:
output: output:
"data/shipdensity_global.zip", "data/shipdensity_global.zip",
log: log:
LOGS + "retrieve_ship_raster.log", "logs/retrieve_ship_raster.log",
resources: resources:
mem_mb=5000, mem_mb=5000,
retries: 2 retries: 2
@ -209,7 +209,7 @@ if config["enable"]["retrieve"]:
output: output:
"data/validation/emission-spot-primary-market-auction-report-2019-data.xls", "data/validation/emission-spot-primary-market-auction-report-2019-data.xls",
log: log:
LOGS + "retrieve_monthly_co2_prices.log", "logs/retrieve_monthly_co2_prices.log",
resources: resources:
mem_mb=5000, mem_mb=5000,
retries: 2 retries: 2
@ -223,7 +223,7 @@ if config["enable"]["retrieve"]:
output: output:
"data/validation/energy-price-trends-xlsx-5619002.xlsx", "data/validation/energy-price-trends-xlsx-5619002.xlsx",
log: log:
LOGS + "retrieve_monthly_fuel_prices.log", "logs/retrieve_monthly_fuel_prices.log",
resources: resources:
mem_mb=5000, mem_mb=5000,
retries: 2 retries: 2

View File

@ -14,6 +14,7 @@ import pytz
import yaml import yaml
from pypsa.components import component_attrs, components from pypsa.components import component_attrs, components
from pypsa.descriptors import Dict from pypsa.descriptors import Dict
from snakemake.utils import update_config
from tqdm import tqdm from tqdm import tqdm
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -29,6 +30,13 @@ def mute_print():
yield yield
def set_scenario_config(snakemake):
if "scenario_config" in snakemake.input:
with open(snakemake.input.scenario_config, "r") as f:
scenario_config = yaml.safe_load(f)
update_config(snakemake.config, scenario_config)
def configure_logging(snakemake, skip_handlers=False): def configure_logging(snakemake, skip_handlers=False):
""" """
Configure the basic behaviour for the logging module. Configure the basic behaviour for the logging module.

View File

@ -92,7 +92,7 @@ import powerplantmatching as pm
import pypsa import pypsa
import scipy.sparse as sparse import scipy.sparse as sparse
import xarray as xr import xarray as xr
from _helpers import configure_logging, update_p_nom_max from _helpers import configure_logging, set_scenario_config, update_p_nom_max
from powerplantmatching.export import map_country_bus from powerplantmatching.export import map_country_bus
from shapely.prepared import prep from shapely.prepared import prep
@ -809,6 +809,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("add_electricity") snakemake = mock_snakemake("add_electricity")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
params = snakemake.params params = snakemake.params

View File

@ -55,7 +55,7 @@ import logging
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import pypsa import pypsa
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from add_electricity import load_costs, sanitize_carriers from add_electricity import load_costs, sanitize_carriers
idx = pd.IndexSlice idx = pd.IndexSlice
@ -231,6 +231,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("add_extra_components", simpl="", clusters=5) snakemake = mock_snakemake("add_extra_components", simpl="", clusters=5)
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)
extendable_carriers = snakemake.params.extendable_carriers extendable_carriers = snakemake.params.extendable_carriers

View File

@ -77,7 +77,7 @@ import shapely
import shapely.prepared import shapely.prepared
import shapely.wkt import shapely.wkt
import yaml import yaml
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from scipy import spatial from scipy import spatial
from scipy.sparse import csgraph from scipy.sparse import csgraph
from shapely.geometry import LineString, Point from shapely.geometry import LineString, Point
@ -745,6 +745,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("base_network") snakemake = mock_snakemake("base_network")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
n = base_network( n = base_network(
snakemake.input.eg_buses, snakemake.input.eg_buses,

View File

@ -47,7 +47,7 @@ import geopandas as gpd
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import pypsa import pypsa
from _helpers import REGION_COLS, configure_logging from _helpers import REGION_COLS, configure_logging, set_scenario_config
from scipy.spatial import Voronoi from scipy.spatial import Voronoi
from shapely.geometry import Polygon from shapely.geometry import Polygon
@ -115,6 +115,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_bus_regions") snakemake = mock_snakemake("build_bus_regions")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
countries = snakemake.params.countries countries = snakemake.params.countries

View File

@ -8,7 +8,7 @@ import logging
import pandas as pd import pandas as pd
import pypsa import pypsa
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from entsoe import EntsoePandasClient from entsoe import EntsoePandasClient
from entsoe.exceptions import InvalidBusinessParameterError, NoMatchingDataError from entsoe.exceptions import InvalidBusinessParameterError, NoMatchingDataError
from requests import HTTPError from requests import HTTPError
@ -21,6 +21,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_cross_border_flows") snakemake = mock_snakemake("build_cross_border_flows")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
api_key = snakemake.config["private"]["keys"]["entsoe_api"] api_key = snakemake.config["private"]["keys"]["entsoe_api"]
client = EntsoePandasClient(api_key=api_key) client = EntsoePandasClient(api_key=api_key)

View File

@ -95,7 +95,7 @@ import logging
import atlite import atlite
import geopandas as gpd import geopandas as gpd
import pandas as pd import pandas as pd
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -105,6 +105,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_cutout", cutout="europe-2013-era5") snakemake = mock_snakemake("build_cutout", cutout="europe-2013-era5")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
cutout_params = snakemake.params.cutouts[snakemake.wildcards.cutout] cutout_params = snakemake.params.cutouts[snakemake.wildcards.cutout]

View File

@ -45,7 +45,7 @@ logger = logging.getLogger(__name__)
import dateutil import dateutil
import numpy as np import numpy as np
import pandas as pd import pandas as pd
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from pandas import Timedelta as Delta from pandas import Timedelta as Delta
@ -288,6 +288,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_electricity_demand") snakemake = mock_snakemake("build_electricity_demand")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
powerstatistics = snakemake.params.load["power_statistics"] powerstatistics = snakemake.params.load["power_statistics"]
interpolate_limit = snakemake.params.load["interpolate_limit"] interpolate_limit = snakemake.params.load["interpolate_limit"]

View File

@ -7,7 +7,7 @@
import logging import logging
import pandas as pd import pandas as pd
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from entsoe import EntsoePandasClient from entsoe import EntsoePandasClient
from entsoe.exceptions import NoMatchingDataError from entsoe.exceptions import NoMatchingDataError
@ -19,6 +19,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_cross_border_flows") snakemake = mock_snakemake("build_cross_border_flows")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
api_key = snakemake.config["private"]["keys"]["entsoe_api"] api_key = snakemake.config["private"]["keys"]["entsoe_api"]
client = EntsoePandasClient(api_key=api_key) client = EntsoePandasClient(api_key=api_key)

View File

@ -7,7 +7,7 @@
import logging import logging
import pandas as pd import pandas as pd
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from entsoe import EntsoePandasClient from entsoe import EntsoePandasClient
from entsoe.exceptions import NoMatchingDataError from entsoe.exceptions import NoMatchingDataError
@ -39,6 +39,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_electricity_production") snakemake = mock_snakemake("build_electricity_production")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
api_key = snakemake.config["private"]["keys"]["entsoe_api"] api_key = snakemake.config["private"]["keys"]["entsoe_api"]
client = EntsoePandasClient(api_key=api_key) client = EntsoePandasClient(api_key=api_key)

View File

@ -65,7 +65,7 @@ import atlite
import country_converter as coco import country_converter as coco
import geopandas as gpd import geopandas as gpd
import pandas as pd import pandas as pd
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
cc = coco.CountryConverter() cc = coco.CountryConverter()
@ -129,6 +129,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_hydro_profile") snakemake = mock_snakemake("build_hydro_profile")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
params_hydro = snakemake.params.hydro params_hydro = snakemake.params.hydro
cutout = atlite.Cutout(snakemake.input.cutout) cutout = atlite.Cutout(snakemake.input.cutout)

View File

@ -59,7 +59,7 @@ import numpy as np
import pandas as pd import pandas as pd
import pypsa import pypsa
import xarray as xr import xarray as xr
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from shapely.geometry import LineString as Line from shapely.geometry import LineString as Line
from shapely.geometry import Point from shapely.geometry import Point
@ -147,6 +147,7 @@ if __name__ == "__main__":
opts="Co2L-4H", opts="Co2L-4H",
) )
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
n = pypsa.Network(snakemake.input.base_network) n = pypsa.Network(snakemake.input.base_network)
time = pd.date_range(freq="h", **snakemake.config["snapshots"]) time = pd.date_range(freq="h", **snakemake.config["snapshots"])

View File

@ -46,7 +46,7 @@ Data was accessed at 16.5.2023
import logging import logging
import pandas as pd import pandas as pd
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -114,6 +114,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_monthly_prices") snakemake = mock_snakemake("build_monthly_prices")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
fuel_price = get_fuel_price() fuel_price = get_fuel_price()
fuel_price.to_csv(snakemake.output.fuel_price) fuel_price.to_csv(snakemake.output.fuel_price)

View File

@ -46,7 +46,7 @@ import logging
import atlite import atlite
import geopandas as gpd import geopandas as gpd
import rasterio as rio import rasterio as rio
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from rasterio.features import geometry_mask from rasterio.features import geometry_mask
from rasterio.warp import transform_bounds from rasterio.warp import transform_bounds
@ -92,6 +92,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_natura_raster") snakemake = mock_snakemake("build_natura_raster")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
cutouts = snakemake.input.cutouts cutouts = snakemake.input.cutouts
xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts)) xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts))

View File

@ -80,7 +80,7 @@ import logging
import pandas as pd import pandas as pd
import powerplantmatching as pm import powerplantmatching as pm
import pypsa import pypsa
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from powerplantmatching.export import map_country_bus from powerplantmatching.export import map_country_bus
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -115,6 +115,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_powerplants") snakemake = mock_snakemake("build_powerplants")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
n = pypsa.Network(snakemake.input.base_network) n = pypsa.Network(snakemake.input.base_network)
countries = snakemake.params.countries countries = snakemake.params.countries

View File

@ -188,7 +188,7 @@ import geopandas as gpd
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import xarray as xr import xarray as xr
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from dask.distributed import Client from dask.distributed import Client
from pypsa.geo import haversine from pypsa.geo import haversine
from shapely.geometry import LineString from shapely.geometry import LineString
@ -202,6 +202,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_renewable_profiles", technology="solar") snakemake = mock_snakemake("build_renewable_profiles", technology="solar")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
nprocesses = int(snakemake.threads) nprocesses = int(snakemake.threads)
noprogress = snakemake.config["run"].get("disable_progressbar", True) noprogress = snakemake.config["run"].get("disable_progressbar", True)

View File

@ -77,7 +77,7 @@ import geopandas as gpd
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import pycountry as pyc import pycountry as pyc
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from shapely.geometry import MultiPolygon, Polygon from shapely.geometry import MultiPolygon, Polygon
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -254,6 +254,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_shapes") snakemake = mock_snakemake("build_shapes")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
country_shapes = countries(snakemake.input.naturalearth, snakemake.params.countries) country_shapes = countries(snakemake.input.naturalearth, snakemake.params.countries)
country_shapes.reset_index().to_file(snakemake.output.country_shapes) country_shapes.reset_index().to_file(snakemake.output.country_shapes)

View File

@ -44,9 +44,10 @@ Description
import logging import logging
import os import os
import zipfile import zipfile
from pathlib import Path
import rioxarray import rioxarray
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from build_natura_raster import determine_cutout_xXyY from build_natura_raster import determine_cutout_xXyY
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -57,16 +58,19 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_ship_raster") snakemake = mock_snakemake("build_ship_raster")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
cutouts = snakemake.input.cutouts cutouts = snakemake.input.cutouts
xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts)) xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts))
with zipfile.ZipFile(snakemake.input.ship_density) as zip_f: with zipfile.ZipFile(snakemake.input.ship_density) as zip_f:
zip_f.extract("shipdensity_global.tif") resources = Path(snakemake.output[0]).parent
with rioxarray.open_rasterio("shipdensity_global.tif") as ship_density: fn = "shipdensity_global.tif"
zip_f.extract(fn, resources)
with rioxarray.open_rasterio(resources / fn) as ship_density:
ship_density = ship_density.drop(["band"]).sel( ship_density = ship_density.drop(["band"]).sel(
x=slice(min(xs), max(Xs)), y=slice(max(Ys), min(ys)) x=slice(min(xs), max(Xs)), y=slice(max(Ys), min(ys))
) )
ship_density.rio.to_raster(snakemake.output[0]) ship_density.rio.to_raster(snakemake.output[0])
os.remove("shipdensity_global.tif") (resources / fn).unlink()

View File

@ -133,7 +133,7 @@ import pandas as pd
import pyomo.environ as po import pyomo.environ as po
import pypsa import pypsa
import seaborn as sns import seaborn as sns
from _helpers import configure_logging, update_p_nom_max from _helpers import configure_logging, set_scenario_config, update_p_nom_max
from pypsa.clustering.spatial import ( from pypsa.clustering.spatial import (
busmap_by_greedy_modularity, busmap_by_greedy_modularity,
busmap_by_hac, busmap_by_hac,
@ -463,6 +463,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("cluster_network", simpl="", clusters="37") snakemake = mock_snakemake("cluster_network", simpl="", clusters="37")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
params = snakemake.params params = snakemake.params
solver_name = snakemake.config["solving"]["solver"]["name"] solver_name = snakemake.config["solving"]["solver"]["name"]

View File

@ -7,7 +7,7 @@
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import pypsa import pypsa
import seaborn as sns import seaborn as sns
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
sns.set_theme("paper", style="whitegrid") sns.set_theme("paper", style="whitegrid")
@ -24,6 +24,7 @@ if __name__ == "__main__":
ll="v1.0", ll="v1.0",
) )
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)

View File

@ -9,7 +9,7 @@ import matplotlib.pyplot as plt
import pandas as pd import pandas as pd
import pypsa import pypsa
import seaborn as sns import seaborn as sns
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
sns.set_theme("paper", style="whitegrid") sns.set_theme("paper", style="whitegrid")
@ -195,6 +195,7 @@ if __name__ == "__main__":
ll="v1.0", ll="v1.0",
) )
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
countries = snakemake.params.countries countries = snakemake.params.countries

View File

@ -8,7 +8,7 @@ import matplotlib.pyplot as plt
import pandas as pd import pandas as pd
import pypsa import pypsa
import seaborn as sns import seaborn as sns
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from pypsa.statistics import get_bus_and_carrier from pypsa.statistics import get_bus_and_carrier
sns.set_theme("paper", style="whitegrid") sns.set_theme("paper", style="whitegrid")
@ -25,6 +25,7 @@ if __name__ == "__main__":
ll="v1.0", ll="v1.0",
) )
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)
n.loads.carrier = "load" n.loads.carrier = "load"

View File

@ -8,7 +8,7 @@ import matplotlib.pyplot as plt
import pandas as pd import pandas as pd
import pypsa import pypsa
import seaborn as sns import seaborn as sns
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from pypsa.statistics import get_bus_and_carrier from pypsa.statistics import get_bus_and_carrier
sns.set_theme("paper", style="whitegrid") sns.set_theme("paper", style="whitegrid")
@ -35,6 +35,7 @@ if __name__ == "__main__":
ll="v1.0", ll="v1.0",
) )
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)
n.loads.carrier = "load" n.loads.carrier = "load"

View File

@ -40,7 +40,7 @@ Description
import logging import logging
import pandas as pd import pandas as pd
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -69,6 +69,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("prepare_links_p_nom", simpl="") snakemake = mock_snakemake("prepare_links_p_nom", simpl="")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
links_p_nom = pd.read_html( links_p_nom = pd.read_html(
"https://en.wikipedia.org/wiki/List_of_HVDC_projects", header=0, match="SwePol" "https://en.wikipedia.org/wiki/List_of_HVDC_projects", header=0, match="SwePol"

View File

@ -63,7 +63,7 @@ import re
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import pypsa import pypsa
from _helpers import configure_logging from _helpers import configure_logging, set_scenario_config
from add_electricity import load_costs, update_transmission_costs from add_electricity import load_costs, update_transmission_costs
from pypsa.descriptors import expand_series from pypsa.descriptors import expand_series
@ -283,6 +283,7 @@ if __name__ == "__main__":
"prepare_network", simpl="", clusters="37", ll="v1.0", opts="Ept" "prepare_network", simpl="", clusters="37", ll="v1.0", opts="Ept"
) )
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
opts = snakemake.wildcards.opts.split("-") opts = snakemake.wildcards.opts.split("-")

View File

@ -36,7 +36,7 @@ import logging
import tarfile import tarfile
from pathlib import Path from pathlib import Path
from _helpers import configure_logging, progress_retrieve from _helpers import configure_logging, progress_retrieve, set_scenario_config
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
from pathlib import Path from pathlib import Path
from _helpers import configure_logging, progress_retrieve from _helpers import configure_logging, progress_retrieve, set_scenario_config
if __name__ == "__main__": if __name__ == "__main__":
if "snakemake" not in globals(): if "snakemake" not in globals():
@ -23,6 +23,7 @@ if __name__ == "__main__":
else: else:
rootpath = "." rootpath = "."
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
url = "https://www.destatis.de/EN/Themes/Economy/Prices/Publications/Downloads-Energy-Price-Trends/energy-price-trends-xlsx-5619002.xlsx?__blob=publicationFile" url = "https://www.destatis.de/EN/Themes/Economy/Prices/Publications/Downloads-Energy-Price-Trends/energy-price-trends-xlsx-5619002.xlsx?__blob=publicationFile"

View File

@ -13,7 +13,7 @@ logger = logging.getLogger(__name__)
import tarfile import tarfile
from pathlib import Path from pathlib import Path
from _helpers import configure_logging, progress_retrieve from _helpers import configure_logging, progress_retrieve, set_scenario_config
if __name__ == "__main__": if __name__ == "__main__":
if "snakemake" not in globals(): if "snakemake" not in globals():
@ -24,6 +24,7 @@ if __name__ == "__main__":
else: else:
rootpath = "." rootpath = "."
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
url = "https://zenodo.org/record/5824485/files/pypsa-eur-sec-data-bundle.tar.gz" url = "https://zenodo.org/record/5824485/files/pypsa-eur-sec-data-bundle.tar.gz"

View File

@ -92,7 +92,7 @@ import numpy as np
import pandas as pd import pandas as pd
import pypsa import pypsa
import scipy as sp import scipy as sp
from _helpers import configure_logging, update_p_nom_max from _helpers import configure_logging, set_scenario_config, update_p_nom_max
from add_electricity import load_costs from add_electricity import load_costs
from cluster_network import cluster_regions, clustering_for_n_clusters from cluster_network import cluster_regions, clustering_for_n_clusters
from pypsa.clustering.spatial import ( from pypsa.clustering.spatial import (
@ -531,6 +531,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("simplify_network", simpl="") snakemake = mock_snakemake("simplify_network", simpl="")
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
params = snakemake.params params = snakemake.params
solver_name = snakemake.config["solving"]["solver"]["name"] solver_name = snakemake.config["solving"]["solver"]["name"]

View File

@ -33,7 +33,11 @@ import numpy as np
import pandas as pd import pandas as pd
import pypsa import pypsa
import xarray as xr import xarray as xr
from _helpers import configure_logging, update_config_with_sector_opts from _helpers import (
configure_logging,
set_scenario_config,
update_config_with_sector_opts,
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
pypsa.pf.logger.setLevel(logging.WARNING) pypsa.pf.logger.setLevel(logging.WARNING)
@ -657,6 +661,7 @@ if __name__ == "__main__":
planning_horizons="2020", planning_horizons="2020",
) )
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
if "sector_opts" in snakemake.wildcards.keys(): if "sector_opts" in snakemake.wildcards.keys():
update_config_with_sector_opts( update_config_with_sector_opts(
snakemake.config, snakemake.wildcards.sector_opts snakemake.config, snakemake.wildcards.sector_opts

View File

@ -11,7 +11,11 @@ import logging
import numpy as np import numpy as np
import pypsa import pypsa
from _helpers import configure_logging, update_config_with_sector_opts from _helpers import (
configure_logging,
set_scenario_config,
update_config_with_sector_opts,
)
from solve_network import prepare_network, solve_network from solve_network import prepare_network, solve_network
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -33,6 +37,7 @@ if __name__ == "__main__":
) )
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
opts = (snakemake.wildcards.opts + "-" + snakemake.wildcards.sector_opts).split("-") opts = (snakemake.wildcards.opts + "-" + snakemake.wildcards.sector_opts).split("-")