Merge pull request #663 from PyPSA/post-merge-param

Post-merge `snakemake.params`
This commit is contained in:
Fabian Hofmann 2023-06-15 22:00:15 +02:00 committed by GitHub
commit bdb5381730
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
47 changed files with 548 additions and 417 deletions

View File

@ -31,6 +31,14 @@ snapshots:
end: "2013-03-08" end: "2013-03-08"
electricity: electricity:
co2limit: 100.e+6
extendable_carriers:
Generator: [OCGT]
StorageUnit: [battery]
Store: [H2]
Link: [H2 pipeline]
renewable_carriers: [solar, onwind, offwind-ac, offwind-dc] renewable_carriers: [solar, onwind, offwind-ac, offwind-dc]
atlite: atlite:

View File

@ -28,6 +28,14 @@ snapshots:
end: "2013-03-08" end: "2013-03-08"
electricity: electricity:
co2limit: 100.e+6
extendable_carriers:
Generator: [OCGT]
StorageUnit: [battery]
Store: [H2]
Link: [H2 pipeline]
renewable_carriers: [solar, onwind, offwind-ac, offwind-dc] renewable_carriers: [solar, onwind, offwind-ac, offwind-dc]
atlite: atlite:

View File

@ -10,6 +10,7 @@ Release Notes
Upcoming Release Upcoming Release
================ ================
* ``param:`` section in rule definition are added to track changed settings in ``config.yaml``. The goal is to automatically re-execute rules whose parameters have changed. See `Non-file parameters for rules <https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules>`_ in the snakemake documentation.
* **Important:** The configuration files are now located in the ``config`` directory. This counts for ``config.default.yaml``, ``config.yaml`` as well as the test configuration files which are now located in ``config/test``. Config files that are still in the root directory will be ignored. * **Important:** The configuration files are now located in the ``config`` directory. This counts for ``config.default.yaml``, ``config.yaml`` as well as the test configuration files which are now located in ``config/test``. Config files that are still in the root directory will be ignored.

View File

@ -19,6 +19,10 @@ if config["enable"].get("prepare_links_p_nom", False):
rule build_electricity_demand: rule build_electricity_demand:
params:
snapshots=config["snapshots"],
countries=config["countries"],
load=config["load"],
input: input:
ancient("data/load_raw.csv"), ancient("data/load_raw.csv"),
output: output:
@ -34,6 +38,10 @@ rule build_electricity_demand:
rule build_powerplants: rule build_powerplants:
params:
powerplants_filter=config["electricity"]["powerplants_filter"],
custom_powerplants=config["electricity"]["custom_powerplants"],
countries=config["countries"],
input: input:
base_network=RESOURCES + "networks/base.nc", base_network=RESOURCES + "networks/base.nc",
custom_powerplants="data/custom_powerplants.csv", custom_powerplants="data/custom_powerplants.csv",
@ -79,6 +87,8 @@ rule base_network:
rule build_shapes: rule build_shapes:
params:
countries=config["countries"],
input: input:
naturalearth=ancient("data/bundle/naturalearth/ne_10m_admin_0_countries.shp"), naturalearth=ancient("data/bundle/naturalearth/ne_10m_admin_0_countries.shp"),
eez=ancient("data/bundle/eez/World_EEZ_v8_2014.shp"), eez=ancient("data/bundle/eez/World_EEZ_v8_2014.shp"),
@ -104,6 +114,8 @@ rule build_shapes:
rule build_bus_regions: rule build_bus_regions:
params:
countries=config["countries"],
input: input:
country_shapes=RESOURCES + "country_shapes.geojson", country_shapes=RESOURCES + "country_shapes.geojson",
offshore_shapes=RESOURCES + "offshore_shapes.geojson", offshore_shapes=RESOURCES + "offshore_shapes.geojson",
@ -125,6 +137,9 @@ rule build_bus_regions:
if config["enable"].get("build_cutout", False): if config["enable"].get("build_cutout", False):
rule build_cutout: rule build_cutout:
params:
snapshots=config["snapshots"],
cutouts=config["atlite"]["cutouts"],
input: input:
regions_onshore=RESOURCES + "regions_onshore.geojson", regions_onshore=RESOURCES + "regions_onshore.geojson",
regions_offshore=RESOURCES + "regions_offshore.geojson", regions_offshore=RESOURCES + "regions_offshore.geojson",
@ -186,6 +201,8 @@ rule build_ship_raster:
rule build_renewable_profiles: rule build_renewable_profiles:
params:
renewable=config["renewable"],
input: input:
base_network=RESOURCES + "networks/base.nc", base_network=RESOURCES + "networks/base.nc",
corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"), corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"),
@ -235,6 +252,9 @@ rule build_renewable_profiles:
rule build_hydro_profile: rule build_hydro_profile:
params:
hydro=config["renewable"]["hydro"],
countries=config["countries"],
input: input:
country_shapes=RESOURCES + "country_shapes.geojson", country_shapes=RESOURCES + "country_shapes.geojson",
eia_hydro_generation="data/eia_hydro_annual_generation.csv", eia_hydro_generation="data/eia_hydro_annual_generation.csv",
@ -252,6 +272,14 @@ rule build_hydro_profile:
rule add_electricity: rule add_electricity:
params:
length_factor=config["lines"]["length_factor"],
scaling_factor=config["load"]["scaling_factor"],
countries=config["countries"],
renewable=config["renewable"],
electricity=config["electricity"],
conventional=config.get("conventional", {}),
costs=config["costs"],
input: input:
**{ **{
f"profile_{tech}": RESOURCES + f"profile_{tech}.nc" f"profile_{tech}": RESOURCES + f"profile_{tech}.nc"
@ -287,6 +315,15 @@ rule add_electricity:
rule simplify_network: rule simplify_network:
params:
simplify_network=config["clustering"]["simplify_network"],
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}),
focus_weights=config.get("focus_weights", None),
renewable_carriers=config["electricity"]["renewable_carriers"],
max_hours=config["electricity"]["max_hours"],
length_factor=config["lines"]["length_factor"],
p_max_pu=config["links"].get("p_max_pu", 1.0),
costs=config["costs"],
input: input:
network=RESOURCES + "networks/elec.nc", network=RESOURCES + "networks/elec.nc",
tech_costs=COSTS, tech_costs=COSTS,
@ -312,6 +349,16 @@ rule simplify_network:
rule cluster_network: rule cluster_network:
params:
cluster_network=config["clustering"]["cluster_network"],
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}),
custom_busmap=config["enable"].get("custom_busmap", False),
focus_weights=config.get("focus_weights", None),
renewable_carriers=config["electricity"]["renewable_carriers"],
conventional_carriers=config["electricity"].get("conventional_carriers", []),
max_hours=config["electricity"]["max_hours"],
length_factor=config["lines"]["length_factor"],
costs=config["costs"],
input: input:
network=RESOURCES + "networks/elec_s{simpl}.nc", network=RESOURCES + "networks/elec_s{simpl}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}.geojson",
@ -343,6 +390,10 @@ rule cluster_network:
rule add_extra_components: rule add_extra_components:
params:
extendable_carriers=config["electricity"]["extendable_carriers"],
max_hours=config["electricity"]["max_hours"],
costs=config["costs"],
input: input:
network=RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", network=RESOURCES + "networks/elec_s{simpl}_{clusters}.nc",
tech_costs=COSTS, tech_costs=COSTS,
@ -362,6 +413,14 @@ rule add_extra_components:
rule prepare_network: rule prepare_network:
params:
links=config["links"],
lines=config["lines"],
co2base=config["electricity"]["co2base"],
co2limit=config["electricity"]["co2limit"],
gaslimit=config["electricity"].get("gaslimit"),
max_hours=config["electricity"]["max_hours"],
costs=config["costs"],
input: input:
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc", RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc",
tech_costs=COSTS, tech_costs=COSTS,

View File

@ -140,6 +140,8 @@ if not (config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]):
rule build_heat_demands: rule build_heat_demands:
params:
snapshots=config["snapshots"],
input: input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc", pop_layout=RESOURCES + "pop_layout_{scope}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
@ -160,6 +162,8 @@ rule build_heat_demands:
rule build_temperature_profiles: rule build_temperature_profiles:
params:
snapshots=config["snapshots"],
input: input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc", pop_layout=RESOURCES + "pop_layout_{scope}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
@ -181,6 +185,8 @@ rule build_temperature_profiles:
rule build_cop_profiles: rule build_cop_profiles:
params:
heat_pump_sink_T=config["sector"]["heat_pump_sink_T"],
input: input:
temp_soil_total=RESOURCES + "temp_soil_total_elec_s{simpl}_{clusters}.nc", temp_soil_total=RESOURCES + "temp_soil_total_elec_s{simpl}_{clusters}.nc",
temp_soil_rural=RESOURCES + "temp_soil_rural_elec_s{simpl}_{clusters}.nc", temp_soil_rural=RESOURCES + "temp_soil_rural_elec_s{simpl}_{clusters}.nc",
@ -208,6 +214,9 @@ rule build_cop_profiles:
rule build_solar_thermal_profiles: rule build_solar_thermal_profiles:
params:
snapshots=config["snapshots"],
solar_thermal=config["solar_thermal"],
input: input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc", pop_layout=RESOURCES + "pop_layout_{scope}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
@ -228,6 +237,9 @@ rule build_solar_thermal_profiles:
rule build_energy_totals: rule build_energy_totals:
params:
countries=config["countries"],
energy=config["energy"],
input: input:
nuts3_shapes=RESOURCES + "nuts3_shapes.geojson", nuts3_shapes=RESOURCES + "nuts3_shapes.geojson",
co2="data/eea/UNFCCC_v23.csv", co2="data/eea/UNFCCC_v23.csv",
@ -253,6 +265,8 @@ rule build_energy_totals:
rule build_biomass_potentials: rule build_biomass_potentials:
params:
biomass=config["biomass"],
input: input:
enspreso_biomass=HTTP.remote( enspreso_biomass=HTTP.remote(
"https://cidportal.jrc.ec.europa.eu/ftp/jrc-opendata/ENSPRESO/ENSPRESO_BIOMASS.xlsx", "https://cidportal.jrc.ec.europa.eu/ftp/jrc-opendata/ENSPRESO/ENSPRESO_BIOMASS.xlsx",
@ -315,6 +329,10 @@ if not config["sector"]["biomass_transport"]:
if config["sector"]["regional_co2_sequestration_potential"]["enable"]: if config["sector"]["regional_co2_sequestration_potential"]["enable"]:
rule build_sequestration_potentials: rule build_sequestration_potentials:
params:
sequestration_potential=config["sector"][
"regional_co2_sequestration_potential"
],
input: input:
sequestration_potential=HTTP.remote( sequestration_potential=HTTP.remote(
"https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson", "https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson",
@ -368,6 +386,8 @@ rule build_salt_cavern_potentials:
rule build_ammonia_production: rule build_ammonia_production:
params:
countries=config["countries"],
input: input:
usgs="data/myb1-2017-nitro.xls", usgs="data/myb1-2017-nitro.xls",
output: output:
@ -386,6 +406,9 @@ rule build_ammonia_production:
rule build_industry_sector_ratios: rule build_industry_sector_ratios:
params:
industry=config["industry"],
ammonia=config["sector"].get("ammonia", False),
input: input:
ammonia_production=RESOURCES + "ammonia_production.csv", ammonia_production=RESOURCES + "ammonia_production.csv",
idees="data/jrc-idees-2015", idees="data/jrc-idees-2015",
@ -405,6 +428,9 @@ rule build_industry_sector_ratios:
rule build_industrial_production_per_country: rule build_industrial_production_per_country:
params:
industry=config["industry"],
countries=config["countries"],
input: input:
ammonia_production=RESOURCES + "ammonia_production.csv", ammonia_production=RESOURCES + "ammonia_production.csv",
jrc="data/jrc-idees-2015", jrc="data/jrc-idees-2015",
@ -426,6 +452,8 @@ rule build_industrial_production_per_country:
rule build_industrial_production_per_country_tomorrow: rule build_industrial_production_per_country_tomorrow:
params:
industry=config["industry"],
input: input:
industrial_production_per_country=RESOURCES industrial_production_per_country=RESOURCES
+ "industrial_production_per_country.csv", + "industrial_production_per_country.csv",
@ -450,6 +478,9 @@ rule build_industrial_production_per_country_tomorrow:
rule build_industrial_distribution_key: rule build_industrial_distribution_key:
params:
hotmaps_locate_missing=config["industry"].get("hotmaps_locate_missing", False),
countries=config["countries"],
input: input:
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
@ -524,6 +555,9 @@ rule build_industrial_energy_demand_per_node:
rule build_industrial_energy_demand_per_country_today: rule build_industrial_energy_demand_per_country_today:
params:
countries=config["countries"],
industry=config["industry"],
input: input:
jrc="data/jrc-idees-2015", jrc="data/jrc-idees-2015",
ammonia_production=RESOURCES + "ammonia_production.csv", ammonia_production=RESOURCES + "ammonia_production.csv",
@ -570,6 +604,9 @@ rule build_industrial_energy_demand_per_node_today:
if config["sector"]["retrofitting"]["retro_endogen"]: if config["sector"]["retrofitting"]["retro_endogen"]:
rule build_retro_cost: rule build_retro_cost:
params:
retrofitting=config["sector"]["retrofitting"],
countries=config["countries"],
input: input:
building_stock="data/retro/data_building_stock.csv", building_stock="data/retro/data_building_stock.csv",
data_tabula="data/retro/tabula-calculator-calcsetbuilding.csv", data_tabula="data/retro/tabula-calculator-calcsetbuilding.csv",
@ -640,6 +677,9 @@ rule build_shipping_demand:
rule build_transport_demand: rule build_transport_demand:
params:
snapshots=config["snapshots"],
sector=config["sector"],
input: input:
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
pop_weighted_energy_totals=RESOURCES pop_weighted_energy_totals=RESOURCES
@ -666,6 +706,18 @@ rule build_transport_demand:
rule prepare_sector_network: rule prepare_sector_network:
params: params:
co2_budget=config["co2_budget"],
conventional_carriers=config["existing_capacities"]["conventional_carriers"],
foresight=config["foresight"],
costs=config["costs"],
sector=config["sector"],
industry=config["industry"],
pypsa_eur=config["pypsa_eur"],
length_factor=config["lines"]["length_factor"],
planning_horizons=config["scenario"]["planning_horizons"],
countries=config["countries"],
emissions_scope=config["energy"]["emissions"],
eurostat_report_year=config["energy"]["eurostat_report_year"],
RDIR=RDIR, RDIR=RDIR,
input: input:
**build_retro_cost_output, **build_retro_cost_output,

View File

@ -9,6 +9,9 @@ localrules:
rule plot_network: rule plot_network:
params:
foresight=config["foresight"],
plotting=config["plotting"],
input: input:
overrides="data/override_component_attrs", overrides="data/override_component_attrs",
network=RESULTS network=RESULTS
@ -67,6 +70,10 @@ rule copy_conda_env:
rule make_summary: rule make_summary:
params: params:
foresight=config["foresight"],
costs=config["costs"],
snapshots=config["snapshots"],
scenario=config["scenario"],
RDIR=RDIR, RDIR=RDIR,
input: input:
overrides="data/override_component_attrs", overrides="data/override_component_attrs",
@ -114,6 +121,10 @@ rule make_summary:
rule plot_summary: rule plot_summary:
params: params:
countries=config["countries"],
planning_horizons=config["scenario"]["planning_horizons"],
sector_opts=config["scenario"]["sector_opts"],
plotting=config["plotting"],
RDIR=RDIR, RDIR=RDIR,
input: input:
costs=RESULTS + "csvs/costs.csv", costs=RESULTS + "csvs/costs.csv",

View File

@ -19,6 +19,8 @@ if config["enable"].get("retrieve_databundle", True):
datafiles.extend(["natura/Natura2000_end2015.shp", "GEBCO_2014_2D.nc"]) datafiles.extend(["natura/Natura2000_end2015.shp", "GEBCO_2014_2D.nc"])
rule retrieve_databundle: rule retrieve_databundle:
params:
tutorial=config["tutorial"],
output: output:
expand("data/bundle/{file}", file=datafiles), expand("data/bundle/{file}", file=datafiles),
log: log:

View File

@ -4,6 +4,13 @@
rule solve_network: rule solve_network:
params:
solving=config["solving"],
foresight=config["foresight"],
planning_horizons=config["scenario"]["planning_horizons"],
co2_sequestration_potential=config["sector"].get(
"co2_sequestration_potential", 200
),
input: input:
network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
output: output:
@ -28,6 +35,8 @@ rule solve_network:
rule solve_operations_network: rule solve_operations_network:
params:
options=config["solving"]["options"],
input: input:
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
output: output:

View File

@ -4,6 +4,11 @@
rule add_existing_baseyear: rule add_existing_baseyear:
params:
baseyear=config["scenario"]["planning_horizons"][0],
sector=config["sector"],
existing_capacities=config["existing_capacities"],
costs=config["costs"],
input: input:
overrides="data/override_component_attrs", overrides="data/override_component_attrs",
network=RESULTS network=RESULTS
@ -42,6 +47,10 @@ rule add_existing_baseyear:
rule add_brownfield: rule add_brownfield:
params:
H2_retrofit=config["sector"]["H2_retrofit"],
H2_retrofit_capacity_per_CH4=config["sector"]["H2_retrofit_capacity_per_CH4"],
threshold_capacity=config["existing_capacities"]["threshold_capacity"],
input: input:
overrides="data/override_component_attrs", overrides="data/override_component_attrs",
network=RESULTS network=RESULTS
@ -74,6 +83,13 @@ ruleorder: add_existing_baseyear > add_brownfield
rule solve_sector_network_myopic: rule solve_sector_network_myopic:
params:
solving=config["solving"],
foresight=config["foresight"],
planning_horizons=config["scenario"]["planning_horizons"],
co2_sequestration_potential=config["sector"].get(
"co2_sequestration_potential", 200
),
input: input:
overrides="data/override_component_attrs", overrides="data/override_component_attrs",
network=RESULTS network=RESULTS

View File

@ -4,6 +4,13 @@
rule solve_sector_network: rule solve_sector_network:
params:
solving=config["solving"],
foresight=config["foresight"],
planning_horizons=config["scenario"]["planning_horizons"],
co2_sequestration_potential=config["sector"].get(
"co2_sequestration_potential", 200
),
input: input:
overrides="data/override_component_attrs", overrides="data/override_component_attrs",
network=RESULTS network=RESULTS

View File

@ -82,7 +82,7 @@ def load_network(import_name=None, custom_components=None):
As in pypsa.Network(import_name) As in pypsa.Network(import_name)
custom_components : dict custom_components : dict
Dictionary listing custom components. Dictionary listing custom components.
For using ``snakemake.config['override_components']`` For using ``snakemake.params['override_components']``
in ``config/config.yaml`` define: in ``config/config.yaml`` define:
.. code:: yaml .. code:: yaml

View File

@ -49,7 +49,7 @@ def add_brownfield(n, n_p, year):
) )
] ]
threshold = snakemake.config["existing_capacities"]["threshold_capacity"] threshold = snakemake.params.threshold_capacity
if not chp_heat.empty: if not chp_heat.empty:
threshold_chp_heat = ( threshold_chp_heat = (
@ -87,7 +87,7 @@ def add_brownfield(n, n_p, year):
# deal with gas network # deal with gas network
pipe_carrier = ["gas pipeline"] pipe_carrier = ["gas pipeline"]
if snakemake.config["sector"]["H2_retrofit"]: if snakemake.params.H2_retrofit:
# drop capacities of previous year to avoid duplicating # drop capacities of previous year to avoid duplicating
to_drop = n.links.carrier.isin(pipe_carrier) & (n.links.build_year != year) to_drop = n.links.carrier.isin(pipe_carrier) & (n.links.build_year != year)
n.mremove("Link", n.links.loc[to_drop].index) n.mremove("Link", n.links.loc[to_drop].index)
@ -98,7 +98,7 @@ def add_brownfield(n, n_p, year):
& (n.links.build_year != year) & (n.links.build_year != year)
].index ].index
gas_pipes_i = n.links[n.links.carrier.isin(pipe_carrier)].index gas_pipes_i = n.links[n.links.carrier.isin(pipe_carrier)].index
CH4_per_H2 = 1 / snakemake.config["sector"]["H2_retrofit_capacity_per_CH4"] CH4_per_H2 = 1 / snakemake.params.H2_retrofit_capacity_per_CH4
fr = "H2 pipeline retrofitted" fr = "H2 pipeline retrofitted"
to = "gas pipeline" to = "gas pipeline"
# today's pipe capacity # today's pipe capacity

View File

@ -137,7 +137,7 @@ def _add_missing_carriers_from_costs(n, costs, carriers):
n.import_components_from_dataframe(emissions, "Carrier") n.import_components_from_dataframe(emissions, "Carrier")
def load_costs(tech_costs, config, elec_config, Nyears=1.0): def load_costs(tech_costs, config, max_hours, Nyears=1.0):
# set all asset costs and other parameters # set all asset costs and other parameters
costs = pd.read_csv(tech_costs, index_col=[0, 1]).sort_index() costs = pd.read_csv(tech_costs, index_col=[0, 1]).sort_index()
@ -180,7 +180,6 @@ def load_costs(tech_costs, config, elec_config, Nyears=1.0):
dict(capital_cost=capital_cost, marginal_cost=0.0, co2_emissions=0.0) dict(capital_cost=capital_cost, marginal_cost=0.0, co2_emissions=0.0)
) )
max_hours = elec_config["max_hours"]
costs.loc["battery"] = costs_for_storage( costs.loc["battery"] = costs_for_storage(
costs.loc["battery storage"], costs.loc["battery storage"],
costs.loc["battery inverter"], costs.loc["battery inverter"],
@ -371,7 +370,7 @@ def attach_conventional_generators(
ppl, ppl,
conventional_carriers, conventional_carriers,
extendable_carriers, extendable_carriers,
conventional_config, conventional_params,
conventional_inputs, conventional_inputs,
): ):
carriers = set(conventional_carriers) | set(extendable_carriers["Generator"]) carriers = set(conventional_carriers) | set(extendable_carriers["Generator"])
@ -408,12 +407,12 @@ def attach_conventional_generators(
lifetime=(ppl.dateout - ppl.datein).fillna(np.inf), lifetime=(ppl.dateout - ppl.datein).fillna(np.inf),
) )
for carrier in conventional_config: for carrier in conventional_params:
# Generators with technology affected # Generators with technology affected
idx = n.generators.query("carrier == @carrier").index idx = n.generators.query("carrier == @carrier").index
for attr in list(set(conventional_config[carrier]) & set(n.generators)): for attr in list(set(conventional_params[carrier]) & set(n.generators)):
values = conventional_config[carrier][attr] values = conventional_params[carrier][attr]
if f"conventional_{carrier}_{attr}" in conventional_inputs: if f"conventional_{carrier}_{attr}" in conventional_inputs:
# Values affecting generators of technology k country-specific # Values affecting generators of technology k country-specific
@ -430,7 +429,7 @@ def attach_conventional_generators(
n.generators.loc[idx, attr] = values n.generators.loc[idx, attr] = values
def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **config): def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **params):
_add_missing_carriers_from_costs(n, costs, carriers) _add_missing_carriers_from_costs(n, costs, carriers)
ppl = ( ppl = (
@ -485,9 +484,9 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **con
) )
if "PHS" in carriers and not phs.empty: if "PHS" in carriers and not phs.empty:
# fill missing max hours to config value and # fill missing max hours to params value and
# assume no natural inflow due to lack of data # assume no natural inflow due to lack of data
max_hours = config.get("PHS_max_hours", 6) max_hours = params.get("PHS_max_hours", 6)
phs = phs.replace({"max_hours": {0: max_hours}}) phs = phs.replace({"max_hours": {0: max_hours}})
n.madd( n.madd(
"StorageUnit", "StorageUnit",
@ -503,7 +502,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **con
) )
if "hydro" in carriers and not hydro.empty: if "hydro" in carriers and not hydro.empty:
hydro_max_hours = config.get("hydro_max_hours") hydro_max_hours = params.get("hydro_max_hours")
assert hydro_max_hours is not None, "No path for hydro capacities given." assert hydro_max_hours is not None, "No path for hydro capacities given."
@ -653,16 +652,7 @@ def attach_OPSD_renewables(n, tech_map):
n.generators.p_nom_min.update(gens.bus.map(caps).dropna()) n.generators.p_nom_min.update(gens.bus.map(caps).dropna())
def estimate_renewable_capacities(n, config): def estimate_renewable_capacities(n, year, tech_map, expansion_limit, countries):
year = config["electricity"]["estimate_renewable_capacities"]["year"]
tech_map = config["electricity"]["estimate_renewable_capacities"][
"technology_mapping"
]
countries = config["countries"]
expansion_limit = config["electricity"]["estimate_renewable_capacities"][
"expansion_limit"
]
if not len(countries) or not len(tech_map): if not len(countries) or not len(tech_map):
return return
@ -725,48 +715,33 @@ if __name__ == "__main__":
snakemake = mock_snakemake("add_electricity") snakemake = mock_snakemake("add_electricity")
configure_logging(snakemake) configure_logging(snakemake)
params = snakemake.params
n = pypsa.Network(snakemake.input.base_network) n = pypsa.Network(snakemake.input.base_network)
Nyears = n.snapshot_weightings.objective.sum() / 8760.0 Nyears = n.snapshot_weightings.objective.sum() / 8760.0
costs = load_costs( costs = load_costs(
snakemake.input.tech_costs, snakemake.input.tech_costs,
snakemake.config["costs"], params.costs,
snakemake.config["electricity"], params.electricity["max_hours"],
Nyears, Nyears,
) )
ppl = load_powerplants(snakemake.input.powerplants) ppl = load_powerplants(snakemake.input.powerplants)
if "renewable_carriers" in snakemake.config["electricity"]:
renewable_carriers = set(snakemake.config["electricity"]["renewable_carriers"])
else:
logger.warning(
"Missing key `renewable_carriers` under config entry `electricity`. "
"In future versions, this will raise an error. "
"Falling back to carriers listed under `renewable`."
)
renewable_carriers = snakemake.config["renewable"]
extendable_carriers = snakemake.config["electricity"]["extendable_carriers"]
if not (set(renewable_carriers) & set(extendable_carriers["Generator"])):
logger.warning(
"No renewables found in config entry `extendable_carriers`. "
"In future versions, these have to be explicitly listed. "
"Falling back to all renewables."
)
conventional_carriers = snakemake.config["electricity"]["conventional_carriers"]
attach_load( attach_load(
n, n,
snakemake.input.regions, snakemake.input.regions,
snakemake.input.load, snakemake.input.load,
snakemake.input.nuts3_shapes, snakemake.input.nuts3_shapes,
snakemake.config["countries"], params.countries,
snakemake.config["load"]["scaling_factor"], params.scaling_factor,
) )
update_transmission_costs(n, costs, snakemake.config["lines"]["length_factor"]) update_transmission_costs(n, costs, params.length_factor)
renewable_carriers = set(params.electricity["renewable_carriers"])
extendable_carriers = params.electricity["extendable_carriers"]
conventional_carriers = params.electricity["conventional_carriers"]
conventional_inputs = { conventional_inputs = {
k: v for k, v in snakemake.input.items() if k.startswith("conventional_") k: v for k, v in snakemake.input.items() if k.startswith("conventional_")
} }
@ -776,7 +751,7 @@ if __name__ == "__main__":
ppl, ppl,
conventional_carriers, conventional_carriers,
extendable_carriers, extendable_carriers,
snakemake.config.get("conventional", {}), params.conventional,
conventional_inputs, conventional_inputs,
) )
@ -786,67 +761,32 @@ if __name__ == "__main__":
snakemake.input, snakemake.input,
renewable_carriers, renewable_carriers,
extendable_carriers, extendable_carriers,
snakemake.config["lines"]["length_factor"], params.length_factor,
) )
if "hydro" in renewable_carriers: if "hydro" in renewable_carriers:
conf = snakemake.config["renewable"]["hydro"] para = params.renewable["hydro"]
attach_hydro( attach_hydro(
n, n,
costs, costs,
ppl, ppl,
snakemake.input.profile_hydro, snakemake.input.profile_hydro,
snakemake.input.hydro_capacities, snakemake.input.hydro_capacities,
conf.pop("carriers", []), para.pop("carriers", []),
**conf, **para,
) )
if "estimate_renewable_capacities" not in snakemake.config["electricity"]: estimate_renewable_caps = params.electricity["estimate_renewable_capacities"]
logger.warning(
"Missing key `estimate_renewable_capacities` under config entry `electricity`. "
"In future versions, this will raise an error. "
"Falling back to whether ``estimate_renewable_capacities_from_capacity_stats`` is in the config."
)
if (
"estimate_renewable_capacities_from_capacity_stats"
in snakemake.config["electricity"]
):
estimate_renewable_caps = {
"enable": True,
**snakemake.config["electricity"][
"estimate_renewable_capacities_from_capacity_stats"
],
}
else:
estimate_renewable_caps = {"enable": False}
else:
estimate_renewable_caps = snakemake.config["electricity"][
"estimate_renewable_capacities"
]
if "enable" not in estimate_renewable_caps:
logger.warning(
"Missing key `enable` under config entry `estimate_renewable_capacities`. "
"In future versions, this will raise an error. Falling back to False."
)
estimate_renewable_caps = {"enable": False}
if "from_opsd" not in estimate_renewable_caps:
logger.warning(
"Missing key `from_opsd` under config entry `estimate_renewable_capacities`. "
"In future versions, this will raise an error. "
"Falling back to whether `renewable_capacities_from_opsd` is non-empty."
)
from_opsd = bool(
snakemake.config["electricity"].get("renewable_capacities_from_opsd", False)
)
estimate_renewable_caps["from_opsd"] = from_opsd
if estimate_renewable_caps["enable"]: if estimate_renewable_caps["enable"]:
tech_map = estimate_renewable_caps["technology_mapping"]
expansion_limit = estimate_renewable_caps["expansion_limit"]
year = estimate_renewable_caps["year"]
if estimate_renewable_caps["from_opsd"]: if estimate_renewable_caps["from_opsd"]:
tech_map = snakemake.config["electricity"]["estimate_renewable_capacities"][
"technology_mapping"
]
attach_OPSD_renewables(n, tech_map) attach_OPSD_renewables(n, tech_map)
estimate_renewable_capacities(n, snakemake.config) estimate_renewable_capacities(
n, year, tech_map, expansion_limit, params.countries
)
update_p_nom_max(n) update_p_nom_max(n)

View File

@ -157,7 +157,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
# Fill missing DateOut # Fill missing DateOut
dateout = ( dateout = (
df_agg.loc[biomass_i, "DateIn"] df_agg.loc[biomass_i, "DateIn"]
+ snakemake.config["costs"]["fill_values"]["lifetime"] + snakemake.params.costs["fill_values"]["lifetime"]
) )
df_agg.loc[biomass_i, "DateOut"] = df_agg.loc[biomass_i, "DateOut"].fillna(dateout) df_agg.loc[biomass_i, "DateOut"] = df_agg.loc[biomass_i, "DateOut"].fillna(dateout)
@ -218,7 +218,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
capacity = df.loc[grouping_year, generator] capacity = df.loc[grouping_year, generator]
capacity = capacity[~capacity.isna()] capacity = capacity[~capacity.isna()]
capacity = capacity[ capacity = capacity[
capacity > snakemake.config["existing_capacities"]["threshold_capacity"] capacity > snakemake.params.existing_capacities["threshold_capacity"]
] ]
suffix = "-ac" if generator == "offwind" else "" suffix = "-ac" if generator == "offwind" else ""
name_suffix = f" {generator}{suffix}-{grouping_year}" name_suffix = f" {generator}{suffix}-{grouping_year}"
@ -582,7 +582,7 @@ def add_heating_capacities_installed_before_baseyear(
) )
# delete links with capacities below threshold # delete links with capacities below threshold
threshold = snakemake.config["existing_capacities"]["threshold_capacity"] threshold = snakemake.params.existing_capacities["threshold_capacity"]
n.mremove( n.mremove(
"Link", "Link",
[ [
@ -612,10 +612,10 @@ if __name__ == "__main__":
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
options = snakemake.config["sector"] options = snakemake.params.sector
opts = snakemake.wildcards.sector_opts.split("-") opts = snakemake.wildcards.sector_opts.split("-")
baseyear = snakemake.config["scenario"]["planning_horizons"][0] baseyear = snakemake.params.baseyear
overrides = override_component_attrs(snakemake.input.overrides) overrides = override_component_attrs(snakemake.input.overrides)
n = pypsa.Network(snakemake.input.network, override_component_attrs=overrides) n = pypsa.Network(snakemake.input.network, override_component_attrs=overrides)
@ -626,14 +626,12 @@ if __name__ == "__main__":
Nyears = n.snapshot_weightings.generators.sum() / 8760.0 Nyears = n.snapshot_weightings.generators.sum() / 8760.0
costs = prepare_costs( costs = prepare_costs(
snakemake.input.costs, snakemake.input.costs,
snakemake.config["costs"], snakemake.params.costs,
Nyears, Nyears,
) )
grouping_years_power = snakemake.config["existing_capacities"][ grouping_years_power = snakemake.params.existing_capacities["grouping_years_power"]
"grouping_years_power" grouping_years_heat = snakemake.params.existing_capacities["grouping_years_heat"]
]
grouping_years_heat = snakemake.config["existing_capacities"]["grouping_years_heat"]
add_power_capacities_installed_before_baseyear( add_power_capacities_installed_before_baseyear(
n, grouping_years_power, costs, baseyear n, grouping_years_power, costs, baseyear
) )
@ -650,7 +648,7 @@ if __name__ == "__main__":
.to_pandas() .to_pandas()
.reindex(index=n.snapshots) .reindex(index=n.snapshots)
) )
default_lifetime = snakemake.config["costs"]["fill_values"]["lifetime"] default_lifetime = snakemake.params.costs["fill_values"]["lifetime"]
add_heating_capacities_installed_before_baseyear( add_heating_capacities_installed_before_baseyear(
n, n,
baseyear, baseyear,

View File

@ -67,9 +67,8 @@ idx = pd.IndexSlice
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def attach_storageunits(n, costs, elec_opts): def attach_storageunits(n, costs, extendable_carriers, max_hours):
carriers = elec_opts["extendable_carriers"]["StorageUnit"] carriers = extendable_carriers["StorageUnit"]
max_hours = elec_opts["max_hours"]
_add_missing_carriers_from_costs(n, costs, carriers) _add_missing_carriers_from_costs(n, costs, carriers)
@ -99,8 +98,8 @@ def attach_storageunits(n, costs, elec_opts):
) )
def attach_stores(n, costs, elec_opts): def attach_stores(n, costs, extendable_carriers):
carriers = elec_opts["extendable_carriers"]["Store"] carriers = extendable_carriers["Store"]
_add_missing_carriers_from_costs(n, costs, carriers) _add_missing_carriers_from_costs(n, costs, carriers)
@ -187,11 +186,10 @@ def attach_stores(n, costs, elec_opts):
) )
def attach_hydrogen_pipelines(n, costs, elec_opts): def attach_hydrogen_pipelines(n, costs, extendable_carriers):
ext_carriers = elec_opts["extendable_carriers"] as_stores = extendable_carriers.get("Store", [])
as_stores = ext_carriers.get("Store", [])
if "H2 pipeline" not in ext_carriers.get("Link", []): if "H2 pipeline" not in extendable_carriers.get("Link", []):
return return
assert "H2" in as_stores, ( assert "H2" in as_stores, (
@ -235,16 +233,17 @@ if __name__ == "__main__":
configure_logging(snakemake) configure_logging(snakemake)
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)
elec_config = snakemake.config["electricity"] extendable_carriers = snakemake.params.extendable_carriers
max_hours = snakemake.params.max_hours
Nyears = n.snapshot_weightings.objective.sum() / 8760.0 Nyears = n.snapshot_weightings.objective.sum() / 8760.0
costs = load_costs( costs = load_costs(
snakemake.input.tech_costs, snakemake.config["costs"], elec_config, Nyears snakemake.input.tech_costs, snakemake.params.costs, max_hours, Nyears
) )
attach_storageunits(n, costs, elec_config) attach_storageunits(n, costs, extendable_carriers, max_hours)
attach_stores(n, costs, elec_config) attach_stores(n, costs, extendable_carriers)
attach_hydrogen_pipelines(n, costs, elec_config) attach_hydrogen_pipelines(n, costs, extendable_carriers)
add_nice_carrier_names(n, snakemake.config) add_nice_carrier_names(n, snakemake.config)

View File

@ -30,7 +30,7 @@ if __name__ == "__main__":
ammonia.index = cc.convert(ammonia.index, to="iso2") ammonia.index = cc.convert(ammonia.index, to="iso2")
years = [str(i) for i in range(2013, 2018)] years = [str(i) for i in range(2013, 2018)]
countries = ammonia.index.intersection(snakemake.config["countries"]) countries = ammonia.index.intersection(snakemake.params.countries)
ammonia = ammonia.loc[countries, years].astype(float) ammonia = ammonia.loc[countries, years].astype(float)
# convert from ktonN to ktonNH3 # convert from ktonN to ktonNH3

View File

@ -210,9 +210,9 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_biomass_potentials", simpl="", clusters="5") snakemake = mock_snakemake("build_biomass_potentials", simpl="", clusters="5")
config = snakemake.config["biomass"] params = snakemake.params.biomass
year = config["year"] year = params["year"]
scenario = config["scenario"] scenario = params["scenario"]
enspreso = enspreso_biomass_potentials(year, scenario) enspreso = enspreso_biomass_potentials(year, scenario)
@ -228,7 +228,7 @@ if __name__ == "__main__":
df.to_csv(snakemake.output.biomass_potentials_all) df.to_csv(snakemake.output.biomass_potentials_all)
grouper = {v: k for k, vv in config["classes"].items() for v in vv} grouper = {v: k for k, vv in params["classes"].items() for v in vv}
df = df.groupby(grouper, axis=1).sum() df = df.groupby(grouper, axis=1).sum()
df *= 1e6 # TWh/a to MWh/a df *= 1e6 # TWh/a to MWh/a

View File

@ -116,7 +116,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_bus_regions") snakemake = mock_snakemake("build_bus_regions")
configure_logging(snakemake) configure_logging(snakemake)
countries = snakemake.config["countries"] countries = snakemake.params.countries
n = pypsa.Network(snakemake.input.base_network) n = pypsa.Network(snakemake.input.base_network)

View File

@ -39,7 +39,7 @@ if __name__ == "__main__":
for source in ["air", "soil"]: for source in ["air", "soil"]:
source_T = xr.open_dataarray(snakemake.input[f"temp_{source}_{area}"]) source_T = xr.open_dataarray(snakemake.input[f"temp_{source}_{area}"])
delta_T = snakemake.config["sector"]["heat_pump_sink_T"] - source_T delta_T = snakemake.params.heat_pump_sink_T - source_T
cop = coefficient_of_performance(delta_T, source) cop = coefficient_of_performance(delta_T, source)

View File

@ -106,9 +106,9 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_cutout", cutout="europe-2013-era5") snakemake = mock_snakemake("build_cutout", cutout="europe-2013-era5")
configure_logging(snakemake) configure_logging(snakemake)
cutout_params = snakemake.config["atlite"]["cutouts"][snakemake.wildcards.cutout] cutout_params = snakemake.params.cutouts[snakemake.wildcards.cutout]
snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"]) snapshots = pd.date_range(freq="h", **snakemake.params.snapshots)
time = [snapshots[0], snapshots[-1]] time = [snapshots[0], snapshots[-1]]
cutout_params["time"] = slice(*cutout_params.get("time", time)) cutout_params["time"] = slice(*cutout_params.get("time", time))

View File

@ -279,16 +279,16 @@ if __name__ == "__main__":
configure_logging(snakemake) configure_logging(snakemake)
powerstatistics = snakemake.config["load"]["power_statistics"] powerstatistics = snakemake.params.load["power_statistics"]
interpolate_limit = snakemake.config["load"]["interpolate_limit"] interpolate_limit = snakemake.params.load["interpolate_limit"]
countries = snakemake.config["countries"] countries = snakemake.params.countries
snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"]) snapshots = pd.date_range(freq="h", **snakemake.params.snapshots)
years = slice(snapshots[0], snapshots[-1]) years = slice(snapshots[0], snapshots[-1])
time_shift = snakemake.config["load"]["time_shift_for_large_gaps"] time_shift = snakemake.params.load["time_shift_for_large_gaps"]
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics) load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
if snakemake.config["load"]["manual_adjustments"]: if snakemake.params.load["manual_adjustments"]:
load = manual_adjustment(load, snakemake.input[0], powerstatistics) load = manual_adjustment(load, snakemake.input[0], powerstatistics)
logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.") logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.")

View File

@ -737,16 +737,16 @@ if __name__ == "__main__":
logging.basicConfig(level=snakemake.config["logging"]["level"]) logging.basicConfig(level=snakemake.config["logging"]["level"])
config = snakemake.config["energy"] params = snakemake.params.energy
nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index("index") nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index("index")
population = nuts3["pop"].groupby(nuts3.country).sum() population = nuts3["pop"].groupby(nuts3.country).sum()
countries = snakemake.config["countries"] countries = snakemake.params.countries
idees_countries = pd.Index(countries).intersection(eu28) idees_countries = pd.Index(countries).intersection(eu28)
data_year = config["energy_totals_year"] data_year = params["energy_totals_year"]
report_year = snakemake.config["energy"]["eurostat_report_year"] report_year = snakemake.params.energy["eurostat_report_year"]
input_eurostat = snakemake.input.eurostat input_eurostat = snakemake.input.eurostat
eurostat = build_eurostat(input_eurostat, countries, report_year, data_year) eurostat = build_eurostat(input_eurostat, countries, report_year, data_year)
swiss = build_swiss(data_year) swiss = build_swiss(data_year)
@ -755,8 +755,8 @@ if __name__ == "__main__":
energy = build_energy_totals(countries, eurostat, swiss, idees) energy = build_energy_totals(countries, eurostat, swiss, idees)
energy.to_csv(snakemake.output.energy_name) energy.to_csv(snakemake.output.energy_name)
base_year_emissions = config["base_emissions_year"] base_year_emissions = params["base_emissions_year"]
emissions_scope = snakemake.config["energy"]["emissions"] emissions_scope = snakemake.params.energy["emissions"]
eea_co2 = build_eea_co2(snakemake.input.co2, base_year_emissions, emissions_scope) eea_co2 = build_eea_co2(snakemake.input.co2, base_year_emissions, emissions_scope)
eurostat_co2 = build_eurostat_co2( eurostat_co2 = build_eurostat_co2(
input_eurostat, countries, report_year, base_year_emissions input_eurostat, countries, report_year, base_year_emissions

View File

@ -27,7 +27,7 @@ if __name__ == "__main__":
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1) cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True) client = Client(cluster, asynchronous=True)
time = pd.date_range(freq="h", **snakemake.config["snapshots"]) time = pd.date_range(freq="h", **snakemake.params.snapshots)
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
clustered_regions = ( clustered_regions = (

View File

@ -130,10 +130,10 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_hydro_profile") snakemake = mock_snakemake("build_hydro_profile")
configure_logging(snakemake) configure_logging(snakemake)
config_hydro = snakemake.config["renewable"]["hydro"] params_hydro = snakemake.params.hydro
cutout = atlite.Cutout(snakemake.input.cutout) cutout = atlite.Cutout(snakemake.input.cutout)
countries = snakemake.config["countries"] countries = snakemake.params.countries
country_shapes = ( country_shapes = (
gpd.read_file(snakemake.input.country_shapes) gpd.read_file(snakemake.input.country_shapes)
.set_index("name")["geometry"] .set_index("name")["geometry"]
@ -151,7 +151,7 @@ if __name__ == "__main__":
normalize_using_yearly=eia_stats, normalize_using_yearly=eia_stats,
) )
if "clip_min_inflow" in config_hydro: if "clip_min_inflow" in params_hydro:
inflow = inflow.where(inflow > config_hydro["clip_min_inflow"], 0) inflow = inflow.where(inflow > params_hydro["clip_min_inflow"], 0)
inflow.to_netcdf(snakemake.output[0]) inflow.to_netcdf(snakemake.output[0])

View File

@ -73,7 +73,7 @@ def prepare_hotmaps_database(regions):
df[["srid", "coordinates"]] = df.geom.str.split(";", expand=True) df[["srid", "coordinates"]] = df.geom.str.split(";", expand=True)
if snakemake.config["industry"].get("hotmaps_locate_missing", False): if snakemake.params.hotmaps_locate_missing:
df = locate_missing_industrial_sites(df) df = locate_missing_industrial_sites(df)
# remove those sites without valid locations # remove those sites without valid locations
@ -143,7 +143,7 @@ if __name__ == "__main__":
logging.basicConfig(level=snakemake.config["logging"]["level"]) logging.basicConfig(level=snakemake.config["logging"]["level"])
countries = snakemake.config["countries"] countries = snakemake.params.countries
regions = gpd.read_file(snakemake.input.regions_onshore).set_index("name") regions = gpd.read_file(snakemake.input.regions_onshore).set_index("name")

View File

@ -101,8 +101,8 @@ def add_ammonia_energy_demand(demand):
def get_ammonia_by_fuel(x): def get_ammonia_by_fuel(x):
fuels = { fuels = {
"gas": config["MWh_CH4_per_tNH3_SMR"], "gas": params["MWh_CH4_per_tNH3_SMR"],
"electricity": config["MWh_elec_per_tNH3_SMR"], "electricity": params["MWh_elec_per_tNH3_SMR"],
} }
return pd.Series({k: x * v for k, v in fuels.items()}) return pd.Series({k: x * v for k, v in fuels.items()})
@ -112,7 +112,7 @@ def add_ammonia_energy_demand(demand):
index=demand.index, fill_value=0.0 index=demand.index, fill_value=0.0
) )
ammonia = pd.DataFrame({"ammonia": ammonia * config["MWh_NH3_per_tNH3"]}).T ammonia = pd.DataFrame({"ammonia": ammonia * params["MWh_NH3_per_tNH3"]}).T
demand["Ammonia"] = ammonia.unstack().reindex(index=demand.index, fill_value=0.0) demand["Ammonia"] = ammonia.unstack().reindex(index=demand.index, fill_value=0.0)
@ -178,9 +178,9 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_industrial_energy_demand_per_country_today") snakemake = mock_snakemake("build_industrial_energy_demand_per_country_today")
config = snakemake.config["industry"] params = snakemake.params.industry
year = config.get("reference_year", 2015) year = params.get("reference_year", 2015)
countries = pd.Index(snakemake.config["countries"]) countries = pd.Index(snakemake.params.countries)
demand = industrial_energy_demand(countries.intersection(eu28), year) demand = industrial_energy_demand(countries.intersection(eu28), year)

View File

@ -264,9 +264,9 @@ def separate_basic_chemicals(demand, year):
# assume HVC, methanol, chlorine production proportional to non-ammonia basic chemicals # assume HVC, methanol, chlorine production proportional to non-ammonia basic chemicals
distribution_key = demand["Basic chemicals"] / demand["Basic chemicals"].sum() distribution_key = demand["Basic chemicals"] / demand["Basic chemicals"].sum()
demand["HVC"] = config["HVC_production_today"] * 1e3 * distribution_key demand["HVC"] = params["HVC_production_today"] * 1e3 * distribution_key
demand["Chlorine"] = config["chlorine_production_today"] * 1e3 * distribution_key demand["Chlorine"] = params["chlorine_production_today"] * 1e3 * distribution_key
demand["Methanol"] = config["methanol_production_today"] * 1e3 * distribution_key demand["Methanol"] = params["methanol_production_today"] * 1e3 * distribution_key
demand.drop(columns=["Basic chemicals"], inplace=True) demand.drop(columns=["Basic chemicals"], inplace=True)
@ -279,11 +279,11 @@ if __name__ == "__main__":
logging.basicConfig(level=snakemake.config["logging"]["level"]) logging.basicConfig(level=snakemake.config["logging"]["level"])
countries = snakemake.config["countries"] countries = snakemake.params.countries
year = snakemake.config["industry"]["reference_year"] year = snakemake.params.industry["reference_year"]
config = snakemake.config["industry"] params = snakemake.params.industry
jrc_dir = snakemake.input.jrc jrc_dir = snakemake.input.jrc
eurostat_dir = snakemake.input.eurostat eurostat_dir = snakemake.input.eurostat

View File

@ -15,7 +15,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_industrial_production_per_country_tomorrow") snakemake = mock_snakemake("build_industrial_production_per_country_tomorrow")
config = snakemake.config["industry"] params = snakemake.params.industry
investment_year = int(snakemake.wildcards.planning_horizons) investment_year = int(snakemake.wildcards.planning_horizons)
@ -25,8 +25,8 @@ if __name__ == "__main__":
keys = ["Integrated steelworks", "Electric arc"] keys = ["Integrated steelworks", "Electric arc"]
total_steel = production[keys].sum(axis=1) total_steel = production[keys].sum(axis=1)
st_primary_fraction = get(config["St_primary_fraction"], investment_year) st_primary_fraction = get(params["St_primary_fraction"], investment_year)
dri_fraction = get(config["DRI_fraction"], investment_year) dri_fraction = get(params["DRI_fraction"], investment_year)
int_steel = production["Integrated steelworks"].sum() int_steel = production["Integrated steelworks"].sum()
fraction_persistent_primary = st_primary_fraction * total_steel.sum() / int_steel fraction_persistent_primary = st_primary_fraction * total_steel.sum() / int_steel
@ -51,7 +51,7 @@ if __name__ == "__main__":
key_pri = "Aluminium - primary production" key_pri = "Aluminium - primary production"
key_sec = "Aluminium - secondary production" key_sec = "Aluminium - secondary production"
al_primary_fraction = get(config["Al_primary_fraction"], investment_year) al_primary_fraction = get(params["Al_primary_fraction"], investment_year)
fraction_persistent_primary = ( fraction_persistent_primary = (
al_primary_fraction * total_aluminium.sum() / production[key_pri].sum() al_primary_fraction * total_aluminium.sum() / production[key_pri].sum()
) )
@ -60,15 +60,15 @@ if __name__ == "__main__":
production[key_sec] = total_aluminium - production[key_pri] production[key_sec] = total_aluminium - production[key_pri]
production["HVC (mechanical recycling)"] = ( production["HVC (mechanical recycling)"] = (
get(config["HVC_mechanical_recycling_fraction"], investment_year) get(params["HVC_mechanical_recycling_fraction"], investment_year)
* production["HVC"] * production["HVC"]
) )
production["HVC (chemical recycling)"] = ( production["HVC (chemical recycling)"] = (
get(config["HVC_chemical_recycling_fraction"], investment_year) get(params["HVC_chemical_recycling_fraction"], investment_year)
* production["HVC"] * production["HVC"]
) )
production["HVC"] *= get(config["HVC_primary_fraction"], investment_year) production["HVC"] *= get(params["HVC_primary_fraction"], investment_year)
fn = snakemake.output.industrial_production_per_country_tomorrow fn = snakemake.output.industrial_production_per_country_tomorrow
production.to_csv(fn, float_format="%.2f") production.to_csv(fn, float_format="%.2f")

View File

@ -185,10 +185,10 @@ def iron_and_steel():
df[sector] = df["Electric arc"] df[sector] = df["Electric arc"]
# add H2 consumption for DRI at 1.7 MWh H2 /ton steel # add H2 consumption for DRI at 1.7 MWh H2 /ton steel
df.at["hydrogen", sector] = config["H2_DRI"] df.at["hydrogen", sector] = params["H2_DRI"]
# add electricity consumption in DRI shaft (0.322 MWh/tSl) # add electricity consumption in DRI shaft (0.322 MWh/tSl)
df.at["elec", sector] += config["elec_DRI"] df.at["elec", sector] += params["elec_DRI"]
## Integrated steelworks ## Integrated steelworks
# could be used in combination with CCS) # could be used in combination with CCS)
@ -383,19 +383,19 @@ def chemicals_industry():
assert s_emi.index[0] == sector assert s_emi.index[0] == sector
# convert from MtHVC/a to ktHVC/a # convert from MtHVC/a to ktHVC/a
s_out = config["HVC_production_today"] * 1e3 s_out = params["HVC_production_today"] * 1e3
# tCO2/t material # tCO2/t material
df.loc["process emission", sector] += ( df.loc["process emission", sector] += (
s_emi["Process emissions"] s_emi["Process emissions"]
- config["petrochemical_process_emissions"] * 1e3 - params["petrochemical_process_emissions"] * 1e3
- config["NH3_process_emissions"] * 1e3 - params["NH3_process_emissions"] * 1e3
) / s_out ) / s_out
# emissions originating from feedstock, could be non-fossil origin # emissions originating from feedstock, could be non-fossil origin
# tCO2/t material # tCO2/t material
df.loc["process emission from feedstock", sector] += ( df.loc["process emission from feedstock", sector] += (
config["petrochemical_process_emissions"] * 1e3 params["petrochemical_process_emissions"] * 1e3
) / s_out ) / s_out
# convert from ktoe/a to GWh/a # convert from ktoe/a to GWh/a
@ -405,18 +405,18 @@ def chemicals_industry():
# subtract ammonia energy demand (in ktNH3/a) # subtract ammonia energy demand (in ktNH3/a)
ammonia = pd.read_csv(snakemake.input.ammonia_production, index_col=0) ammonia = pd.read_csv(snakemake.input.ammonia_production, index_col=0)
ammonia_total = ammonia.loc[ammonia.index.intersection(eu28), str(year)].sum() ammonia_total = ammonia.loc[ammonia.index.intersection(eu28), str(year)].sum()
df.loc["methane", sector] -= ammonia_total * config["MWh_CH4_per_tNH3_SMR"] df.loc["methane", sector] -= ammonia_total * params["MWh_CH4_per_tNH3_SMR"]
df.loc["elec", sector] -= ammonia_total * config["MWh_elec_per_tNH3_SMR"] df.loc["elec", sector] -= ammonia_total * params["MWh_elec_per_tNH3_SMR"]
# subtract chlorine demand # subtract chlorine demand
chlorine_total = config["chlorine_production_today"] chlorine_total = params["chlorine_production_today"]
df.loc["hydrogen", sector] -= chlorine_total * config["MWh_H2_per_tCl"] df.loc["hydrogen", sector] -= chlorine_total * params["MWh_H2_per_tCl"]
df.loc["elec", sector] -= chlorine_total * config["MWh_elec_per_tCl"] df.loc["elec", sector] -= chlorine_total * params["MWh_elec_per_tCl"]
# subtract methanol demand # subtract methanol demand
methanol_total = config["methanol_production_today"] methanol_total = params["methanol_production_today"]
df.loc["methane", sector] -= methanol_total * config["MWh_CH4_per_tMeOH"] df.loc["methane", sector] -= methanol_total * params["MWh_CH4_per_tMeOH"]
df.loc["elec", sector] -= methanol_total * config["MWh_elec_per_tMeOH"] df.loc["elec", sector] -= methanol_total * params["MWh_elec_per_tMeOH"]
# MWh/t material # MWh/t material
df.loc[sources, sector] = df.loc[sources, sector] / s_out df.loc[sources, sector] = df.loc[sources, sector] / s_out
@ -427,37 +427,37 @@ def chemicals_industry():
sector = "HVC (mechanical recycling)" sector = "HVC (mechanical recycling)"
df[sector] = 0.0 df[sector] = 0.0
df.loc["elec", sector] = config["MWh_elec_per_tHVC_mechanical_recycling"] df.loc["elec", sector] = params["MWh_elec_per_tHVC_mechanical_recycling"]
# HVC chemical recycling # HVC chemical recycling
sector = "HVC (chemical recycling)" sector = "HVC (chemical recycling)"
df[sector] = 0.0 df[sector] = 0.0
df.loc["elec", sector] = config["MWh_elec_per_tHVC_chemical_recycling"] df.loc["elec", sector] = params["MWh_elec_per_tHVC_chemical_recycling"]
# Ammonia # Ammonia
sector = "Ammonia" sector = "Ammonia"
df[sector] = 0.0 df[sector] = 0.0
if snakemake.config["sector"].get("ammonia", False): if snakemake.params.ammonia:
df.loc["ammonia", sector] = config["MWh_NH3_per_tNH3"] df.loc["ammonia", sector] = params["MWh_NH3_per_tNH3"]
else: else:
df.loc["hydrogen", sector] = config["MWh_H2_per_tNH3_electrolysis"] df.loc["hydrogen", sector] = params["MWh_H2_per_tNH3_electrolysis"]
df.loc["elec", sector] = config["MWh_elec_per_tNH3_electrolysis"] df.loc["elec", sector] = params["MWh_elec_per_tNH3_electrolysis"]
# Chlorine # Chlorine
sector = "Chlorine" sector = "Chlorine"
df[sector] = 0.0 df[sector] = 0.0
df.loc["hydrogen", sector] = config["MWh_H2_per_tCl"] df.loc["hydrogen", sector] = params["MWh_H2_per_tCl"]
df.loc["elec", sector] = config["MWh_elec_per_tCl"] df.loc["elec", sector] = params["MWh_elec_per_tCl"]
# Methanol # Methanol
sector = "Methanol" sector = "Methanol"
df[sector] = 0.0 df[sector] = 0.0
df.loc["methane", sector] = config["MWh_CH4_per_tMeOH"] df.loc["methane", sector] = params["MWh_CH4_per_tMeOH"]
df.loc["elec", sector] = config["MWh_elec_per_tMeOH"] df.loc["elec", sector] = params["MWh_elec_per_tMeOH"]
# Other chemicals # Other chemicals
@ -1465,10 +1465,10 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_industry_sector_ratios") snakemake = mock_snakemake("build_industry_sector_ratios")
# TODO make config option # TODO make params option
year = 2015 year = 2015
config = snakemake.config["industry"] params = snakemake.params.industry
df = pd.concat( df = pd.concat(
[ [

View File

@ -115,7 +115,7 @@ if __name__ == "__main__":
configure_logging(snakemake) configure_logging(snakemake)
n = pypsa.Network(snakemake.input.base_network) n = pypsa.Network(snakemake.input.base_network)
countries = snakemake.config["countries"] countries = snakemake.params.countries
ppl = ( ppl = (
pm.powerplants(from_url=True) pm.powerplants(from_url=True)
@ -134,12 +134,12 @@ if __name__ == "__main__":
ppl = ppl.query('not (Country in @available_countries and Fueltype == "Bioenergy")') ppl = ppl.query('not (Country in @available_countries and Fueltype == "Bioenergy")')
ppl = pd.concat([ppl, opsd]) ppl = pd.concat([ppl, opsd])
ppl_query = snakemake.config["electricity"]["powerplants_filter"] ppl_query = snakemake.params.powerplants_filter
if isinstance(ppl_query, str): if isinstance(ppl_query, str):
ppl.query(ppl_query, inplace=True) ppl.query(ppl_query, inplace=True)
# add carriers from own powerplant files: # add carriers from own powerplant files:
custom_ppl_query = snakemake.config["electricity"]["custom_powerplants"] custom_ppl_query = snakemake.params.custom_powerplants
ppl = add_custom_powerplants( ppl = add_custom_powerplants(
ppl, snakemake.input.custom_powerplants, custom_ppl_query ppl, snakemake.input.custom_powerplants, custom_ppl_query
) )

View File

@ -64,7 +64,7 @@ Inputs
- ``resources/offshore_shapes.geojson``: confer :ref:`shapes` - ``resources/offshore_shapes.geojson``: confer :ref:`shapes`
- ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions` - ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions`
- ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions` - ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions`
- ``"cutouts/" + config["renewable"][{technology}]['cutout']``: :ref:`cutout` - ``"cutouts/" + params["renewable"][{technology}]['cutout']``: :ref:`cutout`
- ``networks/base.nc``: :ref:`base` - ``networks/base.nc``: :ref:`base`
Outputs Outputs
@ -204,14 +204,14 @@ if __name__ == "__main__":
nprocesses = int(snakemake.threads) nprocesses = int(snakemake.threads)
noprogress = snakemake.config["run"].get("disable_progressbar", True) noprogress = snakemake.config["run"].get("disable_progressbar", True)
config = snakemake.config["renewable"][snakemake.wildcards.technology] params = snakemake.params.renewable[snakemake.wildcards.technology]
resource = config["resource"] # pv panel config / wind turbine config resource = params["resource"] # pv panel params / wind turbine params
correction_factor = config.get("correction_factor", 1.0) correction_factor = params.get("correction_factor", 1.0)
capacity_per_sqkm = config["capacity_per_sqkm"] capacity_per_sqkm = params["capacity_per_sqkm"]
p_nom_max_meth = config.get("potential", "conservative") p_nom_max_meth = params.get("potential", "conservative")
if isinstance(config.get("corine", {}), list): if isinstance(params.get("corine", {}), list):
config["corine"] = {"grid_codes": config["corine"]} params["corine"] = {"grid_codes": params["corine"]}
if correction_factor != 1.0: if correction_factor != 1.0:
logger.info(f"correction_factor is set as {correction_factor}") logger.info(f"correction_factor is set as {correction_factor}")
@ -229,13 +229,13 @@ if __name__ == "__main__":
regions = regions.set_index("name").rename_axis("bus") regions = regions.set_index("name").rename_axis("bus")
buses = regions.index buses = regions.index
res = config.get("excluder_resolution", 100) res = params.get("excluder_resolution", 100)
excluder = atlite.ExclusionContainer(crs=3035, res=res) excluder = atlite.ExclusionContainer(crs=3035, res=res)
if config["natura"]: if params["natura"]:
excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True) excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True)
corine = config.get("corine", {}) corine = params.get("corine", {})
if "grid_codes" in corine: if "grid_codes" in corine:
codes = corine["grid_codes"] codes = corine["grid_codes"]
excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035) excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035)
@ -246,28 +246,28 @@ if __name__ == "__main__":
snakemake.input.corine, codes=codes, buffer=buffer, crs=3035 snakemake.input.corine, codes=codes, buffer=buffer, crs=3035
) )
if "ship_threshold" in config: if "ship_threshold" in params:
shipping_threshold = ( shipping_threshold = (
config["ship_threshold"] * 8760 * 6 params["ship_threshold"] * 8760 * 6
) # approximation because 6 years of data which is hourly collected ) # approximation because 6 years of data which is hourly collected
func = functools.partial(np.less, shipping_threshold) func = functools.partial(np.less, shipping_threshold)
excluder.add_raster( excluder.add_raster(
snakemake.input.ship_density, codes=func, crs=4326, allow_no_overlap=True snakemake.input.ship_density, codes=func, crs=4326, allow_no_overlap=True
) )
if config.get("max_depth"): if params.get("max_depth"):
# lambda not supported for atlite + multiprocessing # lambda not supported for atlite + multiprocessing
# use named function np.greater with partially frozen argument instead # use named function np.greater with partially frozen argument instead
# and exclude areas where: -max_depth > grid cell depth # and exclude areas where: -max_depth > grid cell depth
func = functools.partial(np.greater, -config["max_depth"]) func = functools.partial(np.greater, -params["max_depth"])
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4326, nodata=-1000) excluder.add_raster(snakemake.input.gebco, codes=func, crs=4326, nodata=-1000)
if "min_shore_distance" in config: if "min_shore_distance" in params:
buffer = config["min_shore_distance"] buffer = params["min_shore_distance"]
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer) excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)
if "max_shore_distance" in config: if "max_shore_distance" in params:
buffer = config["max_shore_distance"] buffer = params["max_shore_distance"]
excluder.add_geometry( excluder.add_geometry(
snakemake.input.country_shapes, buffer=buffer, invert=True snakemake.input.country_shapes, buffer=buffer, invert=True
) )
@ -358,13 +358,13 @@ if __name__ == "__main__":
# select only buses with some capacity and minimal capacity factor # select only buses with some capacity and minimal capacity factor
ds = ds.sel( ds = ds.sel(
bus=( bus=(
(ds["profile"].mean("time") > config.get("min_p_max_pu", 0.0)) (ds["profile"].mean("time") > params.get("min_p_max_pu", 0.0))
& (ds["p_nom_max"] > config.get("min_p_nom_max", 0.0)) & (ds["p_nom_max"] > params.get("min_p_nom_max", 0.0))
) )
) )
if "clip_p_max_pu" in config: if "clip_p_max_pu" in params:
min_p_max_pu = config["clip_p_max_pu"] min_p_max_pu = params["clip_p_max_pu"]
ds["profile"] = ds["profile"].where(ds["profile"] >= min_p_max_pu, 0) ds["profile"] = ds["profile"].where(ds["profile"] >= min_p_max_pu, 0)
ds.to_netcdf(snakemake.output.profile) ds.to_netcdf(snakemake.output.profile)

View File

@ -305,7 +305,7 @@ def prepare_building_stock_data():
u_values.set_index(["country_code", "subsector", "bage", "type"], inplace=True) u_values.set_index(["country_code", "subsector", "bage", "type"], inplace=True)
# only take in config.yaml specified countries into account # only take in config.yaml specified countries into account
countries = snakemake.config["countries"] countries = snakemake.params.countries
area_tot = area_tot.loc[countries] area_tot = area_tot.loc[countries]
return u_values, country_iso_dic, countries, area_tot, area return u_values, country_iso_dic, countries, area_tot, area
@ -1040,7 +1040,7 @@ if __name__ == "__main__":
# ******** config ********************************************************* # ******** config *********************************************************
retro_opts = snakemake.config["sector"]["retrofitting"] retro_opts = snakemake.params.retrofitting
interest_rate = retro_opts["interest_rate"] interest_rate = retro_opts["interest_rate"]
annualise_cost = retro_opts["annualise_cost"] # annualise the investment costs annualise_cost = retro_opts["annualise_cost"] # annualise the investment costs
tax_weighting = retro_opts[ tax_weighting = retro_opts[

View File

@ -41,7 +41,7 @@ if __name__ == "__main__":
"build_sequestration_potentials", simpl="", clusters="181" "build_sequestration_potentials", simpl="", clusters="181"
) )
cf = snakemake.config["sector"]["regional_co2_sequestration_potential"] cf = snakemake.params.sequestration_potential
gdf = gpd.read_file(snakemake.input.sequestration_potential[0]) gdf = gpd.read_file(snakemake.input.sequestration_potential[0])

View File

@ -255,13 +255,11 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_shapes") snakemake = mock_snakemake("build_shapes")
configure_logging(snakemake) configure_logging(snakemake)
country_shapes = countries( country_shapes = countries(snakemake.input.naturalearth, snakemake.params.countries)
snakemake.input.naturalearth, snakemake.config["countries"]
)
country_shapes.reset_index().to_file(snakemake.output.country_shapes) country_shapes.reset_index().to_file(snakemake.output.country_shapes)
offshore_shapes = eez( offshore_shapes = eez(
country_shapes, snakemake.input.eez, snakemake.config["countries"] country_shapes, snakemake.input.eez, snakemake.params.countries
) )
offshore_shapes.reset_index().to_file(snakemake.output.offshore_shapes) offshore_shapes.reset_index().to_file(snakemake.output.offshore_shapes)

View File

@ -27,9 +27,9 @@ if __name__ == "__main__":
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1) cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True) client = Client(cluster, asynchronous=True)
config = snakemake.config["solar_thermal"] config = snakemake.params.solar_thermal
time = pd.date_range(freq="h", **snakemake.config["snapshots"]) time = pd.date_range(freq="h", **snakemake.params.snapshots)
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
clustered_regions = ( clustered_regions = (

View File

@ -27,7 +27,7 @@ if __name__ == "__main__":
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1) cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True) client = Client(cluster, asynchronous=True)
time = pd.date_range(freq="h", **snakemake.config["snapshots"]) time = pd.date_range(freq="h", **snakemake.params.snapshots)
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
clustered_regions = ( clustered_regions = (

View File

@ -175,9 +175,9 @@ if __name__ == "__main__":
snakemake.input.pop_weighted_energy_totals, index_col=0 snakemake.input.pop_weighted_energy_totals, index_col=0
) )
options = snakemake.config["sector"] options = snakemake.params.sector
snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"], tz="UTC") snapshots = pd.date_range(freq="h", **snakemake.params.snapshots, tz="UTC")
nyears = len(snapshots) / 8760 nyears = len(snapshots) / 8760

View File

@ -186,7 +186,7 @@ def get_feature_for_hac(n, buses_i=None, feature=None):
if "offwind" in carriers: if "offwind" in carriers:
carriers.remove("offwind") carriers.remove("offwind")
carriers = np.append( carriers = np.append(
carriers, network.generators.carrier.filter(like="offwind").unique() carriers, n.generators.carrier.filter(like="offwind").unique()
) )
if feature.split("-")[1] == "cap": if feature.split("-")[1] == "cap":
@ -463,28 +463,18 @@ if __name__ == "__main__":
snakemake = mock_snakemake("cluster_network", simpl="", clusters="5") snakemake = mock_snakemake("cluster_network", simpl="", clusters="5")
configure_logging(snakemake) configure_logging(snakemake)
params = snakemake.params
solver_name = snakemake.config["solving"]["solver"]["name"]
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)
focus_weights = snakemake.config.get("focus_weights", None) exclude_carriers = params.cluster_network["exclude_carriers"]
renewable_carriers = pd.Index(
[
tech
for tech in n.generators.carrier.unique()
if tech in snakemake.config["renewable"]
]
)
exclude_carriers = snakemake.config["clustering"]["cluster_network"].get(
"exclude_carriers", []
)
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers) aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
if snakemake.wildcards.clusters.endswith("m"): if snakemake.wildcards.clusters.endswith("m"):
n_clusters = int(snakemake.wildcards.clusters[:-1]) n_clusters = int(snakemake.wildcards.clusters[:-1])
conventional = set( aggregate_carriers = set(params.conventional_carriers).intersection(
snakemake.config["electricity"].get("conventional_carriers", []) aggregate_carriers
) )
aggregate_carriers = conventional.intersection(aggregate_carriers)
elif snakemake.wildcards.clusters == "all": elif snakemake.wildcards.clusters == "all":
n_clusters = len(n.buses) n_clusters = len(n.buses)
else: else:
@ -498,13 +488,12 @@ if __name__ == "__main__":
n, busmap, linemap, linemap, pd.Series(dtype="O") n, busmap, linemap, linemap, pd.Series(dtype="O")
) )
else: else:
line_length_factor = snakemake.config["lines"]["length_factor"]
Nyears = n.snapshot_weightings.objective.sum() / 8760 Nyears = n.snapshot_weightings.objective.sum() / 8760
hvac_overhead_cost = load_costs( hvac_overhead_cost = load_costs(
snakemake.input.tech_costs, snakemake.input.tech_costs,
snakemake.config["costs"], params.costs,
snakemake.config["electricity"], params.max_hours,
Nyears, Nyears,
).at["HVAC overhead", "capital_cost"] ).at["HVAC overhead", "capital_cost"]
@ -515,16 +504,16 @@ if __name__ == "__main__":
).all() or x.isnull().all(), "The `potential` configuration option must agree for all renewable carriers, for now!" ).all() or x.isnull().all(), "The `potential` configuration option must agree for all renewable carriers, for now!"
return v return v
aggregation_strategies = snakemake.config["clustering"].get(
"aggregation_strategies", {}
)
# translate str entries of aggregation_strategies to pd.Series functions: # translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = { aggregation_strategies = {
p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()} p: {
for p in aggregation_strategies.keys() k: getattr(pd.Series, v)
for k, v in params.aggregation_strategies[p].items()
}
for p in params.aggregation_strategies.keys()
} }
custom_busmap = snakemake.config["enable"].get("custom_busmap", False) custom_busmap = params.custom_busmap
if custom_busmap: if custom_busmap:
custom_busmap = pd.read_csv( custom_busmap = pd.read_csv(
snakemake.input.custom_busmap, index_col=0, squeeze=True snakemake.input.custom_busmap, index_col=0, squeeze=True
@ -532,21 +521,18 @@ if __name__ == "__main__":
custom_busmap.index = custom_busmap.index.astype(str) custom_busmap.index = custom_busmap.index.astype(str)
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
cluster_config = snakemake.config.get("clustering", {}).get(
"cluster_network", {}
)
clustering = clustering_for_n_clusters( clustering = clustering_for_n_clusters(
n, n,
n_clusters, n_clusters,
custom_busmap, custom_busmap,
aggregate_carriers, aggregate_carriers,
line_length_factor, params.length_factor,
aggregation_strategies, params.aggregation_strategies,
snakemake.config["solving"]["solver"]["name"], solver_name,
cluster_config.get("algorithm", "hac"), params.cluster_network["algorithm"],
cluster_config.get("feature", "solar+onwind-time"), params.cluster_network["feature"],
hvac_overhead_cost, hvac_overhead_cost,
focus_weights, params.focus_weights,
) )
update_p_nom_max(clustering.network) update_p_nom_max(clustering.network)

View File

@ -198,7 +198,7 @@ def calculate_costs(n, label, costs):
def calculate_cumulative_cost(): def calculate_cumulative_cost():
planning_horizons = snakemake.config["scenario"]["planning_horizons"] planning_horizons = snakemake.params.scenario["planning_horizons"]
cumulative_cost = pd.DataFrame( cumulative_cost = pd.DataFrame(
index=df["costs"].sum().index, index=df["costs"].sum().index,
@ -688,19 +688,19 @@ if __name__ == "__main__":
(cluster, ll, opt + sector_opt, planning_horizon): "results/" (cluster, ll, opt + sector_opt, planning_horizon): "results/"
+ snakemake.params.RDIR + snakemake.params.RDIR
+ f"/postnetworks/elec_s{simpl}_{cluster}_l{ll}_{opt}_{sector_opt}_{planning_horizon}.nc" + f"/postnetworks/elec_s{simpl}_{cluster}_l{ll}_{opt}_{sector_opt}_{planning_horizon}.nc"
for simpl in snakemake.config["scenario"]["simpl"] for simpl in snakemake.params.scenario["simpl"]
for cluster in snakemake.config["scenario"]["clusters"] for cluster in snakemake.params.scenario["clusters"]
for opt in snakemake.config["scenario"]["opts"] for opt in snakemake.params.scenario["opts"]
for sector_opt in snakemake.config["scenario"]["sector_opts"] for sector_opt in snakemake.params.scenario["sector_opts"]
for ll in snakemake.config["scenario"]["ll"] for ll in snakemake.params.scenario["ll"]
for planning_horizon in snakemake.config["scenario"]["planning_horizons"] for planning_horizon in snakemake.params.scenario["planning_horizons"]
} }
Nyears = len(pd.date_range(freq="h", **snakemake.config["snapshots"])) / 8760 Nyears = len(pd.date_range(freq="h", **snakemake.params.snapshots)) / 8760
costs_db = prepare_costs( costs_db = prepare_costs(
snakemake.input.costs, snakemake.input.costs,
snakemake.config["costs"], snakemake.params.costs,
Nyears, Nyears,
) )
@ -710,7 +710,7 @@ if __name__ == "__main__":
to_csv(df) to_csv(df)
if snakemake.config["foresight"] == "myopic": if snakemake.params.foresight == "myopic":
cumulative_cost = calculate_cumulative_cost() cumulative_cost = calculate_cumulative_cost()
cumulative_cost.to_csv( cumulative_cost.to_csv(
"results/" + snakemake.params.RDIR + "/csvs/cumulative_cost.csv" "results/" + snakemake.params.RDIR + "/csvs/cumulative_cost.csv"

View File

@ -70,7 +70,7 @@ def plot_map(
transmission=False, transmission=False,
with_legend=True, with_legend=True,
): ):
tech_colors = snakemake.config["plotting"]["tech_colors"] tech_colors = snakemake.params.plotting["tech_colors"]
n = network.copy() n = network.copy()
assign_location(n) assign_location(n)
@ -116,9 +116,7 @@ def plot_map(
costs = costs.stack() # .sort_index() costs = costs.stack() # .sort_index()
# hack because impossible to drop buses... # hack because impossible to drop buses...
eu_location = snakemake.config["plotting"].get( eu_location = snakemake.params.plotting.get("eu_node_location", dict(x=-5.5, y=46))
"eu_node_location", dict(x=-5.5, y=46)
)
n.buses.loc["EU gas", "x"] = eu_location["x"] n.buses.loc["EU gas", "x"] = eu_location["x"]
n.buses.loc["EU gas", "y"] = eu_location["y"] n.buses.loc["EU gas", "y"] = eu_location["y"]
@ -315,7 +313,7 @@ def plot_h2_map(network, regions):
h2_new = n.links[n.links.carrier == "H2 pipeline"] h2_new = n.links[n.links.carrier == "H2 pipeline"]
h2_retro = n.links[n.links.carrier == "H2 pipeline retrofitted"] h2_retro = n.links[n.links.carrier == "H2 pipeline retrofitted"]
if snakemake.config["foresight"] == "myopic": if snakemake.params.foresight == "myopic":
# sum capacitiy for pipelines from different investment periods # sum capacitiy for pipelines from different investment periods
h2_new = group_pipes(h2_new) h2_new = group_pipes(h2_new)
@ -558,7 +556,7 @@ def plot_ch4_map(network):
link_widths_used = max_usage / linewidth_factor link_widths_used = max_usage / linewidth_factor
link_widths_used[max_usage < line_lower_threshold] = 0.0 link_widths_used[max_usage < line_lower_threshold] = 0.0
tech_colors = snakemake.config["plotting"]["tech_colors"] tech_colors = snakemake.params.plotting["tech_colors"]
pipe_colors = { pipe_colors = {
"gas pipeline": "#f08080", "gas pipeline": "#f08080",
@ -700,7 +698,7 @@ def plot_map_without(network):
# hack because impossible to drop buses... # hack because impossible to drop buses...
if "EU gas" in n.buses.index: if "EU gas" in n.buses.index:
eu_location = snakemake.config["plotting"].get( eu_location = snakemake.params.plotting.get(
"eu_node_location", dict(x=-5.5, y=46) "eu_node_location", dict(x=-5.5, y=46)
) )
n.buses.loc["EU gas", "x"] = eu_location["x"] n.buses.loc["EU gas", "x"] = eu_location["x"]
@ -876,7 +874,7 @@ def plot_series(network, carrier="AC", name="test"):
stacked=True, stacked=True,
linewidth=0.0, linewidth=0.0,
color=[ color=[
snakemake.config["plotting"]["tech_colors"][i.replace(suffix, "")] snakemake.params.plotting["tech_colors"][i.replace(suffix, "")]
for i in new_columns for i in new_columns
], ],
) )
@ -937,7 +935,7 @@ if __name__ == "__main__":
regions = gpd.read_file(snakemake.input.regions).set_index("name") regions = gpd.read_file(snakemake.input.regions).set_index("name")
map_opts = snakemake.config["plotting"]["map"] map_opts = snakemake.params.plotting["map"]
if map_opts["boundaries"] is None: if map_opts["boundaries"] is None:
map_opts["boundaries"] = regions.total_bounds[[0, 2, 1, 3]] + [-1, 1, -1, 1] map_opts["boundaries"] = regions.total_bounds[[0, 2, 1, 3]] + [-1, 1, -1, 1]

View File

@ -142,10 +142,10 @@ def plot_costs():
df = df.groupby(df.index.map(rename_techs)).sum() df = df.groupby(df.index.map(rename_techs)).sum()
to_drop = df.index[df.max(axis=1) < snakemake.config["plotting"]["costs_threshold"]] to_drop = df.index[df.max(axis=1) < snakemake.params.plotting["costs_threshold"]]
logger.info( logger.info(
f"Dropping technology with costs below {snakemake.config['plotting']['costs_threshold']} EUR billion per year" f"Dropping technology with costs below {snakemake.params['plotting']['costs_threshold']} EUR billion per year"
) )
logger.debug(df.loc[to_drop]) logger.debug(df.loc[to_drop])
@ -165,7 +165,7 @@ def plot_costs():
kind="bar", kind="bar",
ax=ax, ax=ax,
stacked=True, stacked=True,
color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index], color=[snakemake.params.plotting["tech_colors"][i] for i in new_index],
) )
handles, labels = ax.get_legend_handles_labels() handles, labels = ax.get_legend_handles_labels()
@ -173,7 +173,7 @@ def plot_costs():
handles.reverse() handles.reverse()
labels.reverse() labels.reverse()
ax.set_ylim([0, snakemake.config["plotting"]["costs_max"]]) ax.set_ylim([0, snakemake.params.plotting["costs_max"]])
ax.set_ylabel("System Cost [EUR billion per year]") ax.set_ylabel("System Cost [EUR billion per year]")
@ -201,11 +201,11 @@ def plot_energy():
df = df.groupby(df.index.map(rename_techs)).sum() df = df.groupby(df.index.map(rename_techs)).sum()
to_drop = df.index[ to_drop = df.index[
df.abs().max(axis=1) < snakemake.config["plotting"]["energy_threshold"] df.abs().max(axis=1) < snakemake.params.plotting["energy_threshold"]
] ]
logger.info( logger.info(
f"Dropping all technology with energy consumption or production below {snakemake.config['plotting']['energy_threshold']} TWh/a" f"Dropping all technology with energy consumption or production below {snakemake.params['plotting']['energy_threshold']} TWh/a"
) )
logger.debug(df.loc[to_drop]) logger.debug(df.loc[to_drop])
@ -227,7 +227,7 @@ def plot_energy():
kind="bar", kind="bar",
ax=ax, ax=ax,
stacked=True, stacked=True,
color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index], color=[snakemake.params.plotting["tech_colors"][i] for i in new_index],
) )
handles, labels = ax.get_legend_handles_labels() handles, labels = ax.get_legend_handles_labels()
@ -237,8 +237,8 @@ def plot_energy():
ax.set_ylim( ax.set_ylim(
[ [
snakemake.config["plotting"]["energy_min"], snakemake.params.plotting["energy_min"],
snakemake.config["plotting"]["energy_max"], snakemake.params.plotting["energy_max"],
] ]
) )
@ -287,7 +287,7 @@ def plot_balances():
df = df.groupby(df.index.map(rename_techs)).sum() df = df.groupby(df.index.map(rename_techs)).sum()
to_drop = df.index[ to_drop = df.index[
df.abs().max(axis=1) < snakemake.config["plotting"]["energy_threshold"] / 10 df.abs().max(axis=1) < snakemake.params.plotting["energy_threshold"] / 10
] ]
if v[0] in co2_carriers: if v[0] in co2_carriers:
@ -296,7 +296,7 @@ def plot_balances():
units = "TWh/a" units = "TWh/a"
logger.debug( logger.debug(
f"Dropping technology energy balance smaller than {snakemake.config['plotting']['energy_threshold']/10} {units}" f"Dropping technology energy balance smaller than {snakemake.params['plotting']['energy_threshold']/10} {units}"
) )
logger.debug(df.loc[to_drop]) logger.debug(df.loc[to_drop])
@ -317,7 +317,7 @@ def plot_balances():
kind="bar", kind="bar",
ax=ax, ax=ax,
stacked=True, stacked=True,
color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index], color=[snakemake.params.plotting["tech_colors"][i] for i in new_index],
) )
handles, labels = ax.get_legend_handles_labels() handles, labels = ax.get_legend_handles_labels()
@ -455,10 +455,10 @@ def plot_carbon_budget_distribution(input_eurostat):
ax1 = plt.subplot(gs1[0, 0]) ax1 = plt.subplot(gs1[0, 0])
ax1.set_ylabel("CO$_2$ emissions (Gt per year)", fontsize=22) ax1.set_ylabel("CO$_2$ emissions (Gt per year)", fontsize=22)
ax1.set_ylim([0, 5]) ax1.set_ylim([0, 5])
ax1.set_xlim([1990, snakemake.config["scenario"]["planning_horizons"][-1] + 1]) ax1.set_xlim([1990, snakemake.params.planning_horizons[-1] + 1])
path_cb = "results/" + snakemake.params.RDIR + "/csvs/" path_cb = "results/" + snakemake.params.RDIR + "/csvs/"
countries = snakemake.config["countries"] countries = snakemake.params.countries
e_1990 = co2_emissions_year(countries, input_eurostat, opts, year=1990) e_1990 = co2_emissions_year(countries, input_eurostat, opts, year=1990)
CO2_CAP = pd.read_csv(path_cb + "carbon_budget_distribution.csv", index_col=0) CO2_CAP = pd.read_csv(path_cb + "carbon_budget_distribution.csv", index_col=0)
@ -555,7 +555,7 @@ if __name__ == "__main__":
plot_balances() plot_balances()
for sector_opts in snakemake.config["scenario"]["sector_opts"]: for sector_opts in snakemake.params.sector_opts:
opts = sector_opts.split("-") opts = sector_opts.split("-")
for o in opts: for o in opts:
if "cb" in o: if "cb" in o:

View File

@ -253,12 +253,12 @@ if __name__ == "__main__":
Nyears = n.snapshot_weightings.objective.sum() / 8760.0 Nyears = n.snapshot_weightings.objective.sum() / 8760.0
costs = load_costs( costs = load_costs(
snakemake.input.tech_costs, snakemake.input.tech_costs,
snakemake.config["costs"], snakemake.params.costs,
snakemake.config["electricity"], snakemake.params.max_hours,
Nyears, Nyears,
) )
set_line_s_max_pu(n, snakemake.config["lines"]["s_max_pu"]) set_line_s_max_pu(n, snakemake.params.lines["s_max_pu"])
for o in opts: for o in opts:
m = re.match(r"^\d+h$", o, re.IGNORECASE) m = re.match(r"^\d+h$", o, re.IGNORECASE)
@ -277,11 +277,11 @@ if __name__ == "__main__":
if "Co2L" in o: if "Co2L" in o:
m = re.findall("[0-9]*\.?[0-9]+$", o) m = re.findall("[0-9]*\.?[0-9]+$", o)
if len(m) > 0: if len(m) > 0:
co2limit = float(m[0]) * snakemake.config["electricity"]["co2base"] co2limit = float(m[0]) * snakemake.params.co2base
add_co2limit(n, co2limit, Nyears) add_co2limit(n, co2limit, Nyears)
logger.info("Setting CO2 limit according to wildcard value.") logger.info("Setting CO2 limit according to wildcard value.")
else: else:
add_co2limit(n, snakemake.config["electricity"]["co2limit"], Nyears) add_co2limit(n, snakemake.params.co2limit, Nyears)
logger.info("Setting CO2 limit according to config value.") logger.info("Setting CO2 limit according to config value.")
break break
@ -293,7 +293,7 @@ if __name__ == "__main__":
add_gaslimit(n, limit, Nyears) add_gaslimit(n, limit, Nyears)
logger.info("Setting gas usage limit according to wildcard value.") logger.info("Setting gas usage limit according to wildcard value.")
else: else:
add_gaslimit(n, snakemake.config["electricity"].get("gaslimit"), Nyears) add_gaslimit(n, snakemake.params.gaslimit, Nyears)
logger.info("Setting gas usage limit according to config value.") logger.info("Setting gas usage limit according to config value.")
break break
@ -322,7 +322,7 @@ if __name__ == "__main__":
add_emission_prices(n, dict(co2=float(m[0]))) add_emission_prices(n, dict(co2=float(m[0])))
else: else:
logger.info("Setting emission prices according to config value.") logger.info("Setting emission prices according to config value.")
add_emission_prices(n, snakemake.config["costs"]["emission_prices"]) add_emission_prices(n, snakemake.params.costs["emission_prices"])
break break
ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:]
@ -330,8 +330,8 @@ if __name__ == "__main__":
set_line_nom_max( set_line_nom_max(
n, n,
s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf), s_nom_max_set=snakemake.params.lines.get("s_nom_max,", np.inf),
p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf), p_nom_max_set=snakemake.params.links.get("p_nom_max,", np.inf),
) )
if "ATK" in opts: if "ATK" in opts:

View File

@ -200,12 +200,12 @@ def co2_emissions_year(
""" """
Calculate CO2 emissions in one specific year (e.g. 1990 or 2018). Calculate CO2 emissions in one specific year (e.g. 1990 or 2018).
""" """
emissions_scope = snakemake.config["energy"]["emissions"] emissions_scope = snakemake.params.energy["emissions"]
eea_co2 = build_eea_co2(snakemake.input.co2, year, emissions_scope) eea_co2 = build_eea_co2(snakemake.input.co2, year, emissions_scope)
# TODO: read Eurostat data from year > 2014 # TODO: read Eurostat data from year > 2014
# this only affects the estimation of CO2 emissions for BA, RS, AL, ME, MK # this only affects the estimation of CO2 emissions for BA, RS, AL, ME, MK
report_year = snakemake.config["energy"]["eurostat_report_year"] report_year = snakemake.params.energy["eurostat_report_year"]
if year > 2014: if year > 2014:
eurostat_co2 = build_eurostat_co2( eurostat_co2 = build_eurostat_co2(
input_eurostat, countries, report_year, year=2014 input_eurostat, countries, report_year, year=2014
@ -241,7 +241,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year):
carbon_budget = float(o[o.find("cb") + 2 : o.find("ex")]) carbon_budget = float(o[o.find("cb") + 2 : o.find("ex")])
r = float(o[o.find("ex") + 2 :]) r = float(o[o.find("ex") + 2 :])
countries = snakemake.config["countries"] countries = snakemake.params.countries
e_1990 = co2_emissions_year( e_1990 = co2_emissions_year(
countries, input_eurostat, opts, emissions_scope, report_year, year=1990 countries, input_eurostat, opts, emissions_scope, report_year, year=1990
@ -252,7 +252,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year):
countries, input_eurostat, opts, emissions_scope, report_year, year=2018 countries, input_eurostat, opts, emissions_scope, report_year, year=2018
) )
planning_horizons = snakemake.config["scenario"]["planning_horizons"] planning_horizons = snakemake.params.planning_horizons
t_0 = planning_horizons[0] t_0 = planning_horizons[0]
if "be" in o: if "be" in o:
@ -391,7 +391,7 @@ def update_wind_solar_costs(n, costs):
with xr.open_dataset(profile) as ds: with xr.open_dataset(profile) as ds:
underwater_fraction = ds["underwater_fraction"].to_pandas() underwater_fraction = ds["underwater_fraction"].to_pandas()
connection_cost = ( connection_cost = (
snakemake.config["lines"]["length_factor"] snakemake.params.length_factor
* ds["average_distance"].to_pandas() * ds["average_distance"].to_pandas()
* ( * (
underwater_fraction underwater_fraction
@ -483,8 +483,8 @@ def remove_elec_base_techs(n):
batteries and H2) from base electricity-only network, since they're added batteries and H2) from base electricity-only network, since they're added
here differently using links. here differently using links.
""" """
for c in n.iterate_components(snakemake.config["pypsa_eur"]): for c in n.iterate_components(snakemake.params.pypsa_eur):
to_keep = snakemake.config["pypsa_eur"][c.name] to_keep = snakemake.params.pypsa_eur[c.name]
to_remove = pd.Index(c.df.carrier.unique()).symmetric_difference(to_keep) to_remove = pd.Index(c.df.carrier.unique()).symmetric_difference(to_keep)
if to_remove.empty: if to_remove.empty:
continue continue
@ -674,7 +674,7 @@ def add_dac(n, costs):
def add_co2limit(n, nyears=1.0, limit=0.0): def add_co2limit(n, nyears=1.0, limit=0.0):
logger.info(f"Adding CO2 budget limit as per unit of 1990 levels of {limit}") logger.info(f"Adding CO2 budget limit as per unit of 1990 levels of {limit}")
countries = snakemake.config["countries"] countries = snakemake.params.countries
sectors = emission_sectors_from_opts(opts) sectors = emission_sectors_from_opts(opts)
@ -727,7 +727,7 @@ def cycling_shift(df, steps=1):
return df return df
def prepare_costs(cost_file, config, nyears): def prepare_costs(cost_file, params, nyears):
# set all asset costs and other parameters # set all asset costs and other parameters
costs = pd.read_csv(cost_file, index_col=[0, 1]).sort_index() costs = pd.read_csv(cost_file, index_col=[0, 1]).sort_index()
@ -739,7 +739,7 @@ def prepare_costs(cost_file, config, nyears):
costs.loc[:, "value"].unstack(level=1).groupby("technology").sum(min_count=1) costs.loc[:, "value"].unstack(level=1).groupby("technology").sum(min_count=1)
) )
costs = costs.fillna(config["fill_values"]) costs = costs.fillna(params["fill_values"])
def annuity_factor(v): def annuity_factor(v):
return calculate_annuity(v["lifetime"], v["discount rate"]) + v["FOM"] / 100 return calculate_annuity(v["lifetime"], v["discount rate"]) + v["FOM"] / 100
@ -787,7 +787,7 @@ def add_ammonia(n, costs):
nodes = pop_layout.index nodes = pop_layout.index
cf_industry = snakemake.config["industry"] cf_industry = snakemake.params.industry
n.add("Carrier", "NH3") n.add("Carrier", "NH3")
@ -1102,7 +1102,7 @@ def add_storage_and_grids(n, costs):
lifetime=costs.at["OCGT", "lifetime"], lifetime=costs.at["OCGT", "lifetime"],
) )
cavern_types = snakemake.config["sector"]["hydrogen_underground_storage_locations"] cavern_types = snakemake.params.sector["hydrogen_underground_storage_locations"]
h2_caverns = pd.read_csv(snakemake.input.h2_cavern, index_col=0) h2_caverns = pd.read_csv(snakemake.input.h2_cavern, index_col=0)
if ( if (
@ -3274,7 +3274,7 @@ if __name__ == "__main__":
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
options = snakemake.config["sector"] options = snakemake.params.sector
opts = snakemake.wildcards.sector_opts.split("-") opts = snakemake.wildcards.sector_opts.split("-")
@ -3289,7 +3289,7 @@ if __name__ == "__main__":
costs = prepare_costs( costs = prepare_costs(
snakemake.input.costs, snakemake.input.costs,
snakemake.config["costs"], snakemake.params.costs,
nyears, nyears,
) )
@ -3301,10 +3301,10 @@ if __name__ == "__main__":
spatial = define_spatial(pop_layout.index, options) spatial = define_spatial(pop_layout.index, options)
if snakemake.config["foresight"] == "myopic": if snakemake.params.foresight == "myopic":
add_lifetime_wind_solar(n, costs) add_lifetime_wind_solar(n, costs)
conventional = snakemake.config["existing_capacities"]["conventional_carriers"] conventional = snakemake.params.conventional_carriers
for carrier in conventional: for carrier in conventional:
add_carrier_buses(n, carrier) add_carrier_buses(n, carrier)
@ -3373,15 +3373,15 @@ if __name__ == "__main__":
n = set_temporal_aggregation(n, opts, solver_name) n = set_temporal_aggregation(n, opts, solver_name)
limit_type = "config" limit_type = "config"
limit = get(snakemake.config["co2_budget"], investment_year) limit = get(snakemake.params.co2_budget, investment_year)
for o in opts: for o in opts:
if "cb" not in o: if "cb" not in o:
continue continue
limit_type = "carbon budget" limit_type = "carbon budget"
fn = "results/" + snakemake.params.RDIR + "/csvs/carbon_budget_distribution.csv" fn = "results/" + snakemake.params.RDIR + "/csvs/carbon_budget_distribution.csv"
if not os.path.exists(fn): if not os.path.exists(fn):
emissions_scope = snakemake.config["energy"]["emissions"] emissions_scope = snakemake.params.emissions_scope
report_year = snakemake.config["energy"]["eurostat_report_year"] report_year = snakemake.params.eurostat_report_year
build_carbon_budget( build_carbon_budget(
o, snakemake.input.eurostat, fn, emissions_scope, report_year o, snakemake.input.eurostat, fn, emissions_scope, report_year
) )
@ -3416,8 +3416,8 @@ if __name__ == "__main__":
if options["electricity_grid_connection"]: if options["electricity_grid_connection"]:
add_electricity_grid_connection(n, costs) add_electricity_grid_connection(n, costs)
first_year_myopic = (snakemake.config["foresight"] == "myopic") and ( first_year_myopic = (snakemake.params.foresight == "myopic") and (
snakemake.config["scenario"]["planning_horizons"][0] == investment_year snakemake.params.planning_horizons[0] == investment_year
) )
if options.get("cluster_heat_buses", False) and not first_year_myopic: if options.get("cluster_heat_buses", False) and not first_year_myopic:

View File

@ -53,7 +53,7 @@ if __name__ == "__main__":
snakemake snakemake
) # TODO Make logging compatible with progressbar (see PR #102) ) # TODO Make logging compatible with progressbar (see PR #102)
if snakemake.config["tutorial"]: if snakemake.params.tutorial:
url = "https://zenodo.org/record/3517921/files/pypsa-eur-tutorial-data-bundle.tar.xz" url = "https://zenodo.org/record/3517921/files/pypsa-eur-tutorial-data-bundle.tar.xz"
else: else:
url = "https://zenodo.org/record/3517935/files/pypsa-eur-data-bundle.tar.xz" url = "https://zenodo.org/record/3517935/files/pypsa-eur-data-bundle.tar.xz"

View File

@ -149,17 +149,17 @@ def simplify_network_to_380(n):
return n, trafo_map return n, trafo_map
def _prepare_connection_costs_per_link(n, costs, config): def _prepare_connection_costs_per_link(n, costs, renewable_carriers, length_factor):
if n.links.empty: if n.links.empty:
return {} return {}
connection_costs_per_link = {} connection_costs_per_link = {}
for tech in config["renewable"]: for tech in renewable_carriers:
if tech.startswith("offwind"): if tech.startswith("offwind"):
connection_costs_per_link[tech] = ( connection_costs_per_link[tech] = (
n.links.length n.links.length
* config["lines"]["length_factor"] * length_factor
* ( * (
n.links.underwater_fraction n.links.underwater_fraction
* costs.at[tech + "-connection-submarine", "capital_cost"] * costs.at[tech + "-connection-submarine", "capital_cost"]
@ -172,10 +172,18 @@ def _prepare_connection_costs_per_link(n, costs, config):
def _compute_connection_costs_to_bus( def _compute_connection_costs_to_bus(
n, busmap, costs, config, connection_costs_per_link=None, buses=None n,
busmap,
costs,
renewable_carriers,
length_factor,
connection_costs_per_link=None,
buses=None,
): ):
if connection_costs_per_link is None: if connection_costs_per_link is None:
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config) connection_costs_per_link = _prepare_connection_costs_per_link(
n, costs, renewable_carriers, length_factor
)
if buses is None: if buses is None:
buses = busmap.index[busmap.index != busmap.values] buses = busmap.index[busmap.index != busmap.values]
@ -265,7 +273,16 @@ def _aggregate_and_move_components(
n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)]) n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)])
def simplify_links(n, costs, config, output, aggregation_strategies=dict()): def simplify_links(
n,
costs,
renewables,
length_factor,
p_max_pu,
exclude_carriers,
output,
aggregation_strategies=dict(),
):
## Complex multi-node links are folded into end-points ## Complex multi-node links are folded into end-points
logger.info("Simplifying connected link components") logger.info("Simplifying connected link components")
@ -315,7 +332,9 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
busmap = n.buses.index.to_series() busmap = n.buses.index.to_series()
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config) connection_costs_per_link = _prepare_connection_costs_per_link(
n, costs, renewables, length_factor
)
connection_costs_to_bus = pd.DataFrame( connection_costs_to_bus = pd.DataFrame(
0.0, index=n.buses.index, columns=list(connection_costs_per_link) 0.0, index=n.buses.index, columns=list(connection_costs_per_link)
) )
@ -333,12 +352,17 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
) )
busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]] busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]]
connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus( connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(
n, busmap, costs, config, connection_costs_per_link, buses n,
busmap,
costs,
renewables,
length_factor,
connection_costs_per_link,
buses,
) )
all_links = [i for _, i in sum(links, [])] all_links = [i for _, i in sum(links, [])]
p_max_pu = config["links"].get("p_max_pu", 1.0)
lengths = n.links.loc[all_links, "length"] lengths = n.links.loc[all_links, "length"]
name = lengths.idxmax() + "+{}".format(len(links) - 1) name = lengths.idxmax() + "+{}".format(len(links) - 1)
params = dict( params = dict(
@ -377,10 +401,6 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
logger.debug("Collecting all components using the busmap") logger.debug("Collecting all components using the busmap")
exclude_carriers = config["clustering"]["simplify_network"].get(
"exclude_carriers", []
)
_aggregate_and_move_components( _aggregate_and_move_components(
n, n,
busmap, busmap,
@ -392,19 +412,23 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
return n, busmap return n, busmap
def remove_stubs(n, costs, config, output, aggregation_strategies=dict()): def remove_stubs(
n,
costs,
renewable_carriers,
length_factor,
simplify_network,
output,
aggregation_strategies=dict(),
):
logger.info("Removing stubs") logger.info("Removing stubs")
across_borders = config["clustering"]["simplify_network"].get( across_borders = simplify_network["remove_stubs_across_borders"]
"remove_stubs_across_borders", True
)
matching_attrs = [] if across_borders else ["country"] matching_attrs = [] if across_borders else ["country"]
busmap = busmap_by_stubs(n, matching_attrs) busmap = busmap_by_stubs(n, matching_attrs)
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config) connection_costs_to_bus = _compute_connection_costs_to_bus(
n, busmap, costs, renewable_carriers, length_factor
exclude_carriers = config["clustering"]["simplify_network"].get(
"exclude_carriers", []
) )
_aggregate_and_move_components( _aggregate_and_move_components(
@ -413,7 +437,7 @@ def remove_stubs(n, costs, config, output, aggregation_strategies=dict()):
connection_costs_to_bus, connection_costs_to_bus,
output, output,
aggregation_strategies=aggregation_strategies, aggregation_strategies=aggregation_strategies,
exclude_carriers=exclude_carriers, exclude_carriers=simplify_network["exclude_carriers"],
) )
return n, busmap return n, busmap
@ -473,26 +497,22 @@ def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None):
def cluster( def cluster(
n, n_clusters, config, algorithm="hac", feature=None, aggregation_strategies=dict() n,
n_clusters,
focus_weights,
solver_name,
algorithm="hac",
feature=None,
aggregation_strategies=dict(),
): ):
logger.info(f"Clustering to {n_clusters} buses") logger.info(f"Clustering to {n_clusters} buses")
focus_weights = config.get("focus_weights", None)
renewable_carriers = pd.Index(
[
tech
for tech in n.generators.carrier.unique()
if tech.split("-", 2)[0] in config["renewable"]
]
)
clustering = clustering_for_n_clusters( clustering = clustering_for_n_clusters(
n, n,
n_clusters, n_clusters,
custom_busmap=False, custom_busmap=False,
aggregation_strategies=aggregation_strategies, aggregation_strategies=aggregation_strategies,
solver_name=config["solving"]["solver"]["name"], solver_name=solver_name,
algorithm=algorithm, algorithm=algorithm,
feature=feature, feature=feature,
focus_weights=focus_weights, focus_weights=focus_weights,
@ -508,67 +528,69 @@ if __name__ == "__main__":
snakemake = mock_snakemake("simplify_network", simpl="") snakemake = mock_snakemake("simplify_network", simpl="")
configure_logging(snakemake) configure_logging(snakemake)
n = pypsa.Network(snakemake.input.network) params = snakemake.params
solver_name = snakemake.config["solving"]["solver"]["name"]
n = pypsa.Network(snakemake.input.network)
Nyears = n.snapshot_weightings.objective.sum() / 8760
aggregation_strategies = snakemake.config["clustering"].get(
"aggregation_strategies", {}
)
# translate str entries of aggregation_strategies to pd.Series functions: # translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = { aggregation_strategies = {
p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()} p: {
for p in aggregation_strategies.keys() k: getattr(pd.Series, v)
for k, v in params.aggregation_strategies[p].items()
}
for p in params.aggregation_strategies.keys()
} }
n, trafo_map = simplify_network_to_380(n) n, trafo_map = simplify_network_to_380(n)
Nyears = n.snapshot_weightings.objective.sum() / 8760
technology_costs = load_costs( technology_costs = load_costs(
snakemake.input.tech_costs, snakemake.input.tech_costs,
snakemake.config["costs"], params.costs,
snakemake.config["electricity"], params.max_hours,
Nyears, Nyears,
) )
n, simplify_links_map = simplify_links( n, simplify_links_map = simplify_links(
n, technology_costs, snakemake.config, snakemake.output, aggregation_strategies n,
technology_costs,
params.renewable_carriers,
params.length_factor,
params.p_max_pu,
params.simplify_network["exclude_carriers"],
snakemake.output,
aggregation_strategies,
) )
busmaps = [trafo_map, simplify_links_map] busmaps = [trafo_map, simplify_links_map]
cluster_config = snakemake.config["clustering"]["simplify_network"] if params.simplify_network["remove_stubs"]:
if cluster_config.get("remove_stubs", True):
n, stub_map = remove_stubs( n, stub_map = remove_stubs(
n, n,
technology_costs, technology_costs,
snakemake.config, params.renewable_carriers,
params.length_factor,
params.simplify_network,
snakemake.output, snakemake.output,
aggregation_strategies=aggregation_strategies, aggregation_strategies=aggregation_strategies,
) )
busmaps.append(stub_map) busmaps.append(stub_map)
if cluster_config.get("to_substations", False): if params.simplify_network["to_substations"]:
n, substation_map = aggregate_to_substations(n, aggregation_strategies) n, substation_map = aggregate_to_substations(n, aggregation_strategies)
busmaps.append(substation_map) busmaps.append(substation_map)
# treatment of outliers (nodes without a profile for considered carrier): # treatment of outliers (nodes without a profile for considered carrier):
# all nodes that have no profile of the given carrier are being aggregated to closest neighbor # all nodes that have no profile of the given carrier are being aggregated to closest neighbor
if ( if params.simplify_network["algorithm"] == "hac":
snakemake.config.get("clustering", {}) carriers = params.simplify_network["feature"].split("-")[0].split("+")
.get("cluster_network", {})
.get("algorithm", "hac")
== "hac"
or cluster_config.get("algorithm", "hac") == "hac"
):
carriers = (
cluster_config.get("feature", "solar+onwind-time").split("-")[0].split("+")
)
for carrier in carriers: for carrier in carriers:
buses_i = list( buses_i = list(
set(n.buses.index) - set(n.generators.query("carrier == @carrier").bus) set(n.buses.index) - set(n.generators.query("carrier == @carrier").bus)
) )
logger.info( logger.info(
f"clustering preparaton (hac): aggregating {len(buses_i)} buses of type {carrier}." f"clustering preparation (hac): aggregating {len(buses_i)} buses of type {carrier}."
) )
n, busmap_hac = aggregate_to_substations(n, aggregation_strategies, buses_i) n, busmap_hac = aggregate_to_substations(n, aggregation_strategies, buses_i)
busmaps.append(busmap_hac) busmaps.append(busmap_hac)
@ -577,9 +599,10 @@ if __name__ == "__main__":
n, cluster_map = cluster( n, cluster_map = cluster(
n, n,
int(snakemake.wildcards.simpl), int(snakemake.wildcards.simpl),
snakemake.config, params.focus_weights,
cluster_config.get("algorithm", "hac"), solver_name,
cluster_config.get("feature", None), params.simplify_network["algorithm"],
params.simplify_network["feature"],
aggregation_strategies, aggregation_strategies,
) )
busmaps.append(cluster_map) busmaps.append(cluster_map)

View File

@ -44,14 +44,14 @@ pypsa.pf.logger.setLevel(logging.WARNING)
from pypsa.descriptors import get_switchable_as_dense as get_as_dense from pypsa.descriptors import get_switchable_as_dense as get_as_dense
def add_land_use_constraint(n, config): def add_land_use_constraint(n, planning_horizons, config):
if "m" in snakemake.wildcards.clusters: if "m" in snakemake.wildcards.clusters:
_add_land_use_constraint_m(n, config) _add_land_use_constraint_m(n, planning_horizons, config)
else: else:
_add_land_use_constraint(n, config) _add_land_use_constraint(n)
def _add_land_use_constraint(n, config): def _add_land_use_constraint(n):
# warning: this will miss existing offwind which is not classed AC-DC and has carrier 'offwind' # warning: this will miss existing offwind which is not classed AC-DC and has carrier 'offwind'
for carrier in ["solar", "onwind", "offwind-ac", "offwind-dc"]: for carrier in ["solar", "onwind", "offwind-ac", "offwind-dc"]:
@ -80,10 +80,10 @@ def _add_land_use_constraint(n, config):
n.generators.p_nom_max.clip(lower=0, inplace=True) n.generators.p_nom_max.clip(lower=0, inplace=True)
def _add_land_use_constraint_m(n, config): def _add_land_use_constraint_m(n, planning_horizons, config):
# if generators clustering is lower than network clustering, land_use accounting is at generators clusters # if generators clustering is lower than network clustering, land_use accounting is at generators clusters
planning_horizons = config["scenario"]["planning_horizons"] planning_horizons = param["planning_horizons"]
grouping_years = config["existing_capacities"]["grouping_years"] grouping_years = config["existing_capacities"]["grouping_years"]
current_horizon = snakemake.wildcards.planning_horizons current_horizon = snakemake.wildcards.planning_horizons
@ -141,7 +141,14 @@ def add_co2_sequestration_limit(n, limit=200):
) )
def prepare_network(n, solve_opts=None, config=None): def prepare_network(
n,
solve_opts=None,
config=None,
foresight=None,
planning_horizons=None,
co2_sequestration_potential=None,
):
if "clip_p_max_pu" in solve_opts: if "clip_p_max_pu" in solve_opts:
for df in ( for df in (
n.generators_t.p_max_pu, n.generators_t.p_max_pu,
@ -191,11 +198,11 @@ def prepare_network(n, solve_opts=None, config=None):
n.set_snapshots(n.snapshots[:nhours]) n.set_snapshots(n.snapshots[:nhours])
n.snapshot_weightings[:] = 8760.0 / nhours n.snapshot_weightings[:] = 8760.0 / nhours
if config["foresight"] == "myopic": if foresight == "myopic":
add_land_use_constraint(n, config) add_land_use_constraint(n, planning_horizons, config)
if n.stores.carrier.eq("co2 stored").any(): if n.stores.carrier.eq("co2 stored").any():
limit = config["sector"].get("co2_sequestration_potential", 200) limit = co2_sequestration_potential
add_co2_sequestration_limit(n, limit=limit) add_co2_sequestration_limit(n, limit=limit)
return n return n
@ -590,13 +597,11 @@ def extra_functionality(n, snapshots):
add_pipe_retrofit_constraint(n) add_pipe_retrofit_constraint(n)
def solve_network(n, config, opts="", **kwargs): def solve_network(n, config, solving, opts="", **kwargs):
set_of_options = config["solving"]["solver"]["options"] set_of_options = solving["solver"]["options"]
solver_options = ( solver_options = solving["solver_options"][set_of_options] if set_of_options else {}
config["solving"]["solver_options"][set_of_options] if set_of_options else {} solver_name = solving["solver"]["name"]
) cf_solving = solving["options"]
solver_name = config["solving"]["solver"]["name"]
cf_solving = config["solving"]["options"]
track_iterations = cf_solving.get("track_iterations", False) track_iterations = cf_solving.get("track_iterations", False)
min_iterations = cf_solving.get("min_iterations", 4) min_iterations = cf_solving.get("min_iterations", 4)
max_iterations = cf_solving.get("max_iterations", 6) max_iterations = cf_solving.get("max_iterations", 6)
@ -665,7 +670,7 @@ if __name__ == "__main__":
if "sector_opts" in snakemake.wildcards.keys(): if "sector_opts" in snakemake.wildcards.keys():
opts += "-" + snakemake.wildcards.sector_opts opts += "-" + snakemake.wildcards.sector_opts
opts = [o for o in opts.split("-") if o != ""] opts = [o for o in opts.split("-") if o != ""]
solve_opts = snakemake.config["solving"]["options"] solve_opts = snakemake.params.solving["options"]
np.random.seed(solve_opts.get("seed", 123)) np.random.seed(solve_opts.get("seed", 123))
@ -675,10 +680,21 @@ if __name__ == "__main__":
else: else:
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)
n = prepare_network(n, solve_opts, config=snakemake.config) n = prepare_network(
n,
solve_opts,
config=snakemake.config,
foresight=snakemake.params.foresight,
planning_horizons=snakemake.params.planning_horizons,
co2_sequestration_potential=snakemake.params["co2_sequestration_potential"],
)
n = solve_network( n = solve_network(
n, config=snakemake.config, opts=opts, log_fn=snakemake.log.solver n,
config=snakemake.config,
solving=snakemake.params.solving,
opts=opts,
log_fn=snakemake.log.solver,
) )
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))

View File

@ -41,7 +41,7 @@ if __name__ == "__main__":
opts = (snakemake.wildcards.opts + "-" + snakemake.wildcards.sector_opts).split("-") opts = (snakemake.wildcards.opts + "-" + snakemake.wildcards.sector_opts).split("-")
opts = [o for o in opts if o != ""] opts = [o for o in opts if o != ""]
solve_opts = snakemake.config["solving"]["options"] solve_opts = snakemake.params.options
np.random.seed(solve_opts.get("seed", 123)) np.random.seed(solve_opts.get("seed", 123))