Merge branch 'wc-to-cf' into scenario-management

This commit is contained in:
Fabian Neumann 2024-02-17 17:16:28 +01:00
commit 35b2228891
21 changed files with 383 additions and 389 deletions

View File

@ -62,9 +62,6 @@ snapshots:
start: "2013-01-01" start: "2013-01-01"
end: "2014-01-01" end: "2014-01-01"
inclusive: 'left' inclusive: 'left'
resolution: false
segmentation: false
#representative: false
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#enable # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#enable
enable: enable:
@ -369,6 +366,11 @@ existing_capacities:
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#sector # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#sector
sector: sector:
transport: true
heating: true
biomass: true
industry: true
agriculture: true
district_heating: district_heating:
potential: 0.6 potential: 0.6
progress: progress:
@ -534,6 +536,7 @@ sector:
use_methanation_waste_heat: true use_methanation_waste_heat: true
use_fuel_cell_waste_heat: true use_fuel_cell_waste_heat: true
use_electrolysis_waste_heat: true use_electrolysis_waste_heat: true
electricity_transmission_grid: true
electricity_distribution_grid: true electricity_distribution_grid: true
electricity_distribution_grid_cost_factor: 1.0 electricity_distribution_grid_cost_factor: 1.0
electricity_grid_connection: true electricity_grid_connection: true
@ -715,6 +718,14 @@ clustering:
committable: any committable: any
ramp_limit_up: max ramp_limit_up: max
ramp_limit_down: max ramp_limit_down: max
temporal:
resolution_elec: false
resolution_sector: false
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#adjustments
adjustments:
electricity: false
sector: false
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#solving # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#solving
solving: solving:

View File

@ -0,0 +1,8 @@
,Unit,Values,Description
adjustments,,,
-- electricity,bool or dict,,"Parameter adjustments for capital cost, marginal cost, and maximum capacities of carriers. Applied in :mod:`prepare_network.`"
-- -- {attr},,,"Attribute can be ``e_nom_opt``, ``p_nom_opt``, ``marginal_cost`` or ``capital_cost``"
-- -- -- {carrier},float,per-unit,"Any carrier of the network to which parameter adjustment factor should be applied."
-- sector,bool or dict,,"Parameter adjustments for capital cost, marginal cost, and maximum capacities of carriers. Applied in :mod:`prepare_sector_network.`"
-- -- {attr},,,"Attribute can be ``e_nom_opt``, ``p_nom_opt``, ``marginal_cost`` or ``capital_cost``"
-- -- -- {carrier},float,per-unit,"Any carrier of the network to which parameter adjustment factor should be applied."
1 Unit Values Description
2 adjustments
3 -- electricity bool or dict Parameter adjustments for capital cost, marginal cost, and maximum capacities of carriers. Applied in :mod:`prepare_network.`
4 -- -- {attr} Attribute can be ``e_nom_opt``, ``p_nom_opt``, ``marginal_cost`` or ``capital_cost``
5 -- -- -- {carrier} float per-unit Any carrier of the network to which parameter adjustment factor should be applied.
6 -- sector bool or dict Parameter adjustments for capital cost, marginal cost, and maximum capacities of carriers. Applied in :mod:`prepare_sector_network.`
7 -- -- {attr} Attribute can be ``e_nom_opt``, ``p_nom_opt``, ``marginal_cost`` or ``capital_cost``
8 -- -- -- {carrier} float per-unit Any carrier of the network to which parameter adjustment factor should be applied.

View File

@ -17,3 +17,6 @@ aggregation_strategies,,,
-- -- {key},str,"{key} can be any of the component of the generator (str). Its value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator." -- -- {key},str,"{key} can be any of the component of the generator (str). Its value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator."
-- buses,,, -- buses,,,
-- -- {key},str,"{key} can be any of the component of the bus (str). Its value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new bus." -- -- {key},str,"{key} can be any of the component of the bus (str). Its value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new bus."
temporal,,,Options for temporal resolution
-- resolution_elec,--,"{false,``nH``; i.e. ``2H``-``6H``}","Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks."
-- resolution_sector,--,"{false,``nH``; i.e. ``2H``-``6H``}","Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_sector_network`."

1 Unit Values Description
17 -- -- {key} str {key} can be any of the component of the generator (str). It’s value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}. Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator.
18 -- buses
19 -- -- {key} str {key} can be any of the component of the bus (str). It’s value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}. Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new bus.
20 temporal Options for temporal resolution
21 -- resolution_elec -- {false,``nH``; i.e. ``2H``-``6H``} Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks.
22 -- resolution_sector -- {false,``nH``; i.e. ``2H``-``6H``} Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_sector_network`.

View File

@ -1,4 +1,9 @@
,Unit,Values,Description ,Unit,Values,Description
transport,--,"{true, false}",Flag to include transport sector.
heating,--,"{true, false}",Flag to include heating sector.
biomass,--,"{true, false}",Flag to include biomass sector.
industry,--,"{true, false}",Flag to include industry sector.
agriculture,--,"{true, false}",Flag to include agriculture sector.
district_heating,--,,`prepare_sector_network.py <https://github.com/PyPSA/pypsa-eur-sec/blob/master/scripts/prepare_sector_network.py>`_ district_heating,--,,`prepare_sector_network.py <https://github.com/PyPSA/pypsa-eur-sec/blob/master/scripts/prepare_sector_network.py>`_
-- potential,--,float,maximum fraction of urban demand which can be supplied by district heating -- potential,--,float,maximum fraction of urban demand which can be supplied by district heating
-- progress,--,Dictionary with planning horizons as keys., Increase of today's district heating demand to potential maximum district heating share. Progress = 0 means today's district heating share. Progress = 1 means maximum fraction of urban demand is supplied by district heating -- progress,--,Dictionary with planning horizons as keys., Increase of today's district heating demand to potential maximum district heating share. Progress = 0 means today's district heating share. Progress = 1 means maximum fraction of urban demand is supplied by district heating
@ -109,6 +114,7 @@ min_part_load _methanolisation,per unit of p_nom ,float,The minimum unit dispatc
use_fischer_tropsch _waste_heat,--,"{true, false}",Add option for using waste heat of Fischer Tropsch in district heating networks use_fischer_tropsch _waste_heat,--,"{true, false}",Add option for using waste heat of Fischer Tropsch in district heating networks
use_fuel_cell_waste_heat,--,"{true, false}",Add option for using waste heat of fuel cells in district heating networks use_fuel_cell_waste_heat,--,"{true, false}",Add option for using waste heat of fuel cells in district heating networks
use_electrolysis_waste _heat,--,"{true, false}",Add option for using waste heat of electrolysis in district heating networks use_electrolysis_waste _heat,--,"{true, false}",Add option for using waste heat of electrolysis in district heating networks
electricity_transmission _grid,--,"{true, false}",Switch for enabling/disabling the electricity transmission grid.
electricity_distribution _grid,--,"{true, false}",Add a simplified representation of the exchange capacity between transmission and distribution grid level through a link. electricity_distribution _grid,--,"{true, false}",Add a simplified representation of the exchange capacity between transmission and distribution grid level through a link.
electricity_distribution _grid_cost_factor,,,Multiplies the investment cost of the electricity distribution grid electricity_distribution _grid_cost_factor,,,Multiplies the investment cost of the electricity distribution grid
,,, ,,,

1 Unit Values Description
2 transport -- {true, false} Flag to include transport sector.
3 heating -- {true, false} Flag to include heating sector.
4 biomass -- {true, false} Flag to include biomass sector.
5 industry -- {true, false} Flag to include industry sector.
6 agriculture -- {true, false} Flag to include agriculture sector.
7 district_heating -- `prepare_sector_network.py <https://github.com/PyPSA/pypsa-eur-sec/blob/master/scripts/prepare_sector_network.py>`_
8 -- potential -- float maximum fraction of urban demand which can be supplied by district heating
9 -- progress -- Dictionary with planning horizons as keys. Increase of today's district heating demand to potential maximum district heating share. Progress = 0 means today's district heating share. Progress = 1 means maximum fraction of urban demand is supplied by district heating
114 use_fischer_tropsch _waste_heat -- {true, false} Add option for using waste heat of Fischer Tropsch in district heating networks
115 use_fuel_cell_waste_heat -- {true, false} Add option for using waste heat of fuel cells in district heating networks
116 use_electrolysis_waste _heat -- {true, false} Add option for using waste heat of electrolysis in district heating networks
117 electricity_transmission _grid -- {true, false} Switch for enabling/disabling the electricity transmission grid.
118 electricity_distribution _grid -- {true, false} Add a simplified representation of the exchange capacity between transmission and distribution grid level through a link.
119 electricity_distribution _grid_cost_factor Multiplies the investment cost of the electricity distribution grid
120

View File

@ -2,5 +2,3 @@
start,--,str or datetime-like; e.g. YYYY-MM-DD,Left bound of date range start,--,str or datetime-like; e.g. YYYY-MM-DD,Left bound of date range
end,--,str or datetime-like; e.g. YYYY-MM-DD,Right bound of date range end,--,str or datetime-like; e.g. YYYY-MM-DD,Right bound of date range
inclusive,--,"One of {'neither', 'both', left, right}","Make the time interval closed to the ``left``, ``right``, or both sides ``both`` or neither side ``None``." inclusive,--,"One of {'neither', 'both', left, right}","Make the time interval closed to the ``left``, ``right``, or both sides ``both`` or neither side ``None``."
resolution ,--,"{false,``nH``; i.e. ``2H``-``6H``}","Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks."
segmentation,--,"{false,``n``; e.g. ``4380``}","Apply time series segmentation with `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package to ``n`` adjacent snapshots of varying lengths based on capacity factors of varying renewables, hydro inflow and load in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks."

1 Unit Values Description
2 start -- str or datetime-like; e.g. YYYY-MM-DD Left bound of date range
3 end -- str or datetime-like; e.g. YYYY-MM-DD Right bound of date range
4 inclusive -- One of {'neither', 'both', ‘left’, ‘right’} Make the time interval closed to the ``left``, ``right``, or both sides ``both`` or neither side ``None``.
resolution -- {false,``nH``; i.e. ``2H``-``6H``} Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks.
segmentation -- {false,``n``; e.g. ``4380``} Apply time series segmentation with `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package to ``n`` adjacent snapshots of varying lengths based on capacity factors of varying renewables, hydro inflow and load in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks.

View File

@ -561,6 +561,21 @@ The list of available biomass is given by the category in `ENSPRESO_BIOMASS <htt
use ``min`` in ``p_nom_max:`` for more ` use ``min`` in ``p_nom_max:`` for more `
conservative assumptions. conservative assumptions.
.. _adjustments_cf:
``adjustments``
=============
.. literalinclude:: ../config/config.default.yaml
:language: yaml
:start-at: adjustments:
:end-before: # docs
.. csv-table::
:header-rows: 1
:widths: 22,7,22,33
:file: configtables/adjustments.csv
.. _solving_cf: .. _solving_cf:
``solving`` ``solving``

View File

@ -20,10 +20,7 @@ if config["enable"].get("prepare_links_p_nom", False):
rule build_electricity_demand: rule build_electricity_demand:
params: params:
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
countries=config_provider("countries"), countries=config_provider("countries"),
load=config_provider("load"), load=config_provider("load"),
input: input:
@ -65,10 +62,7 @@ rule build_powerplants:
rule base_network: rule base_network:
params: params:
countries=config_provider("countries"), countries=config_provider("countries"),
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
lines=config_provider("lines"), lines=config_provider("lines"),
links=config_provider("links"), links=config_provider("links"),
transformers=config_provider("transformers"), transformers=config_provider("transformers"),
@ -151,10 +145,7 @@ if config["enable"].get("build_cutout", False):
rule build_cutout: rule build_cutout:
params: params:
snapshots={ snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)
for k in ["start", "end", "inclusive"]
},
cutouts=config_provider("atlite", "cutouts"), cutouts=config_provider("atlite", "cutouts"),
input: input:
regions_onshore=resources("regions_onshore.geojson"), regions_onshore=resources("regions_onshore.geojson"),
@ -271,10 +262,7 @@ else:
rule build_renewable_profiles: rule build_renewable_profiles:
params: params:
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
renewable=config_provider("renewable"), renewable=config_provider("renewable"),
input: input:
**opt, **opt,
@ -375,10 +363,7 @@ if config["lines"]["dynamic_line_rating"]["activate"]:
rule build_line_rating: rule build_line_rating:
params: params:
snapshots={ snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)
for k in ["start", "end", "inclusive"]
},
input: input:
base_network=resources("networks/base.nc"), base_network=resources("networks/base.nc"),
cutout="cutouts/" cutout="cutouts/"
@ -570,12 +555,7 @@ rule add_extra_components:
rule prepare_network: rule prepare_network:
params: params:
snapshots=lambda w: { time_resolution=config_provider("clustering", "temporal", "resolution_elec"),
"resolution": config_provider("snapshots", "resolution", default=False)(w),
"segmentation": config_provider(
"snapshots", "segmentation", default=False
)(w),
},
links=config_provider("links"), links=config_provider("links"),
lines=config_provider("lines"), lines=config_provider("lines"),
co2base=config_provider("electricity", "co2base"), co2base=config_provider("electricity", "co2base"),
@ -585,6 +565,7 @@ rule prepare_network:
gaslimit=config_provider("electricity", "gaslimit"), gaslimit=config_provider("electricity", "gaslimit"),
max_hours=config_provider("electricity", "max_hours"), max_hours=config_provider("electricity", "max_hours"),
costs=config_provider("costs"), costs=config_provider("costs"),
adjustments=config_provider("adjustments", "electricity",
autarky=config_provider("electricity", "autarky", default={}), autarky=config_provider("electricity", "autarky", default={}),
input: input:
resources("networks/elec_s{simpl}_{clusters}_ec.nc"), resources("networks/elec_s{simpl}_{clusters}_ec.nc"),

View File

@ -135,10 +135,7 @@ rule cluster_gas_network:
rule build_daily_heat_demand: rule build_daily_heat_demand:
params: params:
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
input: input:
pop_layout=resources("pop_layout_{scope}.nc"), pop_layout=resources("pop_layout_{scope}.nc"),
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
@ -163,10 +160,7 @@ rule build_daily_heat_demand:
rule build_hourly_heat_demand: rule build_hourly_heat_demand:
params: params:
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
input: input:
heat_profile="data/heat_load_profile_BDEW.csv", heat_profile="data/heat_load_profile_BDEW.csv",
heat_demand=resources("daily_heat_demand_{scope}_elec_s{simpl}_{clusters}.nc"), heat_demand=resources("daily_heat_demand_{scope}_elec_s{simpl}_{clusters}.nc"),
@ -187,10 +181,7 @@ rule build_hourly_heat_demand:
rule build_temperature_profiles: rule build_temperature_profiles:
params: params:
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
input: input:
pop_layout=resources("pop_layout_{scope}.nc"), pop_layout=resources("pop_layout_{scope}.nc"),
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
@ -245,10 +236,7 @@ rule build_cop_profiles:
rule build_solar_thermal_profiles: rule build_solar_thermal_profiles:
params: params:
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
solar_thermal=config_provider("solar_thermal"), solar_thermal=config_provider("solar_thermal"),
input: input:
pop_layout=resources("pop_layout_{scope}.nc"), pop_layout=resources("pop_layout_{scope}.nc"),
@ -732,10 +720,7 @@ rule build_shipping_demand:
rule build_transport_demand: rule build_transport_demand:
params: params:
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
sector=config_provider("sector"), sector=config_provider("sector"),
input: input:
clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"),
@ -822,6 +807,7 @@ rule build_existing_heating_distribution:
rule prepare_sector_network: rule prepare_sector_network:
params: params:
time_resolution=config_provider("clustering", "temporal", "resolution_sector"),
co2_budget=config_provider("co2_budget"), co2_budget=config_provider("co2_budget"),
conventional_carriers=config_provider( conventional_carriers=config_provider(
"existing_capacities", "conventional_carriers" "existing_capacities", "conventional_carriers"
@ -830,10 +816,12 @@ rule prepare_sector_network:
costs=config_provider("costs"), costs=config_provider("costs"),
sector=config_provider("sector"), sector=config_provider("sector"),
industry=config_provider("industry"), industry=config_provider("industry"),
lines=config_provider("lines"),
pypsa_eur=config_provider("pypsa_eur"), pypsa_eur=config_provider("pypsa_eur"),
length_factor=config_provider("lines", "length_factor"), length_factor=config_provider("lines", "length_factor"),
planning_horizons=config_provider("scenario", "planning_horizons"), planning_horizons=config_provider("scenario", "planning_horizons"),
countries=config_provider("countries"), countries=config_provider("countries"),
adjustments=config_provider("adjustments", "sector"),
emissions_scope=config_provider("energy", "emissions"), emissions_scope=config_provider("energy", "emissions"),
eurostat_report_year=config_provider("energy", "eurostat_report_year"), eurostat_report_year=config_provider("energy", "eurostat_report_year"),
RDIR=RDIR, RDIR=RDIR,

View File

@ -159,10 +159,7 @@ rule make_summary:
params: params:
foresight=config_provider("foresight"), foresight=config_provider("foresight"),
costs=config_provider("costs"), costs=config_provider("costs"),
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
scenario=config_provider("scenario"), scenario=config_provider("scenario"),
RDIR=RDIR, RDIR=RDIR,
input: input:
@ -241,10 +238,11 @@ rule plot_summary:
params: params:
countries=config_provider("countries"), countries=config_provider("countries"),
planning_horizons=config_provider("scenario", "planning_horizons"), planning_horizons=config_provider("scenario", "planning_horizons"),
sector_opts=config_provider("scenario", "sector_opts"),
emissions_scope=config_provider("energy", "emissions"), emissions_scope=config_provider("energy", "emissions"),
eurostat_report_year=config_provider("energy", "eurostat_report_year"), eurostat_report_year=config_provider("energy", "eurostat_report_year"),
plotting=config_provider("plotting"), plotting=config_provider("plotting"),
foresight=config_provider("foresight"),
co2_budget=config_provider("co2_budget"),
RDIR=RDIR, RDIR=RDIR,
input: input:
costs=RESULTS + "csvs/costs.csv", costs=RESULTS + "csvs/costs.csv",

View File

@ -64,10 +64,7 @@ rule add_brownfield:
"sector", "H2_retrofit_capacity_per_CH4" "sector", "H2_retrofit_capacity_per_CH4"
), ),
threshold_capacity=config_provider("existing_capacities", " threshold_capacity"), threshold_capacity=config_provider("existing_capacities", " threshold_capacity"),
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
carriers=config_provider("electricity", "renewable_carriers"), carriers=config_provider("electricity", "renewable_carriers"),
input: input:
unpack(input_profile_tech_brownfield), unpack(input_profile_tech_brownfield),

View File

@ -57,6 +57,8 @@ def input_network_year(w):
rule prepare_perfect_foresight: rule prepare_perfect_foresight:
params:
costs=config["costs"],
input: input:
unpack(input_network_year), unpack(input_network_year),
brownfield_network=lambda w: ( brownfield_network=lambda w: (

View File

@ -38,10 +38,7 @@ rule build_cross_border_flows:
The data is used for validation of the optimization results. The data is used for validation of the optimization results.
""" """
params: params:
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
countries=config_provider("countries"), countries=config_provider("countries"),
input: input:
network=resources("networks/base.nc"), network=resources("networks/base.nc"),
@ -61,10 +58,7 @@ rule build_electricity_prices:
The data is used for validation of the optimization results. The data is used for validation of the optimization results.
""" """
params: params:
snapshots=lambda w: { snapshots=config_provider("snapshots"),
k: config_provider("snapshots", k)(w)
for k in ["start", "end", "inclusive"]
},
countries=config_provider("countries"), countries=config_provider("countries"),
output: output:
resources("historical_electricity_prices.csv"), resources("historical_electricity_prices.csv"),

View File

@ -114,9 +114,9 @@ def find_opt(opts, expr):
""" """
for o in opts: for o in opts:
if expr in o: if expr in o:
m = re.findall("[0-9]*\.?[0-9]+$", o) m = re.findall("^m?\d*(\.|p)?\d+$", o)
if len(m) > 0: if len(m) > 0:
return True, float(m[0]) return True, float(m[0].replace("p", ".").replace("m", "-"))
else: else:
return True, None return True, None
return False, None return False, None
@ -465,11 +465,177 @@ def parse(infix):
return {infix.pop(0): parse(infix)} return {infix.pop(0): parse(infix)}
def update_config_with_sector_opts(config, sector_opts): def update_config_from_wildcards(config, w):
for o in sector_opts.split("-"): """
if o.startswith("CF+"): Parses configuration settings from wildcards and updates the config.
infix = o.split("+")[1:]
update_config(config, parse(infix)) - TODO: Should be run inside config_provider function.
"""
if w.get("opts"):
opts = w.opts.split("-")
if nhours := get_opt(opts, r"^\d+(h|seg)$"):
config["clustering"]["temporal"]["resolution_elec"] = nhours
co2l_enable, co2l_value = find_opt(opts, "Co2L")
if co2l_enable:
config["electricity"]["co2limit_enable"] = True
if co2l_value is not None:
config["electricity"]["co2limit"] = (
co2l_value * config["electricity"]["co2base"]
)
gasl_enable, gasl_value = find_opt(opts, "CH4L")
if gasl_enable:
config["electricity"]["gaslimit_enable"] = True
if gasl_value is not None:
config["electricity"]["gaslimit"] = gasl_value * 1e6
if "Ept" in opts:
config["costs"]["emission_prices"]["co2_monthly_prices"] = True
ep_enable, ep_value = find_opt(opts, "Ep")
if ep_enable:
config["costs"]["emission_prices"]["enable"] = True
if ep_value is not None:
config["costs"]["emission_prices"]["co2"] = ep_value
if "ATK" in opts:
config["autarky"]["enable"] = True
if "ATKc" in opts:
config["autarky"]["by_country"] = True
attr_lookup = {
"p": "p_nom_max",
"e": "e_nom_max",
"c": "capital_cost",
"m": "marginal_cost",
}
for o in opts:
flags = ["+e", "+p", "+m", "+c"]
if all(flag not in o for flag in flags):
continue
carrier, attr_factor = o.split("+")
attr = attr_lookup[attr_factor[0]]
factor = float(attr_factor[1:])
if not isinstance(config["adjustments"]["electricity"], dict):
config["adjustments"]["electricity"] = dict()
update_config(
config["adjustments"]["electricity"], {attr: {carrier: factor}}
)
if w.get("sector_opts"):
opts = w.sector_opts.split("-")
if "T" in opts:
config["sector"]["transport"] = True
if "H" in opts:
config["sector"]["heating"] = True
if "B" in opts:
config["sector"]["biomass"] = True
if "I" in opts:
config["sector"]["industry"] = True
if "A" in opts:
config["sector"]["agriculture"] = True
if "CCL" in opts:
config["solving"]["constraints"]["CCL"] = True
eq_value = get_opt(opts, r"^EQ+\d*\.?\d+(c|)")
for o in opts:
if eq_value is not None:
config["solving"]["constraints"]["EQ"] = eq_value
elif "EQ" in o:
config["solving"]["constraints"]["EQ"] = True
break
if "BAU" in opts:
config["solving"]["constraints"]["BAU"] = True
if "SAFE" in opts:
config["solving"]["constraints"]["SAFE"] = True
if nhours := get_opt(opts, r"^\d+(h|sn|seg)$"):
config["clustering"]["temporal"]["resolution_sector"] = nhours
if "decentral" in opts:
config["sector"]["electricity_transmission_grid"] = False
if "noH2network" in opts:
config["sector"]["H2_network"] = False
if "nowasteheat" in opts:
config["sector"]["use_fischer_tropsch_waste_heat"] = False
config["sector"]["use_methanolisation_waste_heat"] = False
config["sector"]["use_haber_bosch_waste_heat"] = False
config["sector"]["use_methanation_waste_heat"] = False
config["sector"]["use_fuel_cell_waste_heat"] = False
config["sector"]["use_electrolysis_waste_heat"] = False
if "nodistrict" in opts:
config["sector"]["district_heating"]["progress"] = 0.0
dg_enable, dg_factor = find_opt(opts, "dist")
if dg_enable:
config["sector"]["electricity_distribution_grid"] = True
if dg_factor is not None:
config["sector"][
"electricity_distribution_grid_cost_factor"
] = dg_factor
if "biomasstransport" in opts:
config["sector"]["biomass_transport"] = True
_, maxext = find_opt(opts, "linemaxext")
if maxext is not None:
config["lines"]["max_extension"] = maxext * 1e3
config["links"]["max_extension"] = maxext * 1e3
_, co2l_value = find_opt(opts, "Co2L")
if co2l_value is not None:
config["co2_budget"] = float(co2l_value)
if co2_distribution := get_opt(opts, r"^(cb)\d+(\.\d+)?(ex|be)$"):
config["co2_budget"] = co2_distribution
if co2_budget := get_opt(opts, r"^(cb)\d+(\.\d+)?$"):
config["co2_budget"] = float(co2_budget[2:])
attr_lookup = {
"p": "p_nom_max",
"e": "e_nom_max",
"c": "capital_cost",
"m": "marginal_cost",
}
for o in opts:
flags = ["+e", "+p", "+m", "+c"]
if all(flag not in o for flag in flags):
continue
carrier, attr_factor = o.split("+")
attr = attr_lookup[attr_factor[0]]
factor = float(attr_factor[1:])
if not isinstance(config["adjustments"]["sector"], dict):
config["adjustments"]["sector"] = dict()
update_config(config["adjustments"]["sector"], {attr: {carrier: factor}})
_, sdr_value = find_opt(opts, "sdr")
if sdr_value is not None:
config["costs"]["social_discountrate"] = sdr_value / 100
_, seq_limit = find_opt(opts, "seq")
if seq_limit is not None:
config["sector"]["co2_sequestration_potential"] = seq_limit
# any config option can be represented in wildcard
for o in opts:
if o.startswith("CF+"):
infix = o.split("+")[1:]
update_config(config, parse(infix))
def get_checksum_from_zenodo(file_url): def get_checksum_from_zenodo(file_url):

View File

@ -15,7 +15,7 @@ import xarray as xr
from _helpers import ( from _helpers import (
configure_logging, configure_logging,
set_scenario_config, set_scenario_config,
update_config_with_sector_opts, update_config_from_wildcards,
) )
from add_existing_baseyear import add_build_year_to_new_assets from add_existing_baseyear import add_build_year_to_new_assets
from pypsa.clustering.spatial import normed_or_uniform from pypsa.clustering.spatial import normed_or_uniform
@ -217,7 +217,7 @@ if __name__ == "__main__":
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake) set_scenario_config(snakemake)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) update_config_from_wildcards(snakemake.config, snakemake.wildcards)
logger.info(f"Preparing brownfield from the file {snakemake.input.network_p}") logger.info(f"Preparing brownfield from the file {snakemake.input.network_p}")

View File

@ -18,7 +18,7 @@ import xarray as xr
from _helpers import ( from _helpers import (
configure_logging, configure_logging,
set_scenario_config, set_scenario_config,
update_config_with_sector_opts, update_config_from_wildcards,
) )
from add_electricity import sanitize_carriers from add_electricity import sanitize_carriers
from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs
@ -559,10 +559,9 @@ if __name__ == "__main__":
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake) set_scenario_config(snakemake)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) update_config_from_wildcards(snakemake.config, snakemake.wildcards)
options = snakemake.params.sector options = snakemake.params.sector
opts = snakemake.wildcards.sector_opts.split("-")
baseyear = snakemake.params.baseyear baseyear = snakemake.params.baseyear
@ -585,7 +584,7 @@ if __name__ == "__main__":
n, grouping_years_power, costs, baseyear n, grouping_years_power, costs, baseyear
) )
if "H" in opts: if options["heating"]:
time_dep_hp_cop = options["time_dep_hp_cop"] time_dep_hp_cop = options["time_dep_hp_cop"]
ashp_cop = ( ashp_cop = (
xr.open_dataarray(snakemake.input.cop_air_total) xr.open_dataarray(snakemake.input.cop_air_total)

View File

@ -584,7 +584,8 @@ if __name__ == "__main__":
plot_balances() plot_balances()
for sector_opts in snakemake.params.sector_opts: if (
opts = sector_opts.split("-") snakemake.params["co2_budget"].startswith("cb")
if any("cb" in o for o in opts) or snakemake.config["foresight"] == "perfect": or snakemake.params["foresight"] == "perfect"
plot_carbon_budget_distribution(snakemake.input.eurostat) ):
plot_carbon_budget_distribution(snakemake.input.eurostat)

View File

@ -62,7 +62,7 @@ import logging
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import pypsa import pypsa
from _helpers import configure_logging, find_opt, get_opt, set_scenario_config from _helpers import configure_logging, update_config_from_wildcards, set_scenario_config
from add_electricity import load_costs, update_transmission_costs from add_electricity import load_costs, update_transmission_costs
from pypsa.descriptors import expand_series from pypsa.descriptors import expand_series
@ -71,6 +71,28 @@ idx = pd.IndexSlice
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def maybe_adjust_costs_and_potentials(n, adjustments):
if not adjustments:
return
for attr, carrier_factor in adjustments.items():
for carrier, factor in carrier_factor.items():
# beware if factor is 0 and p_nom_max is np.inf, 0*np.inf is nan
if carrier == "AC": # lines do not have carrier
n.lines[attr] *= factor
continue
comps = {
"p_nom_max": {"Generator", "Link", "StorageUnit"},
"e_nom_max": {"Store"},
"capital_cost": {"Generator", "Link", "StorageUnit", "Store"},
"marginal_cost": {"Generator", "Link", "StorageUnit", "Store"},
}
for c in n.iterate_components(comps[attr]):
sel = c.df.index[c.df.carrier == carrier]
c.df.loc[sel, attr] *= factor
logger.info(f"changing {attr} for {carrier} by factor {factor}")
def add_co2limit(n, co2limit, Nyears=1.0): def add_co2limit(n, co2limit, Nyears=1.0):
n.add( n.add(
"GlobalConstraint", "GlobalConstraint",
@ -278,12 +300,11 @@ if __name__ == "__main__":
from _helpers import mock_snakemake from _helpers import mock_snakemake
snakemake = mock_snakemake( snakemake = mock_snakemake(
"prepare_network", simpl="", clusters="37", ll="v1.0", opts="Ept" "prepare_network", simpl="", clusters="37", ll="v1.0", opts="Co2L-4H"
) )
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake) set_scenario_config(snakemake)
update_config_from_wildcards(snakemake.config, snakemake.wildcards)
opts = snakemake.wildcards.opts.split("-")
n = pypsa.Network(snakemake.input[0]) n = pypsa.Network(snakemake.input[0])
Nyears = n.snapshot_weightings.objective.sum() / 8760.0 Nyears = n.snapshot_weightings.objective.sum() / 8760.0
@ -297,81 +318,32 @@ if __name__ == "__main__":
set_line_s_max_pu(n, snakemake.params.lines["s_max_pu"]) set_line_s_max_pu(n, snakemake.params.lines["s_max_pu"])
# temporal averaging # temporal averaging
nhours_config = snakemake.params.snapshots.get("resolution", False) if nhours := snakemake.params.time_resolution:
nhours_wildcard = get_opt(opts, r"^\d+h$")
nhours = nhours_wildcard or nhours_config
if nhours:
n = average_every_nhours(n, nhours) n = average_every_nhours(n, nhours)
# segments with package tsam # segments with package tsam
time_seg_config = snakemake.params.snapshots.get("segmentation", False) if time_seg := snakemake.params.time_resolution:
time_seg_wildcard = get_opt(opts, r"^\d+seg$")
time_seg = time_seg_wildcard or time_seg_config
if time_seg:
solver_name = snakemake.config["solving"]["solver"]["name"] solver_name = snakemake.config["solving"]["solver"]["name"]
n = apply_time_segmentation(n, time_seg.replace("seg", ""), solver_name) n = apply_time_segmentation(n, time_seg.replace("seg", ""), solver_name)
Co2L_config = snakemake.params.co2limit_enable if snakemake.params.co2limit_enable:
Co2L_wildcard, co2limit_wildcard = find_opt(opts, "Co2L") add_co2limit(n, snakemake.params.co2limit, Nyears)
if Co2L_wildcard or Co2L_config:
if co2limit_wildcard is not None:
co2limit = co2limit_wildcard * snakemake.params.co2base
add_co2limit(n, co2limit, Nyears)
logger.info("Setting CO2 limit according to wildcard value.")
else:
add_co2limit(n, snakemake.params.co2limit, Nyears)
logger.info("Setting CO2 limit according to config value.")
CH4L_config = snakemake.params.gaslimit_enable if snakemake.params.gaslimit_enable:
CH4L_wildcard, gaslimit_wildcard = find_opt(opts, "CH4L") add_gaslimit(n, snakemake.params.gaslimit, Nyears)
if CH4L_wildcard or CH4L_config:
if gaslimit_wildcard is not None:
gaslimit = gaslimit_wildcard * 1e6
add_gaslimit(n, gaslimit, Nyears)
logger.info("Setting gas usage limit according to wildcard value.")
else:
add_gaslimit(n, snakemake.params.gaslimit, Nyears)
logger.info("Setting gas usage limit according to config value.")
for o in opts: maybe_adjust_costs_and_potentials(n, snakemake.params["adjustments"])
if "+" not in o:
continue
oo = o.split("+")
suptechs = map(lambda c: c.split("-", 2)[0], n.carriers.index)
if oo[0].startswith(tuple(suptechs)):
carrier = oo[0]
# handles only p_nom_max as stores and lines have no potentials
attr_lookup = {"p": "p_nom_max", "c": "capital_cost", "m": "marginal_cost"}
attr = attr_lookup[oo[1][0]]
factor = float(oo[1][1:])
if carrier == "AC": # lines do not have carrier
n.lines[attr] *= factor
else:
comps = {"Generator", "Link", "StorageUnit", "Store"}
for c in n.iterate_components(comps):
sel = c.df.carrier.str.contains(carrier)
c.df.loc[sel, attr] *= factor
emission_prices = snakemake.params.costs["emission_prices"] emission_prices = snakemake.params.costs["emission_prices"]
Ept_config = emission_prices.get("co2_monthly_prices", False) if emission_prices["co2_monthly_prices"]:
Ept_wildcard = "Ept" in opts
Ep_config = emission_prices.get("enable", False)
Ep_wildcard, co2_wildcard = find_opt(opts, "Ep")
if Ept_wildcard or Ept_config:
logger.info( logger.info(
"Setting time dependent emission prices according spot market price" "Setting time dependent emission prices according spot market price"
) )
add_dynamic_emission_prices(n) add_dynamic_emission_prices(n)
elif Ep_wildcard or Ep_config: elif emission_prices["enable"]:
if co2_wildcard is not None: add_emission_prices(
logger.info("Setting CO2 prices according to wildcard value.") n, dict(co2=snakemake.params.costs["emission_prices"]["co2"])
add_emission_prices(n, dict(co2=co2_wildcard)) )
else:
logger.info("Setting CO2 prices according to config value.")
add_emission_prices(
n, dict(co2=snakemake.params.costs["emission_prices"]["co2"])
)
ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:]
set_transmission_limit(n, ll_type, factor, costs, Nyears) set_transmission_limit(n, ll_type, factor, costs, Nyears)
@ -384,11 +356,8 @@ if __name__ == "__main__":
p_nom_max_ext=snakemake.params.links.get("max_extension", np.inf), p_nom_max_ext=snakemake.params.links.get("max_extension", np.inf),
) )
autarky_config = snakemake.params.autarky if snakemake.params.autarky["enable"]:
if "ATK" in opts or autarky_config.get("enable", False): only_crossborder = snakemake.params.autarky["by_country"]
only_crossborder = False
if "ATKc" in opts or autarky_config.get("by_country", False):
only_crossborder = True
enforce_autarky(n, only_crossborder=only_crossborder) enforce_autarky(n, only_crossborder=only_crossborder)
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))

View File

@ -15,7 +15,7 @@ import pypsa
from _helpers import ( from _helpers import (
configure_logging, configure_logging,
set_scenario_config, set_scenario_config,
update_config_with_sector_opts, update_config_from_wildcards,
) )
from add_existing_baseyear import add_build_year_to_new_assets from add_existing_baseyear import add_build_year_to_new_assets
from pypsa.descriptors import expand_series from pypsa.descriptors import expand_series
@ -308,17 +308,14 @@ def set_all_phase_outs(n):
n.mremove("Link", remove_i) n.mremove("Link", remove_i)
def set_carbon_constraints(n, opts): def set_carbon_constraints(n):
""" """
Add global constraints for carbon emissions. Add global constraints for carbon emissions.
""" """
budget = None budget = snakemake.config["co2_budget"]
for o in opts: if budget and isinstance(budget, float):
# other budgets budget *= 1e9 # convert to t CO2
m = re.match(r"^\d+p\d$", o, re.IGNORECASE)
if m is not None:
budget = snakemake.config["co2_budget"][m.group(0)] * 1e9
if budget is not None:
logger.info(f"add carbon budget of {budget}") logger.info(f"add carbon budget of {budget}")
n.add( n.add(
"GlobalConstraint", "GlobalConstraint",
@ -345,7 +342,7 @@ def set_carbon_constraints(n, opts):
) )
# set minimum CO2 emission constraint to avoid too fast reduction # set minimum CO2 emission constraint to avoid too fast reduction
if "co2min" in opts: if "co2min" in snakemake.wildcards.sector_opts.split("-"):
emissions_1990 = 4.53693 emissions_1990 = 4.53693
emissions_2019 = 3.344096 emissions_2019 = 3.344096
target_2030 = 0.45 * emissions_1990 target_2030 = 0.45 * emissions_1990
@ -491,21 +488,6 @@ def apply_time_segmentation_perfect(
return n return n
def set_temporal_aggregation_SEG(n, opts, solver_name):
"""
Aggregate network temporally with tsam.
"""
for o in opts:
# segments with package tsam
m = re.match(r"^(\d+)seg$", o, re.IGNORECASE)
if m is not None:
segments = int(m[1])
logger.info(f"Use temporal segmentation with {segments} segments")
n = apply_time_segmentation_perfect(n, segments, solver_name=solver_name)
break
return n
if __name__ == "__main__": if __name__ == "__main__":
if "snakemake" not in globals(): if "snakemake" not in globals():
from _helpers import mock_snakemake from _helpers import mock_snakemake
@ -521,14 +503,10 @@ if __name__ == "__main__":
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake) set_scenario_config(snakemake)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) update_config_from_wildcards(snakemake.config, snakemake.wildcards)
# parameters ----------------------------------------------------------- # parameters -----------------------------------------------------------
years = snakemake.config["scenario"]["planning_horizons"] years = snakemake.config["scenario"]["planning_horizons"]
opts = snakemake.wildcards.sector_opts.split("-") social_discountrate = snakemake.params.costs["social_discountrate"]
social_discountrate = snakemake.config["costs"]["social_discountrate"]
for o in opts:
if "sdr" in o:
social_discountrate = float(o.replace("sdr", "")) / 100
logger.info( logger.info(
f"Concat networks of investment period {years} with social discount rate of {social_discountrate * 100}%" f"Concat networks of investment period {years} with social discount rate of {social_discountrate * 100}%"
@ -538,9 +516,10 @@ if __name__ == "__main__":
n = concat_networks(years) n = concat_networks(years)
# temporal aggregate # temporal aggregate
opts = snakemake.wildcards.sector_opts.split("-")
solver_name = snakemake.config["solving"]["solver"]["name"] solver_name = snakemake.config["solving"]["solver"]["name"]
n = set_temporal_aggregation_SEG(n, opts, solver_name) segments = snakemake.params["clustering"]["temporal"]["resolution_sector"]
if isinstance(segments, (int, float)):
n = apply_time_segmentation_perfect(n, segments, solver_name=solver_name)
# adjust global constraints lv limit if the same for all years # adjust global constraints lv limit if the same for all years
n = adjust_lvlimit(n) n = adjust_lvlimit(n)
@ -556,8 +535,7 @@ if __name__ == "__main__":
add_H2_boilers(n) add_H2_boilers(n)
# set carbon constraints # set carbon constraints
opts = snakemake.wildcards.sector_opts.split("-") n = set_carbon_constraints(n)
n = set_carbon_constraints(n, opts)
# export network # export network
n.export_to_netcdf(snakemake.output[0]) n.export_to_netcdf(snakemake.output[0])

View File

@ -9,7 +9,6 @@ technologies for the buildings, transport and industry sectors.
import logging import logging
import os import os
import re
from itertools import product from itertools import product
from types import SimpleNamespace from types import SimpleNamespace
@ -21,12 +20,13 @@ import xarray as xr
from _helpers import ( from _helpers import (
configure_logging, configure_logging,
set_scenario_config, set_scenario_config,
update_config_with_sector_opts, update_config_from_wildcards,
) )
from add_electricity import calculate_annuity, sanitize_carriers, sanitize_locations from add_electricity import calculate_annuity, sanitize_carriers, sanitize_locations
from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2 from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2
from networkx.algorithms import complement from networkx.algorithms import complement
from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation
from prepare_network import maybe_adjust_costs_and_potentials
from pypsa.geo import haversine_pts from pypsa.geo import haversine_pts
from pypsa.io import import_components_from_dataframe from pypsa.io import import_components_from_dataframe
from scipy.stats import beta from scipy.stats import beta
@ -194,13 +194,13 @@ def define_spatial(nodes, options):
spatial = SimpleNamespace() spatial = SimpleNamespace()
def emission_sectors_from_opts(opts): def determine_emission_sectors(options):
sectors = ["electricity"] sectors = ["electricity"]
if "T" in opts: if options["transport"]:
sectors += ["rail non-elec", "road non-elec"] sectors += ["rail non-elec", "road non-elec"]
if "H" in opts: if options["heating"]:
sectors += ["residential non-elec", "services non-elec"] sectors += ["residential non-elec", "services non-elec"]
if "I" in opts: if options["industry"]:
sectors += [ sectors += [
"industrial non-elec", "industrial non-elec",
"industrial processes", "industrial processes",
@ -209,7 +209,7 @@ def emission_sectors_from_opts(opts):
"domestic navigation", "domestic navigation",
"international navigation", "international navigation",
] ]
if "A" in opts: if options["agriculture"]:
sectors += ["agriculture"] sectors += ["agriculture"]
return sectors return sectors
@ -223,7 +223,7 @@ def get(item, investment_year=None):
def co2_emissions_year( def co2_emissions_year(
countries, input_eurostat, opts, emissions_scope, report_year, input_co2, year countries, input_eurostat, options, emissions_scope, report_year, input_co2, year
): ):
""" """
Calculate CO2 emissions in one specific year (e.g. 1990 or 2018). Calculate CO2 emissions in one specific year (e.g. 1990 or 2018).
@ -241,7 +241,7 @@ def co2_emissions_year(
co2_totals = build_co2_totals(countries, eea_co2, eurostat_co2) co2_totals = build_co2_totals(countries, eea_co2, eurostat_co2)
sectors = emission_sectors_from_opts(opts) sectors = determine_emission_sectors(options)
co2_emissions = co2_totals.loc[countries, sectors].sum().sum() co2_emissions = co2_totals.loc[countries, sectors].sum().sum()
@ -252,11 +252,12 @@ def co2_emissions_year(
# TODO: move to own rule with sector-opts wildcard? # TODO: move to own rule with sector-opts wildcard?
def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year): def build_carbon_budget(
o, input_eurostat, fn, emissions_scope, report_year, input_co2, options
):
""" """
Distribute carbon budget following beta or exponential transition path. Distribute carbon budget following beta or exponential transition path.
""" """
# opts?
if "be" in o: if "be" in o:
# beta decay # beta decay
@ -272,7 +273,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year):
e_1990 = co2_emissions_year( e_1990 = co2_emissions_year(
countries, countries,
input_eurostat, input_eurostat,
opts, options,
emissions_scope, emissions_scope,
report_year, report_year,
input_co2, input_co2,
@ -283,7 +284,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year):
e_0 = co2_emissions_year( e_0 = co2_emissions_year(
countries, countries,
input_eurostat, input_eurostat,
opts, options,
emissions_scope, emissions_scope,
report_year, report_year,
input_co2, input_co2,
@ -760,12 +761,12 @@ def add_dac(n, costs):
) )
def add_co2limit(n, nyears=1.0, limit=0.0): def add_co2limit(n, options, nyears=1.0, limit=0.0):
logger.info(f"Adding CO2 budget limit as per unit of 1990 levels of {limit}") logger.info(f"Adding CO2 budget limit as per unit of 1990 levels of {limit}")
countries = snakemake.params.countries countries = snakemake.params.countries
sectors = emission_sectors_from_opts(opts) sectors = determine_emission_sectors(options)
# convert Mt to tCO2 # convert Mt to tCO2
co2_totals = 1e6 * pd.read_csv(snakemake.input.co2_totals_name, index_col=0) co2_totals = 1e6 * pd.read_csv(snakemake.input.co2_totals_name, index_col=0)
@ -2004,13 +2005,6 @@ def add_heat(n, costs):
if options["retrofitting"]["retro_endogen"]: if options["retrofitting"]["retro_endogen"]:
logger.info("Add retrofitting endogenously") logger.info("Add retrofitting endogenously")
# resample heat demand temporal 'heat_demand_r' depending on in config
# specified temporal resolution, to not overestimate retrofitting
hours = list(filter(re.compile(r"^\d+h$", re.IGNORECASE).search, opts))
if len(hours) == 0:
hours = [n.snapshots[1] - n.snapshots[0]]
heat_demand_r = heat_demand.resample(hours[0]).mean()
# retrofitting data 'retro_data' with 'costs' [EUR/m^2] and heat # retrofitting data 'retro_data' with 'costs' [EUR/m^2] and heat
# demand 'dE' [per unit of original heat demand] for each country and # demand 'dE' [per unit of original heat demand] for each country and
# different retrofitting strengths [additional insulation thickness in m] # different retrofitting strengths [additional insulation thickness in m]
@ -2028,12 +2022,12 @@ def add_heat(n, costs):
# share of space heat demand 'w_space' of total heat demand # share of space heat demand 'w_space' of total heat demand
w_space = {} w_space = {}
for sector in sectors: for sector in sectors:
w_space[sector] = heat_demand_r[sector + " space"] / ( w_space[sector] = heat_demand[sector + " space"] / (
heat_demand_r[sector + " space"] + heat_demand_r[sector + " water"] heat_demand[sector + " space"] + heat_demand[sector + " water"]
) )
w_space["tot"] = ( w_space["tot"] = (
heat_demand_r["services space"] + heat_demand_r["residential space"] heat_demand["services space"] + heat_demand["residential space"]
) / heat_demand_r.T.groupby(level=[1]).sum().T ) / heat_demand.T.groupby(level=[1]).sum().T
for name in n.loads[ for name in n.loads[
n.loads.carrier.isin([x + " heat" for x in heat_systems]) n.loads.carrier.isin([x + " heat" for x in heat_systems])
@ -2063,7 +2057,7 @@ def add_heat(n, costs):
pop_layout.loc[node].fraction * floor_area.loc[ct, "value"] * 10**6 pop_layout.loc[node].fraction * floor_area.loc[ct, "value"] * 10**6
).loc[sec] * f ).loc[sec] * f
# total heat demand at node [MWh] # total heat demand at node [MWh]
demand = n.loads_t.p_set[name].resample(hours[0]).mean() demand = n.loads_t.p_set[name]
# space heat demand at node [MWh] # space heat demand at node [MWh]
space_heat_demand = demand * w_space[sec][node] space_heat_demand = demand * w_space[sec][node]
@ -3296,52 +3290,6 @@ def remove_h2_network(n):
n.stores.drop("EU H2 Store", inplace=True) n.stores.drop("EU H2 Store", inplace=True)
def maybe_adjust_costs_and_potentials(n, opts):
for o in opts:
flags = ["+e", "+p", "+m", "+c"]
if all(flag not in o for flag in flags):
continue
oo = o.split("+")
carrier_list = np.hstack(
(
n.generators.carrier.unique(),
n.links.carrier.unique(),
n.stores.carrier.unique(),
n.storage_units.carrier.unique(),
)
)
suptechs = map(lambda c: c.split("-", 2)[0], carrier_list)
if oo[0].startswith(tuple(suptechs)):
carrier = oo[0]
attr_lookup = {
"p": "p_nom_max",
"e": "e_nom_max",
"c": "capital_cost",
"m": "marginal_cost",
}
attr = attr_lookup[oo[1][0]]
factor = float(oo[1][1:])
# beware if factor is 0 and p_nom_max is np.inf, 0*np.inf is nan
if carrier == "AC": # lines do not have carrier
n.lines[attr] *= factor
else:
if attr == "p_nom_max":
comps = {"Generator", "Link", "StorageUnit"}
elif attr == "e_nom_max":
comps = {"Store"}
else:
comps = {"Generator", "Link", "StorageUnit", "Store"}
for c in n.iterate_components(comps):
if carrier == "solar":
sel = c.df.carrier.str.contains(
carrier
) & ~c.df.carrier.str.contains("solar rooftop")
else:
sel = c.df.carrier.str.contains(carrier)
c.df.loc[sel, attr] *= factor
logger.info(f"changing {attr} for {carrier} by factor {factor}")
def limit_individual_line_extension(n, maxext): def limit_individual_line_extension(n, maxext):
logger.info(f"Limiting new HVAC and HVDC extensions to {maxext} MW") logger.info(f"Limiting new HVAC and HVDC extensions to {maxext} MW")
n.lines["s_nom_max"] = n.lines["s_nom"] + maxext n.lines["s_nom_max"] = n.lines["s_nom"] + maxext
@ -3511,31 +3459,31 @@ def apply_time_segmentation(
return n return n
def set_temporal_aggregation(n, opts, solver_name): def set_temporal_aggregation(n, resolution, solver_name):
""" """
Aggregate network temporally. Aggregate network temporally.
""" """
for o in opts: if not resolution:
# temporal averaging return n
m = re.match(r"^\d+h$", o, re.IGNORECASE)
if m is not None: # representative snapshots
n = average_every_nhours(n, m.group(0)) if "sn" in resolution.lower():
break sn = int(resolution[:-2])
# representative snapshots logger.info("Use every %s snapshot as representative", sn)
m = re.match(r"(^\d+)sn$", o, re.IGNORECASE) n.set_snapshots(n.snapshots[::sn])
if m is not None: n.snapshot_weightings *= sn
sn = int(m[1])
logger.info(f"Use every {sn} snapshot as representative") # segments with package tsam
n.set_snapshots(n.snapshots[::sn]) elif "seg" in resolution.lower():
n.snapshot_weightings *= sn segments = int(resolution[:-3])
break logger.info("Use temporal segmentation with %s segments", segments)
# segments with package tsam n = apply_time_segmentation(n, segments, solver_name=solver_name)
m = re.match(r"^(\d+)seg$", o, re.IGNORECASE)
if m is not None: # temporal averaging
segments = int(m[1]) elif "h" in resolution.lower():
logger.info(f"Use temporal segmentation with {segments} segments") logger.info("Aggregate to frequency %s", resolution)
n = apply_time_segmentation(n, segments, solver_name=solver_name) n = average_every_nhours(n, resolution)
break
return n return n
@ -3606,13 +3554,10 @@ if __name__ == "__main__":
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake) set_scenario_config(snakemake)
update_config_from_wildcards(snakemake.config, snakemake.wildcards)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
options = snakemake.params.sector options = snakemake.params.sector
opts = snakemake.wildcards.sector_opts.split("-")
investment_year = int(snakemake.wildcards.planning_horizons[-4:]) investment_year = int(snakemake.wildcards.planning_horizons[-4:])
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)
@ -3650,56 +3595,34 @@ if __name__ == "__main__":
add_storage_and_grids(n, costs) add_storage_and_grids(n, costs)
# TODO merge with opts cost adjustment below if options["transport"]:
for o in opts:
if o[:4] == "dist":
options["electricity_distribution_grid"] = True
options["electricity_distribution_grid_cost_factor"] = float(
o[4:].replace("p", ".").replace("m", "-")
)
if o == "biomasstransport":
options["biomass_transport"] = True
if "nodistrict" in opts:
options["district_heating"]["progress"] = 0.0
if "nowasteheat" in opts:
logger.info("Disabling waste heat.")
options["use_fischer_tropsch_waste_heat"] = False
options["use_methanolisation_waste_heat"] = False
options["use_haber_bosch_waste_heat"] = False
options["use_methanation_waste_heat"] = False
options["use_fuel_cell_waste_heat"] = False
options["use_electrolysis_waste_heat"] = False
if "T" in opts:
add_land_transport(n, costs) add_land_transport(n, costs)
if "H" in opts: if options["heating"]:
add_heat(n, costs) add_heat(n, costs)
if "B" in opts: if options["biomass"]:
add_biomass(n, costs) add_biomass(n, costs)
if options["ammonia"]: if options["ammonia"]:
add_ammonia(n, costs) add_ammonia(n, costs)
if "I" in opts: if options["industry"]:
add_industry(n, costs) add_industry(n, costs)
if "H" in opts: if options["heating"]:
add_waste_heat(n) add_waste_heat(n)
if "A" in opts: # requires H and I if options["agriculture"]: # requires H and I
add_agriculture(n, costs) add_agriculture(n, costs)
if options["dac"]: if options["dac"]:
add_dac(n, costs) add_dac(n, costs)
if "decentral" in opts: if not options["electricity_transmission_grid"]:
decentral(n) decentral(n)
if "noH2network" in opts: if not options["H2_network"]:
remove_h2_network(n) remove_h2_network(n)
if options["co2network"]: if options["co2network"]:
@ -3709,51 +3632,39 @@ if __name__ == "__main__":
add_allam(n, costs) add_allam(n, costs)
solver_name = snakemake.config["solving"]["solver"]["name"] solver_name = snakemake.config["solving"]["solver"]["name"]
n = set_temporal_aggregation(n, opts, solver_name) resolution = snakemake.params.time_resolution
n = set_temporal_aggregation(n, resolution, solver_name)
limit_type = "config" co2_budget = snakemake.params.co2_budget
limit = get(snakemake.params.co2_budget, investment_year) if isinstance(co2_budget, str) and co2_budget.startswith("cb"):
for o in opts:
if "cb" not in o:
continue
limit_type = "carbon budget"
fn = "results/" + snakemake.params.RDIR + "/csvs/carbon_budget_distribution.csv" fn = "results/" + snakemake.params.RDIR + "/csvs/carbon_budget_distribution.csv"
if not os.path.exists(fn): if not os.path.exists(fn):
emissions_scope = snakemake.params.emissions_scope emissions_scope = snakemake.params.emissions_scope
report_year = snakemake.params.eurostat_report_year report_year = snakemake.params.eurostat_report_year
input_co2 = snakemake.input.co2 input_co2 = snakemake.input.co2
build_carbon_budget( build_carbon_budget(
o, co2_budget,
snakemake.input.eurostat, snakemake.input.eurostat,
fn, fn,
emissions_scope, emissions_scope,
report_year, report_year,
input_co2, input_co2,
options,
) )
co2_cap = pd.read_csv(fn, index_col=0).squeeze() co2_cap = pd.read_csv(fn, index_col=0).squeeze()
limit = co2_cap.loc[investment_year] limit = co2_cap.loc[investment_year]
break else:
for o in opts: limit = get(co2_budget, investment_year)
if "Co2L" not in o: add_co2limit(n, options, nyears, limit)
continue
limit_type = "wildcard"
limit = o[o.find("Co2L") + 4 :]
limit = float(limit.replace("p", ".").replace("m", "-"))
break
logger.info(f"Add CO2 limit from {limit_type}")
add_co2limit(n, nyears, limit)
for o in opts: maxext = snakemake.params["lines"]["max_extension"]
if not o[:10] == "linemaxext": if maxext is not None:
continue
maxext = float(o[10:]) * 1e3
limit_individual_line_extension(n, maxext) limit_individual_line_extension(n, maxext)
break
if options["electricity_distribution_grid"]: if options["electricity_distribution_grid"]:
insert_electricity_distribution_grid(n, costs) insert_electricity_distribution_grid(n, costs)
maybe_adjust_costs_and_potentials(n, opts) maybe_adjust_costs_and_potentials(n, snakemake.params["adjustments"])
if options["gas_distribution_grid"]: if options["gas_distribution_grid"]:
insert_gas_distribution_costs(n, costs) insert_gas_distribution_costs(n, costs)

View File

@ -39,9 +39,8 @@ import xarray as xr
from _benchmark import memory_logger from _benchmark import memory_logger
from _helpers import ( from _helpers import (
configure_logging, configure_logging,
get_opt,
set_scenario_config, set_scenario_config,
update_config_with_sector_opts, update_config_from_wildcards,
) )
from pypsa.descriptors import get_activity_mask from pypsa.descriptors import get_activity_mask
from pypsa.descriptors import get_switchable_as_dense as get_as_dense from pypsa.descriptors import get_switchable_as_dense as get_as_dense
@ -183,16 +182,10 @@ def _add_land_use_constraint_m(n, planning_horizons, config):
n.generators.p_nom_max.clip(lower=0, inplace=True) n.generators.p_nom_max.clip(lower=0, inplace=True)
def add_co2_sequestration_limit(n, config, limit=200): def add_co2_sequestration_limit(n, limit=200):
""" """
Add a global constraint on the amount of Mt CO2 that can be sequestered. Add a global constraint on the amount of Mt CO2 that can be sequestered.
""" """
limit = limit * 1e6
for o in opts:
if "seq" not in o:
continue
limit = float(o[o.find("seq") + 3 :]) * 1e6
break
if not n.investment_periods.empty: if not n.investment_periods.empty:
periods = n.investment_periods periods = n.investment_periods
@ -205,7 +198,7 @@ def add_co2_sequestration_limit(n, config, limit=200):
"GlobalConstraint", "GlobalConstraint",
names, names,
sense=">=", sense=">=",
constant=-limit, constant=-limit * 1e6,
type="operational_limit", type="operational_limit",
carrier_attribute="co2 sequestered", carrier_attribute="co2 sequestered",
investment_period=periods, investment_period=periods,
@ -265,7 +258,7 @@ def add_carbon_budget_constraint(n, snapshots):
n.model.add_constraints(lhs <= rhs, name=f"GlobalConstraint-{name}") n.model.add_constraints(lhs <= rhs, name=f"GlobalConstraint-{name}")
def add_max_growth(n, config): def add_max_growth(n):
""" """
Add maximum growth rates for different carriers. Add maximum growth rates for different carriers.
""" """
@ -398,11 +391,11 @@ def prepare_network(
if foresight == "perfect": if foresight == "perfect":
n = add_land_use_constraint_perfect(n) n = add_land_use_constraint_perfect(n)
if snakemake.params["sector"]["limit_max_growth"]["enable"]: if snakemake.params["sector"]["limit_max_growth"]["enable"]:
n = add_max_growth(n, config) n = add_max_growth(n)
if n.stores.carrier.eq("co2 sequestered").any(): if n.stores.carrier.eq("co2 sequestered").any():
limit = co2_sequestration_potential limit = co2_sequestration_potential
add_co2_sequestration_limit(n, config, limit=limit) add_co2_sequestration_limit(n, limit=limit)
return n return n
@ -836,30 +829,20 @@ def extra_functionality(n, snapshots):
location to add them. The arguments ``opts`` and location to add them. The arguments ``opts`` and
``snakemake.config`` are expected to be attached to the network. ``snakemake.config`` are expected to be attached to the network.
""" """
opts = n.opts
config = n.config config = n.config
constraints = config["solving"].get("constraints", {}) constraints = config["solving"].get("constraints", {})
if ( if constraints["BAU"] and n.generators.p_nom_extendable.any():
"BAU" in opts or constraints.get("BAU", False)
) and n.generators.p_nom_extendable.any():
add_BAU_constraints(n, config) add_BAU_constraints(n, config)
if ( if constraints["SAFE"] and n.generators.p_nom_extendable.any():
"SAFE" in opts or constraints.get("SAFE", False)
) and n.generators.p_nom_extendable.any():
add_SAFE_constraints(n, config) add_SAFE_constraints(n, config)
if ( if constraints["CCL"] and n.generators.p_nom_extendable.any():
"CCL" in opts or constraints.get("CCL", False)
) and n.generators.p_nom_extendable.any():
add_CCL_constraints(n, config) add_CCL_constraints(n, config)
reserve = config["electricity"].get("operational_reserve", {}) reserve = config["electricity"].get("operational_reserve", {})
if reserve.get("activate"): if reserve.get("activate"):
add_operational_reserve_margin(n, snapshots, config) add_operational_reserve_margin(n, snapshots, config)
EQ_config = constraints.get("EQ", False) if EQ_o := constraints["EQ"]:
EQ_wildcard = get_opt(opts, r"^EQ+[0-9]*\.?[0-9]+(c|)")
EQ_o = EQ_wildcard or EQ_config
if EQ_o:
add_EQ_constraints(n, EQ_o.replace("EQ", "")) add_EQ_constraints(n, EQ_o.replace("EQ", ""))
add_battery_constraints(n) add_battery_constraints(n)
@ -882,7 +865,7 @@ def extra_functionality(n, snapshots):
custom_extra_functionality(n, snapshots, snakemake) custom_extra_functionality(n, snapshots, snakemake)
def solve_network(n, config, solving, opts="", **kwargs): def solve_network(n, config, solving, **kwargs):
set_of_options = solving["solver"]["options"] set_of_options = solving["solver"]["options"]
cf_solving = solving["options"] cf_solving = solving["options"]
@ -910,7 +893,6 @@ def solve_network(n, config, solving, opts="", **kwargs):
# add to network for extra_functionality # add to network for extra_functionality
n.config = config n.config = config
n.opts = opts
if rolling_horizon: if rolling_horizon:
kwargs["horizon"] = cf_solving.get("horizon", 365) kwargs["horizon"] = cf_solving.get("horizon", 365)
@ -956,16 +938,8 @@ if __name__ == "__main__":
) )
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake) set_scenario_config(snakemake)
update_config_from_wildcards(snakemake.config, snakemake.wildcards)
if "sector_opts" in snakemake.wildcards.keys():
update_config_with_sector_opts(
snakemake.config, snakemake.wildcards.sector_opts
)
opts = snakemake.wildcards.opts
if "sector_opts" in snakemake.wildcards.keys():
opts += "-" + snakemake.wildcards.sector_opts
opts = [o for o in opts.split("-") if o != ""]
solve_opts = snakemake.params.solving["options"] solve_opts = snakemake.params.solving["options"]
np.random.seed(solve_opts.get("seed", 123)) np.random.seed(solve_opts.get("seed", 123))
@ -988,7 +962,6 @@ if __name__ == "__main__":
n, n,
config=snakemake.config, config=snakemake.config,
solving=snakemake.params.solving, solving=snakemake.params.solving,
opts=opts,
log_fn=snakemake.log.solver, log_fn=snakemake.log.solver,
) )

View File

@ -15,7 +15,7 @@ import pypsa
from _helpers import ( from _helpers import (
configure_logging, configure_logging,
set_scenario_config, set_scenario_config,
update_config_with_sector_opts, update_config_from_wildcards,
) )
from solve_network import prepare_network, solve_network from solve_network import prepare_network, solve_network
@ -39,10 +39,8 @@ if __name__ == "__main__":
configure_logging(snakemake) configure_logging(snakemake)
set_scenario_config(snakemake) set_scenario_config(snakemake)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) update_config_from_wildcards(snakemake.config, snakemake.wildcards)
opts = f"{snakemake.wildcards.opts}-{snakemake.wildcards.sector_opts}".split("-")
opts = [o for o in opts if o != ""]
solve_opts = snakemake.params.options solve_opts = snakemake.params.options
np.random.seed(solve_opts.get("seed", 123)) np.random.seed(solve_opts.get("seed", 123))
@ -51,9 +49,7 @@ if __name__ == "__main__":
n.optimize.fix_optimal_capacities() n.optimize.fix_optimal_capacities()
n = prepare_network(n, solve_opts, config=snakemake.config) n = prepare_network(n, solve_opts, config=snakemake.config)
n = solve_network( n = solve_network(n, config=snakemake.config, log_fn=snakemake.log.solver)
n, config=snakemake.config, opts=opts, log_fn=snakemake.log.solver
)
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
n.export_to_netcdf(snakemake.output[0]) n.export_to_netcdf(snakemake.output[0])