handle all {opts} and {sector_opts} wildcard values in config

This commit is contained in:
Fabian Neumann 2024-02-17 11:57:16 +01:00
parent a3c0ffac44
commit dd2416a59e
21 changed files with 385 additions and 345 deletions

View File

@ -59,9 +59,6 @@ snapshots:
start: "2013-01-01"
end: "2014-01-01"
inclusive: 'left'
resolution: false
segmentation: false
#representative: false
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#enable
enable:
@ -366,6 +363,11 @@ existing_capacities:
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#sector
sector:
transport: true
heating: true
biomass: true
industry: true
agriculture: true
district_heating:
potential: 0.6
progress:
@ -531,6 +533,7 @@ sector:
use_methanation_waste_heat: true
use_fuel_cell_waste_heat: true
use_electrolysis_waste_heat: true
electricity_transmission_grid: true
electricity_distribution_grid: true
electricity_distribution_grid_cost_factor: 1.0
electricity_grid_connection: true
@ -712,6 +715,14 @@ clustering:
committable: any
ramp_limit_up: max
ramp_limit_down: max
temporal:
resolution_elec: false
resolution_sector: false
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#adjustments
adjustments:
electricity: false
sector: false
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#solving
solving:

View File

@ -0,0 +1,8 @@
,Unit,Values,Description
adjustments,,,
-- electricity,bool or dict,,"Parameter adjustments for capital cost, marginal cost, and maximum capacities of carriers. Applied in :mod:`prepare_network.`"
-- -- {attr},,,"Attribute can be ``e_nom_opt``, ``p_nom_opt``, ``marginal_cost`` or ``capital_cost``"
-- -- -- {carrier},float,per-unit,"Any carrier of the network to which parameter adjustment factor should be applied."
-- sector,bool or dict,,"Parameter adjustments for capital cost, marginal cost, and maximum capacities of carriers. Applied in :mod:`prepare_sector_network.`"
-- -- {attr},,,"Attribute can be ``e_nom_opt``, ``p_nom_opt``, ``marginal_cost`` or ``capital_cost``"
-- -- -- {carrier},float,per-unit,"Any carrier of the network to which parameter adjustment factor should be applied."
1 Unit Values Description
2 adjustments
3 -- electricity bool or dict Parameter adjustments for capital cost, marginal cost, and maximum capacities of carriers. Applied in :mod:`prepare_network.`
4 -- -- {attr} Attribute can be ``e_nom_opt``, ``p_nom_opt``, ``marginal_cost`` or ``capital_cost``
5 -- -- -- {carrier} float per-unit Any carrier of the network to which parameter adjustment factor should be applied.
6 -- sector bool or dict Parameter adjustments for capital cost, marginal cost, and maximum capacities of carriers. Applied in :mod:`prepare_sector_network.`
7 -- -- {attr} Attribute can be ``e_nom_opt``, ``p_nom_opt``, ``marginal_cost`` or ``capital_cost``
8 -- -- -- {carrier} float per-unit Any carrier of the network to which parameter adjustment factor should be applied.

View File

@ -17,3 +17,6 @@ aggregation_strategies,,,
-- -- {key},str,"{key} can be any of the component of the generator (str). Its value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator."
-- buses,,,
-- -- {key},str,"{key} can be any of the component of the bus (str). Its value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new bus."
temporal,,,Options for temporal resolution
-- resolution_elec,--,"{false,``nH``; i.e. ``2H``-``6H``}","Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks."
-- resolution_sector,--,"{false,``nH``; i.e. ``2H``-``6H``}","Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_sector_network`."

1 Unit Values Description
17 -- -- {key} str {key} can be any of the component of the generator (str). It’s value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}. Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator.
18 -- buses
19 -- -- {key} str {key} can be any of the component of the bus (str). It’s value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}. Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new bus.
20 temporal Options for temporal resolution
21 -- resolution_elec -- {false,``nH``; i.e. ``2H``-``6H``} Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks.
22 -- resolution_sector -- {false,``nH``; i.e. ``2H``-``6H``} Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_sector_network`.

View File

@ -1,4 +1,9 @@
,Unit,Values,Description
transport,--,"{true, false}",Flag to include transport sector.
heating,--,"{true, false}",Flag to include heating sector.
biomass,--,"{true, false}",Flag to include biomass sector.
industry,--,"{true, false}",Flag to include industry sector.
agriculture,--,"{true, false}",Flag to include agriculture sector.
district_heating,--,,`prepare_sector_network.py <https://github.com/PyPSA/pypsa-eur-sec/blob/master/scripts/prepare_sector_network.py>`_
-- potential,--,float,maximum fraction of urban demand which can be supplied by district heating
-- progress,--,Dictionary with planning horizons as keys., Increase of today's district heating demand to potential maximum district heating share. Progress = 0 means today's district heating share. Progress = 1 means maximum fraction of urban demand is supplied by district heating
@ -109,6 +114,7 @@ min_part_load _methanolisation,per unit of p_nom ,float,The minimum unit dispatc
use_fischer_tropsch _waste_heat,--,"{true, false}",Add option for using waste heat of Fischer Tropsch in district heating networks
use_fuel_cell_waste_heat,--,"{true, false}",Add option for using waste heat of fuel cells in district heating networks
use_electrolysis_waste _heat,--,"{true, false}",Add option for using waste heat of electrolysis in district heating networks
electricity_transmission _grid,--,"{true, false}",Switch for enabling/disabling the electricity transmission grid.
electricity_distribution _grid,--,"{true, false}",Add a simplified representation of the exchange capacity between transmission and distribution grid level through a link.
electricity_distribution _grid_cost_factor,,,Multiplies the investment cost of the electricity distribution grid
,,,

1 Unit Values Description
2 transport -- {true, false} Flag to include transport sector.
3 heating -- {true, false} Flag to include heating sector.
4 biomass -- {true, false} Flag to include biomass sector.
5 industry -- {true, false} Flag to include industry sector.
6 agriculture -- {true, false} Flag to include agriculture sector.
7 district_heating -- `prepare_sector_network.py <https://github.com/PyPSA/pypsa-eur-sec/blob/master/scripts/prepare_sector_network.py>`_
8 -- potential -- float maximum fraction of urban demand which can be supplied by district heating
9 -- progress -- Dictionary with planning horizons as keys. Increase of today's district heating demand to potential maximum district heating share. Progress = 0 means today's district heating share. Progress = 1 means maximum fraction of urban demand is supplied by district heating
114 use_fischer_tropsch _waste_heat -- {true, false} Add option for using waste heat of Fischer Tropsch in district heating networks
115 use_fuel_cell_waste_heat -- {true, false} Add option for using waste heat of fuel cells in district heating networks
116 use_electrolysis_waste _heat -- {true, false} Add option for using waste heat of electrolysis in district heating networks
117 electricity_transmission _grid -- {true, false} Switch for enabling/disabling the electricity transmission grid.
118 electricity_distribution _grid -- {true, false} Add a simplified representation of the exchange capacity between transmission and distribution grid level through a link.
119 electricity_distribution _grid_cost_factor Multiplies the investment cost of the electricity distribution grid
120

View File

@ -2,5 +2,3 @@
start,--,str or datetime-like; e.g. YYYY-MM-DD,Left bound of date range
end,--,str or datetime-like; e.g. YYYY-MM-DD,Right bound of date range
inclusive,--,"One of {'neither', 'both', left, right}","Make the time interval closed to the ``left``, ``right``, or both sides ``both`` or neither side ``None``."
resolution ,--,"{false,``nH``; i.e. ``2H``-``6H``}","Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks."
segmentation,--,"{false,``n``; e.g. ``4380``}","Apply time series segmentation with `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package to ``n`` adjacent snapshots of varying lengths based on capacity factors of varying renewables, hydro inflow and load in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks."

1 Unit Values Description
2 start -- str or datetime-like; e.g. YYYY-MM-DD Left bound of date range
3 end -- str or datetime-like; e.g. YYYY-MM-DD Right bound of date range
4 inclusive -- One of {'neither', 'both', ‘left’, ‘right’} Make the time interval closed to the ``left``, ``right``, or both sides ``both`` or neither side ``None``.
resolution -- {false,``nH``; i.e. ``2H``-``6H``} Resample the time-resolution by averaging over every ``n`` snapshots in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks.
segmentation -- {false,``n``; e.g. ``4380``} Apply time series segmentation with `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package to ``n`` adjacent snapshots of varying lengths based on capacity factors of varying renewables, hydro inflow and load in :mod:`prepare_network`. **Warning:** This option should currently only be used with electricity-only networks, not for sector-coupled networks.

View File

@ -561,6 +561,21 @@ The list of available biomass is given by the category in `ENSPRESO_BIOMASS <htt
use ``min`` in ``p_nom_max:`` for more `
conservative assumptions.
.. _adjustments_cf:
``adjustments``
=============
.. literalinclude:: ../config/config.default.yaml
:language: yaml
:start-at: adjustments:
:end-before: # docs
.. csv-table::
:header-rows: 1
:widths: 22,7,22,33
:file: configtables/adjustments.csv
.. _solving_cf:
``solving``

View File

@ -20,7 +20,7 @@ if config["enable"].get("prepare_links_p_nom", False):
rule build_electricity_demand:
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
countries=config["countries"],
load=config["load"],
input:
@ -62,7 +62,7 @@ rule build_powerplants:
rule base_network:
params:
countries=config["countries"],
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
lines=config["lines"],
links=config["links"],
transformers=config["transformers"],
@ -145,7 +145,7 @@ if config["enable"].get("build_cutout", False):
rule build_cutout:
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
cutouts=config["atlite"]["cutouts"],
input:
regions_onshore=RESOURCES + "regions_onshore.geojson",
@ -259,7 +259,7 @@ else:
rule build_renewable_profiles:
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
renewable=config["renewable"],
input:
**opt,
@ -357,7 +357,7 @@ if config["lines"]["dynamic_line_rating"]["activate"]:
rule build_line_rating:
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
input:
base_network=RESOURCES + "networks/base.nc",
cutout="cutouts/"
@ -539,10 +539,7 @@ rule add_extra_components:
rule prepare_network:
params:
snapshots={
"resolution": config["snapshots"].get("resolution", False),
"segmentation": config["snapshots"].get("segmentation", False),
},
time_resolution=config["clustering"]["temporal"]["resolution_elec"],
links=config["links"],
lines=config["lines"],
co2base=config["electricity"]["co2base"],
@ -552,6 +549,7 @@ rule prepare_network:
gaslimit=config["electricity"].get("gaslimit"),
max_hours=config["electricity"]["max_hours"],
costs=config["costs"],
adjustments=config["adjustments"]["electricity"],
autarky=config["electricity"].get("autarky", {}),
input:
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc",

View File

@ -125,7 +125,7 @@ rule cluster_gas_network:
rule build_daily_heat_demand:
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
@ -147,7 +147,7 @@ rule build_daily_heat_demand:
rule build_hourly_heat_demand:
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
input:
heat_profile="data/heat_load_profile_BDEW.csv",
heat_demand=RESOURCES + "daily_heat_demand_{scope}_elec_s{simpl}_{clusters}.nc",
@ -168,7 +168,7 @@ rule build_hourly_heat_demand:
rule build_temperature_profiles:
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
@ -220,7 +220,7 @@ rule build_cop_profiles:
rule build_solar_thermal_profiles:
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
solar_thermal=config["solar_thermal"],
input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc",
@ -707,7 +707,7 @@ rule build_shipping_demand:
rule build_transport_demand:
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
sector=config["sector"],
input:
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
@ -789,16 +789,19 @@ rule build_existing_heating_distribution:
rule prepare_sector_network:
params:
time_resolution=config["clustering"]["temporal"]["resolution_sector"],
co2_budget=config["co2_budget"],
conventional_carriers=config["existing_capacities"]["conventional_carriers"],
foresight=config["foresight"],
costs=config["costs"],
sector=config["sector"],
industry=config["industry"],
lines=config["lines"],
pypsa_eur=config["pypsa_eur"],
length_factor=config["lines"]["length_factor"],
planning_horizons=config["scenario"]["planning_horizons"],
countries=config["countries"],
adjustments=config["adjustments"]["sector"],
emissions_scope=config["energy"]["emissions"],
eurostat_report_year=config["energy"]["eurostat_report_year"],
RDIR=RDIR,

View File

@ -162,7 +162,7 @@ rule make_summary:
params:
foresight=config["foresight"],
costs=config["costs"],
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
scenario=config["scenario"],
RDIR=RDIR,
input:
@ -240,10 +240,11 @@ rule plot_summary:
params:
countries=config["countries"],
planning_horizons=config["scenario"]["planning_horizons"],
sector_opts=config["scenario"]["sector_opts"],
emissions_scope=config["energy"]["emissions"],
eurostat_report_year=config["energy"]["eurostat_report_year"],
plotting=config["plotting"],
foresight=config["foresight"],
co2_budget=config["co2_budget"],
RDIR=RDIR,
input:
costs=RESULTS + "csvs/costs.csv",

View File

@ -51,7 +51,7 @@ rule add_brownfield:
H2_retrofit=config["sector"]["H2_retrofit"],
H2_retrofit_capacity_per_CH4=config["sector"]["H2_retrofit_capacity_per_CH4"],
threshold_capacity=config["existing_capacities"]["threshold_capacity"],
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
carriers=config["electricity"]["renewable_carriers"],
input:
**{

View File

@ -46,6 +46,8 @@ rule add_existing_baseyear:
rule prepare_perfect_foresight:
params:
costs=config["costs"],
input:
**{
f"network_{year}": RESULTS

View File

@ -17,7 +17,7 @@ rule build_electricity_production:
The data is used for validation of the optimization results.
"""
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
countries=config["countries"],
output:
RESOURCES + "historical_electricity_production.csv",
@ -35,7 +35,7 @@ rule build_cross_border_flows:
The data is used for validation of the optimization results.
"""
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
countries=config["countries"],
input:
network=RESOURCES + "networks/base.nc",
@ -55,7 +55,7 @@ rule build_electricity_prices:
The data is used for validation of the optimization results.
"""
params:
snapshots={k: config["snapshots"][k] for k in ["start", "end", "inclusive"]},
snapshots=config["snapshots"],
countries=config["countries"],
output:
RESOURCES + "historical_electricity_prices.csv",

View File

@ -15,6 +15,7 @@ import pandas as pd
import pytz
import requests
import yaml
from snakemake.utils import update_config
from tqdm import tqdm
logger = logging.getLogger(__name__)
@ -43,9 +44,9 @@ def find_opt(opts, expr):
"""
for o in opts:
if expr in o:
m = re.findall("[0-9]*\.?[0-9]+$", o)
m = re.findall("^m?\d*(\.|p)?\d+$", o)
if len(m) > 0:
return True, float(m[0])
return True, float(m[0].replace("p", ".").replace("m", "-"))
else:
return True, None
return False, None
@ -379,13 +380,177 @@ def parse(infix):
return {infix.pop(0): parse(infix)}
def update_config_with_sector_opts(config, sector_opts):
from snakemake.utils import update_config
def update_config_from_wildcards(config, w):
"""
Parses configuration settings from wildcards and updates the config.
for o in sector_opts.split("-"):
if o.startswith("CF+"):
infix = o.split("+")[1:]
update_config(config, parse(infix))
- TODO: Should be run inside config_provider function.
"""
if w.get("opts"):
opts = w.opts.split("-")
if nhours := get_opt(opts, r"^\d+(h|seg)$"):
config["clustering"]["temporal"]["resolution_elec"] = nhours
co2l_enable, co2l_value = find_opt(opts, "Co2L")
if co2l_enable:
config["electricity"]["co2limit_enable"] = True
if co2l_value is not None:
config["electricity"]["co2limit"] = (
co2l_value * config["electricity"]["co2base"]
)
gasl_enable, gasl_value = find_opt(opts, "CH4L")
if gasl_enable:
config["electricity"]["gaslimit_enable"] = True
if gasl_value is not None:
config["electricity"]["gaslimit"] = gasl_value * 1e6
if "Ept" in opts:
config["costs"]["emission_prices"]["co2_monthly_prices"] = True
ep_enable, ep_value = find_opt(opts, "Ep")
if ep_enable:
config["costs"]["emission_prices"]["enable"] = True
if ep_value is not None:
config["costs"]["emission_prices"]["co2"] = ep_value
if "ATK" in opts:
config["autarky"]["enable"] = True
if "ATKc" in opts:
config["autarky"]["by_country"] = True
attr_lookup = {
"p": "p_nom_max",
"e": "e_nom_max",
"c": "capital_cost",
"m": "marginal_cost",
}
for o in opts:
flags = ["+e", "+p", "+m", "+c"]
if all(flag not in o for flag in flags):
continue
carrier, attr_factor = o.split("+")
attr = attr_lookup[attr_factor[0]]
factor = float(attr_factor[1:])
if not isinstance(config["adjustments"]["electricity"], dict):
config["adjustments"]["electricity"] = dict()
update_config(
config["adjustments"]["electricity"], {attr: {carrier: factor}}
)
if w.get("sector_opts"):
opts = w.sector_opts.split("-")
if "T" in opts:
config["sector"]["transport"] = True
if "H" in opts:
config["sector"]["heating"] = True
if "B" in opts:
config["sector"]["biomass"] = True
if "I" in opts:
config["sector"]["industry"] = True
if "A" in opts:
config["sector"]["agriculture"] = True
if "CCL" in opts:
config["solving"]["constraints"]["CCL"] = True
eq_value = get_opt(opts, r"^EQ+\d*\.?\d+(c|)")
for o in opts:
if eq_value is not None:
config["solving"]["constraints"]["EQ"] = eq_value
elif "EQ" in o:
config["solving"]["constraints"]["EQ"] = True
break
if "BAU" in opts:
config["solving"]["constraints"]["BAU"] = True
if "SAFE" in opts:
config["solving"]["constraints"]["SAFE"] = True
if nhours := get_opt(opts, r"^\d+(h|sn|seg)$"):
config["clustering"]["temporal"]["resolution_sector"] = nhours
if "decentral" in opts:
config["sector"]["electricity_transmission_grid"] = False
if "noH2network" in opts:
config["sector"]["H2_network"] = False
if "nowasteheat" in opts:
config["sector"]["use_fischer_tropsch_waste_heat"] = False
config["sector"]["use_methanolisation_waste_heat"] = False
config["sector"]["use_haber_bosch_waste_heat"] = False
config["sector"]["use_methanation_waste_heat"] = False
config["sector"]["use_fuel_cell_waste_heat"] = False
config["sector"]["use_electrolysis_waste_heat"] = False
if "nodistrict" in opts:
config["sector"]["district_heating"]["progress"] = 0.0
dg_enable, dg_factor = find_opt(opts, "dist")
if dg_enable:
config["sector"]["electricity_distribution_grid"] = True
if dg_factor is not None:
config["sector"][
"electricity_distribution_grid_cost_factor"
] = dg_factor
if "biomasstransport" in opts:
config["sector"]["biomass_transport"] = True
_, maxext = find_opt(opts, "linemaxext")
if maxext is not None:
config["lines"]["max_extension"] = maxext * 1e3
config["links"]["max_extension"] = maxext * 1e3
_, co2l_value = find_opt(opts, "Co2L")
if co2l_value is not None:
config["co2_budget"] = float(co2l_value)
if co2_distribution := get_opt(opts, r"^(cb)\d+(\.\d+)?(ex|be)$"):
config["co2_budget"] = co2_distribution
if co2_budget := get_opt(opts, r"^(cb)\d+(\.\d+)?$"):
config["co2_budget"] = float(co2_budget[2:])
attr_lookup = {
"p": "p_nom_max",
"e": "e_nom_max",
"c": "capital_cost",
"m": "marginal_cost",
}
for o in opts:
flags = ["+e", "+p", "+m", "+c"]
if all(flag not in o for flag in flags):
continue
carrier, attr_factor = o.split("+")
attr = attr_lookup[attr_factor[0]]
factor = float(attr_factor[1:])
if not isinstance(config["adjustments"]["sector"], dict):
config["adjustments"]["sector"] = dict()
update_config(config["adjustments"]["sector"], {attr: {carrier: factor}})
_, sdr_value = find_opt(opts, "sdr")
if sdr_value is not None:
config["costs"]["social_discountrate"] = sdr_value / 100
_, seq_limit = find_opt(opts, "seq")
if seq_limit is not None:
config["sector"]["co2_sequestration_potential"] = seq_limit
# any config option can be represented in wildcard
for o in opts:
if o.startswith("CF+"):
infix = o.split("+")[1:]
update_config(config, parse(infix))
def get_checksum_from_zenodo(file_url):

View File

@ -12,7 +12,7 @@ import numpy as np
import pandas as pd
import pypsa
import xarray as xr
from _helpers import update_config_with_sector_opts
from _helpers import update_config_from_wildcards
from add_existing_baseyear import add_build_year_to_new_assets
from pypsa.clustering.spatial import normed_or_uniform
@ -212,7 +212,7 @@ if __name__ == "__main__":
logging.basicConfig(level=snakemake.config["logging"]["level"])
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
update_config_from_wildcards(snakemake.config, snakemake.wildcards)
logger.info(f"Preparing brownfield from the file {snakemake.input.network_p}")

View File

@ -15,7 +15,7 @@ import numpy as np
import pandas as pd
import pypsa
import xarray as xr
from _helpers import update_config_with_sector_opts
from _helpers import update_config_from_wildcards
from add_electricity import sanitize_carriers
from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs
@ -554,10 +554,9 @@ if __name__ == "__main__":
logging.basicConfig(level=snakemake.config["logging"]["level"])
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
update_config_from_wildcards(snakemake.config, snakemake.wildcards)
options = snakemake.params.sector
opts = snakemake.wildcards.sector_opts.split("-")
baseyear = snakemake.params.baseyear
@ -580,7 +579,7 @@ if __name__ == "__main__":
n, grouping_years_power, costs, baseyear
)
if "H" in opts:
if options["heating"]:
time_dep_hp_cop = options["time_dep_hp_cop"]
ashp_cop = (
xr.open_dataarray(snakemake.input.cop_air_total)

View File

@ -582,7 +582,8 @@ if __name__ == "__main__":
plot_balances()
for sector_opts in snakemake.params.sector_opts:
opts = sector_opts.split("-")
if any("cb" in o for o in opts) or snakemake.config["foresight"] == "perfect":
plot_carbon_budget_distribution(snakemake.input.eurostat)
if (
snakemake.params["co2_budget"].startswith("cb")
or snakemake.params["foresight"] == "perfect"
):
plot_carbon_budget_distribution(snakemake.input.eurostat)

View File

@ -62,7 +62,7 @@ import logging
import numpy as np
import pandas as pd
import pypsa
from _helpers import configure_logging, find_opt, get_opt
from _helpers import configure_logging, update_config_from_wildcards
from add_electricity import load_costs, update_transmission_costs
from pypsa.descriptors import expand_series
@ -71,6 +71,28 @@ idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def maybe_adjust_costs_and_potentials(n, adjustments):
if not adjustments:
return
for attr, carrier_factor in adjustments.items():
for carrier, factor in carrier_factor.items():
# beware if factor is 0 and p_nom_max is np.inf, 0*np.inf is nan
if carrier == "AC": # lines do not have carrier
n.lines[attr] *= factor
continue
comps = {
"p_nom_max": {"Generator", "Link", "StorageUnit"},
"e_nom_max": {"Store"},
"capital_cost": {"Generator", "Link", "StorageUnit", "Store"},
"marginal_cost": {"Generator", "Link", "StorageUnit", "Store"},
}
for c in n.iterate_components(comps[attr]):
sel = c.df.index[c.df.carrier == carrier]
c.df.loc[sel, attr] *= factor
logger.info(f"changing {attr} for {carrier} by factor {factor}")
def add_co2limit(n, co2limit, Nyears=1.0):
n.add(
"GlobalConstraint",
@ -278,11 +300,10 @@ if __name__ == "__main__":
from _helpers import mock_snakemake
snakemake = mock_snakemake(
"prepare_network", simpl="", clusters="37", ll="v1.0", opts="Ept"
"prepare_network", simpl="", clusters="37", ll="v1.0", opts="Co2L-4H"
)
configure_logging(snakemake)
opts = snakemake.wildcards.opts.split("-")
update_config_from_wildcards(snakemake.config, snakemake.wildcards)
n = pypsa.Network(snakemake.input[0])
Nyears = n.snapshot_weightings.objective.sum() / 8760.0
@ -296,81 +317,32 @@ if __name__ == "__main__":
set_line_s_max_pu(n, snakemake.params.lines["s_max_pu"])
# temporal averaging
nhours_config = snakemake.params.snapshots.get("resolution", False)
nhours_wildcard = get_opt(opts, r"^\d+h$")
nhours = nhours_wildcard or nhours_config
if nhours:
if nhours := snakemake.params.time_resolution:
n = average_every_nhours(n, nhours)
# segments with package tsam
time_seg_config = snakemake.params.snapshots.get("segmentation", False)
time_seg_wildcard = get_opt(opts, r"^\d+seg$")
time_seg = time_seg_wildcard or time_seg_config
if time_seg:
if time_seg := snakemake.params.time_resolution:
solver_name = snakemake.config["solving"]["solver"]["name"]
n = apply_time_segmentation(n, time_seg.replace("seg", ""), solver_name)
Co2L_config = snakemake.params.co2limit_enable
Co2L_wildcard, co2limit_wildcard = find_opt(opts, "Co2L")
if Co2L_wildcard or Co2L_config:
if co2limit_wildcard is not None:
co2limit = co2limit_wildcard * snakemake.params.co2base
add_co2limit(n, co2limit, Nyears)
logger.info("Setting CO2 limit according to wildcard value.")
else:
add_co2limit(n, snakemake.params.co2limit, Nyears)
logger.info("Setting CO2 limit according to config value.")
if snakemake.params.co2limit_enable:
add_co2limit(n, snakemake.params.co2limit, Nyears)
CH4L_config = snakemake.params.gaslimit_enable
CH4L_wildcard, gaslimit_wildcard = find_opt(opts, "CH4L")
if CH4L_wildcard or CH4L_config:
if gaslimit_wildcard is not None:
gaslimit = gaslimit_wildcard * 1e6
add_gaslimit(n, gaslimit, Nyears)
logger.info("Setting gas usage limit according to wildcard value.")
else:
add_gaslimit(n, snakemake.params.gaslimit, Nyears)
logger.info("Setting gas usage limit according to config value.")
if snakemake.params.gaslimit_enable:
add_gaslimit(n, snakemake.params.gaslimit, Nyears)
for o in opts:
if "+" not in o:
continue
oo = o.split("+")
suptechs = map(lambda c: c.split("-", 2)[0], n.carriers.index)
if oo[0].startswith(tuple(suptechs)):
carrier = oo[0]
# handles only p_nom_max as stores and lines have no potentials
attr_lookup = {"p": "p_nom_max", "c": "capital_cost", "m": "marginal_cost"}
attr = attr_lookup[oo[1][0]]
factor = float(oo[1][1:])
if carrier == "AC": # lines do not have carrier
n.lines[attr] *= factor
else:
comps = {"Generator", "Link", "StorageUnit", "Store"}
for c in n.iterate_components(comps):
sel = c.df.carrier.str.contains(carrier)
c.df.loc[sel, attr] *= factor
maybe_adjust_costs_and_potentials(n, snakemake.params["adjustments"])
emission_prices = snakemake.params.costs["emission_prices"]
Ept_config = emission_prices.get("co2_monthly_prices", False)
Ept_wildcard = "Ept" in opts
Ep_config = emission_prices.get("enable", False)
Ep_wildcard, co2_wildcard = find_opt(opts, "Ep")
if Ept_wildcard or Ept_config:
if emission_prices["co2_monthly_prices"]:
logger.info(
"Setting time dependent emission prices according spot market price"
)
add_dynamic_emission_prices(n)
elif Ep_wildcard or Ep_config:
if co2_wildcard is not None:
logger.info("Setting CO2 prices according to wildcard value.")
add_emission_prices(n, dict(co2=co2_wildcard))
else:
logger.info("Setting CO2 prices according to config value.")
add_emission_prices(
n, dict(co2=snakemake.params.costs["emission_prices"]["co2"])
)
elif emission_prices["enable"]:
add_emission_prices(
n, dict(co2=snakemake.params.costs["emission_prices"]["co2"])
)
ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:]
set_transmission_limit(n, ll_type, factor, costs, Nyears)
@ -383,11 +355,8 @@ if __name__ == "__main__":
p_nom_max_ext=snakemake.params.links.get("max_extension", np.inf),
)
autarky_config = snakemake.params.autarky
if "ATK" in opts or autarky_config.get("enable", False):
only_crossborder = False
if "ATKc" in opts or autarky_config.get("by_country", False):
only_crossborder = True
if snakemake.params.autarky["enable"]:
only_crossborder = snakemake.params.autarky["by_country"]
enforce_autarky(n, only_crossborder=only_crossborder)
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))

View File

@ -12,7 +12,7 @@ import re
import numpy as np
import pandas as pd
import pypsa
from _helpers import update_config_with_sector_opts
from _helpers import update_config_from_wildcards
from add_existing_baseyear import add_build_year_to_new_assets
from pypsa.descriptors import expand_series
from pypsa.io import import_components_from_dataframe
@ -304,17 +304,14 @@ def set_all_phase_outs(n):
n.mremove("Link", remove_i)
def set_carbon_constraints(n, opts):
def set_carbon_constraints(n):
"""
Add global constraints for carbon emissions.
"""
budget = None
for o in opts:
# other budgets
m = re.match(r"^\d+p\d$", o, re.IGNORECASE)
if m is not None:
budget = snakemake.config["co2_budget"][m.group(0)] * 1e9
if budget is not None:
budget = snakemake.config["co2_budget"]
if budget and isinstance(budget, float):
budget *= 1e9 # convert to t CO2
logger.info(f"add carbon budget of {budget}")
n.add(
"GlobalConstraint",
@ -341,7 +338,7 @@ def set_carbon_constraints(n, opts):
)
# set minimum CO2 emission constraint to avoid too fast reduction
if "co2min" in opts:
if "co2min" in snakemake.wildcards.sector_opts.split("-"):
emissions_1990 = 4.53693
emissions_2019 = 3.344096
target_2030 = 0.45 * emissions_1990
@ -487,21 +484,6 @@ def apply_time_segmentation_perfect(
return n
def set_temporal_aggregation_SEG(n, opts, solver_name):
"""
Aggregate network temporally with tsam.
"""
for o in opts:
# segments with package tsam
m = re.match(r"^(\d+)seg$", o, re.IGNORECASE)
if m is not None:
segments = int(m[1])
logger.info(f"Use temporal segmentation with {segments} segments")
n = apply_time_segmentation_perfect(n, segments, solver_name=solver_name)
break
return n
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
@ -515,14 +497,10 @@ if __name__ == "__main__":
sector_opts="1p7-4380H-T-H-B-I-A-dist1",
)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
update_config_from_wildcards(snakemake.config, snakemake.wildcards)
# parameters -----------------------------------------------------------
years = snakemake.config["scenario"]["planning_horizons"]
opts = snakemake.wildcards.sector_opts.split("-")
social_discountrate = snakemake.config["costs"]["social_discountrate"]
for o in opts:
if "sdr" in o:
social_discountrate = float(o.replace("sdr", "")) / 100
social_discountrate = snakemake.params.costs["social_discountrate"]
logger.info(
f"Concat networks of investment period {years} with social discount rate of {social_discountrate * 100}%"
@ -532,9 +510,10 @@ if __name__ == "__main__":
n = concat_networks(years)
# temporal aggregate
opts = snakemake.wildcards.sector_opts.split("-")
solver_name = snakemake.config["solving"]["solver"]["name"]
n = set_temporal_aggregation_SEG(n, opts, solver_name)
segments = snakemake.params["clustering"]["temporal"]["resolution_sector"]
if isinstance(segments, (int, float)):
n = apply_time_segmentation_perfect(n, segments, solver_name=solver_name)
# adjust global constraints lv limit if the same for all years
n = adjust_lvlimit(n)
@ -550,8 +529,7 @@ if __name__ == "__main__":
add_H2_boilers(n)
# set carbon constraints
opts = snakemake.wildcards.sector_opts.split("-")
n = set_carbon_constraints(n, opts)
n = set_carbon_constraints(n)
# export network
n.export_to_netcdf(snakemake.output[0])

View File

@ -9,7 +9,6 @@ technologies for the buildings, transport and industry sectors.
import logging
import os
import re
from itertools import product
from types import SimpleNamespace
@ -18,11 +17,12 @@ import numpy as np
import pandas as pd
import pypsa
import xarray as xr
from _helpers import update_config_with_sector_opts
from _helpers import configure_logging, update_config_from_wildcards
from add_electricity import calculate_annuity, sanitize_carriers, sanitize_locations
from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2
from networkx.algorithms import complement
from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation
from prepare_network import maybe_adjust_costs_and_potentials
from pypsa.geo import haversine_pts
from pypsa.io import import_components_from_dataframe
from scipy.stats import beta
@ -190,13 +190,13 @@ def define_spatial(nodes, options):
spatial = SimpleNamespace()
def emission_sectors_from_opts(opts):
def determine_emission_sectors(options):
sectors = ["electricity"]
if "T" in opts:
if options["transport"]:
sectors += ["rail non-elec", "road non-elec"]
if "H" in opts:
if options["heating"]:
sectors += ["residential non-elec", "services non-elec"]
if "I" in opts:
if options["industry"]:
sectors += [
"industrial non-elec",
"industrial processes",
@ -205,7 +205,7 @@ def emission_sectors_from_opts(opts):
"domestic navigation",
"international navigation",
]
if "A" in opts:
if options["agriculture"]:
sectors += ["agriculture"]
return sectors
@ -219,7 +219,7 @@ def get(item, investment_year=None):
def co2_emissions_year(
countries, input_eurostat, opts, emissions_scope, report_year, input_co2, year
countries, input_eurostat, options, emissions_scope, report_year, input_co2, year
):
"""
Calculate CO2 emissions in one specific year (e.g. 1990 or 2018).
@ -237,7 +237,7 @@ def co2_emissions_year(
co2_totals = build_co2_totals(countries, eea_co2, eurostat_co2)
sectors = emission_sectors_from_opts(opts)
sectors = determine_emission_sectors(options)
co2_emissions = co2_totals.loc[countries, sectors].sum().sum()
@ -248,11 +248,12 @@ def co2_emissions_year(
# TODO: move to own rule with sector-opts wildcard?
def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year):
def build_carbon_budget(
o, input_eurostat, fn, emissions_scope, report_year, input_co2, options
):
"""
Distribute carbon budget following beta or exponential transition path.
"""
# opts?
if "be" in o:
# beta decay
@ -268,7 +269,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year):
e_1990 = co2_emissions_year(
countries,
input_eurostat,
opts,
options,
emissions_scope,
report_year,
input_co2,
@ -279,7 +280,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year):
e_0 = co2_emissions_year(
countries,
input_eurostat,
opts,
options,
emissions_scope,
report_year,
input_co2,
@ -756,12 +757,12 @@ def add_dac(n, costs):
)
def add_co2limit(n, nyears=1.0, limit=0.0):
def add_co2limit(n, options, nyears=1.0, limit=0.0):
logger.info(f"Adding CO2 budget limit as per unit of 1990 levels of {limit}")
countries = snakemake.params.countries
sectors = emission_sectors_from_opts(opts)
sectors = determine_emission_sectors(options)
# convert Mt to tCO2
co2_totals = 1e6 * pd.read_csv(snakemake.input.co2_totals_name, index_col=0)
@ -2000,13 +2001,6 @@ def add_heat(n, costs):
if options["retrofitting"]["retro_endogen"]:
logger.info("Add retrofitting endogenously")
# resample heat demand temporal 'heat_demand_r' depending on in config
# specified temporal resolution, to not overestimate retrofitting
hours = list(filter(re.compile(r"^\d+h$", re.IGNORECASE).search, opts))
if len(hours) == 0:
hours = [n.snapshots[1] - n.snapshots[0]]
heat_demand_r = heat_demand.resample(hours[0]).mean()
# retrofitting data 'retro_data' with 'costs' [EUR/m^2] and heat
# demand 'dE' [per unit of original heat demand] for each country and
# different retrofitting strengths [additional insulation thickness in m]
@ -2024,12 +2018,12 @@ def add_heat(n, costs):
# share of space heat demand 'w_space' of total heat demand
w_space = {}
for sector in sectors:
w_space[sector] = heat_demand_r[sector + " space"] / (
heat_demand_r[sector + " space"] + heat_demand_r[sector + " water"]
w_space[sector] = heat_demand[sector + " space"] / (
heat_demand[sector + " space"] + heat_demand[sector + " water"]
)
w_space["tot"] = (
heat_demand_r["services space"] + heat_demand_r["residential space"]
) / heat_demand_r.T.groupby(level=[1]).sum().T
heat_demand["services space"] + heat_demand["residential space"]
) / heat_demand.T.groupby(level=[1]).sum().T
for name in n.loads[
n.loads.carrier.isin([x + " heat" for x in heat_systems])
@ -2059,7 +2053,7 @@ def add_heat(n, costs):
pop_layout.loc[node].fraction * floor_area.loc[ct, "value"] * 10**6
).loc[sec] * f
# total heat demand at node [MWh]
demand = n.loads_t.p_set[name].resample(hours[0]).mean()
demand = n.loads_t.p_set[name]
# space heat demand at node [MWh]
space_heat_demand = demand * w_space[sec][node]
@ -3292,52 +3286,6 @@ def remove_h2_network(n):
n.stores.drop("EU H2 Store", inplace=True)
def maybe_adjust_costs_and_potentials(n, opts):
for o in opts:
flags = ["+e", "+p", "+m", "+c"]
if all(flag not in o for flag in flags):
continue
oo = o.split("+")
carrier_list = np.hstack(
(
n.generators.carrier.unique(),
n.links.carrier.unique(),
n.stores.carrier.unique(),
n.storage_units.carrier.unique(),
)
)
suptechs = map(lambda c: c.split("-", 2)[0], carrier_list)
if oo[0].startswith(tuple(suptechs)):
carrier = oo[0]
attr_lookup = {
"p": "p_nom_max",
"e": "e_nom_max",
"c": "capital_cost",
"m": "marginal_cost",
}
attr = attr_lookup[oo[1][0]]
factor = float(oo[1][1:])
# beware if factor is 0 and p_nom_max is np.inf, 0*np.inf is nan
if carrier == "AC": # lines do not have carrier
n.lines[attr] *= factor
else:
if attr == "p_nom_max":
comps = {"Generator", "Link", "StorageUnit"}
elif attr == "e_nom_max":
comps = {"Store"}
else:
comps = {"Generator", "Link", "StorageUnit", "Store"}
for c in n.iterate_components(comps):
if carrier == "solar":
sel = c.df.carrier.str.contains(
carrier
) & ~c.df.carrier.str.contains("solar rooftop")
else:
sel = c.df.carrier.str.contains(carrier)
c.df.loc[sel, attr] *= factor
logger.info(f"changing {attr} for {carrier} by factor {factor}")
def limit_individual_line_extension(n, maxext):
logger.info(f"Limiting new HVAC and HVDC extensions to {maxext} MW")
n.lines["s_nom_max"] = n.lines["s_nom"] + maxext
@ -3507,31 +3455,31 @@ def apply_time_segmentation(
return n
def set_temporal_aggregation(n, opts, solver_name):
def set_temporal_aggregation(n, resolution, solver_name):
"""
Aggregate network temporally.
"""
for o in opts:
# temporal averaging
m = re.match(r"^\d+h$", o, re.IGNORECASE)
if m is not None:
n = average_every_nhours(n, m.group(0))
break
# representative snapshots
m = re.match(r"(^\d+)sn$", o, re.IGNORECASE)
if m is not None:
sn = int(m[1])
logger.info(f"Use every {sn} snapshot as representative")
n.set_snapshots(n.snapshots[::sn])
n.snapshot_weightings *= sn
break
# segments with package tsam
m = re.match(r"^(\d+)seg$", o, re.IGNORECASE)
if m is not None:
segments = int(m[1])
logger.info(f"Use temporal segmentation with {segments} segments")
n = apply_time_segmentation(n, segments, solver_name=solver_name)
break
if not resolution:
return n
# representative snapshots
if "sn" in resolution.lower():
sn = int(resolution[:-2])
logger.info("Use every %s snapshot as representative", sn)
n.set_snapshots(n.snapshots[::sn])
n.snapshot_weightings *= sn
# segments with package tsam
elif "seg" in resolution.lower():
segments = int(resolution[:-3])
logger.info("Use temporal segmentation with %s segments", segments)
n = apply_time_segmentation(n, segments, solver_name=solver_name)
# temporal averaging
elif "h" in resolution.lower():
logger.info("Aggregate to frequency %s", resolution)
n = average_every_nhours(n, resolution)
return n
@ -3600,14 +3548,12 @@ if __name__ == "__main__":
planning_horizons="2030",
)
logging.basicConfig(level=snakemake.config["logging"]["level"])
configure_logging(snakemake)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
update_config_from_wildcards(snakemake.config, snakemake.wildcards)
options = snakemake.params.sector
opts = snakemake.wildcards.sector_opts.split("-")
investment_year = int(snakemake.wildcards.planning_horizons[-4:])
n = pypsa.Network(snakemake.input.network)
@ -3645,56 +3591,34 @@ if __name__ == "__main__":
add_storage_and_grids(n, costs)
# TODO merge with opts cost adjustment below
for o in opts:
if o[:4] == "dist":
options["electricity_distribution_grid"] = True
options["electricity_distribution_grid_cost_factor"] = float(
o[4:].replace("p", ".").replace("m", "-")
)
if o == "biomasstransport":
options["biomass_transport"] = True
if "nodistrict" in opts:
options["district_heating"]["progress"] = 0.0
if "nowasteheat" in opts:
logger.info("Disabling waste heat.")
options["use_fischer_tropsch_waste_heat"] = False
options["use_methanolisation_waste_heat"] = False
options["use_haber_bosch_waste_heat"] = False
options["use_methanation_waste_heat"] = False
options["use_fuel_cell_waste_heat"] = False
options["use_electrolysis_waste_heat"] = False
if "T" in opts:
if options["transport"]:
add_land_transport(n, costs)
if "H" in opts:
if options["heating"]:
add_heat(n, costs)
if "B" in opts:
if options["biomass"]:
add_biomass(n, costs)
if options["ammonia"]:
add_ammonia(n, costs)
if "I" in opts:
if options["industry"]:
add_industry(n, costs)
if "H" in opts:
if options["heating"]:
add_waste_heat(n)
if "A" in opts: # requires H and I
if options["agriculture"]: # requires H and I
add_agriculture(n, costs)
if options["dac"]:
add_dac(n, costs)
if "decentral" in opts:
if not options["electricity_transmission_grid"]:
decentral(n)
if "noH2network" in opts:
if not options["H2_network"]:
remove_h2_network(n)
if options["co2network"]:
@ -3704,51 +3628,39 @@ if __name__ == "__main__":
add_allam(n, costs)
solver_name = snakemake.config["solving"]["solver"]["name"]
n = set_temporal_aggregation(n, opts, solver_name)
resolution = snakemake.params.time_resolution
n = set_temporal_aggregation(n, resolution, solver_name)
limit_type = "config"
limit = get(snakemake.params.co2_budget, investment_year)
for o in opts:
if "cb" not in o:
continue
limit_type = "carbon budget"
co2_budget = snakemake.params.co2_budget
if isinstance(co2_budget, str) and co2_budget.startswith("cb"):
fn = "results/" + snakemake.params.RDIR + "/csvs/carbon_budget_distribution.csv"
if not os.path.exists(fn):
emissions_scope = snakemake.params.emissions_scope
report_year = snakemake.params.eurostat_report_year
input_co2 = snakemake.input.co2
build_carbon_budget(
o,
co2_budget,
snakemake.input.eurostat,
fn,
emissions_scope,
report_year,
input_co2,
options,
)
co2_cap = pd.read_csv(fn, index_col=0).squeeze()
limit = co2_cap.loc[investment_year]
break
for o in opts:
if "Co2L" not in o:
continue
limit_type = "wildcard"
limit = o[o.find("Co2L") + 4 :]
limit = float(limit.replace("p", ".").replace("m", "-"))
break
logger.info(f"Add CO2 limit from {limit_type}")
add_co2limit(n, nyears, limit)
else:
limit = get(co2_budget, investment_year)
add_co2limit(n, options, nyears, limit)
for o in opts:
if not o[:10] == "linemaxext":
continue
maxext = float(o[10:]) * 1e3
maxext = snakemake.params["lines"]["max_extension"]
if maxext is not None:
limit_individual_line_extension(n, maxext)
break
if options["electricity_distribution_grid"]:
insert_electricity_distribution_grid(n, costs)
maybe_adjust_costs_and_potentials(n, opts)
maybe_adjust_costs_and_potentials(n, snakemake.params["adjustments"])
if options["gas_distribution_grid"]:
insert_gas_distribution_costs(n, costs)

View File

@ -37,7 +37,7 @@ import pandas as pd
import pypsa
import xarray as xr
from _benchmark import memory_logger
from _helpers import configure_logging, get_opt, update_config_with_sector_opts
from _helpers import configure_logging, update_config_from_wildcards
from pypsa.descriptors import get_activity_mask
from pypsa.descriptors import get_switchable_as_dense as get_as_dense
@ -178,16 +178,10 @@ def _add_land_use_constraint_m(n, planning_horizons, config):
n.generators.p_nom_max.clip(lower=0, inplace=True)
def add_co2_sequestration_limit(n, config, limit=200):
def add_co2_sequestration_limit(n, limit=200):
"""
Add a global constraint on the amount of Mt CO2 that can be sequestered.
"""
limit = limit * 1e6
for o in opts:
if "seq" not in o:
continue
limit = float(o[o.find("seq") + 3 :]) * 1e6
break
if not n.investment_periods.empty:
periods = n.investment_periods
@ -200,7 +194,7 @@ def add_co2_sequestration_limit(n, config, limit=200):
"GlobalConstraint",
names,
sense=">=",
constant=-limit,
constant=-limit * 1e6,
type="operational_limit",
carrier_attribute="co2 sequestered",
investment_period=periods,
@ -260,7 +254,7 @@ def add_carbon_budget_constraint(n, snapshots):
n.model.add_constraints(lhs <= rhs, name=f"GlobalConstraint-{name}")
def add_max_growth(n, config):
def add_max_growth(n):
"""
Add maximum growth rates for different carriers.
"""
@ -393,11 +387,11 @@ def prepare_network(
if foresight == "perfect":
n = add_land_use_constraint_perfect(n)
if snakemake.params["sector"]["limit_max_growth"]["enable"]:
n = add_max_growth(n, config)
n = add_max_growth(n)
if n.stores.carrier.eq("co2 sequestered").any():
limit = co2_sequestration_potential
add_co2_sequestration_limit(n, config, limit=limit)
add_co2_sequestration_limit(n, limit=limit)
return n
@ -831,30 +825,20 @@ def extra_functionality(n, snapshots):
location to add them. The arguments ``opts`` and
``snakemake.config`` are expected to be attached to the network.
"""
opts = n.opts
config = n.config
constraints = config["solving"].get("constraints", {})
if (
"BAU" in opts or constraints.get("BAU", False)
) and n.generators.p_nom_extendable.any():
if constraints["BAU"] and n.generators.p_nom_extendable.any():
add_BAU_constraints(n, config)
if (
"SAFE" in opts or constraints.get("SAFE", False)
) and n.generators.p_nom_extendable.any():
if constraints["SAFE"] and n.generators.p_nom_extendable.any():
add_SAFE_constraints(n, config)
if (
"CCL" in opts or constraints.get("CCL", False)
) and n.generators.p_nom_extendable.any():
if constraints["CCL"] and n.generators.p_nom_extendable.any():
add_CCL_constraints(n, config)
reserve = config["electricity"].get("operational_reserve", {})
if reserve.get("activate"):
add_operational_reserve_margin(n, snapshots, config)
EQ_config = constraints.get("EQ", False)
EQ_wildcard = get_opt(opts, r"^EQ+[0-9]*\.?[0-9]+(c|)")
EQ_o = EQ_wildcard or EQ_config
if EQ_o:
if EQ_o := constraints["EQ"]:
add_EQ_constraints(n, EQ_o.replace("EQ", ""))
add_battery_constraints(n)
@ -877,7 +861,7 @@ def extra_functionality(n, snapshots):
custom_extra_functionality(n, snapshots, snakemake)
def solve_network(n, config, solving, opts="", **kwargs):
def solve_network(n, config, solving, **kwargs):
set_of_options = solving["solver"]["options"]
cf_solving = solving["options"]
@ -905,7 +889,6 @@ def solve_network(n, config, solving, opts="", **kwargs):
# add to network for extra_functionality
n.config = config
n.opts = opts
if rolling_horizon:
kwargs["horizon"] = cf_solving.get("horizon", 365)
@ -950,15 +933,8 @@ if __name__ == "__main__":
planning_horizons="2030",
)
configure_logging(snakemake)
if "sector_opts" in snakemake.wildcards.keys():
update_config_with_sector_opts(
snakemake.config, snakemake.wildcards.sector_opts
)
update_config_from_wildcards(snakemake.config, snakemake.wildcards)
opts = snakemake.wildcards.opts
if "sector_opts" in snakemake.wildcards.keys():
opts += "-" + snakemake.wildcards.sector_opts
opts = [o for o in opts.split("-") if o != ""]
solve_opts = snakemake.params.solving["options"]
np.random.seed(solve_opts.get("seed", 123))
@ -981,7 +957,6 @@ if __name__ == "__main__":
n,
config=snakemake.config,
solving=snakemake.params.solving,
opts=opts,
log_fn=snakemake.log.solver,
)

View File

@ -12,7 +12,7 @@ import logging
import numpy as np
import pypsa
from _helpers import configure_logging, update_config_with_sector_opts
from _helpers import configure_logging, update_config_from_wildcards
from solve_network import prepare_network, solve_network
logger = logging.getLogger(__name__)
@ -34,10 +34,8 @@ if __name__ == "__main__":
)
configure_logging(snakemake)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
update_config_from_wildcards(snakemake.config, snakemake.wildcards)
opts = f"{snakemake.wildcards.opts}-{snakemake.wildcards.sector_opts}".split("-")
opts = [o for o in opts if o != ""]
solve_opts = snakemake.params.options
np.random.seed(solve_opts.get("seed", 123))
@ -46,9 +44,7 @@ if __name__ == "__main__":
n.optimize.fix_optimal_capacities()
n = prepare_network(n, solve_opts, config=snakemake.config)
n = solve_network(
n, config=snakemake.config, opts=opts, log_fn=snakemake.log.solver
)
n = solve_network(n, config=snakemake.config, log_fn=snakemake.log.solver)
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
n.export_to_netcdf(snakemake.output[0])