Clustering: build renewable profiles and add all assets after clustering (#1201)
* Cluster first: build renewable profiles and add all assets after clustering * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * correction: pass landfall_lengths through functions * assign landfall_lenghts correctly * remove parameter add_land_use_constraint * fix network_dict * calculate distance to shoreline, remove underwater_fraction * adjust simplification parameter to exclude Crete from offshore wind connections * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * remove unused geth2015 hydro capacities * removing remaining traces of {simpl} wildcard * add release notes and update workflow graphics * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: lisazeyen <lisa.zeyen@web.de>
This commit is contained in:
parent
40351fbf9b
commit
013b705ee4
2
.gitignore
vendored
2
.gitignore
vendored
@ -73,3 +73,5 @@ d1gam3xoknrgr2.cloudfront.net/
|
|||||||
*.ipynb
|
*.ipynb
|
||||||
|
|
||||||
merger-todos.md
|
merger-todos.md
|
||||||
|
|
||||||
|
*.html
|
||||||
|
@ -39,7 +39,6 @@ localrules:
|
|||||||
|
|
||||||
|
|
||||||
wildcard_constraints:
|
wildcard_constraints:
|
||||||
simpl="[a-zA-Z0-9]*",
|
|
||||||
clusters="[0-9]+(m|c)?|all",
|
clusters="[0-9]+(m|c)?|all",
|
||||||
ll=r"(v|c)([0-9\.]+|opt)",
|
ll=r"(v|c)([0-9\.]+|opt)",
|
||||||
opts=r"[-+a-zA-Z0-9\.]*",
|
opts=r"[-+a-zA-Z0-9\.]*",
|
||||||
|
@ -37,8 +37,6 @@ foresight: overnight
|
|||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#scenario
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#scenario
|
||||||
# Wildcard docs in https://pypsa-eur.readthedocs.io/en/latest/wildcards.html
|
# Wildcard docs in https://pypsa-eur.readthedocs.io/en/latest/wildcards.html
|
||||||
scenario:
|
scenario:
|
||||||
simpl:
|
|
||||||
- ''
|
|
||||||
ll:
|
ll:
|
||||||
- vopt
|
- vopt
|
||||||
clusters:
|
clusters:
|
||||||
@ -188,6 +186,7 @@ renewable:
|
|||||||
max_shore_distance: 30000
|
max_shore_distance: 30000
|
||||||
excluder_resolution: 200
|
excluder_resolution: 200
|
||||||
clip_p_max_pu: 1.e-2
|
clip_p_max_pu: 1.e-2
|
||||||
|
landfall_length: 10
|
||||||
offwind-dc:
|
offwind-dc:
|
||||||
cutout: europe-2013-sarah3-era5
|
cutout: europe-2013-sarah3-era5
|
||||||
resource:
|
resource:
|
||||||
@ -205,6 +204,7 @@ renewable:
|
|||||||
min_shore_distance: 30000
|
min_shore_distance: 30000
|
||||||
excluder_resolution: 200
|
excluder_resolution: 200
|
||||||
clip_p_max_pu: 1.e-2
|
clip_p_max_pu: 1.e-2
|
||||||
|
landfall_length: 10
|
||||||
offwind-float:
|
offwind-float:
|
||||||
cutout: europe-2013-sarah3-era5
|
cutout: europe-2013-sarah3-era5
|
||||||
resource:
|
resource:
|
||||||
@ -225,6 +225,7 @@ renewable:
|
|||||||
min_depth: 60
|
min_depth: 60
|
||||||
max_depth: 1000
|
max_depth: 1000
|
||||||
clip_p_max_pu: 1.e-2
|
clip_p_max_pu: 1.e-2
|
||||||
|
landfall_length: 10
|
||||||
solar:
|
solar:
|
||||||
cutout: europe-2013-sarah3-era5
|
cutout: europe-2013-sarah3-era5
|
||||||
resource:
|
resource:
|
||||||
@ -301,6 +302,7 @@ links:
|
|||||||
p_max_pu: 1.0
|
p_max_pu: 1.0
|
||||||
p_nom_max: .inf
|
p_nom_max: .inf
|
||||||
max_extension: 30000 #MW
|
max_extension: 30000 #MW
|
||||||
|
length_factor: 1.25
|
||||||
under_construction: 'keep' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity for lines in grid extract
|
under_construction: 'keep' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity for lines in grid extract
|
||||||
|
|
||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#transmission_projects
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#transmission_projects
|
||||||
@ -335,6 +337,9 @@ load:
|
|||||||
scaling_factor: 1.0
|
scaling_factor: 1.0
|
||||||
fixed_year: false # false or year (e.g. 2013)
|
fixed_year: false # false or year (e.g. 2013)
|
||||||
supplement_synthetic: true
|
supplement_synthetic: true
|
||||||
|
distribution_key:
|
||||||
|
gdp: 0.6
|
||||||
|
population: 0.4
|
||||||
|
|
||||||
# docs
|
# docs
|
||||||
# TODO: PyPSA-Eur merge issue in prepare_sector_network.py
|
# TODO: PyPSA-Eur merge issue in prepare_sector_network.py
|
||||||
@ -849,14 +854,13 @@ clustering:
|
|||||||
focus_weights: false
|
focus_weights: false
|
||||||
simplify_network:
|
simplify_network:
|
||||||
to_substations: false
|
to_substations: false
|
||||||
algorithm: kmeans # choose from: [hac, kmeans]
|
|
||||||
feature: solar+onwind-time
|
|
||||||
exclude_carriers: []
|
|
||||||
remove_stubs: true
|
remove_stubs: true
|
||||||
remove_stubs_across_borders: true
|
remove_stubs_across_borders: false
|
||||||
cluster_network:
|
cluster_network:
|
||||||
algorithm: kmeans
|
algorithm: kmeans
|
||||||
feature: solar+onwind-time
|
hac_features:
|
||||||
|
- wnd100m
|
||||||
|
- influx_direct
|
||||||
exclude_carriers: []
|
exclude_carriers: []
|
||||||
consider_efficiency_classes: false
|
consider_efficiency_classes: false
|
||||||
aggregation_strategies:
|
aggregation_strategies:
|
||||||
|
@ -10,8 +10,6 @@ run:
|
|||||||
shared_cutouts: true
|
shared_cutouts: true
|
||||||
|
|
||||||
scenario:
|
scenario:
|
||||||
simpl:
|
|
||||||
- ''
|
|
||||||
ll:
|
ll:
|
||||||
- vopt
|
- vopt
|
||||||
clusters:
|
clusters:
|
||||||
|
@ -10,8 +10,6 @@ foresight: perfect
|
|||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#scenario
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#scenario
|
||||||
# Wildcard docs in https://pypsa-eur.readthedocs.io/en/latest/wildcards.html
|
# Wildcard docs in https://pypsa-eur.readthedocs.io/en/latest/wildcards.html
|
||||||
scenario:
|
scenario:
|
||||||
simpl:
|
|
||||||
- ''
|
|
||||||
ll:
|
ll:
|
||||||
- v1.0
|
- v1.0
|
||||||
clusters:
|
clusters:
|
||||||
|
@ -2,16 +2,14 @@
|
|||||||
focus_weights,,,Optionally specify the focus weights for the clustering of countries. For instance: `DE: 0.8` will distribute 80% of all nodes to Germany and 20% to the rest of the countries.
|
focus_weights,,,Optionally specify the focus weights for the clustering of countries. For instance: `DE: 0.8` will distribute 80% of all nodes to Germany and 20% to the rest of the countries.
|
||||||
simplify_network,,,
|
simplify_network,,,
|
||||||
-- to_substations,bool,"{'true','false'}","Aggregates all nodes without power injection (positive or negative, i.e. demand or generation) to electrically closest ones"
|
-- to_substations,bool,"{'true','false'}","Aggregates all nodes without power injection (positive or negative, i.e. demand or generation) to electrically closest ones"
|
||||||
-- algorithm,str,"One of {‘kmeans’, ‘hac’, ‘modularity‘}",
|
|
||||||
-- feature,str,"Str in the format ‘carrier1+carrier2+...+carrierN-X’, where CarrierI can be from {‘solar’, ‘onwind’, ‘offwind’, ‘ror’} and X is one of {‘cap’, ‘time’}.",
|
|
||||||
-- exclude_carriers,list,"List of Str like [ 'solar', 'onwind'] or empy list []","List of carriers which will not be aggregated. If empty, all carriers will be aggregated."
|
-- exclude_carriers,list,"List of Str like [ 'solar', 'onwind'] or empy list []","List of carriers which will not be aggregated. If empty, all carriers will be aggregated."
|
||||||
-- remove stubs,bool,"{'true','false'}",Controls whether radial parts of the network should be recursively aggregated. Defaults to true.
|
-- remove stubs,bool,"{'true','false'}",Controls whether radial parts of the network should be recursively aggregated. Defaults to true.
|
||||||
-- remove_stubs_across_borders,bool,"{'true','false'}",Controls whether radial parts of the network should be recursively aggregated across borders. Defaults to true.
|
-- remove_stubs_across_borders,bool,"{'true','false'}",Controls whether radial parts of the network should be recursively aggregated across borders. Defaults to true.
|
||||||
cluster_network,,,
|
cluster_network,,,
|
||||||
-- algorithm,str,"One of {‘kmeans’, ‘hac’}",
|
-- algorithm,str,"One of {‘kmeans’, ‘hac’}",
|
||||||
-- feature,str,"Str in the format ‘carrier1+carrier2+...+carrierN-X’, where CarrierI can be from {‘solar’, ‘onwind’, ‘offwind’, ‘ror’} and X is one of {‘cap’, ‘time’}.",
|
-- hac_features,list,"List of meteorological variables contained in the weather data cutout that should be considered for hierarchical clustering.",
|
||||||
-- exclude_carriers,list,"List of Str like [ 'solar', 'onwind'] or empy list []","List of carriers which will not be aggregated. If empty, all carriers will be aggregated."
|
exclude_carriers,list,"List of Str like [ 'solar', 'onwind'] or empy list []","List of carriers which will not be aggregated. If empty, all carriers will be aggregated."
|
||||||
-- consider_efficiency_classes,bool,"{'true','false'}","Aggregated each carriers into the top 10-quantile (high), the bottom 90-quantile (low), and everything in between (medium)."
|
consider_efficiency_classes,bool,"{'true','false'}","Aggregated each carriers into the top 10-quantile (high), the bottom 90-quantile (low), and everything in between (medium)."
|
||||||
aggregation_strategies,,,
|
aggregation_strategies,,,
|
||||||
-- generators,,,
|
-- generators,,,
|
||||||
-- -- {key},str,"{key} can be any of the component of the generator (str). It’s value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator."
|
-- -- {key},str,"{key} can be any of the component of the generator (str). It’s value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator."
|
||||||
|
|
@ -27,7 +27,7 @@ custom_powerplants,--,"use `pandas.query <https://pandas.pydata.org/pandas-docs/
|
|||||||
,,,
|
,,,
|
||||||
everywhere_powerplants,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}","List of conventional power plants to add to every node in the model with zero initial capacity. To be used in combination with ``extendable_carriers`` to allow for building conventional powerplants irrespective of existing locations."
|
everywhere_powerplants,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}","List of conventional power plants to add to every node in the model with zero initial capacity. To be used in combination with ``extendable_carriers`` to allow for building conventional powerplants irrespective of existing locations."
|
||||||
,,,
|
,,,
|
||||||
conventional_carriers,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}","List of conventional power plants to include in the model from ``resources/powerplants.csv``. If an included carrier is also listed in ``extendable_carriers``, the capacity is taken as a lower bound."
|
conventional_carriers,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}","List of conventional power plants to include in the model from ``resources/powerplants_s_{clusters}.csv``. If an included carrier is also listed in ``extendable_carriers``, the capacity is taken as a lower bound."
|
||||||
,,,
|
,,,
|
||||||
renewable_carriers,--,"Any subset of {solar, onwind, offwind-ac, offwind-dc, offwind-float, hydro}",List of renewable generators to include in the model.
|
renewable_carriers,--,"Any subset of {solar, onwind, offwind-ac, offwind-dc, offwind-float, hydro}",List of renewable generators to include in the model.
|
||||||
estimate_renewable_capacities,,,
|
estimate_renewable_capacities,,,
|
||||||
|
Can't render this file because it has a wrong number of fields in line 7.
|
@ -4,5 +4,5 @@ retrieve_databundle,bool,"{true, false}","Switch to retrieve databundle from zen
|
|||||||
retrieve_cost_data,bool,"{true, false}","Switch to retrieve technology cost data from `technology-data repository <https://github.com/PyPSA/technology-data>`_."
|
retrieve_cost_data,bool,"{true, false}","Switch to retrieve technology cost data from `technology-data repository <https://github.com/PyPSA/technology-data>`_."
|
||||||
build_cutout,bool,"{true, false}","Switch to enable the building of cutouts via the rule :mod:`build_cutout`."
|
build_cutout,bool,"{true, false}","Switch to enable the building of cutouts via the rule :mod:`build_cutout`."
|
||||||
retrieve_cutout,bool,"{true, false}","Switch to enable the retrieval of cutouts from zenodo with :mod:`retrieve_cutout`."
|
retrieve_cutout,bool,"{true, false}","Switch to enable the retrieval of cutouts from zenodo with :mod:`retrieve_cutout`."
|
||||||
custom_busmap,bool,"{true, false}","Switch to enable the use of custom busmaps in rule :mod:`cluster_network`. If activated the rule looks for provided busmaps at ``data/busmaps/elec_s{simpl}_{clusters}_{base_network}.csv`` which should have the same format as ``resources/busmap_elec_s{simpl}_{clusters}.csv``, i.e. the index should contain the buses of ``networks/elec_s{simpl}.nc``. {base_network} is the name of the selected base_network in electricity, e.g. ``gridkit``, ``osm-prebuilt``, or ``osm-raw``."
|
custom_busmap,bool,"{true, false}","Switch to enable the use of custom busmaps in rule :mod:`cluster_network`. If activated the rule looks for provided busmaps at ``data/busmaps/base_s_{clusters}_{base_network}.csv`` which should have the same format as ``resources/busmap_base_s_{clusters}.csv``, i.e. the index should contain the buses of ``networks/base_s.nc``. {base_network} is the name of the selected base_network in electricity, e.g. ``gridkit``, ``osm-prebuilt``, or ``osm-raw``."
|
||||||
drop_leap_day,bool,"{true, false}","Switch to drop February 29 from all time-dependent data in leap years"
|
drop_leap_day,bool,"{true, false}","Switch to drop February 29 from all time-dependent data in leap years"
|
||||||
|
|
@ -2,4 +2,5 @@
|
|||||||
p_max_pu,--,"Value in [0.,1.]","Correction factor for link capacities ``p_nom``."
|
p_max_pu,--,"Value in [0.,1.]","Correction factor for link capacities ``p_nom``."
|
||||||
p_nom_max,MW,"float","Global upper limit for the maximum capacity of each extendable DC link."
|
p_nom_max,MW,"float","Global upper limit for the maximum capacity of each extendable DC link."
|
||||||
max_extension,MW,"float","Upper limit for the extended capacity of each extendable DC link."
|
max_extension,MW,"float","Upper limit for the extended capacity of each extendable DC link."
|
||||||
|
length_factor,--,float,"Correction factor to account for the fact that buses are *not* connected by links through air-line distance."
|
||||||
under_construction,--,"One of {'zero': set capacity to zero, 'remove': remove completely, 'keep': keep with full capacity}","Specifies how to handle lines which are currently under construction."
|
under_construction,--,"One of {'zero': set capacity to zero, 'remove': remove completely, 'keep': keep with full capacity}","Specifies how to handle lines which are currently under construction."
|
||||||
|
|
@ -5,3 +5,6 @@ manual_adjustments,bool,"{true, false}","Whether to adjust the load data manuall
|
|||||||
scaling_factor,--,float,"Global correction factor for the load time series."
|
scaling_factor,--,float,"Global correction factor for the load time series."
|
||||||
fixed_year,--,Year or False,"To specify a fixed year for the load time series that deviates from the snapshots' year"
|
fixed_year,--,Year or False,"To specify a fixed year for the load time series that deviates from the snapshots' year"
|
||||||
supplement_synthetic,bool,"{true, false}","Whether to supplement missing data for selected time period should be supplemented by synthetic data from https://zenodo.org/records/10820928."
|
supplement_synthetic,bool,"{true, false}","Whether to supplement missing data for selected time period should be supplemented by synthetic data from https://zenodo.org/records/10820928."
|
||||||
|
distribution_key,--,--,"Distribution key for spatially disaggregating the per-country electricity demand data."
|
||||||
|
-- gdp,float,"[0, 1]","Weighting factor for the GDP data in the distribution key."
|
||||||
|
-- population,float,"[0, 1]","Weighting factor for the population data in the distribution key."
|
||||||
|
|
@ -15,3 +15,4 @@ max_depth,m,float,"Maximum sea water depth at which wind turbines can be build.
|
|||||||
min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential."
|
min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential."
|
||||||
max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential."
|
max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential."
|
||||||
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
|
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
|
||||||
|
landfall_length,km,float,"Fixed length of the cable connection that is onshorelandfall in km. If 'centroid', the length is calculated as the distance to centroid of the onshore bus."
|
||||||
|
|
@ -15,3 +15,4 @@ max_depth,m,float,"Maximum sea water depth at which wind turbines can be build.
|
|||||||
min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build."
|
min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build."
|
||||||
max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build."
|
max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build."
|
||||||
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
|
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
|
||||||
|
landfall_length,km,float,"Fixed length of the cable connection that is onshorelandfall in km. If 'centroid', the length is calculated as the distance to centroid of the onshore bus."
|
||||||
|
|
@ -1,5 +1,4 @@
|
|||||||
,Unit,Values,Description
|
,Unit,Values,Description
|
||||||
simpl,--,cf. :ref:`simpl`,"List of ``{simpl}`` wildcards to run."
|
|
||||||
clusters,--,cf. :ref:`clusters`,"List of ``{clusters}`` wildcards to run."
|
clusters,--,cf. :ref:`clusters`,"List of ``{clusters}`` wildcards to run."
|
||||||
ll,--,cf. :ref:`ll`,"List of ``{ll}`` wildcards to run."
|
ll,--,cf. :ref:`ll`,"List of ``{ll}`` wildcards to run."
|
||||||
opts,--,cf. :ref:`opts`,"List of ``{opts}`` wildcards to run."
|
opts,--,cf. :ref:`opts`,"List of ``{opts}`` wildcards to run."
|
||||||
|
|
@ -87,7 +87,7 @@ facilitate running multiple scenarios through a single command
|
|||||||
|
|
||||||
For each wildcard, a **list of values** is provided. The rule
|
For each wildcard, a **list of values** is provided. The rule
|
||||||
``solve_all_elec_networks`` will trigger the rules for creating
|
``solve_all_elec_networks`` will trigger the rules for creating
|
||||||
``results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc`` for **all
|
``results/networks/base_s_{clusters}_elec_l{ll}_{opts}.nc`` for **all
|
||||||
combinations** of the provided wildcard values as defined by Python's
|
combinations** of the provided wildcard values as defined by Python's
|
||||||
`itertools.product(...)
|
`itertools.product(...)
|
||||||
<https://docs.python.org/2/library/itertools.html#itertools.product>`__ function
|
<https://docs.python.org/2/library/itertools.html#itertools.product>`__ function
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 122 KiB After Width: | Height: | Size: 436 KiB |
@ -35,7 +35,7 @@ For instance, an invocation to
|
|||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
.../pypsa-eur % snakemake -call results/networks/elec_s_128_ec_lvopt_.nc
|
.../pypsa-eur % snakemake -call results/networks/base_s_128_elec_lvopt_.nc
|
||||||
|
|
||||||
follows this dependency graph
|
follows this dependency graph
|
||||||
|
|
||||||
@ -50,7 +50,7 @@ preceding rules which another rule takes as input data.
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
The dependency graph was generated using
|
The dependency graph was generated using
|
||||||
``snakemake --dag results/networks/elec_s_128_ec_lvopt_.nc -F | sed -n "/digraph/,/}/p" | dot -Tpng -o doc/img/intro-workflow.png``
|
``snakemake --dag results/networks/base_s_128_elec_lvopt_.nc -F | sed -n "/digraph/,/}/p" | dot -Tpng -o doc/img/intro-workflow.png``
|
||||||
|
|
||||||
For the use of ``snakemake``, it makes sense to familiarize yourself quickly
|
For the use of ``snakemake``, it makes sense to familiarize yourself quickly
|
||||||
with the `basic tutorial
|
with the `basic tutorial
|
||||||
|
@ -27,11 +27,12 @@ Then the process continues by calculating conventional power plant capacities, p
|
|||||||
|
|
||||||
- :mod:`build_powerplants` for today's thermal power plant capacities using `powerplantmatching <https://github.com/PyPSA/powerplantmatching>`__ allocating these to the closest substation for each powerplant,
|
- :mod:`build_powerplants` for today's thermal power plant capacities using `powerplantmatching <https://github.com/PyPSA/powerplantmatching>`__ allocating these to the closest substation for each powerplant,
|
||||||
- :mod:`build_ship_raster` for building shipping traffic density,
|
- :mod:`build_ship_raster` for building shipping traffic density,
|
||||||
|
- :mod:`determine_availability_matrix` for the land eligibility analysis of each cutout grid cell for PV, onshore and offshore wind,
|
||||||
- :mod:`build_renewable_profiles` for the hourly capacity factors and installation potentials constrained by land-use in each substation's Voronoi cell for PV, onshore and offshore wind, and
|
- :mod:`build_renewable_profiles` for the hourly capacity factors and installation potentials constrained by land-use in each substation's Voronoi cell for PV, onshore and offshore wind, and
|
||||||
- :mod:`build_hydro_profile` for the hourly per-unit hydro power availability time series.
|
- :mod:`build_hydro_profile` for the hourly per-unit hydro power availability time series.
|
||||||
|
|
||||||
The central rule :mod:`add_electricity` then ties all the different data inputs
|
The central rule :mod:`add_electricity` then ties all the different data inputs
|
||||||
together into a detailed PyPSA network stored in ``networks/elec.nc``.
|
together into a detailed PyPSA network stored in ``networks/base_s_{clusters}_elec.nc``.
|
||||||
|
|
||||||
.. _cutout:
|
.. _cutout:
|
||||||
|
|
||||||
@ -115,6 +116,15 @@ Rule ``determine_availability_matrix_MD_UA``
|
|||||||
|
|
||||||
.. automodule:: determine_availability_matrix_MD_UA
|
.. automodule:: determine_availability_matrix_MD_UA
|
||||||
|
|
||||||
|
|
||||||
|
.. _renewableprofiles:
|
||||||
|
|
||||||
|
Rule ``determine_availability_matrix``
|
||||||
|
======================================
|
||||||
|
|
||||||
|
.. automodule:: determine_availability_matrix
|
||||||
|
|
||||||
|
|
||||||
.. _renewableprofiles:
|
.. _renewableprofiles:
|
||||||
|
|
||||||
Rule ``build_renewable_profiles``
|
Rule ``build_renewable_profiles``
|
||||||
@ -129,10 +139,3 @@ Rule ``build_hydro_profile``
|
|||||||
===============================
|
===============================
|
||||||
|
|
||||||
.. automodule:: build_hydro_profile
|
.. automodule:: build_hydro_profile
|
||||||
|
|
||||||
.. _electricity:
|
|
||||||
|
|
||||||
Rule ``add_electricity``
|
|
||||||
=============================
|
|
||||||
|
|
||||||
.. automodule:: add_electricity
|
|
||||||
|
@ -8,7 +8,8 @@
|
|||||||
Release Notes
|
Release Notes
|
||||||
##########################################
|
##########################################
|
||||||
|
|
||||||
.. Upcoming Release
|
Upcoming Release
|
||||||
|
================
|
||||||
|
|
||||||
PyPSA-Eur 0.13.0 (13th September 2024)
|
PyPSA-Eur 0.13.0 (13th September 2024)
|
||||||
======================================
|
======================================
|
||||||
@ -115,6 +116,62 @@ PyPSA-Eur 0.13.0 (13th September 2024)
|
|||||||
* The sources of nearly all data files are now listed in the documentation.
|
* The sources of nearly all data files are now listed in the documentation.
|
||||||
(https://github.com/PyPSA/pypsa-eur/pull/1284)
|
(https://github.com/PyPSA/pypsa-eur/pull/1284)
|
||||||
|
|
||||||
|
* Rearranged workflow to cluster the electricity network before calculating
|
||||||
|
renewable profiles and adding further electricity system components.
|
||||||
|
|
||||||
|
- Moved rules ``simplify_network`` and ``cluster_network`` before
|
||||||
|
``add_electricity`` and ``build_renewable_profiles``.
|
||||||
|
|
||||||
|
- Split rule ``build_renewable_profiles`` into two separate rules,
|
||||||
|
``determine_availability_matrix`` for land eligibility analysis and
|
||||||
|
``build_renewable_profiles``, which now only computes the profiles and total
|
||||||
|
potentials from the pre-computed availability matrix.
|
||||||
|
|
||||||
|
- Removed variables ``weight``, ``underwater_fraction``, and ``potential`` from the
|
||||||
|
output of ``build_renewable_profiles`` as it is no longer needed.
|
||||||
|
|
||||||
|
- HAC-clustering is now based on wind speeds and irradiation time series
|
||||||
|
rather than capacity factors of wind and solar power plants.
|
||||||
|
|
||||||
|
- Added new rule ``build_hac_features`` that aggregates cutout weather data to
|
||||||
|
base regions in preparation for ``cluster_network``.
|
||||||
|
|
||||||
|
- Removed ``{simpl}`` wildcard and all associated code of the ``m`` suffix of
|
||||||
|
the ``{cluster}`` wildcard. This means that the option to pre-cluster the
|
||||||
|
network in ``simplify_network`` was removed. It will be superseded by
|
||||||
|
clustering renewable profiles and potentials within clustered regions by
|
||||||
|
resource classes soon.
|
||||||
|
|
||||||
|
- Added new rule ``add_transmission_projects_and_dlr`` which adds the outputs
|
||||||
|
from ``build_line_rating`` and ``build_transmission_projects`` to the output
|
||||||
|
of ``base_network``.
|
||||||
|
|
||||||
|
- The rule ``add_extra_components`` was integrated into ``add_electricity``
|
||||||
|
|
||||||
|
- Added new rule ``build_electricity_demand_base`` to determine the load
|
||||||
|
distribution of the substations in the base network (which was previously
|
||||||
|
done in ``add_electricity``). This time series is used as weights for
|
||||||
|
kmeans-clustering in ``cluster_network`` and is later added to the network in
|
||||||
|
``add_electricity`` in aggregated form.
|
||||||
|
|
||||||
|
- The weights of the kmeans clustering algorithm are now exclusively based on
|
||||||
|
the load distribution. Previously, they also included the distribution of
|
||||||
|
thermal capacity.
|
||||||
|
|
||||||
|
- Since the networks no longer start with the whole electricity system added
|
||||||
|
pre-clustering, the files have been renamed from ``elec...nc`` to
|
||||||
|
``base...nc`` to identify them as derivatives of ``base.nc``.
|
||||||
|
|
||||||
|
- The scripts ``simplify_network.py`` and ``cluster_network.py`` were
|
||||||
|
simplified to become less nested and profited from the removed need to deal
|
||||||
|
with cost data.
|
||||||
|
|
||||||
|
- New configuration options to calculate connection costs of offshore wind
|
||||||
|
plants. Offshore connection costs are now calculated based on the underwater
|
||||||
|
distance to the shoreline plus a configurable ``landfall_length`` which
|
||||||
|
defaults to 10 km. Previously the distance to the region's centroid was
|
||||||
|
used, which is not practical when the regions are already aggregated.
|
||||||
|
|
||||||
PyPSA-Eur 0.12.0 (30th August 2024)
|
PyPSA-Eur 0.12.0 (30th August 2024)
|
||||||
===================================
|
===================================
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
Simplifying Electricity Networks
|
Simplifying Electricity Networks
|
||||||
##########################################
|
##########################################
|
||||||
|
|
||||||
The simplification ``snakemake`` rules prepare **approximations** of the full model, for which it is computationally viable to co-optimize generation, storage and transmission capacities.
|
The simplification ``snakemake`` rules prepare **approximations** of the network model, for which it is computationally viable to co-optimize generation, storage and transmission capacities.
|
||||||
|
|
||||||
- :mod:`simplify_network` transforms the transmission grid to a 380 kV only equivalent network, while
|
- :mod:`simplify_network` transforms the transmission grid to a 380 kV only equivalent network, while
|
||||||
- :mod:`cluster_network` uses a `k-means <https://en.wikipedia.org/wiki/K-means_clustering>`__ based clustering technique to partition the network into a given number of zones and then reduce the network to a representation with one bus per zone.
|
- :mod:`cluster_network` uses a `k-means <https://en.wikipedia.org/wiki/K-means_clustering>`__ based clustering technique to partition the network into a given number of zones and then reduce the network to a representation with one bus per zone.
|
||||||
@ -18,7 +18,7 @@ The simplification and clustering steps are described in detail in the paper
|
|||||||
|
|
||||||
- Jonas Hörsch and Tom Brown. `The role of spatial scale in joint optimisations of generation and transmission for European highly renewable scenarios <https://arxiv.org/abs/1705.07617>`__), *14th International Conference on the European Energy Market*, 2017. `arXiv:1705.07617 <https://arxiv.org/abs/1705.07617>`__, `doi:10.1109/EEM.2017.7982024 <https://doi.org/10.1109/EEM.2017.7982024>`__.
|
- Jonas Hörsch and Tom Brown. `The role of spatial scale in joint optimisations of generation and transmission for European highly renewable scenarios <https://arxiv.org/abs/1705.07617>`__), *14th International Conference on the European Energy Market*, 2017. `arXiv:1705.07617 <https://arxiv.org/abs/1705.07617>`__, `doi:10.1109/EEM.2017.7982024 <https://doi.org/10.1109/EEM.2017.7982024>`__.
|
||||||
|
|
||||||
After simplification and clustering of the network, additional components may be appended in the rule :mod:`add_extra_components` and the network is prepared for solving in :mod:`prepare_network`.
|
After simplification and clustering of the network, further electricity network components may be appended in the rule :mod:`add_electricity` and the network is prepared for solving in :mod:`prepare_network`.
|
||||||
|
|
||||||
.. _simplify:
|
.. _simplify:
|
||||||
|
|
||||||
@ -34,13 +34,12 @@ Rule ``cluster_network``
|
|||||||
|
|
||||||
.. automodule:: cluster_network
|
.. automodule:: cluster_network
|
||||||
|
|
||||||
.. _extra_components:
|
.. _electricity:
|
||||||
|
|
||||||
Rule ``add_extra_components``
|
Rule ``add_electricity``
|
||||||
=============================
|
=============================
|
||||||
|
|
||||||
.. automodule:: add_extra_components
|
.. automodule:: add_electricity
|
||||||
|
|
||||||
|
|
||||||
.. _prepare:
|
.. _prepare:
|
||||||
|
|
||||||
|
221
doc/tutorial.rst
221
doc/tutorial.rst
@ -32,7 +32,7 @@ configuration, execute
|
|||||||
.. code:: bash
|
.. code:: bash
|
||||||
:class: full-width
|
:class: full-width
|
||||||
|
|
||||||
snakemake -call results/test-elec/networks/elec_s_6_ec_lcopt_.nc --configfile config/test/config.electricity.yaml
|
snakemake -call results/test-elec/networks/base_s_6_elec_lcopt_.nc --configfile config/test/config.electricity.yaml
|
||||||
|
|
||||||
This configuration is set to download a reduced cutout via the rule :mod:`retrieve_cutout`.
|
This configuration is set to download a reduced cutout via the rule :mod:`retrieve_cutout`.
|
||||||
For more information on the data dependencies of PyPSA-Eur, continue reading :ref:`data`.
|
For more information on the data dependencies of PyPSA-Eur, continue reading :ref:`data`.
|
||||||
@ -114,9 +114,9 @@ clustered down to 6 buses and every 24 hours aggregated to one snapshot. The com
|
|||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
snakemake -call results/test-elec/networks/elec_s_6_ec_lcopt_.nc --configfile config/test/config.electricity.yaml
|
snakemake -call results/test-elec/networks/base_s_6_elec_lcopt_.nc --configfile config/test/config.electricity.yaml
|
||||||
|
|
||||||
orders ``snakemake`` to run the rule :mod:`solve_network` that produces the solved network and stores it in ``results/networks`` with the name ``elec_s_6_ec_lcopt_.nc``:
|
orders ``snakemake`` to run the rule :mod:`solve_network` that produces the solved network and stores it in ``results/networks`` with the name ``base_s_6_elec_lcopt_.nc``:
|
||||||
|
|
||||||
.. literalinclude:: ../rules/solve_electricity.smk
|
.. literalinclude:: ../rules/solve_electricity.smk
|
||||||
:start-at: rule solve_network:
|
:start-at: rule solve_network:
|
||||||
@ -132,98 +132,129 @@ This triggers a workflow of multiple preceding jobs that depend on each rule's i
|
|||||||
graph[bgcolor=white, margin=0];
|
graph[bgcolor=white, margin=0];
|
||||||
node[shape=box, style=rounded, fontname=sans, fontsize=10, penwidth=2];
|
node[shape=box, style=rounded, fontname=sans, fontsize=10, penwidth=2];
|
||||||
edge[penwidth=2, color=grey];
|
edge[penwidth=2, color=grey];
|
||||||
0[label = "solve_network", color = "0.16 0.6 0.85", style="rounded"];
|
0[label = "solve_network", color = "0.19 0.6 0.85", style="rounded"];
|
||||||
1[label = "prepare_network\nll: copt\nopts: ", color = "0.40 0.6 0.85", style="rounded"];
|
1[label = "prepare_network\nll: copt\nopts: ", color = "0.24 0.6 0.85", style="rounded"];
|
||||||
2[label = "add_extra_components", color = "0.03 0.6 0.85", style="rounded"];
|
2[label = "add_electricity", color = "0.35 0.6 0.85", style="rounded"];
|
||||||
3[label = "cluster_network\nclusters: 6", color = "0.26 0.6 0.85", style="rounded"];
|
3[label = "build_renewable_profiles", color = "0.15 0.6 0.85", style="rounded"];
|
||||||
4[label = "simplify_network\nsimpl: ", color = "0.17 0.6 0.85", style="rounded"];
|
4[label = "determine_availability_matrix\ntechnology: solar", color = "0.39 0.6 0.85", style="rounded"];
|
||||||
5[label = "add_electricity", color = "0.39 0.6 0.85", style="rounded"];
|
5[label = "retrieve_databundle", color = "0.65 0.6 0.85", style="rounded"];
|
||||||
6[label = "build_renewable_profiles\ntechnology: solar", color = "0.13 0.6 0.85", style="rounded"];
|
6[label = "build_shapes", color = "0.45 0.6 0.85", style="rounded"];
|
||||||
7[label = "base_network", color = "0.01 0.6 0.85", style="rounded"];
|
7[label = "retrieve_naturalearth_countries", color = "0.03 0.6 0.85", style="rounded"];
|
||||||
8[label = "retrieve_osm_prebuilt", color = "0.27 0.6 0.85", style="rounded"];
|
8[label = "retrieve_eez", color = "0.17 0.6 0.85", style="rounded"];
|
||||||
9[label = "build_shapes", color = "0.18 0.6 0.85", style="rounded"];
|
9[label = "cluster_network\nclusters: 6", color = "0.38 0.6 0.85", style="rounded"];
|
||||||
10[label = "retrieve_naturalearth_countries", color = "0.41 0.6 0.85", style="rounded"];
|
10[label = "simplify_network", color = "0.14 0.6 0.85", style="rounded"];
|
||||||
11[label = "retrieve_eez", color = "0.14 0.6 0.85", style="rounded"];
|
11[label = "add_transmission_projects_and_dlr", color = "0.61 0.6 0.85", style="rounded"];
|
||||||
12[label = "retrieve_databundle", color = "0.38 0.6 0.85", style="rounded"];
|
12[label = "base_network", color = "0.36 0.6 0.85", style="rounded"];
|
||||||
13[label = "retrieve_cutout\ncutout: be-03-2013-era5", color = "0.51 0.6 0.85", style="rounded"];
|
13[label = "retrieve_osm_prebuilt", color = "0.22 0.6 0.85", style="rounded"];
|
||||||
14[label = "build_renewable_profiles\ntechnology: solar-hsat", color = "0.13 0.6 0.85", style="rounded"];
|
14[label = "build_line_rating", color = "0.50 0.6 0.85", style="rounded"];
|
||||||
15[label = "build_renewable_profiles\ntechnology: onwind", color = "0.13 0.6 0.85", style="rounded"];
|
15[label = "retrieve_cutout\ncutout: be-03-2013-era5", color = "0.02 0.6 0.85", style="rounded"];
|
||||||
16[label = "build_renewable_profiles\ntechnology: offwind-ac", color = "0.13 0.6 0.85", style="rounded"];
|
16[label = "build_transmission_projects", color = "0.08 0.6 0.85", style="rounded"];
|
||||||
17[label = "build_ship_raster", color = "0.16 0.6 0.85", style="rounded"];
|
17[label = "build_electricity_demand_base", color = "0.11 0.6 0.85", style="rounded"];
|
||||||
18[label = "retrieve_ship_raster", color = "0.53 0.6 0.85", style="rounded"];
|
18[label = "build_electricity_demand", color = "0.60 0.6 0.85", style="rounded"];
|
||||||
19[label = "build_renewable_profiles\ntechnology: offwind-dc", color = "0.13 0.6 0.85", style="rounded"];
|
19[label = "retrieve_electricity_demand", color = "0.60 0.6 0.85", style="rounded"];
|
||||||
20[label = "build_renewable_profiles\ntechnology: offwind-float", color = "0.13 0.6 0.85", style="rounded"];
|
20[label = "retrieve_synthetic_electricity_demand", color = "0.32 0.6 0.85", style="rounded"];
|
||||||
21[label = "build_line_rating", color = "0.46 0.6 0.85", style="rounded"];
|
21[label = "build_renewable_profiles", color = "0.15 0.6 0.85", style="rounded"];
|
||||||
22[label = "build_transmission_projects", color = "0.29 0.6 0.85", style="rounded"];
|
22[label = "determine_availability_matrix\ntechnology: solar-hsat", color = "0.39 0.6 0.85", style="rounded"];
|
||||||
23[label = "retrieve_cost_data\nyear: 2030", color = "0.11 0.6 0.85", style="rounded"];
|
23[label = "build_renewable_profiles", color = "0.15 0.6 0.85", style="rounded"];
|
||||||
24[label = "build_powerplants", color = "0.18 0.6 0.85", style="rounded"];
|
24[label = "determine_availability_matrix\ntechnology: onwind", color = "0.39 0.6 0.85", style="rounded"];
|
||||||
25[label = "build_electricity_demand", color = "0.30 0.6 0.85", style="rounded"];
|
25[label = "build_renewable_profiles", color = "0.15 0.6 0.85", style="rounded"];
|
||||||
26[label = "retrieve_electricity_demand", color = "0.13 0.6 0.85", style="rounded"];
|
26[label = "determine_availability_matrix\ntechnology: offwind-ac", color = "0.39 0.6 0.85", style="rounded"];
|
||||||
27[label = "retrieve_synthetic_electricity_demand", color = "0.43 0.6 0.85", style="rounded"];
|
27[label = "build_ship_raster", color = "0.12 0.6 0.85", style="rounded"];
|
||||||
|
28[label = "retrieve_ship_raster", color = "0.44 0.6 0.85", style="rounded"];
|
||||||
|
29[label = "build_renewable_profiles", color = "0.15 0.6 0.85", style="rounded"];
|
||||||
|
30[label = "determine_availability_matrix\ntechnology: offwind-dc", color = "0.39 0.6 0.85", style="rounded"];
|
||||||
|
31[label = "build_renewable_profiles", color = "0.15 0.6 0.85", style="rounded"];
|
||||||
|
32[label = "determine_availability_matrix\ntechnology: offwind-float", color = "0.39 0.6 0.85", style="rounded"];
|
||||||
|
33[label = "retrieve_cost_data\nyear: 2030", color = "0.01 0.6 0.85", style="rounded"];
|
||||||
|
34[label = "build_powerplants", color = "0.52 0.6 0.85", style="rounded"];
|
||||||
1 -> 0
|
1 -> 0
|
||||||
2 -> 1
|
2 -> 1
|
||||||
23 -> 1
|
33 -> 1
|
||||||
3 -> 2
|
3 -> 2
|
||||||
|
21 -> 2
|
||||||
23 -> 2
|
23 -> 2
|
||||||
|
25 -> 2
|
||||||
|
29 -> 2
|
||||||
|
31 -> 2
|
||||||
|
9 -> 2
|
||||||
|
33 -> 2
|
||||||
|
34 -> 2
|
||||||
|
17 -> 2
|
||||||
4 -> 3
|
4 -> 3
|
||||||
23 -> 3
|
6 -> 3
|
||||||
|
9 -> 3
|
||||||
|
15 -> 3
|
||||||
5 -> 4
|
5 -> 4
|
||||||
23 -> 4
|
6 -> 4
|
||||||
7 -> 4
|
9 -> 4
|
||||||
6 -> 5
|
15 -> 4
|
||||||
14 -> 5
|
|
||||||
15 -> 5
|
|
||||||
16 -> 5
|
|
||||||
19 -> 5
|
|
||||||
20 -> 5
|
|
||||||
7 -> 5
|
|
||||||
21 -> 5
|
|
||||||
22 -> 5
|
|
||||||
23 -> 5
|
|
||||||
24 -> 5
|
|
||||||
25 -> 5
|
|
||||||
9 -> 5
|
|
||||||
7 -> 6
|
7 -> 6
|
||||||
12 -> 6
|
8 -> 6
|
||||||
9 -> 6
|
5 -> 6
|
||||||
13 -> 6
|
|
||||||
8 -> 7
|
|
||||||
9 -> 7
|
|
||||||
10 -> 9
|
10 -> 9
|
||||||
11 -> 9
|
17 -> 9
|
||||||
12 -> 9
|
11 -> 10
|
||||||
7 -> 14
|
12 -> 10
|
||||||
|
12 -> 11
|
||||||
|
14 -> 11
|
||||||
|
16 -> 11
|
||||||
|
13 -> 12
|
||||||
|
6 -> 12
|
||||||
12 -> 14
|
12 -> 14
|
||||||
9 -> 14
|
15 -> 14
|
||||||
13 -> 14
|
|
||||||
7 -> 15
|
|
||||||
12 -> 15
|
|
||||||
9 -> 15
|
|
||||||
13 -> 15
|
|
||||||
7 -> 16
|
|
||||||
12 -> 16
|
12 -> 16
|
||||||
17 -> 16
|
6 -> 16
|
||||||
9 -> 16
|
10 -> 17
|
||||||
13 -> 16
|
6 -> 17
|
||||||
18 -> 17
|
18 -> 17
|
||||||
13 -> 17
|
19 -> 18
|
||||||
7 -> 19
|
20 -> 18
|
||||||
12 -> 19
|
22 -> 21
|
||||||
17 -> 19
|
6 -> 21
|
||||||
9 -> 19
|
9 -> 21
|
||||||
13 -> 19
|
15 -> 21
|
||||||
7 -> 20
|
5 -> 22
|
||||||
12 -> 20
|
6 -> 22
|
||||||
17 -> 20
|
|
||||||
9 -> 20
|
|
||||||
13 -> 20
|
|
||||||
7 -> 21
|
|
||||||
13 -> 21
|
|
||||||
7 -> 22
|
|
||||||
9 -> 22
|
9 -> 22
|
||||||
7 -> 24
|
15 -> 22
|
||||||
|
24 -> 23
|
||||||
|
6 -> 23
|
||||||
|
9 -> 23
|
||||||
|
15 -> 23
|
||||||
|
5 -> 24
|
||||||
|
6 -> 24
|
||||||
|
9 -> 24
|
||||||
|
15 -> 24
|
||||||
26 -> 25
|
26 -> 25
|
||||||
27 -> 25
|
6 -> 25
|
||||||
|
9 -> 25
|
||||||
|
15 -> 25
|
||||||
|
5 -> 26
|
||||||
|
27 -> 26
|
||||||
|
6 -> 26
|
||||||
|
9 -> 26
|
||||||
|
15 -> 26
|
||||||
|
28 -> 27
|
||||||
|
15 -> 27
|
||||||
|
30 -> 29
|
||||||
|
6 -> 29
|
||||||
|
9 -> 29
|
||||||
|
15 -> 29
|
||||||
|
5 -> 30
|
||||||
|
27 -> 30
|
||||||
|
6 -> 30
|
||||||
|
9 -> 30
|
||||||
|
15 -> 30
|
||||||
|
32 -> 31
|
||||||
|
6 -> 31
|
||||||
|
9 -> 31
|
||||||
|
15 -> 31
|
||||||
|
5 -> 32
|
||||||
|
27 -> 32
|
||||||
|
6 -> 32
|
||||||
|
9 -> 32
|
||||||
|
15 -> 32
|
||||||
|
9 -> 34
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
|
||||||
@ -237,9 +268,10 @@ In the terminal, this will show up as a list of jobs to be run:
|
|||||||
job count
|
job count
|
||||||
------------------------------------- -------
|
------------------------------------- -------
|
||||||
add_electricity 1
|
add_electricity 1
|
||||||
add_extra_components 1
|
add_transmission_projects_and_dlr 1
|
||||||
base_network 1
|
base_network 1
|
||||||
build_electricity_demand 1
|
build_electricity_demand 1
|
||||||
|
build_electricity_demand_base 1
|
||||||
build_line_rating 1
|
build_line_rating 1
|
||||||
build_powerplants 1
|
build_powerplants 1
|
||||||
build_renewable_profiles 6
|
build_renewable_profiles 6
|
||||||
@ -247,6 +279,7 @@ In the terminal, this will show up as a list of jobs to be run:
|
|||||||
build_ship_raster 1
|
build_ship_raster 1
|
||||||
build_transmission_projects 1
|
build_transmission_projects 1
|
||||||
cluster_network 1
|
cluster_network 1
|
||||||
|
determine_availability_matrix 6
|
||||||
prepare_network 1
|
prepare_network 1
|
||||||
retrieve_cost_data 1
|
retrieve_cost_data 1
|
||||||
retrieve_cutout 1
|
retrieve_cutout 1
|
||||||
@ -259,7 +292,7 @@ In the terminal, this will show up as a list of jobs to be run:
|
|||||||
retrieve_synthetic_electricity_demand 1
|
retrieve_synthetic_electricity_demand 1
|
||||||
simplify_network 1
|
simplify_network 1
|
||||||
solve_network 1
|
solve_network 1
|
||||||
total 28
|
total 35
|
||||||
|
|
||||||
|
|
||||||
``snakemake`` then runs these jobs in the correct order.
|
``snakemake`` then runs these jobs in the correct order.
|
||||||
@ -269,13 +302,12 @@ A job (here ``simplify_network``) will display its attributes and normally some
|
|||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
rule simplify_network:
|
rule simplify_network:
|
||||||
input: resources/test/networks/elec.nc, resources/test/costs_2030.csv, resources/test/regions_onshore.geojson, resources/test/regions_offshore.geojson
|
input: resources/test/networks/base_extended.nc, resources/test/regions_onshore.geojson, resources/test/regions_offshore.geojson
|
||||||
output: resources/test/networks/elec_s.nc, resources/test/regions_onshore_elec_s.geojson, resources/test/regions_offshore_elec_s.geojson, resources/test/busmap_elec_s.csv
|
output: resources/test/networks/base_s.nc, resources/test/regions_onshore_base_s.geojson, resources/test/regions_offshore_base_s.geojson, resources/test/busmap_base_s.csv
|
||||||
log: logs/test/simplify_network/elec_s.log
|
log: logs/test/simplify_network.log
|
||||||
jobid: 4
|
jobid: 10
|
||||||
benchmark: benchmarks/test/simplify_network/elec_s
|
benchmark: benchmarks/test/simplify_network_b
|
||||||
reason: Forced execution
|
reason: Forced execution
|
||||||
wildcards: simpl=
|
|
||||||
resources: tmpdir=<TBD>, mem_mb=12000, mem_mib=11445
|
resources: tmpdir=<TBD>, mem_mb=12000, mem_mib=11445
|
||||||
|
|
||||||
Once the whole worktree is finished, it should state so in the terminal.
|
Once the whole worktree is finished, it should state so in the terminal.
|
||||||
@ -291,10 +323,9 @@ You can produce any output file occurring in the ``Snakefile`` by running
|
|||||||
For example, you can explore the evolution of the PyPSA networks by running
|
For example, you can explore the evolution of the PyPSA networks by running
|
||||||
|
|
||||||
#. ``snakemake resources/networks/base.nc -call --configfile config/test/config.electricity.yaml``
|
#. ``snakemake resources/networks/base.nc -call --configfile config/test/config.electricity.yaml``
|
||||||
#. ``snakemake resources/networks/elec.nc -call --configfile config/test/config.electricity.yaml``
|
#. ``snakemake resources/networks/base_s.nc -call --configfile config/test/config.electricity.yaml``
|
||||||
#. ``snakemake resources/networks/elec_s.nc -call --configfile config/test/config.electricity.yaml``
|
#. ``snakemake resources/networks/base_s_6.nc -call --configfile config/test/config.electricity.yaml``
|
||||||
#. ``snakemake resources/networks/elec_s_6.nc -call --configfile config/test/config.electricity.yaml``
|
#. ``snakemake resources/networks/base_s_6_elec_lcopt_.nc -call --configfile config/test/config.electricity.yaml``
|
||||||
#. ``snakemake resources/networks/elec_s_6_ec_lcopt_.nc -call --configfile config/test/config.electricity.yaml``
|
|
||||||
|
|
||||||
To run all combinations of wildcard values provided in the ``config/config.yaml`` under ``scenario:``,
|
To run all combinations of wildcard values provided in the ``config/config.yaml`` under ``scenario:``,
|
||||||
you can use the collection rule ``solve_elec_networks``.
|
you can use the collection rule ``solve_elec_networks``.
|
||||||
@ -332,6 +363,6 @@ Jupyter Notebooks).
|
|||||||
|
|
||||||
import pypsa
|
import pypsa
|
||||||
|
|
||||||
n = pypsa.Network("results/networks/elec_s_6_ec_lcopt_.nc")
|
n = pypsa.Network("results/networks/base_s_6_elec_lcopt_.nc")
|
||||||
|
|
||||||
For inspiration, read the `examples section in the PyPSA documentation <https://pypsa.readthedocs.io/en/latest/examples-basic.html>`__.
|
For inspiration, read the `examples section in the PyPSA documentation <https://pypsa.readthedocs.io/en/latest/examples-basic.html>`__.
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -38,15 +38,6 @@ series and potentials using the rule :mod:`build_renewable_profiles`.
|
|||||||
It can take the values ``onwind``, ``offwind-ac``, ``offwind-dc``, ``offwind-float``, and ``solar`` but **not** ``hydro``
|
It can take the values ``onwind``, ``offwind-ac``, ``offwind-dc``, ``offwind-float``, and ``solar`` but **not** ``hydro``
|
||||||
(since hydroelectric plant profiles are created by a different rule)``
|
(since hydroelectric plant profiles are created by a different rule)``
|
||||||
|
|
||||||
.. _simpl:
|
|
||||||
|
|
||||||
The ``{simpl}`` wildcard
|
|
||||||
========================
|
|
||||||
|
|
||||||
The ``{simpl}`` wildcard specifies number of buses a detailed
|
|
||||||
network model should be pre-clustered to in the rule
|
|
||||||
:mod:`simplify_network` (before :mod:`cluster_network`).
|
|
||||||
|
|
||||||
.. _clusters:
|
.. _clusters:
|
||||||
|
|
||||||
The ``{clusters}`` wildcard
|
The ``{clusters}`` wildcard
|
||||||
@ -57,11 +48,6 @@ network model should be reduced to in the rule :mod:`cluster_network`.
|
|||||||
The number of clusters must be lower than the total number of nodes
|
The number of clusters must be lower than the total number of nodes
|
||||||
and higher than the number of countries. However, a country counts twice if
|
and higher than the number of countries. However, a country counts twice if
|
||||||
it has two asynchronous subnetworks (e.g. Denmark or Italy).
|
it has two asynchronous subnetworks (e.g. Denmark or Italy).
|
||||||
|
|
||||||
If an `m` is placed behind the number of clusters (e.g. ``100m``),
|
|
||||||
generators are only moved to the clustered buses but not aggregated
|
|
||||||
by carrier; i.e. the clustered bus may have more than one e.g. wind generator.
|
|
||||||
|
|
||||||
.. _ll:
|
.. _ll:
|
||||||
|
|
||||||
The ``{ll}`` wildcard
|
The ``{ll}`` wildcard
|
||||||
|
@ -35,12 +35,14 @@ rule build_powerplants:
|
|||||||
everywhere_powerplants=config_provider("electricity", "everywhere_powerplants"),
|
everywhere_powerplants=config_provider("electricity", "everywhere_powerplants"),
|
||||||
countries=config_provider("countries"),
|
countries=config_provider("countries"),
|
||||||
input:
|
input:
|
||||||
base_network=resources("networks/base.nc"),
|
network=resources("networks/base_s_{clusters}.nc"),
|
||||||
custom_powerplants="data/custom_powerplants.csv",
|
custom_powerplants="data/custom_powerplants.csv",
|
||||||
output:
|
output:
|
||||||
resources("powerplants.csv"),
|
resources("powerplants_s_{clusters}.csv"),
|
||||||
log:
|
log:
|
||||||
logs("build_powerplants.log"),
|
logs("build_powerplants_s_{clusters}.log"),
|
||||||
|
benchmark:
|
||||||
|
benchmarks("build_powerplants_s_{clusters}")
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=7000,
|
mem_mb=7000,
|
||||||
@ -169,6 +171,8 @@ rule build_ship_raster:
|
|||||||
|
|
||||||
|
|
||||||
rule determine_availability_matrix_MD_UA:
|
rule determine_availability_matrix_MD_UA:
|
||||||
|
params:
|
||||||
|
renewable=config_provider("renewable"),
|
||||||
input:
|
input:
|
||||||
copernicus="data/Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
|
copernicus="data/Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
|
||||||
wdpa="data/WDPA.gpkg",
|
wdpa="data/WDPA.gpkg",
|
||||||
@ -186,18 +190,20 @@ rule determine_availability_matrix_MD_UA:
|
|||||||
country_shapes=resources("country_shapes.geojson"),
|
country_shapes=resources("country_shapes.geojson"),
|
||||||
offshore_shapes=resources("offshore_shapes.geojson"),
|
offshore_shapes=resources("offshore_shapes.geojson"),
|
||||||
regions=lambda w: (
|
regions=lambda w: (
|
||||||
resources("regions_onshore.geojson")
|
resources("regions_onshore_base_s_{clusters}.geojson")
|
||||||
if w.technology in ("onwind", "solar", "solar-hsat")
|
if w.technology in ("onwind", "solar", "solar-hsat")
|
||||||
else resources("regions_offshore.geojson")
|
else resources("regions_offshore_base_s_{clusters}.geojson")
|
||||||
),
|
),
|
||||||
cutout=lambda w: "cutouts/"
|
cutout=lambda w: "cutouts/"
|
||||||
+ CDIR
|
+ CDIR
|
||||||
+ config_provider("renewable", w.technology, "cutout")(w)
|
+ config_provider("renewable", w.technology, "cutout")(w)
|
||||||
+ ".nc",
|
+ ".nc",
|
||||||
output:
|
output:
|
||||||
availability_matrix=resources("availability_matrix_MD-UA_{technology}.nc"),
|
availability_matrix=resources(
|
||||||
|
"availability_matrix_MD-UA_{clusters}_{technology}.nc"
|
||||||
|
),
|
||||||
log:
|
log:
|
||||||
logs("determine_availability_matrix_MD_UA_{technology}.log"),
|
logs("determine_availability_matrix_MD_UA_{clusters}_{technology}.log"),
|
||||||
threads: config["atlite"].get("nprocesses", 4)
|
threads: config["atlite"].get("nprocesses", 4)
|
||||||
resources:
|
resources:
|
||||||
mem_mb=config["atlite"].get("nprocesses", 4) * 5000,
|
mem_mb=config["atlite"].get("nprocesses", 4) * 5000,
|
||||||
@ -213,20 +219,17 @@ def input_ua_md_availability_matrix(w):
|
|||||||
if {"UA", "MD"}.intersection(countries):
|
if {"UA", "MD"}.intersection(countries):
|
||||||
return {
|
return {
|
||||||
"availability_matrix_MD_UA": resources(
|
"availability_matrix_MD_UA": resources(
|
||||||
"availability_matrix_MD-UA_{technology}.nc"
|
"availability_matrix_MD-UA_{clusters}_{technology}.nc"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
rule build_renewable_profiles:
|
rule determine_availability_matrix:
|
||||||
params:
|
params:
|
||||||
snapshots=config_provider("snapshots"),
|
|
||||||
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
|
||||||
renewable=config_provider("renewable"),
|
renewable=config_provider("renewable"),
|
||||||
input:
|
input:
|
||||||
unpack(input_ua_md_availability_matrix),
|
unpack(input_ua_md_availability_matrix),
|
||||||
base_network=resources("networks/base.nc"),
|
|
||||||
corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"),
|
corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"),
|
||||||
natura=lambda w: (
|
natura=lambda w: (
|
||||||
"data/bundle/natura/natura.tiff"
|
"data/bundle/natura/natura.tiff"
|
||||||
@ -256,20 +259,48 @@ rule build_renewable_profiles:
|
|||||||
country_shapes=resources("country_shapes.geojson"),
|
country_shapes=resources("country_shapes.geojson"),
|
||||||
offshore_shapes=resources("offshore_shapes.geojson"),
|
offshore_shapes=resources("offshore_shapes.geojson"),
|
||||||
regions=lambda w: (
|
regions=lambda w: (
|
||||||
resources("regions_onshore.geojson")
|
resources("regions_onshore_base_s_{clusters}.geojson")
|
||||||
if w.technology in ("onwind", "solar", "solar-hsat")
|
if w.technology in ("onwind", "solar", "solar-hsat")
|
||||||
else resources("regions_offshore.geojson")
|
else resources("regions_offshore_base_s_{clusters}.geojson")
|
||||||
),
|
),
|
||||||
cutout=lambda w: "cutouts/"
|
cutout=lambda w: "cutouts/"
|
||||||
+ CDIR
|
+ CDIR
|
||||||
+ config_provider("renewable", w.technology, "cutout")(w)
|
+ config_provider("renewable", w.technology, "cutout")(w)
|
||||||
+ ".nc",
|
+ ".nc",
|
||||||
output:
|
output:
|
||||||
profile=resources("profile_{technology}.nc"),
|
resources("availability_matrix_{clusters}_{technology}.nc"),
|
||||||
log:
|
log:
|
||||||
logs("build_renewable_profile_{technology}.log"),
|
logs("determine_availability_matrix_{clusters}_{technology}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_renewable_profiles_{technology}")
|
benchmarks("determine_availability_matrix_{clusters}_{technology}")
|
||||||
|
threads: config["atlite"].get("nprocesses", 4)
|
||||||
|
resources:
|
||||||
|
mem_mb=config["atlite"].get("nprocesses", 4) * 5000,
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/determine_availability_matrix.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule build_renewable_profiles:
|
||||||
|
params:
|
||||||
|
snapshots=config_provider("snapshots"),
|
||||||
|
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
||||||
|
renewable=config_provider("renewable"),
|
||||||
|
input:
|
||||||
|
availability_matrix=resources("availability_matrix_{clusters}_{technology}.nc"),
|
||||||
|
offshore_shapes=resources("offshore_shapes.geojson"),
|
||||||
|
regions=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
|
cutout=lambda w: "cutouts/"
|
||||||
|
+ CDIR
|
||||||
|
+ config_provider("renewable", w.technology, "cutout")(w)
|
||||||
|
+ ".nc",
|
||||||
|
output:
|
||||||
|
profile=resources("profile_{clusters}_{technology}.nc"),
|
||||||
|
log:
|
||||||
|
logs("build_renewable_profile_{clusters}_{technology}.log"),
|
||||||
|
benchmark:
|
||||||
|
benchmarks("build_renewable_profiles_{clusters}_{technology}")
|
||||||
threads: config["atlite"].get("nprocesses", 4)
|
threads: config["atlite"].get("nprocesses", 4)
|
||||||
resources:
|
resources:
|
||||||
mem_mb=config["atlite"].get("nprocesses", 4) * 5000,
|
mem_mb=config["atlite"].get("nprocesses", 4) * 5000,
|
||||||
@ -337,7 +368,7 @@ rule build_line_rating:
|
|||||||
+ config_provider("lines", "dynamic_line_rating", "cutout")(w)
|
+ config_provider("lines", "dynamic_line_rating", "cutout")(w)
|
||||||
+ ".nc",
|
+ ".nc",
|
||||||
output:
|
output:
|
||||||
output=resources("networks/line_rating.nc"),
|
output=resources("dlr.nc"),
|
||||||
log:
|
log:
|
||||||
logs("build_line_rating.log"),
|
logs("build_line_rating.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
@ -385,6 +416,44 @@ rule build_transmission_projects:
|
|||||||
"../scripts/build_transmission_projects.py"
|
"../scripts/build_transmission_projects.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule add_transmission_projects_and_dlr:
|
||||||
|
params:
|
||||||
|
transmission_projects=config_provider("transmission_projects"),
|
||||||
|
dlr=config_provider("lines", "dynamic_line_rating"),
|
||||||
|
s_max_pu=config_provider("lines", "s_max_pu"),
|
||||||
|
input:
|
||||||
|
network=resources("networks/base.nc"),
|
||||||
|
dlr=lambda w: (
|
||||||
|
resources("dlr.nc")
|
||||||
|
if config_provider("lines", "dynamic_line_rating", "activate")(w)
|
||||||
|
else []
|
||||||
|
),
|
||||||
|
transmission_projects=lambda w: (
|
||||||
|
[
|
||||||
|
resources("transmission_projects/new_buses.csv"),
|
||||||
|
resources("transmission_projects/new_lines.csv"),
|
||||||
|
resources("transmission_projects/new_links.csv"),
|
||||||
|
resources("transmission_projects/adjust_lines.csv"),
|
||||||
|
resources("transmission_projects/adjust_links.csv"),
|
||||||
|
]
|
||||||
|
if config_provider("transmission_projects", "enable")(w)
|
||||||
|
else []
|
||||||
|
),
|
||||||
|
output:
|
||||||
|
network=resources("networks/base_extended.nc"),
|
||||||
|
log:
|
||||||
|
logs("add_transmission_projects_and_dlr.log"),
|
||||||
|
benchmark:
|
||||||
|
benchmarks("add_transmission_projects_and_dlr")
|
||||||
|
threads: 1
|
||||||
|
resources:
|
||||||
|
mem_mb=4000,
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/add_transmission_projects_and_dlr.py"
|
||||||
|
|
||||||
|
|
||||||
def input_profile_tech(w):
|
def input_profile_tech(w):
|
||||||
return {
|
return {
|
||||||
f"profile_{tech}": resources(f"profile_{tech}.nc")
|
f"profile_{tech}": resources(f"profile_{tech}.nc")
|
||||||
@ -414,8 +483,8 @@ rule build_gdp_pop_non_nuts3:
|
|||||||
params:
|
params:
|
||||||
countries=config_provider("countries"),
|
countries=config_provider("countries"),
|
||||||
input:
|
input:
|
||||||
base_network=resources("networks/base.nc"),
|
base_network=resources("networks/base_s.nc"),
|
||||||
regions=resources("regions_onshore.geojson"),
|
regions=resources("regions_onshore_base_s.geojson"),
|
||||||
gdp_non_nuts3="data/bundle/GDP_per_capita_PPP_1990_2015_v2.nc",
|
gdp_non_nuts3="data/bundle/GDP_per_capita_PPP_1990_2015_v2.nc",
|
||||||
pop_non_nuts3="data/bundle/ppp_2013_1km_Aggregated.tif",
|
pop_non_nuts3="data/bundle/ppp_2013_1km_Aggregated.tif",
|
||||||
output:
|
output:
|
||||||
@ -433,97 +502,76 @@ rule build_gdp_pop_non_nuts3:
|
|||||||
"../scripts/build_gdp_pop_non_nuts3.py"
|
"../scripts/build_gdp_pop_non_nuts3.py"
|
||||||
|
|
||||||
|
|
||||||
rule add_electricity:
|
rule build_electricity_demand_base:
|
||||||
params:
|
params:
|
||||||
length_factor=config_provider("lines", "length_factor"),
|
distribution_key=config_provider("load", "distribution_key"),
|
||||||
scaling_factor=config_provider("load", "scaling_factor"),
|
|
||||||
countries=config_provider("countries"),
|
|
||||||
snapshots=config_provider("snapshots"),
|
|
||||||
renewable=config_provider("renewable"),
|
|
||||||
electricity=config_provider("electricity"),
|
|
||||||
conventional=config_provider("conventional"),
|
|
||||||
costs=config_provider("costs"),
|
|
||||||
foresight=config_provider("foresight"),
|
|
||||||
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
|
||||||
transmission_projects=config_provider("transmission_projects"),
|
|
||||||
input:
|
input:
|
||||||
unpack(input_profile_tech),
|
|
||||||
unpack(input_conventional),
|
|
||||||
unpack(input_gdp_pop_non_nuts3),
|
unpack(input_gdp_pop_non_nuts3),
|
||||||
base_network=resources("networks/base.nc"),
|
base_network=resources("networks/base_s.nc"),
|
||||||
line_rating=lambda w: (
|
regions=resources("regions_onshore_base_s.geojson"),
|
||||||
resources("networks/line_rating.nc")
|
nuts3=resources("nuts3_shapes.geojson"),
|
||||||
if config_provider("lines", "dynamic_line_rating", "activate")(w)
|
|
||||||
else resources("networks/base.nc")
|
|
||||||
),
|
|
||||||
transmission_projects=lambda w: (
|
|
||||||
[
|
|
||||||
resources("transmission_projects/new_buses.csv"),
|
|
||||||
resources("transmission_projects/new_lines.csv"),
|
|
||||||
resources("transmission_projects/new_links.csv"),
|
|
||||||
resources("transmission_projects/adjust_lines.csv"),
|
|
||||||
resources("transmission_projects/adjust_links.csv"),
|
|
||||||
]
|
|
||||||
if config_provider("transmission_projects", "enable")(w)
|
|
||||||
else []
|
|
||||||
),
|
|
||||||
tech_costs=lambda w: resources(
|
|
||||||
f"costs_{config_provider('costs', 'year')(w)}.csv"
|
|
||||||
),
|
|
||||||
regions=resources("regions_onshore.geojson"),
|
|
||||||
powerplants=resources("powerplants.csv"),
|
|
||||||
hydro_capacities=ancient("data/hydro_capacities.csv"),
|
|
||||||
unit_commitment="data/unit_commitment.csv",
|
|
||||||
fuel_price=lambda w: (
|
|
||||||
resources("monthly_fuel_price.csv")
|
|
||||||
if config_provider("conventional", "dynamic_fuel_price")(w)
|
|
||||||
else []
|
|
||||||
),
|
|
||||||
load=resources("electricity_demand.csv"),
|
load=resources("electricity_demand.csv"),
|
||||||
nuts3_shapes=resources("nuts3_shapes.geojson"),
|
|
||||||
output:
|
output:
|
||||||
resources("networks/elec.nc"),
|
resources("electricity_demand_base_s.nc"),
|
||||||
log:
|
log:
|
||||||
logs("add_electricity.log"),
|
logs("build_electricity_demand_base_s.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("add_electricity")
|
benchmarks("build_electricity_demand_base_s")
|
||||||
threads: 1
|
resources:
|
||||||
|
mem_mb=5000,
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/build_electricity_demand_base.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule build_hac_features:
|
||||||
|
params:
|
||||||
|
snapshots=config_provider("snapshots"),
|
||||||
|
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
||||||
|
features=config_provider("clustering", "cluster_network", "hac_features"),
|
||||||
|
input:
|
||||||
|
cutout=lambda w: "cutouts/"
|
||||||
|
+ CDIR
|
||||||
|
+ config_provider("atlite", "default_cutout")(w)
|
||||||
|
+ ".nc",
|
||||||
|
regions=resources("regions_onshore_base_s.geojson"),
|
||||||
|
output:
|
||||||
|
resources("hac_features.nc"),
|
||||||
|
log:
|
||||||
|
logs("build_hac_features.log"),
|
||||||
|
benchmark:
|
||||||
|
benchmarks("build_hac_features")
|
||||||
|
threads: config["atlite"].get("nprocesses", 4)
|
||||||
resources:
|
resources:
|
||||||
mem_mb=10000,
|
mem_mb=10000,
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
"../scripts/add_electricity.py"
|
"../scripts/build_hac_features.py"
|
||||||
|
|
||||||
|
|
||||||
rule simplify_network:
|
rule simplify_network:
|
||||||
params:
|
params:
|
||||||
simplify_network=config_provider("clustering", "simplify_network"),
|
simplify_network=config_provider("clustering", "simplify_network"),
|
||||||
|
cluster_network=config_provider("clustering", "cluster_network"),
|
||||||
aggregation_strategies=config_provider(
|
aggregation_strategies=config_provider(
|
||||||
"clustering", "aggregation_strategies", default={}
|
"clustering", "aggregation_strategies", default={}
|
||||||
),
|
),
|
||||||
focus_weights=config_provider("clustering", "focus_weights", default=None),
|
|
||||||
renewable_carriers=config_provider("electricity", "renewable_carriers"),
|
|
||||||
max_hours=config_provider("electricity", "max_hours"),
|
|
||||||
length_factor=config_provider("lines", "length_factor"),
|
|
||||||
p_max_pu=config_provider("links", "p_max_pu", default=1.0),
|
p_max_pu=config_provider("links", "p_max_pu", default=1.0),
|
||||||
costs=config_provider("costs"),
|
|
||||||
input:
|
input:
|
||||||
network=resources("networks/elec.nc"),
|
network=resources("networks/base_extended.nc"),
|
||||||
tech_costs=lambda w: resources(
|
|
||||||
f"costs_{config_provider('costs', 'year')(w)}.csv"
|
|
||||||
),
|
|
||||||
regions_onshore=resources("regions_onshore.geojson"),
|
regions_onshore=resources("regions_onshore.geojson"),
|
||||||
regions_offshore=resources("regions_offshore.geojson"),
|
regions_offshore=resources("regions_offshore.geojson"),
|
||||||
output:
|
output:
|
||||||
network=resources("networks/elec_s{simpl}.nc"),
|
network=resources("networks/base_s.nc"),
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s.geojson"),
|
||||||
regions_offshore=resources("regions_offshore_elec_s{simpl}.geojson"),
|
regions_offshore=resources("regions_offshore_base_s.geojson"),
|
||||||
busmap=resources("busmap_elec_s{simpl}.csv"),
|
busmap=resources("busmap_base_s.csv"),
|
||||||
log:
|
log:
|
||||||
logs("simplify_network/elec_s{simpl}.log"),
|
logs("simplify_network.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("simplify_network/elec_s{simpl}")
|
benchmarks("simplify_network_b")
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=12000,
|
mem_mb=12000,
|
||||||
@ -537,7 +585,7 @@ rule simplify_network:
|
|||||||
def input_cluster_network(w):
|
def input_cluster_network(w):
|
||||||
if config_provider("enable", "custom_busmap", default=False)(w):
|
if config_provider("enable", "custom_busmap", default=False)(w):
|
||||||
base_network = config_provider("electricity", "base_network")(w)
|
base_network = config_provider("electricity", "base_network")(w)
|
||||||
custom_busmap = f"data/busmaps/elec_s{w.simpl}_{w.clusters}_{base_network}.csv"
|
custom_busmap = f"data/busmaps/base_s_{w.clusters}_{base_network}.csv"
|
||||||
return {"custom_busmap": custom_busmap}
|
return {"custom_busmap": custom_busmap}
|
||||||
return {"custom_busmap": []}
|
return {"custom_busmap": []}
|
||||||
|
|
||||||
@ -556,26 +604,29 @@ rule cluster_network:
|
|||||||
),
|
),
|
||||||
max_hours=config_provider("electricity", "max_hours"),
|
max_hours=config_provider("electricity", "max_hours"),
|
||||||
length_factor=config_provider("lines", "length_factor"),
|
length_factor=config_provider("lines", "length_factor"),
|
||||||
costs=config_provider("costs"),
|
|
||||||
input:
|
input:
|
||||||
unpack(input_cluster_network),
|
unpack(input_cluster_network),
|
||||||
network=resources("networks/elec_s{simpl}.nc"),
|
network=resources("networks/base_s.nc"),
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s.geojson"),
|
||||||
regions_offshore=resources("regions_offshore_elec_s{simpl}.geojson"),
|
regions_offshore=resources("regions_offshore_base_s.geojson"),
|
||||||
busmap=ancient(resources("busmap_elec_s{simpl}.csv")),
|
busmap=ancient(resources("busmap_base_s.csv")),
|
||||||
tech_costs=lambda w: resources(
|
hac_features=lambda w: (
|
||||||
f"costs_{config_provider('costs', 'year')(w)}.csv"
|
resources("hac_features.nc")
|
||||||
|
if config_provider("clustering", "cluster_network", "algorithm")(w)
|
||||||
|
== "hac"
|
||||||
|
else []
|
||||||
),
|
),
|
||||||
|
load=resources("electricity_demand_base_s.nc"),
|
||||||
output:
|
output:
|
||||||
network=resources("networks/elec_s{simpl}_{clusters}.nc"),
|
network=resources("networks/base_s_{clusters}.nc"),
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
regions_offshore=resources("regions_offshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_offshore=resources("regions_offshore_base_s_{clusters}.geojson"),
|
||||||
busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"),
|
busmap=resources("busmap_base_s_{clusters}.csv"),
|
||||||
linemap=resources("linemap_elec_s{simpl}_{clusters}.csv"),
|
linemap=resources("linemap_base_s_{clusters}.csv"),
|
||||||
log:
|
log:
|
||||||
logs("cluster_network/elec_s{simpl}_{clusters}.log"),
|
logs("cluster_network_base_s_{clusters}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("cluster_network/elec_s{simpl}_{clusters}")
|
benchmarks("cluster_network_base_s_{clusters}")
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=10000,
|
mem_mb=10000,
|
||||||
@ -585,29 +636,76 @@ rule cluster_network:
|
|||||||
"../scripts/cluster_network.py"
|
"../scripts/cluster_network.py"
|
||||||
|
|
||||||
|
|
||||||
rule add_extra_components:
|
def input_profile_tech(w):
|
||||||
|
return {
|
||||||
|
f"profile_{tech}": resources(
|
||||||
|
"profile_{clusters}_" + tech + ".nc"
|
||||||
|
if tech != "hydro"
|
||||||
|
else f"profile_{tech}.nc"
|
||||||
|
)
|
||||||
|
for tech in config_provider("electricity", "renewable_carriers")(w)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def input_conventional(w):
|
||||||
|
return {
|
||||||
|
f"conventional_{carrier}_{attr}": fn
|
||||||
|
for carrier, d in config_provider("conventional", default={None: {}})(w).items()
|
||||||
|
if carrier in config_provider("electricity", "conventional_carriers")(w)
|
||||||
|
for attr, fn in d.items()
|
||||||
|
if str(fn).startswith("data/")
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
rule add_electricity:
|
||||||
params:
|
params:
|
||||||
extendable_carriers=config_provider("electricity", "extendable_carriers"),
|
line_length_factor=config_provider("lines", "length_factor"),
|
||||||
max_hours=config_provider("electricity", "max_hours"),
|
link_length_factor=config_provider("links", "length_factor"),
|
||||||
|
scaling_factor=config_provider("load", "scaling_factor"),
|
||||||
|
countries=config_provider("countries"),
|
||||||
|
snapshots=config_provider("snapshots"),
|
||||||
|
renewable=config_provider("renewable"),
|
||||||
|
electricity=config_provider("electricity"),
|
||||||
|
conventional=config_provider("conventional"),
|
||||||
costs=config_provider("costs"),
|
costs=config_provider("costs"),
|
||||||
|
foresight=config_provider("foresight"),
|
||||||
|
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
||||||
|
consider_efficiency_classes=config_provider(
|
||||||
|
"clustering", "consider_efficiency_classes"
|
||||||
|
),
|
||||||
|
aggregation_strategies=config_provider("clustering", "aggregation_strategies"),
|
||||||
|
exclude_carriers=config_provider("clustering", "exclude_carriers"),
|
||||||
input:
|
input:
|
||||||
network=resources("networks/elec_s{simpl}_{clusters}.nc"),
|
unpack(input_profile_tech),
|
||||||
|
unpack(input_conventional),
|
||||||
|
base_network=resources("networks/base_s_{clusters}.nc"),
|
||||||
tech_costs=lambda w: resources(
|
tech_costs=lambda w: resources(
|
||||||
f"costs_{config_provider('costs', 'year')(w)}.csv"
|
f"costs_{config_provider('costs', 'year')(w)}.csv"
|
||||||
),
|
),
|
||||||
|
regions=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
|
powerplants=resources("powerplants_s_{clusters}.csv"),
|
||||||
|
hydro_capacities=ancient("data/hydro_capacities.csv"),
|
||||||
|
unit_commitment="data/unit_commitment.csv",
|
||||||
|
fuel_price=lambda w: (
|
||||||
|
resources("monthly_fuel_price.csv")
|
||||||
|
if config_provider("conventional", "dynamic_fuel_price")(w)
|
||||||
|
else []
|
||||||
|
),
|
||||||
|
load=resources("electricity_demand_base_s.nc"),
|
||||||
|
busmap=resources("busmap_base_s_{clusters}.csv"),
|
||||||
output:
|
output:
|
||||||
resources("networks/elec_s{simpl}_{clusters}_ec.nc"),
|
resources("networks/base_s_{clusters}_elec.nc"),
|
||||||
log:
|
log:
|
||||||
logs("add_extra_components/elec_s{simpl}_{clusters}.log"),
|
logs("add_electricity_{clusters}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("add_extra_components/elec_s{simpl}_{clusters}_ec")
|
benchmarks("add_electricity_{clusters}")
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=4000,
|
mem_mb=10000,
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
"../scripts/add_extra_components.py"
|
"../scripts/add_electricity.py"
|
||||||
|
|
||||||
|
|
||||||
rule prepare_network:
|
rule prepare_network:
|
||||||
@ -626,17 +724,17 @@ rule prepare_network:
|
|||||||
autarky=config_provider("electricity", "autarky", default={}),
|
autarky=config_provider("electricity", "autarky", default={}),
|
||||||
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
||||||
input:
|
input:
|
||||||
resources("networks/elec_s{simpl}_{clusters}_ec.nc"),
|
resources("networks/base_s_{clusters}_elec.nc"),
|
||||||
tech_costs=lambda w: resources(
|
tech_costs=lambda w: resources(
|
||||||
f"costs_{config_provider('costs', 'year')(w)}.csv"
|
f"costs_{config_provider('costs', 'year')(w)}.csv"
|
||||||
),
|
),
|
||||||
co2_price=lambda w: resources("co2_price.csv") if "Ept" in w.opts else [],
|
co2_price=lambda w: resources("co2_price.csv") if "Ept" in w.opts else [],
|
||||||
output:
|
output:
|
||||||
resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"),
|
resources("networks/base_s_{clusters}_elec_l{ll}_{opts}.nc"),
|
||||||
log:
|
log:
|
||||||
logs("prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"),
|
logs("prepare_network_base_s_{clusters}_elec_l{ll}_{opts}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
(benchmarks("prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"))
|
benchmarks("prepare_network_base_s_{clusters}_elec_l{ll}_{opts}")
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=4000,
|
mem_mb=4000,
|
||||||
|
@ -33,19 +33,19 @@ rule build_clustered_population_layouts:
|
|||||||
pop_layout_total=resources("pop_layout_total.nc"),
|
pop_layout_total=resources("pop_layout_total.nc"),
|
||||||
pop_layout_urban=resources("pop_layout_urban.nc"),
|
pop_layout_urban=resources("pop_layout_urban.nc"),
|
||||||
pop_layout_rural=resources("pop_layout_rural.nc"),
|
pop_layout_rural=resources("pop_layout_rural.nc"),
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
cutout=lambda w: "cutouts/"
|
cutout=lambda w: "cutouts/"
|
||||||
+ CDIR
|
+ CDIR
|
||||||
+ config_provider("atlite", "default_cutout")(w)
|
+ config_provider("atlite", "default_cutout")(w)
|
||||||
+ ".nc",
|
+ ".nc",
|
||||||
output:
|
output:
|
||||||
clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"),
|
clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"),
|
||||||
log:
|
log:
|
||||||
logs("build_clustered_population_layouts_{simpl}_{clusters}.log"),
|
logs("build_clustered_population_layouts_s_{clusters}.log"),
|
||||||
resources:
|
resources:
|
||||||
mem_mb=10000,
|
mem_mb=10000,
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_clustered_population_layouts/s{simpl}_{clusters}")
|
benchmarks("build_clustered_population_layouts/s_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -57,19 +57,19 @@ rule build_simplified_population_layouts:
|
|||||||
pop_layout_total=resources("pop_layout_total.nc"),
|
pop_layout_total=resources("pop_layout_total.nc"),
|
||||||
pop_layout_urban=resources("pop_layout_urban.nc"),
|
pop_layout_urban=resources("pop_layout_urban.nc"),
|
||||||
pop_layout_rural=resources("pop_layout_rural.nc"),
|
pop_layout_rural=resources("pop_layout_rural.nc"),
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s.geojson"),
|
||||||
cutout=lambda w: "cutouts/"
|
cutout=lambda w: "cutouts/"
|
||||||
+ CDIR
|
+ CDIR
|
||||||
+ config_provider("atlite", "default_cutout")(w)
|
+ config_provider("atlite", "default_cutout")(w)
|
||||||
+ ".nc",
|
+ ".nc",
|
||||||
output:
|
output:
|
||||||
clustered_pop_layout=resources("pop_layout_elec_s{simpl}.csv"),
|
clustered_pop_layout=resources("pop_layout_base_s.csv"),
|
||||||
resources:
|
resources:
|
||||||
mem_mb=10000,
|
mem_mb=10000,
|
||||||
log:
|
log:
|
||||||
logs("build_simplified_population_layouts_{simpl}"),
|
logs("build_simplified_population_layouts_s"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_simplified_population_layouts/s{simpl}")
|
benchmarks("build_simplified_population_layouts/s")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -96,17 +96,17 @@ rule build_gas_input_locations:
|
|||||||
gem="data/gem/Europe-Gas-Tracker-2024-05.xlsx",
|
gem="data/gem/Europe-Gas-Tracker-2024-05.xlsx",
|
||||||
entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson",
|
entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson",
|
||||||
storage="data/gas_network/scigrid-gas/data/IGGIELGN_Storages.geojson",
|
storage="data/gas_network/scigrid-gas/data/IGGIELGN_Storages.geojson",
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
regions_offshore=resources("regions_offshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_offshore=resources("regions_offshore_base_s_{clusters}.geojson"),
|
||||||
output:
|
output:
|
||||||
gas_input_nodes=resources("gas_input_locations_s{simpl}_{clusters}.geojson"),
|
gas_input_nodes=resources("gas_input_locations_s_{clusters}.geojson"),
|
||||||
gas_input_nodes_simplified=resources(
|
gas_input_nodes_simplified=resources(
|
||||||
"gas_input_locations_s{simpl}_{clusters}_simplified.csv"
|
"gas_input_locations_s_{clusters}_simplified.csv"
|
||||||
),
|
),
|
||||||
resources:
|
resources:
|
||||||
mem_mb=2000,
|
mem_mb=2000,
|
||||||
log:
|
log:
|
||||||
logs("build_gas_input_locations_s{simpl}_{clusters}.log"),
|
logs("build_gas_input_locations_s_{clusters}.log"),
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -116,14 +116,14 @@ rule build_gas_input_locations:
|
|||||||
rule cluster_gas_network:
|
rule cluster_gas_network:
|
||||||
input:
|
input:
|
||||||
cleaned_gas_network=resources("gas_network.csv"),
|
cleaned_gas_network=resources("gas_network.csv"),
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
regions_offshore=resources("regions_offshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_offshore=resources("regions_offshore_base_s_{clusters}.geojson"),
|
||||||
output:
|
output:
|
||||||
clustered_gas_network=resources("gas_network_elec_s{simpl}_{clusters}.csv"),
|
clustered_gas_network=resources("gas_network_base_s_{clusters}.csv"),
|
||||||
resources:
|
resources:
|
||||||
mem_mb=4000,
|
mem_mb=4000,
|
||||||
log:
|
log:
|
||||||
logs("cluster_gas_network_s{simpl}_{clusters}.log"),
|
logs("cluster_gas_network_{clusters}.log"),
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -149,17 +149,17 @@ rule build_daily_heat_demand:
|
|||||||
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
||||||
input:
|
input:
|
||||||
pop_layout=resources("pop_layout_total.nc"),
|
pop_layout=resources("pop_layout_total.nc"),
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
cutout=heat_demand_cutout,
|
cutout=heat_demand_cutout,
|
||||||
output:
|
output:
|
||||||
heat_demand=resources("daily_heat_demand_total_elec_s{simpl}_{clusters}.nc"),
|
heat_demand=resources("daily_heat_demand_total_base_s_{clusters}.nc"),
|
||||||
resources:
|
resources:
|
||||||
mem_mb=20000,
|
mem_mb=20000,
|
||||||
threads: 8
|
threads: 8
|
||||||
log:
|
log:
|
||||||
logs("build_daily_heat_demand_total_{simpl}_{clusters}.loc"),
|
logs("build_daily_heat_demand_total_s_{clusters}.loc"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_daily_heat_demand/total_s{simpl}_{clusters}")
|
benchmarks("build_daily_heat_demand/total_s_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -172,16 +172,16 @@ rule build_hourly_heat_demand:
|
|||||||
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
||||||
input:
|
input:
|
||||||
heat_profile="data/heat_load_profile_BDEW.csv",
|
heat_profile="data/heat_load_profile_BDEW.csv",
|
||||||
heat_demand=resources("daily_heat_demand_total_elec_s{simpl}_{clusters}.nc"),
|
heat_demand=resources("daily_heat_demand_total_base_s_{clusters}.nc"),
|
||||||
output:
|
output:
|
||||||
heat_demand=resources("hourly_heat_demand_total_elec_s{simpl}_{clusters}.nc"),
|
heat_demand=resources("hourly_heat_demand_total_base_s_{clusters}.nc"),
|
||||||
resources:
|
resources:
|
||||||
mem_mb=2000,
|
mem_mb=2000,
|
||||||
threads: 8
|
threads: 8
|
||||||
log:
|
log:
|
||||||
logs("build_hourly_heat_demand_total_{simpl}_{clusters}.loc"),
|
logs("build_hourly_heat_demand_total_s_{clusters}.loc"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_hourly_heat_demand/total_s{simpl}_{clusters}")
|
benchmarks("build_hourly_heat_demand/total_s_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -194,18 +194,18 @@ rule build_temperature_profiles:
|
|||||||
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
||||||
input:
|
input:
|
||||||
pop_layout=resources("pop_layout_total.nc"),
|
pop_layout=resources("pop_layout_total.nc"),
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
cutout=heat_demand_cutout,
|
cutout=heat_demand_cutout,
|
||||||
output:
|
output:
|
||||||
temp_soil=resources("temp_soil_total_elec_s{simpl}_{clusters}.nc"),
|
temp_soil=resources("temp_soil_total_base_s_{clusters}.nc"),
|
||||||
temp_air=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"),
|
temp_air=resources("temp_air_total_base_s_{clusters}.nc"),
|
||||||
resources:
|
resources:
|
||||||
mem_mb=20000,
|
mem_mb=20000,
|
||||||
threads: 8
|
threads: 8
|
||||||
log:
|
log:
|
||||||
logs("build_temperature_profiles_total_{simpl}_{clusters}.log"),
|
logs("build_temperature_profiles_total_s_{clusters}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_temperature_profiles/total_s{simpl}_{clusters}")
|
benchmarks("build_temperature_profiles/total_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -252,21 +252,21 @@ rule build_central_heating_temperature_profiles:
|
|||||||
"rolling_window_ambient_temperature",
|
"rolling_window_ambient_temperature",
|
||||||
),
|
),
|
||||||
input:
|
input:
|
||||||
temp_air_total=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"),
|
temp_air_total=resources("temp_air_total_base_s_{clusters}.nc"),
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
output:
|
output:
|
||||||
central_heating_forward_temperature_profiles=resources(
|
central_heating_forward_temperature_profiles=resources(
|
||||||
"central_heating_forward_temperature_profiles_elec_s{simpl}_{clusters}.nc"
|
"central_heating_forward_temperature_profiles_base_s_{clusters}.nc"
|
||||||
),
|
),
|
||||||
central_heating_return_temperature_profiles=resources(
|
central_heating_return_temperature_profiles=resources(
|
||||||
"central_heating_return_temperature_profiles_elec_s{simpl}_{clusters}.nc"
|
"central_heating_return_temperature_profiles_base_s_{clusters}.nc"
|
||||||
),
|
),
|
||||||
resources:
|
resources:
|
||||||
mem_mb=20000,
|
mem_mb=20000,
|
||||||
log:
|
log:
|
||||||
logs("build_central_heating_temperature_profiles_s{simpl}_{clusters}.log"),
|
logs("build_central_heating_temperature_profiles_s_{clusters}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_central_heating_temperature_profiles/s{simpl}_{clusters}")
|
benchmarks("build_central_heating_temperature_profiles/s_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -288,22 +288,22 @@ rule build_cop_profiles:
|
|||||||
snapshots=config_provider("snapshots"),
|
snapshots=config_provider("snapshots"),
|
||||||
input:
|
input:
|
||||||
central_heating_forward_temperature_profiles=resources(
|
central_heating_forward_temperature_profiles=resources(
|
||||||
"central_heating_forward_temperature_profiles_elec_s{simpl}_{clusters}.nc"
|
"central_heating_forward_temperature_profiles_base_s_{clusters}.nc"
|
||||||
),
|
),
|
||||||
central_heating_return_temperature_profiles=resources(
|
central_heating_return_temperature_profiles=resources(
|
||||||
"central_heating_return_temperature_profiles_elec_s{simpl}_{clusters}.nc"
|
"central_heating_return_temperature_profiles_base_s_{clusters}.nc"
|
||||||
),
|
),
|
||||||
temp_soil_total=resources("temp_soil_total_elec_s{simpl}_{clusters}.nc"),
|
temp_soil_total=resources("temp_soil_total_base_s_{clusters}.nc"),
|
||||||
temp_air_total=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"),
|
temp_air_total=resources("temp_air_total_base_s_{clusters}.nc"),
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
output:
|
output:
|
||||||
cop_profiles=resources("cop_profiles_elec_s{simpl}_{clusters}.nc"),
|
cop_profiles=resources("cop_profiles_base_s_{clusters}.nc"),
|
||||||
resources:
|
resources:
|
||||||
mem_mb=20000,
|
mem_mb=20000,
|
||||||
log:
|
log:
|
||||||
logs("build_cop_profiles_s{simpl}_{clusters}.log"),
|
logs("build_cop_profiles_s_{clusters}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_cop_profiles/s{simpl}_{clusters}")
|
benchmarks("build_cop_profiles/s_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -330,17 +330,17 @@ rule build_solar_thermal_profiles:
|
|||||||
solar_thermal=config_provider("solar_thermal"),
|
solar_thermal=config_provider("solar_thermal"),
|
||||||
input:
|
input:
|
||||||
pop_layout=resources("pop_layout_total.nc"),
|
pop_layout=resources("pop_layout_total.nc"),
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
cutout=solar_thermal_cutout,
|
cutout=solar_thermal_cutout,
|
||||||
output:
|
output:
|
||||||
solar_thermal=resources("solar_thermal_total_elec_s{simpl}_{clusters}.nc"),
|
solar_thermal=resources("solar_thermal_total_base_s_{clusters}.nc"),
|
||||||
resources:
|
resources:
|
||||||
mem_mb=20000,
|
mem_mb=20000,
|
||||||
threads: 16
|
threads: 16
|
||||||
log:
|
log:
|
||||||
logs("build_solar_thermal_profiles_total_s{simpl}_{clusters}.log"),
|
logs("build_solar_thermal_profiles_total_s_{clusters}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_solar_thermal_profiles/total_s{simpl}_{clusters}")
|
benchmarks("build_solar_thermal_profiles/total_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -406,25 +406,25 @@ rule build_biomass_potentials:
|
|||||||
enspreso_biomass="data/ENSPRESO_BIOMASS.xlsx",
|
enspreso_biomass="data/ENSPRESO_BIOMASS.xlsx",
|
||||||
eurostat="data/eurostat/Balances-April2023",
|
eurostat="data/eurostat/Balances-April2023",
|
||||||
nuts2="data/nuts/NUTS_RG_03M_2013_4326_LEVL_2.geojson",
|
nuts2="data/nuts/NUTS_RG_03M_2013_4326_LEVL_2.geojson",
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
nuts3_population=ancient("data/bundle/nama_10r_3popgdp.tsv.gz"),
|
nuts3_population=ancient("data/bundle/nama_10r_3popgdp.tsv.gz"),
|
||||||
swiss_cantons=ancient("data/ch_cantons.csv"),
|
swiss_cantons=ancient("data/ch_cantons.csv"),
|
||||||
swiss_population=ancient("data/bundle/je-e-21.03.02.xls"),
|
swiss_population=ancient("data/bundle/je-e-21.03.02.xls"),
|
||||||
country_shapes=resources("country_shapes.geojson"),
|
country_shapes=resources("country_shapes.geojson"),
|
||||||
output:
|
output:
|
||||||
biomass_potentials_all=resources(
|
biomass_potentials_all=resources(
|
||||||
"biomass_potentials_all_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"biomass_potentials_all_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
biomass_potentials=resources(
|
biomass_potentials=resources(
|
||||||
"biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"biomass_potentials_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
threads: 8
|
threads: 8
|
||||||
resources:
|
resources:
|
||||||
mem_mb=1000,
|
mem_mb=1000,
|
||||||
log:
|
log:
|
||||||
logs("build_biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.log"),
|
logs("build_biomass_potentials_s_{clusters}_{planning_horizons}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_biomass_potentials_s{simpl}_{clusters}_{planning_horizons}")
|
benchmarks("build_biomass_potentials_s_{clusters}_{planning_horizons}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -457,19 +457,19 @@ rule build_sequestration_potentials:
|
|||||||
),
|
),
|
||||||
input:
|
input:
|
||||||
sequestration_potential="data/complete_map_2020_unit_Mt.geojson",
|
sequestration_potential="data/complete_map_2020_unit_Mt.geojson",
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
regions_offshore=resources("regions_offshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_offshore=resources("regions_offshore_base_s_{clusters}.geojson"),
|
||||||
output:
|
output:
|
||||||
sequestration_potential=resources(
|
sequestration_potential=resources(
|
||||||
"co2_sequestration_potential_elec_s{simpl}_{clusters}.csv"
|
"co2_sequestration_potential_base_s_{clusters}.csv"
|
||||||
),
|
),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=4000,
|
mem_mb=4000,
|
||||||
log:
|
log:
|
||||||
logs("build_sequestration_potentials_s{simpl}_{clusters}.log"),
|
logs("build_sequestration_potentials_{clusters}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_sequestration_potentials_s{simpl}_{clusters}")
|
benchmarks("build_sequestration_potentials_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -479,17 +479,17 @@ rule build_sequestration_potentials:
|
|||||||
rule build_salt_cavern_potentials:
|
rule build_salt_cavern_potentials:
|
||||||
input:
|
input:
|
||||||
salt_caverns="data/bundle/h2_salt_caverns_GWh_per_sqkm.geojson",
|
salt_caverns="data/bundle/h2_salt_caverns_GWh_per_sqkm.geojson",
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
regions_offshore=resources("regions_offshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_offshore=resources("regions_offshore_base_s_{clusters}.geojson"),
|
||||||
output:
|
output:
|
||||||
h2_cavern_potential=resources("salt_cavern_potentials_s{simpl}_{clusters}.csv"),
|
h2_cavern_potential=resources("salt_cavern_potentials_s_{clusters}.csv"),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=2000,
|
mem_mb=2000,
|
||||||
log:
|
log:
|
||||||
logs("build_salt_cavern_potentials_s{simpl}_{clusters}.log"),
|
logs("build_salt_cavern_potentials_s_{clusters}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_salt_cavern_potentials_s{simpl}_{clusters}")
|
benchmarks("build_salt_cavern_potentials_s_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -625,8 +625,8 @@ rule build_industrial_distribution_key:
|
|||||||
),
|
),
|
||||||
countries=config_provider("countries"),
|
countries=config_provider("countries"),
|
||||||
input:
|
input:
|
||||||
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"),
|
clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"),
|
||||||
hotmaps="data/Industrial_Database.csv",
|
hotmaps="data/Industrial_Database.csv",
|
||||||
gem_gspt="data/gem/Global-Steel-Plant-Tracker-April-2024-Standard-Copy-V1.xlsx",
|
gem_gspt="data/gem/Global-Steel-Plant-Tracker-April-2024-Standard-Copy-V1.xlsx",
|
||||||
ammonia="data/ammonia_plants.csv",
|
ammonia="data/ammonia_plants.csv",
|
||||||
@ -634,15 +634,15 @@ rule build_industrial_distribution_key:
|
|||||||
refineries_supplement="data/refineries-noneu.csv",
|
refineries_supplement="data/refineries-noneu.csv",
|
||||||
output:
|
output:
|
||||||
industrial_distribution_key=resources(
|
industrial_distribution_key=resources(
|
||||||
"industrial_distribution_key_elec_s{simpl}_{clusters}.csv"
|
"industrial_distribution_key_base_s_{clusters}.csv"
|
||||||
),
|
),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=1000,
|
mem_mb=1000,
|
||||||
log:
|
log:
|
||||||
logs("build_industrial_distribution_key_s{simpl}_{clusters}.log"),
|
logs("build_industrial_distribution_key_{clusters}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_industrial_distribution_key/s{simpl}_{clusters}")
|
benchmarks("build_industrial_distribution_key/s_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -652,26 +652,24 @@ rule build_industrial_distribution_key:
|
|||||||
rule build_industrial_production_per_node:
|
rule build_industrial_production_per_node:
|
||||||
input:
|
input:
|
||||||
industrial_distribution_key=resources(
|
industrial_distribution_key=resources(
|
||||||
"industrial_distribution_key_elec_s{simpl}_{clusters}.csv"
|
"industrial_distribution_key_base_s_{clusters}.csv"
|
||||||
),
|
),
|
||||||
industrial_production_per_country_tomorrow=resources(
|
industrial_production_per_country_tomorrow=resources(
|
||||||
"industrial_production_per_country_tomorrow_{planning_horizons}.csv"
|
"industrial_production_per_country_tomorrow_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
industrial_production_per_node=resources(
|
industrial_production_per_node=resources(
|
||||||
"industrial_production_elec_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"industrial_production_base_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=1000,
|
mem_mb=1000,
|
||||||
log:
|
log:
|
||||||
logs(
|
logs("build_industrial_production_per_node_{clusters}_{planning_horizons}.log"),
|
||||||
"build_industrial_production_per_node_s{simpl}_{clusters}_{planning_horizons}.log"
|
|
||||||
),
|
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
benchmarks(
|
benchmarks(
|
||||||
"build_industrial_production_per_node/s{simpl}_{clusters}_{planning_horizons}"
|
"build_industrial_production_per_node/s_{clusters}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
@ -686,26 +684,26 @@ rule build_industrial_energy_demand_per_node:
|
|||||||
"industry_sector_ratios_{planning_horizons}.csv"
|
"industry_sector_ratios_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
industrial_production_per_node=resources(
|
industrial_production_per_node=resources(
|
||||||
"industrial_production_elec_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"industrial_production_base_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
industrial_energy_demand_per_node_today=resources(
|
industrial_energy_demand_per_node_today=resources(
|
||||||
"industrial_energy_demand_today_elec_s{simpl}_{clusters}.csv"
|
"industrial_energy_demand_today_base_s_{clusters}.csv"
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
industrial_energy_demand_per_node=resources(
|
industrial_energy_demand_per_node=resources(
|
||||||
"industrial_energy_demand_elec_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"industrial_energy_demand_base_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=1000,
|
mem_mb=1000,
|
||||||
log:
|
log:
|
||||||
logs(
|
logs(
|
||||||
"build_industrial_energy_demand_per_node_s{simpl}_{clusters}_{planning_horizons}.log"
|
"build_industrial_energy_demand_per_node_{clusters}_{planning_horizons}.log"
|
||||||
),
|
),
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
benchmarks(
|
benchmarks(
|
||||||
"build_industrial_energy_demand_per_node/s{simpl}_{clusters}_{planning_horizons}"
|
"build_industrial_energy_demand_per_node/s_{clusters}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
@ -744,22 +742,22 @@ rule build_industrial_energy_demand_per_country_today:
|
|||||||
rule build_industrial_energy_demand_per_node_today:
|
rule build_industrial_energy_demand_per_node_today:
|
||||||
input:
|
input:
|
||||||
industrial_distribution_key=resources(
|
industrial_distribution_key=resources(
|
||||||
"industrial_distribution_key_elec_s{simpl}_{clusters}.csv"
|
"industrial_distribution_key_base_s_{clusters}.csv"
|
||||||
),
|
),
|
||||||
industrial_energy_demand_per_country_today=resources(
|
industrial_energy_demand_per_country_today=resources(
|
||||||
"industrial_energy_demand_per_country_today.csv"
|
"industrial_energy_demand_per_country_today.csv"
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
industrial_energy_demand_per_node_today=resources(
|
industrial_energy_demand_per_node_today=resources(
|
||||||
"industrial_energy_demand_today_elec_s{simpl}_{clusters}.csv"
|
"industrial_energy_demand_today_base_s_{clusters}.csv"
|
||||||
),
|
),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=1000,
|
mem_mb=1000,
|
||||||
log:
|
log:
|
||||||
logs("build_industrial_energy_demand_per_node_today_s{simpl}_{clusters}.log"),
|
logs("build_industrial_energy_demand_per_node_today_{clusters}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_industrial_energy_demand_per_node_today/s{simpl}_{clusters}")
|
benchmarks("build_industrial_energy_demand_per_node_today/s_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -773,23 +771,23 @@ rule build_retro_cost:
|
|||||||
input:
|
input:
|
||||||
building_stock="data/retro/data_building_stock.csv",
|
building_stock="data/retro/data_building_stock.csv",
|
||||||
data_tabula="data/bundle/retro/tabula-calculator-calcsetbuilding.csv",
|
data_tabula="data/bundle/retro/tabula-calculator-calcsetbuilding.csv",
|
||||||
air_temperature=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"),
|
air_temperature=resources("temp_air_total_base_s_{clusters}.nc"),
|
||||||
u_values_PL="data/retro/u_values_poland.csv",
|
u_values_PL="data/retro/u_values_poland.csv",
|
||||||
tax_w="data/retro/electricity_taxes_eu.csv",
|
tax_w="data/retro/electricity_taxes_eu.csv",
|
||||||
construction_index="data/retro/comparative_level_investment.csv",
|
construction_index="data/retro/comparative_level_investment.csv",
|
||||||
floor_area_missing="data/retro/floor_area_missing.csv",
|
floor_area_missing="data/retro/floor_area_missing.csv",
|
||||||
clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"),
|
clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"),
|
||||||
cost_germany="data/retro/retro_cost_germany.csv",
|
cost_germany="data/retro/retro_cost_germany.csv",
|
||||||
window_assumptions="data/retro/window_assumptions.csv",
|
window_assumptions="data/retro/window_assumptions.csv",
|
||||||
output:
|
output:
|
||||||
retro_cost=resources("retro_cost_elec_s{simpl}_{clusters}.csv"),
|
retro_cost=resources("retro_cost_base_s_{clusters}.csv"),
|
||||||
floor_area=resources("floor_area_elec_s{simpl}_{clusters}.csv"),
|
floor_area=resources("floor_area_base_s_{clusters}.csv"),
|
||||||
resources:
|
resources:
|
||||||
mem_mb=1000,
|
mem_mb=1000,
|
||||||
log:
|
log:
|
||||||
logs("build_retro_cost_s{simpl}_{clusters}.log"),
|
logs("build_retro_cost_{clusters}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("build_retro_cost/s{simpl}_{clusters}")
|
benchmarks("build_retro_cost/s_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -801,14 +799,14 @@ rule build_population_weighted_energy_totals:
|
|||||||
snapshots=config_provider("snapshots"),
|
snapshots=config_provider("snapshots"),
|
||||||
input:
|
input:
|
||||||
energy_totals=resources("{kind}_totals.csv"),
|
energy_totals=resources("{kind}_totals.csv"),
|
||||||
clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"),
|
clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"),
|
||||||
output:
|
output:
|
||||||
resources("pop_weighted_{kind}_totals_s{simpl}_{clusters}.csv"),
|
resources("pop_weighted_{kind}_totals_s_{clusters}.csv"),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=2000,
|
mem_mb=2000,
|
||||||
log:
|
log:
|
||||||
logs("build_population_weighted_{kind}_totals_s{simpl}_{clusters}.log"),
|
logs("build_population_weighted_{kind}_totals_{clusters}.log"),
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -819,17 +817,17 @@ rule build_shipping_demand:
|
|||||||
input:
|
input:
|
||||||
ports="data/attributed_ports.json",
|
ports="data/attributed_ports.json",
|
||||||
scope=resources("europe_shape.geojson"),
|
scope=resources("europe_shape.geojson"),
|
||||||
regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
demand=resources("energy_totals.csv"),
|
demand=resources("energy_totals.csv"),
|
||||||
params:
|
params:
|
||||||
energy_totals_year=config_provider("energy", "energy_totals_year"),
|
energy_totals_year=config_provider("energy", "energy_totals_year"),
|
||||||
output:
|
output:
|
||||||
resources("shipping_demand_s{simpl}_{clusters}.csv"),
|
resources("shipping_demand_s_{clusters}.csv"),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=2000,
|
mem_mb=2000,
|
||||||
log:
|
log:
|
||||||
logs("build_shipping_demand_s{simpl}_{clusters}.log"),
|
logs("build_shipping_demand_s_{clusters}.log"),
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -843,24 +841,24 @@ rule build_transport_demand:
|
|||||||
sector=config_provider("sector"),
|
sector=config_provider("sector"),
|
||||||
energy_totals_year=config_provider("energy", "energy_totals_year"),
|
energy_totals_year=config_provider("energy", "energy_totals_year"),
|
||||||
input:
|
input:
|
||||||
clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"),
|
clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"),
|
||||||
pop_weighted_energy_totals=resources(
|
pop_weighted_energy_totals=resources(
|
||||||
"pop_weighted_energy_totals_s{simpl}_{clusters}.csv"
|
"pop_weighted_energy_totals_s_{clusters}.csv"
|
||||||
),
|
),
|
||||||
transport_data=resources("transport_data.csv"),
|
transport_data=resources("transport_data.csv"),
|
||||||
traffic_data_KFZ="data/bundle/emobility/KFZ__count",
|
traffic_data_KFZ="data/bundle/emobility/KFZ__count",
|
||||||
traffic_data_Pkw="data/bundle/emobility/Pkw__count",
|
traffic_data_Pkw="data/bundle/emobility/Pkw__count",
|
||||||
temp_air_total=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"),
|
temp_air_total=resources("temp_air_total_base_s_{clusters}.nc"),
|
||||||
output:
|
output:
|
||||||
transport_demand=resources("transport_demand_s{simpl}_{clusters}.csv"),
|
transport_demand=resources("transport_demand_s_{clusters}.csv"),
|
||||||
transport_data=resources("transport_data_s{simpl}_{clusters}.csv"),
|
transport_data=resources("transport_data_s_{clusters}.csv"),
|
||||||
avail_profile=resources("avail_profile_s{simpl}_{clusters}.csv"),
|
avail_profile=resources("avail_profile_s_{clusters}.csv"),
|
||||||
dsm_profile=resources("dsm_profile_s{simpl}_{clusters}.csv"),
|
dsm_profile=resources("dsm_profile_s_{clusters}.csv"),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=2000,
|
mem_mb=2000,
|
||||||
log:
|
log:
|
||||||
logs("build_transport_demand_s{simpl}_{clusters}.log"),
|
logs("build_transport_demand_s_{clusters}.log"),
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -873,16 +871,16 @@ rule build_district_heat_share:
|
|||||||
energy_totals_year=config_provider("energy", "energy_totals_year"),
|
energy_totals_year=config_provider("energy", "energy_totals_year"),
|
||||||
input:
|
input:
|
||||||
district_heat_share=resources("district_heat_share.csv"),
|
district_heat_share=resources("district_heat_share.csv"),
|
||||||
clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"),
|
clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"),
|
||||||
output:
|
output:
|
||||||
district_heat_share=resources(
|
district_heat_share=resources(
|
||||||
"district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"district_heat_share_base_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=1000,
|
mem_mb=1000,
|
||||||
log:
|
log:
|
||||||
logs("build_district_heat_share_s{simpl}_{clusters}_{planning_horizons}.log"),
|
logs("build_district_heat_share_{clusters}_{planning_horizons}.log"),
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -896,27 +894,27 @@ rule build_existing_heating_distribution:
|
|||||||
existing_capacities=config_provider("existing_capacities"),
|
existing_capacities=config_provider("existing_capacities"),
|
||||||
input:
|
input:
|
||||||
existing_heating="data/existing_infrastructure/existing_heating_raw.csv",
|
existing_heating="data/existing_infrastructure/existing_heating_raw.csv",
|
||||||
clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"),
|
clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"),
|
||||||
clustered_pop_energy_layout=resources(
|
clustered_pop_energy_layout=resources(
|
||||||
"pop_weighted_energy_totals_s{simpl}_{clusters}.csv"
|
"pop_weighted_energy_totals_s_{clusters}.csv"
|
||||||
),
|
),
|
||||||
district_heat_share=resources(
|
district_heat_share=resources(
|
||||||
"district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"district_heat_share_base_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
existing_heating_distribution=resources(
|
existing_heating_distribution=resources(
|
||||||
"existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"existing_heating_distribution_base_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=2000,
|
mem_mb=2000,
|
||||||
log:
|
log:
|
||||||
logs(
|
logs(
|
||||||
"build_existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.log"
|
"build_existing_heating_distribution_base_s_{clusters}_{planning_horizons}.log"
|
||||||
),
|
),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks(
|
benchmarks(
|
||||||
"build_existing_heating_distribution/elec_s{simpl}_{clusters}_{planning_horizons}"
|
"build_existing_heating_distribution/base_s_{clusters}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
@ -930,28 +928,28 @@ rule time_aggregation:
|
|||||||
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
||||||
solver_name=config_provider("solving", "solver", "name"),
|
solver_name=config_provider("solving", "solver", "name"),
|
||||||
input:
|
input:
|
||||||
network=resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"),
|
network=resources("networks/base_s_{clusters}_elec_l{ll}_{opts}.nc"),
|
||||||
hourly_heat_demand_total=lambda w: (
|
hourly_heat_demand_total=lambda w: (
|
||||||
resources("hourly_heat_demand_total_elec_s{simpl}_{clusters}.nc")
|
resources("hourly_heat_demand_total_base_s_{clusters}.nc")
|
||||||
if config_provider("sector", "heating")(w)
|
if config_provider("sector", "heating")(w)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
solar_thermal_total=lambda w: (
|
solar_thermal_total=lambda w: (
|
||||||
resources("solar_thermal_total_elec_s{simpl}_{clusters}.nc")
|
resources("solar_thermal_total_base_s_{clusters}.nc")
|
||||||
if config_provider("sector", "solar_thermal")(w)
|
if config_provider("sector", "solar_thermal")(w)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
snapshot_weightings=resources(
|
snapshot_weightings=resources(
|
||||||
"snapshot_weightings_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.csv"
|
"snapshot_weightings_base_s_{clusters}_elec_l{ll}_{opts}.csv"
|
||||||
),
|
),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=5000,
|
mem_mb=5000,
|
||||||
log:
|
log:
|
||||||
logs("time_aggregation_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"),
|
logs("time_aggregation_base_s_{clusters}_elec_l{ll}_{opts}.log"),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("time_aggregation_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}")
|
benchmarks("time_aggregation_base_s_{clusters}_elec_l{ll}_{opts}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -960,7 +958,7 @@ rule time_aggregation:
|
|||||||
|
|
||||||
def input_profile_offwind(w):
|
def input_profile_offwind(w):
|
||||||
return {
|
return {
|
||||||
f"profile_{tech}": resources(f"profile_{tech}.nc")
|
f"profile_{tech}": resources("profile_{clusters}_" + tech + ".nc")
|
||||||
for tech in ["offwind-ac", "offwind-dc", "offwind-float"]
|
for tech in ["offwind-ac", "offwind-dc", "offwind-float"]
|
||||||
if (tech in config_provider("electricity", "renewable_carriers")(w))
|
if (tech in config_provider("electricity", "renewable_carriers")(w))
|
||||||
}
|
}
|
||||||
@ -973,21 +971,21 @@ rule build_egs_potentials:
|
|||||||
costs=config_provider("costs"),
|
costs=config_provider("costs"),
|
||||||
input:
|
input:
|
||||||
egs_cost="data/egs_costs.json",
|
egs_cost="data/egs_costs.json",
|
||||||
regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
air_temperature=(
|
air_temperature=(
|
||||||
resources("temp_air_total_elec_s{simpl}_{clusters}.nc")
|
resources("temp_air_total_base_s_{clusters}.nc")
|
||||||
if config_provider("sector", "enhanced_geothermal", "var_cf")
|
if config_provider("sector", "enhanced_geothermal", "var_cf")
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
egs_potentials=resources("egs_potentials_s{simpl}_{clusters}.csv"),
|
egs_potentials=resources("egs_potentials_{clusters}.csv"),
|
||||||
egs_overlap=resources("egs_overlap_s{simpl}_{clusters}.csv"),
|
egs_overlap=resources("egs_overlap_{clusters}.csv"),
|
||||||
egs_capacity_factors=resources("egs_capacity_factors_s{simpl}_{clusters}.csv"),
|
egs_capacity_factors=resources("egs_capacity_factors_{clusters}.csv"),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=2000,
|
mem_mb=2000,
|
||||||
log:
|
log:
|
||||||
logs("build_egs_potentials_s{simpl}_{clusters}.log"),
|
logs("build_egs_potentials_{clusters}.log"),
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -1005,6 +1003,7 @@ rule prepare_sector_network:
|
|||||||
costs=config_provider("costs"),
|
costs=config_provider("costs"),
|
||||||
sector=config_provider("sector"),
|
sector=config_provider("sector"),
|
||||||
industry=config_provider("industry"),
|
industry=config_provider("industry"),
|
||||||
|
renewable=config_provider("renewable"),
|
||||||
lines=config_provider("lines"),
|
lines=config_provider("lines"),
|
||||||
pypsa_eur=config_provider("pypsa_eur"),
|
pypsa_eur=config_provider("pypsa_eur"),
|
||||||
length_factor=config_provider("lines", "length_factor"),
|
length_factor=config_provider("lines", "length_factor"),
|
||||||
@ -1022,15 +1021,15 @@ rule prepare_sector_network:
|
|||||||
**rules.cluster_gas_network.output,
|
**rules.cluster_gas_network.output,
|
||||||
**rules.build_gas_input_locations.output,
|
**rules.build_gas_input_locations.output,
|
||||||
snapshot_weightings=resources(
|
snapshot_weightings=resources(
|
||||||
"snapshot_weightings_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.csv"
|
"snapshot_weightings_base_s_{clusters}_elec_l{ll}_{opts}.csv"
|
||||||
),
|
),
|
||||||
retro_cost=lambda w: (
|
retro_cost=lambda w: (
|
||||||
resources("retro_cost_elec_s{simpl}_{clusters}.csv")
|
resources("retro_cost_base_s_{clusters}.csv")
|
||||||
if config_provider("sector", "retrofitting", "retro_endogen")(w)
|
if config_provider("sector", "retrofitting", "retro_endogen")(w)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
floor_area=lambda w: (
|
floor_area=lambda w: (
|
||||||
resources("floor_area_elec_s{simpl}_{clusters}.csv")
|
resources("floor_area_base_s_{clusters}.csv")
|
||||||
if config_provider("sector", "retrofitting", "retro_endogen")(w)
|
if config_provider("sector", "retrofitting", "retro_endogen")(w)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
@ -1041,96 +1040,91 @@ rule prepare_sector_network:
|
|||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
sequestration_potential=lambda w: (
|
sequestration_potential=lambda w: (
|
||||||
resources("co2_sequestration_potential_elec_s{simpl}_{clusters}.csv")
|
resources("co2_sequestration_potential_base_s_{clusters}.csv")
|
||||||
if config_provider(
|
if config_provider(
|
||||||
"sector", "regional_co2_sequestration_potential", "enable"
|
"sector", "regional_co2_sequestration_potential", "enable"
|
||||||
)(w)
|
)(w)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
network=resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"),
|
network=resources("networks/base_s_{clusters}_elec_l{ll}_{opts}.nc"),
|
||||||
eurostat="data/eurostat/Balances-April2023",
|
eurostat="data/eurostat/Balances-April2023",
|
||||||
pop_weighted_energy_totals=resources(
|
pop_weighted_energy_totals=resources(
|
||||||
"pop_weighted_energy_totals_s{simpl}_{clusters}.csv"
|
"pop_weighted_energy_totals_s_{clusters}.csv"
|
||||||
),
|
),
|
||||||
pop_weighted_heat_totals=resources(
|
pop_weighted_heat_totals=resources("pop_weighted_heat_totals_s_{clusters}.csv"),
|
||||||
"pop_weighted_heat_totals_s{simpl}_{clusters}.csv"
|
shipping_demand=resources("shipping_demand_s_{clusters}.csv"),
|
||||||
),
|
transport_demand=resources("transport_demand_s_{clusters}.csv"),
|
||||||
shipping_demand=resources("shipping_demand_s{simpl}_{clusters}.csv"),
|
transport_data=resources("transport_data_s_{clusters}.csv"),
|
||||||
transport_demand=resources("transport_demand_s{simpl}_{clusters}.csv"),
|
avail_profile=resources("avail_profile_s_{clusters}.csv"),
|
||||||
transport_data=resources("transport_data_s{simpl}_{clusters}.csv"),
|
dsm_profile=resources("dsm_profile_s_{clusters}.csv"),
|
||||||
avail_profile=resources("avail_profile_s{simpl}_{clusters}.csv"),
|
|
||||||
dsm_profile=resources("dsm_profile_s{simpl}_{clusters}.csv"),
|
|
||||||
co2_totals_name=resources("co2_totals.csv"),
|
co2_totals_name=resources("co2_totals.csv"),
|
||||||
co2="data/bundle/eea/UNFCCC_v23.csv",
|
co2="data/bundle/eea/UNFCCC_v23.csv",
|
||||||
biomass_potentials=lambda w: (
|
biomass_potentials=lambda w: (
|
||||||
resources(
|
resources(
|
||||||
"biomass_potentials_s{simpl}_{clusters}_"
|
"biomass_potentials_s_{clusters}_"
|
||||||
+ "{}.csv".format(config_provider("biomass", "year")(w))
|
+ "{}.csv".format(config_provider("biomass", "year")(w))
|
||||||
)
|
)
|
||||||
if config_provider("foresight")(w) == "overnight"
|
if config_provider("foresight")(w) == "overnight"
|
||||||
else resources(
|
else resources("biomass_potentials_s_{clusters}_{planning_horizons}.csv")
|
||||||
"biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv"
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
costs=lambda w: (
|
costs=lambda w: (
|
||||||
resources("costs_{}.csv".format(config_provider("costs", "year")(w)))
|
resources("costs_{}.csv".format(config_provider("costs", "year")(w)))
|
||||||
if config_provider("foresight")(w) == "overnight"
|
if config_provider("foresight")(w) == "overnight"
|
||||||
else resources("costs_{planning_horizons}.csv")
|
else resources("costs_{planning_horizons}.csv")
|
||||||
),
|
),
|
||||||
h2_cavern=resources("salt_cavern_potentials_s{simpl}_{clusters}.csv"),
|
h2_cavern=resources("salt_cavern_potentials_s_{clusters}.csv"),
|
||||||
busmap_s=resources("busmap_elec_s{simpl}.csv"),
|
busmap_s=resources("busmap_base_s.csv"),
|
||||||
busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"),
|
busmap=resources("busmap_base_s_{clusters}.csv"),
|
||||||
clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"),
|
clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"),
|
||||||
simplified_pop_layout=resources("pop_layout_elec_s{simpl}.csv"),
|
|
||||||
industrial_demand=resources(
|
industrial_demand=resources(
|
||||||
"industrial_energy_demand_elec_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"industrial_energy_demand_base_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
hourly_heat_demand_total=resources(
|
hourly_heat_demand_total=resources(
|
||||||
"hourly_heat_demand_total_elec_s{simpl}_{clusters}.nc"
|
"hourly_heat_demand_total_base_s_{clusters}.nc"
|
||||||
),
|
),
|
||||||
industrial_production=resources(
|
industrial_production=resources(
|
||||||
"industrial_production_elec_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"industrial_production_base_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
district_heat_share=resources(
|
district_heat_share=resources(
|
||||||
"district_heat_share_elec_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"district_heat_share_base_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
heating_efficiencies=resources("heating_efficiencies.csv"),
|
heating_efficiencies=resources("heating_efficiencies.csv"),
|
||||||
temp_soil_total=resources("temp_soil_total_elec_s{simpl}_{clusters}.nc"),
|
temp_soil_total=resources("temp_soil_total_base_s_{clusters}.nc"),
|
||||||
temp_air_total=resources("temp_air_total_elec_s{simpl}_{clusters}.nc"),
|
temp_air_total=resources("temp_air_total_base_s_{clusters}.nc"),
|
||||||
cop_profiles=resources("cop_profiles_elec_s{simpl}_{clusters}.nc"),
|
cop_profiles=resources("cop_profiles_base_s_{clusters}.nc"),
|
||||||
solar_thermal_total=lambda w: (
|
solar_thermal_total=lambda w: (
|
||||||
resources("solar_thermal_total_elec_s{simpl}_{clusters}.nc")
|
resources("solar_thermal_total_base_s_{clusters}.nc")
|
||||||
if config_provider("sector", "solar_thermal")(w)
|
if config_provider("sector", "solar_thermal")(w)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
egs_potentials=lambda w: (
|
egs_potentials=lambda w: (
|
||||||
resources("egs_potentials_s{simpl}_{clusters}.csv")
|
resources("egs_potentials_{clusters}.csv")
|
||||||
if config_provider("sector", "enhanced_geothermal", "enable")(w)
|
if config_provider("sector", "enhanced_geothermal", "enable")(w)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
egs_overlap=lambda w: (
|
egs_overlap=lambda w: (
|
||||||
resources("egs_overlap_s{simpl}_{clusters}.csv")
|
resources("egs_overlap_{clusters}.csv")
|
||||||
if config_provider("sector", "enhanced_geothermal", "enable")(w)
|
if config_provider("sector", "enhanced_geothermal", "enable")(w)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
egs_capacity_factors=lambda w: (
|
egs_capacity_factors=lambda w: (
|
||||||
resources("egs_capacity_factors_s{simpl}_{clusters}.csv")
|
resources("egs_capacity_factors_{clusters}.csv")
|
||||||
if config_provider("sector", "enhanced_geothermal", "enable")(w)
|
if config_provider("sector", "enhanced_geothermal", "enable")(w)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=2000,
|
mem_mb=2000,
|
||||||
log:
|
log:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "logs/prepare_sector_network_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
+ "logs/prepare_sector_network_base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "benchmarks/prepare_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
+ "benchmarks/prepare_sector_network/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
|
@ -6,7 +6,6 @@
|
|||||||
localrules:
|
localrules:
|
||||||
all,
|
all,
|
||||||
cluster_networks,
|
cluster_networks,
|
||||||
extra_components_networks,
|
|
||||||
prepare_elec_networks,
|
prepare_elec_networks,
|
||||||
prepare_sector_networks,
|
prepare_sector_networks,
|
||||||
solve_elec_networks,
|
solve_elec_networks,
|
||||||
@ -16,16 +15,7 @@ localrules:
|
|||||||
rule cluster_networks:
|
rule cluster_networks:
|
||||||
input:
|
input:
|
||||||
expand(
|
expand(
|
||||||
resources("networks/elec_s{simpl}_{clusters}.nc"),
|
resources("networks/base_s_{clusters}.nc"),
|
||||||
**config["scenario"],
|
|
||||||
run=config["run"]["name"],
|
|
||||||
),
|
|
||||||
|
|
||||||
|
|
||||||
rule extra_components_networks:
|
|
||||||
input:
|
|
||||||
expand(
|
|
||||||
resources("networks/elec_s{simpl}_{clusters}_ec.nc"),
|
|
||||||
**config["scenario"],
|
**config["scenario"],
|
||||||
run=config["run"]["name"],
|
run=config["run"]["name"],
|
||||||
),
|
),
|
||||||
@ -34,7 +24,7 @@ rule extra_components_networks:
|
|||||||
rule prepare_elec_networks:
|
rule prepare_elec_networks:
|
||||||
input:
|
input:
|
||||||
expand(
|
expand(
|
||||||
resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"),
|
resources("networks/base_s_{clusters}_elec_l{ll}_{opts}.nc"),
|
||||||
**config["scenario"],
|
**config["scenario"],
|
||||||
run=config["run"]["name"],
|
run=config["run"]["name"],
|
||||||
),
|
),
|
||||||
@ -44,7 +34,7 @@ rule prepare_sector_networks:
|
|||||||
input:
|
input:
|
||||||
expand(
|
expand(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
**config["scenario"],
|
**config["scenario"],
|
||||||
run=config["run"]["name"],
|
run=config["run"]["name"],
|
||||||
),
|
),
|
||||||
@ -53,7 +43,7 @@ rule prepare_sector_networks:
|
|||||||
rule solve_elec_networks:
|
rule solve_elec_networks:
|
||||||
input:
|
input:
|
||||||
expand(
|
expand(
|
||||||
RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
RESULTS + "networks/base_s_{clusters}_elec_l{ll}_{opts}.nc",
|
||||||
**config["scenario"],
|
**config["scenario"],
|
||||||
run=config["run"]["name"],
|
run=config["run"]["name"],
|
||||||
),
|
),
|
||||||
@ -63,7 +53,7 @@ rule solve_sector_networks:
|
|||||||
input:
|
input:
|
||||||
expand(
|
expand(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "postnetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
**config["scenario"],
|
**config["scenario"],
|
||||||
run=config["run"]["name"],
|
run=config["run"]["name"],
|
||||||
),
|
),
|
||||||
@ -73,7 +63,7 @@ rule solve_sector_networks_perfect:
|
|||||||
input:
|
input:
|
||||||
expand(
|
expand(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
|
+ "maps/base_s_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
|
||||||
**config["scenario"],
|
**config["scenario"],
|
||||||
run=config["run"]["name"],
|
run=config["run"]["name"],
|
||||||
),
|
),
|
||||||
@ -82,14 +72,13 @@ rule solve_sector_networks_perfect:
|
|||||||
rule validate_elec_networks:
|
rule validate_elec_networks:
|
||||||
input:
|
input:
|
||||||
expand(
|
expand(
|
||||||
RESULTS
|
RESULTS + "figures/.statistics_plots_base_s_{clusters}_elec_l{ll}_{opts}",
|
||||||
+ "figures/.statistics_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
|
||||||
**config["scenario"],
|
**config["scenario"],
|
||||||
run=config["run"]["name"],
|
run=config["run"]["name"],
|
||||||
),
|
),
|
||||||
expand(
|
expand(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "figures/.validation_{kind}_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
+ "figures/.validation_{kind}_plots_base_s_{clusters}_elec_l{ll}_{opts}",
|
||||||
**config["scenario"],
|
**config["scenario"],
|
||||||
run=config["run"]["name"],
|
run=config["run"]["name"],
|
||||||
kind=["production", "prices", "cross_border"],
|
kind=["production", "prices", "cross_border"],
|
||||||
|
@ -98,9 +98,7 @@ def memory(w):
|
|||||||
if m is not None:
|
if m is not None:
|
||||||
factor *= int(m.group(1)) / 8760
|
factor *= int(m.group(1)) / 8760
|
||||||
break
|
break
|
||||||
if w.clusters.endswith("m") or w.clusters.endswith("c"):
|
if w.clusters == "all":
|
||||||
return int(factor * (55000 + 600 * int(w.clusters[:-1])))
|
|
||||||
elif w.clusters == "all":
|
|
||||||
return int(factor * (18000 + 180 * 4000))
|
return int(factor * (18000 + 180 * 4000))
|
||||||
else:
|
else:
|
||||||
return int(factor * (10000 + 195 * int(w.clusters)))
|
return int(factor * (10000 + 195 * int(w.clusters)))
|
||||||
@ -144,7 +142,7 @@ def solved_previous_horizon(w):
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_"
|
+ "postnetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_"
|
||||||
+ planning_horizon_p
|
+ planning_horizon_p
|
||||||
+ ".nc"
|
+ ".nc"
|
||||||
)
|
)
|
||||||
|
@ -9,17 +9,15 @@ if config["foresight"] != "perfect":
|
|||||||
params:
|
params:
|
||||||
plotting=config_provider("plotting"),
|
plotting=config_provider("plotting"),
|
||||||
input:
|
input:
|
||||||
network=resources("networks/elec_s{simpl}_{clusters}.nc"),
|
network=resources("networks/base_s_{clusters}.nc"),
|
||||||
regions_onshore=resources(
|
regions_onshore=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
"regions_onshore_elec_s{simpl}_{clusters}.geojson"
|
|
||||||
),
|
|
||||||
output:
|
output:
|
||||||
map=resources("maps/power-network-s{simpl}-{clusters}.pdf"),
|
map=resources("maps/power-network-s-{clusters}.pdf"),
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=4000,
|
mem_mb=4000,
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks("plot_power_network_clustered/elec_s{simpl}_{clusters}")
|
benchmarks("plot_power_network_clustered/base_s_{clusters}")
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -30,21 +28,21 @@ if config["foresight"] != "perfect":
|
|||||||
plotting=config_provider("plotting"),
|
plotting=config_provider("plotting"),
|
||||||
input:
|
input:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "postnetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
output:
|
output:
|
||||||
map=RESULTS
|
map=RESULTS
|
||||||
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
|
+ "maps/base_s_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
|
||||||
threads: 2
|
threads: 2
|
||||||
resources:
|
resources:
|
||||||
mem_mb=10000,
|
mem_mb=10000,
|
||||||
log:
|
log:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "logs/plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
+ "logs/plot_power_network/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "benchmarks/plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
+ "benchmarks/plot_power_network/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
@ -57,21 +55,21 @@ if config["foresight"] != "perfect":
|
|||||||
foresight=config_provider("foresight"),
|
foresight=config_provider("foresight"),
|
||||||
input:
|
input:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "postnetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
output:
|
output:
|
||||||
map=RESULTS
|
map=RESULTS
|
||||||
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-h2_network_{planning_horizons}.pdf",
|
+ "maps/base_s_{clusters}_l{ll}_{opts}_{sector_opts}-h2_network_{planning_horizons}.pdf",
|
||||||
threads: 2
|
threads: 2
|
||||||
resources:
|
resources:
|
||||||
mem_mb=10000,
|
mem_mb=10000,
|
||||||
log:
|
log:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "logs/plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
+ "logs/plot_hydrogen_network/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "benchmarks/plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
+ "benchmarks/plot_hydrogen_network/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
@ -83,21 +81,21 @@ if config["foresight"] != "perfect":
|
|||||||
plotting=config_provider("plotting"),
|
plotting=config_provider("plotting"),
|
||||||
input:
|
input:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "postnetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
output:
|
output:
|
||||||
map=RESULTS
|
map=RESULTS
|
||||||
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-ch4_network_{planning_horizons}.pdf",
|
+ "maps/base_s_{clusters}_l{ll}_{opts}_{sector_opts}-ch4_network_{planning_horizons}.pdf",
|
||||||
threads: 2
|
threads: 2
|
||||||
resources:
|
resources:
|
||||||
mem_mb=10000,
|
mem_mb=10000,
|
||||||
log:
|
log:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "logs/plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
+ "logs/plot_gas_network/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "benchmarks/plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
+ "benchmarks/plot_gas_network/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
@ -110,7 +108,7 @@ if config["foresight"] == "perfect":
|
|||||||
def output_map_year(w):
|
def output_map_year(w):
|
||||||
return {
|
return {
|
||||||
f"map_{year}": RESULTS
|
f"map_{year}": RESULTS
|
||||||
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_"
|
+ "maps/base_s_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_"
|
||||||
+ f"{year}.pdf"
|
+ f"{year}.pdf"
|
||||||
for year in config_provider("scenario", "planning_horizons")(w)
|
for year in config_provider("scenario", "planning_horizons")(w)
|
||||||
}
|
}
|
||||||
@ -120,8 +118,8 @@ if config["foresight"] == "perfect":
|
|||||||
plotting=config_provider("plotting"),
|
plotting=config_provider("plotting"),
|
||||||
input:
|
input:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
+ "postnetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
||||||
regions=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
|
regions=resources("regions_onshore_base_s_{clusters}.geojson"),
|
||||||
output:
|
output:
|
||||||
unpack(output_map_year),
|
unpack(output_map_year),
|
||||||
threads: 2
|
threads: 2
|
||||||
@ -144,7 +142,7 @@ rule make_summary:
|
|||||||
input:
|
input:
|
||||||
networks=expand(
|
networks=expand(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "postnetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
**config["scenario"],
|
**config["scenario"],
|
||||||
allow_missing=True,
|
allow_missing=True,
|
||||||
),
|
),
|
||||||
@ -158,20 +156,20 @@ rule make_summary:
|
|||||||
)
|
)
|
||||||
),
|
),
|
||||||
ac_plot=expand(
|
ac_plot=expand(
|
||||||
resources("maps/power-network-s{simpl}-{clusters}.pdf"),
|
resources("maps/power-network-s-{clusters}.pdf"),
|
||||||
**config["scenario"],
|
**config["scenario"],
|
||||||
allow_missing=True,
|
allow_missing=True,
|
||||||
),
|
),
|
||||||
costs_plot=expand(
|
costs_plot=expand(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
|
+ "maps/base_s_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
|
||||||
**config["scenario"],
|
**config["scenario"],
|
||||||
allow_missing=True,
|
allow_missing=True,
|
||||||
),
|
),
|
||||||
h2_plot=lambda w: expand(
|
h2_plot=lambda w: expand(
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-h2_network_{planning_horizons}.pdf"
|
+ "maps/base_s_{clusters}_l{ll}_{opts}_{sector_opts}-h2_network_{planning_horizons}.pdf"
|
||||||
if config_provider("sector", "H2_network")(w)
|
if config_provider("sector", "H2_network")(w)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
@ -181,7 +179,7 @@ rule make_summary:
|
|||||||
ch4_plot=lambda w: expand(
|
ch4_plot=lambda w: expand(
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-ch4_network_{planning_horizons}.pdf"
|
+ "maps/base_s_{clusters}_l{ll}_{opts}_{sector_opts}-ch4_network_{planning_horizons}.pdf"
|
||||||
if config_provider("sector", "gas_network")(w)
|
if config_provider("sector", "gas_network")(w)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
@ -260,19 +258,19 @@ STATISTICS_BARPLOTS = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
rule plot_elec_statistics:
|
rule plot_base_statistics:
|
||||||
params:
|
params:
|
||||||
plotting=config_provider("plotting"),
|
plotting=config_provider("plotting"),
|
||||||
barplots=STATISTICS_BARPLOTS,
|
barplots=STATISTICS_BARPLOTS,
|
||||||
input:
|
input:
|
||||||
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
network=RESULTS + "networks/base_s_{clusters}_elec_l{ll}_{opts}.nc",
|
||||||
output:
|
output:
|
||||||
**{
|
**{
|
||||||
f"{plot}_bar": RESULTS
|
f"{plot}_bar": RESULTS
|
||||||
+ f"figures/statistics_{plot}_bar_elec_s{{simpl}}_{{clusters}}_ec_l{{ll}}_{{opts}}.pdf"
|
+ f"figures/statistics_{plot}_bar_base_s_{{clusters}}_elec_l{{ll}}_{{opts}}.pdf"
|
||||||
for plot in STATISTICS_BARPLOTS
|
for plot in STATISTICS_BARPLOTS
|
||||||
},
|
},
|
||||||
barplots_touch=RESULTS
|
barplots_touch=RESULTS
|
||||||
+ "figures/.statistics_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
+ "figures/.statistics_plots_base_s_{clusters}_elec_l{ll}_{opts}",
|
||||||
script:
|
script:
|
||||||
"../scripts/plot_statistics.py"
|
"../scripts/plot_statistics.py"
|
||||||
|
@ -13,19 +13,19 @@ rule solve_network:
|
|||||||
),
|
),
|
||||||
custom_extra_functionality=input_custom_extra_functionality,
|
custom_extra_functionality=input_custom_extra_functionality,
|
||||||
input:
|
input:
|
||||||
network=resources("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"),
|
network=resources("networks/base_s_{clusters}_elec_l{ll}_{opts}.nc"),
|
||||||
output:
|
output:
|
||||||
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
network=RESULTS + "networks/base_s_{clusters}_elec_l{ll}_{opts}.nc",
|
||||||
config=RESULTS + "configs/config.elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.yaml",
|
config=RESULTS + "configs/config.base_s_{clusters}_elec_l{ll}_{opts}.yaml",
|
||||||
log:
|
log:
|
||||||
solver=normpath(
|
solver=normpath(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log"
|
+ "logs/solve_network/base_s_{clusters}_elec_l{ll}_{opts}_solver.log"
|
||||||
),
|
),
|
||||||
python=RESULTS
|
python=RESULTS
|
||||||
+ "logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log",
|
+ "logs/solve_network/base_s_{clusters}_elec_l{ll}_{opts}_python.log",
|
||||||
benchmark:
|
benchmark:
|
||||||
(RESULTS + "benchmarks/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}")
|
(RESULTS + "benchmarks/solve_network/base_s_{clusters}_elec_l{ll}_{opts}")
|
||||||
threads: solver_threads
|
threads: solver_threads
|
||||||
resources:
|
resources:
|
||||||
mem_mb=memory,
|
mem_mb=memory,
|
||||||
@ -49,20 +49,20 @@ rule solve_operations_network:
|
|||||||
),
|
),
|
||||||
custom_extra_functionality=input_custom_extra_functionality,
|
custom_extra_functionality=input_custom_extra_functionality,
|
||||||
input:
|
input:
|
||||||
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
network=RESULTS + "networks/base_s_{clusters}_elec_l{ll}_{opts}.nc",
|
||||||
output:
|
output:
|
||||||
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc",
|
network=RESULTS + "networks/base_s_{clusters}_elec_l{ll}_{opts}_op.nc",
|
||||||
log:
|
log:
|
||||||
solver=normpath(
|
solver=normpath(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log"
|
+ "logs/solve_operations_network/base_s_{clusters}_elec_l{ll}_{opts}_op_solver.log"
|
||||||
),
|
),
|
||||||
python=RESULTS
|
python=RESULTS
|
||||||
+ "logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log",
|
+ "logs/solve_operations_network/base_s_{clusters}_elec_l{ll}_{opts}_op_python.log",
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "benchmarks/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
+ "benchmarks/solve_operations_network/base_s_{clusters}_elec_l{ll}_{opts}"
|
||||||
)
|
)
|
||||||
threads: 4
|
threads: 4
|
||||||
resources:
|
resources:
|
||||||
|
@ -13,24 +13,24 @@ rule add_existing_baseyear:
|
|||||||
energy_totals_year=config_provider("energy", "energy_totals_year"),
|
energy_totals_year=config_provider("energy", "energy_totals_year"),
|
||||||
input:
|
input:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
powerplants=resources("powerplants.csv"),
|
powerplants=resources("powerplants_s_{clusters}.csv"),
|
||||||
busmap_s=resources("busmap_elec_s{simpl}.csv"),
|
busmap_s=resources("busmap_base_s.csv"),
|
||||||
busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"),
|
busmap=resources("busmap_base_s_{clusters}.csv"),
|
||||||
clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"),
|
clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"),
|
||||||
costs=lambda w: resources(
|
costs=lambda w: resources(
|
||||||
"costs_{}.csv".format(
|
"costs_{}.csv".format(
|
||||||
config_provider("scenario", "planning_horizons", 0)(w)
|
config_provider("scenario", "planning_horizons", 0)(w)
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
cop_profiles=resources("cop_profiles_elec_s{simpl}_{clusters}.nc"),
|
cop_profiles=resources("cop_profiles_base_s_{clusters}.nc"),
|
||||||
existing_heating_distribution=resources(
|
existing_heating_distribution=resources(
|
||||||
"existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"existing_heating_distribution_base_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
heating_efficiencies=resources("heating_efficiencies.csv"),
|
heating_efficiencies=resources("heating_efficiencies.csv"),
|
||||||
output:
|
output:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks-brownfield/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
wildcard_constraints:
|
wildcard_constraints:
|
||||||
# TODO: The first planning_horizon needs to be aligned across scenarios
|
# TODO: The first planning_horizon needs to be aligned across scenarios
|
||||||
# snakemake does not support passing functions to wildcard_constraints
|
# snakemake does not support passing functions to wildcard_constraints
|
||||||
@ -41,11 +41,11 @@ rule add_existing_baseyear:
|
|||||||
mem_mb=2000,
|
mem_mb=2000,
|
||||||
log:
|
log:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "logs/add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
+ "logs/add_existing_baseyear_base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "benchmarks/add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
+ "benchmarks/add_existing_baseyear/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
@ -55,7 +55,7 @@ rule add_existing_baseyear:
|
|||||||
|
|
||||||
def input_profile_tech_brownfield(w):
|
def input_profile_tech_brownfield(w):
|
||||||
return {
|
return {
|
||||||
f"profile_{tech}": resources(f"profile_{tech}.nc")
|
f"profile_{tech}": resources("profile_{clusters}_" + tech + ".nc")
|
||||||
for tech in config_provider("electricity", "renewable_carriers")(w)
|
for tech in config_provider("electricity", "renewable_carriers")(w)
|
||||||
if tech != "hydro"
|
if tech != "hydro"
|
||||||
}
|
}
|
||||||
@ -74,26 +74,26 @@ rule add_brownfield:
|
|||||||
heat_pump_sources=config_provider("sector", "heat_pump_sources"),
|
heat_pump_sources=config_provider("sector", "heat_pump_sources"),
|
||||||
input:
|
input:
|
||||||
unpack(input_profile_tech_brownfield),
|
unpack(input_profile_tech_brownfield),
|
||||||
simplify_busmap=resources("busmap_elec_s{simpl}.csv"),
|
simplify_busmap=resources("busmap_base_s.csv"),
|
||||||
cluster_busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"),
|
cluster_busmap=resources("busmap_base_s_{clusters}.csv"),
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
network_p=solved_previous_horizon, #solved network at previous time step
|
network_p=solved_previous_horizon, #solved network at previous time step
|
||||||
costs=resources("costs_{planning_horizons}.csv"),
|
costs=resources("costs_{planning_horizons}.csv"),
|
||||||
cop_profiles=resources("cop_profiles_elec_s{simpl}_{clusters}.nc"),
|
cop_profiles=resources("cop_profiles_base_s_{clusters}.nc"),
|
||||||
output:
|
output:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks-brownfield/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
threads: 4
|
threads: 4
|
||||||
resources:
|
resources:
|
||||||
mem_mb=10000,
|
mem_mb=10000,
|
||||||
log:
|
log:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "logs/add_brownfield_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
+ "logs/add_brownfield_base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "benchmarks/add_brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
+ "benchmarks/add_brownfield/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
@ -115,22 +115,22 @@ rule solve_sector_network_myopic:
|
|||||||
custom_extra_functionality=input_custom_extra_functionality,
|
custom_extra_functionality=input_custom_extra_functionality,
|
||||||
input:
|
input:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks-brownfield/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
costs=resources("costs_{planning_horizons}.csv"),
|
costs=resources("costs_{planning_horizons}.csv"),
|
||||||
output:
|
output:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "postnetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
config=RESULTS
|
config=RESULTS
|
||||||
+ "configs/config.elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.yaml",
|
+ "configs/config.base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.yaml",
|
||||||
shadow:
|
shadow:
|
||||||
"shallow"
|
"shallow"
|
||||||
log:
|
log:
|
||||||
solver=RESULTS
|
solver=RESULTS
|
||||||
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log",
|
+ "logs/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log",
|
||||||
memory=RESULTS
|
memory=RESULTS
|
||||||
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_memory.log",
|
+ "logs/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_memory.log",
|
||||||
python=RESULTS
|
python=RESULTS
|
||||||
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log",
|
+ "logs/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log",
|
||||||
threads: solver_threads
|
threads: solver_threads
|
||||||
resources:
|
resources:
|
||||||
mem_mb=config_provider("solving", "mem_mb"),
|
mem_mb=config_provider("solving", "mem_mb"),
|
||||||
@ -138,7 +138,7 @@ rule solve_sector_network_myopic:
|
|||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "benchmarks/solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
+ "benchmarks/solve_sector_network/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
|
@ -14,21 +14,21 @@ rule solve_sector_network:
|
|||||||
custom_extra_functionality=input_custom_extra_functionality,
|
custom_extra_functionality=input_custom_extra_functionality,
|
||||||
input:
|
input:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
output:
|
output:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "postnetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
config=RESULTS
|
config=RESULTS
|
||||||
+ "configs/config.elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.yaml",
|
+ "configs/config.base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.yaml",
|
||||||
shadow:
|
shadow:
|
||||||
"shallow"
|
"shallow"
|
||||||
log:
|
log:
|
||||||
solver=RESULTS
|
solver=RESULTS
|
||||||
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log",
|
+ "logs/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log",
|
||||||
memory=RESULTS
|
memory=RESULTS
|
||||||
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_memory.log",
|
+ "logs/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_memory.log",
|
||||||
python=RESULTS
|
python=RESULTS
|
||||||
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log",
|
+ "logs/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log",
|
||||||
threads: solver_threads
|
threads: solver_threads
|
||||||
resources:
|
resources:
|
||||||
mem_mb=config_provider("solving", "mem_mb"),
|
mem_mb=config_provider("solving", "mem_mb"),
|
||||||
@ -36,7 +36,7 @@ rule solve_sector_network:
|
|||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "benchmarks/solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
+ "benchmarks/solve_sector_network/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
|
@ -11,25 +11,25 @@ rule add_existing_baseyear:
|
|||||||
energy_totals_year=config_provider("energy", "energy_totals_year"),
|
energy_totals_year=config_provider("energy", "energy_totals_year"),
|
||||||
input:
|
input:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
powerplants=resources("powerplants.csv"),
|
powerplants=resources("powerplants_s_{clusters}.csv"),
|
||||||
busmap_s=resources("busmap_elec_s{simpl}.csv"),
|
busmap_s=resources("busmap_base_s.csv"),
|
||||||
busmap=resources("busmap_elec_s{simpl}_{clusters}.csv"),
|
busmap=resources("busmap_base_s_{clusters}.csv"),
|
||||||
clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"),
|
clustered_pop_layout=resources("pop_layout_base_s_{clusters}.csv"),
|
||||||
costs=lambda w: resources(
|
costs=lambda w: resources(
|
||||||
"costs_{}.csv".format(
|
"costs_{}.csv".format(
|
||||||
config_provider("scenario", "planning_horizons", 0)(w)
|
config_provider("scenario", "planning_horizons", 0)(w)
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
cop_profiles=resources("cop_profiles_elec_s{simpl}_{clusters}.nc"),
|
cop_profiles=resources("cop_profiles_base_s_{clusters}.nc"),
|
||||||
existing_heating_distribution=resources(
|
existing_heating_distribution=resources(
|
||||||
"existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv"
|
"existing_heating_distribution_base_s_{clusters}_{planning_horizons}.csv"
|
||||||
),
|
),
|
||||||
existing_heating="data/existing_infrastructure/existing_heating_raw.csv",
|
existing_heating="data/existing_infrastructure/existing_heating_raw.csv",
|
||||||
heating_efficiencies=resources("heating_efficiencies.csv"),
|
heating_efficiencies=resources("heating_efficiencies.csv"),
|
||||||
output:
|
output:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks-brownfield/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
wildcard_constraints:
|
wildcard_constraints:
|
||||||
planning_horizons=config["scenario"]["planning_horizons"][0], #only applies to baseyear
|
planning_horizons=config["scenario"]["planning_horizons"][0], #only applies to baseyear
|
||||||
threads: 1
|
threads: 1
|
||||||
@ -38,11 +38,11 @@ rule add_existing_baseyear:
|
|||||||
runtime=config_provider("solving", "runtime", default="24h"),
|
runtime=config_provider("solving", "runtime", default="24h"),
|
||||||
log:
|
log:
|
||||||
logs(
|
logs(
|
||||||
"add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"
|
"add_existing_baseyear_base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"
|
||||||
),
|
),
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks(
|
benchmarks(
|
||||||
"add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
"add_existing_baseyear/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
@ -53,7 +53,7 @@ rule add_existing_baseyear:
|
|||||||
def input_network_year(w):
|
def input_network_year(w):
|
||||||
return {
|
return {
|
||||||
f"network_{year}": RESULTS
|
f"network_{year}": RESULTS
|
||||||
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}"
|
+ "prenetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}"
|
||||||
+ f"_{year}.nc"
|
+ f"_{year}.nc"
|
||||||
for year in config_provider("scenario", "planning_horizons")(w)[1:]
|
for year in config_provider("scenario", "planning_horizons")(w)[1:]
|
||||||
}
|
}
|
||||||
@ -68,25 +68,21 @@ rule prepare_perfect_foresight:
|
|||||||
brownfield_network=lambda w: (
|
brownfield_network=lambda w: (
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "prenetworks-brownfield/"
|
+ "prenetworks-brownfield/"
|
||||||
+ "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_"
|
+ "base_s_{clusters}_l{ll}_{opts}_{sector_opts}_"
|
||||||
+ "{}.nc".format(
|
+ "{}.nc".format(
|
||||||
str(config_provider("scenario", "planning_horizons", 0)(w))
|
str(config_provider("scenario", "planning_horizons", 0)(w))
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
+ "prenetworks-brownfield/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
||||||
threads: 2
|
threads: 2
|
||||||
resources:
|
resources:
|
||||||
mem_mb=10000,
|
mem_mb=10000,
|
||||||
log:
|
log:
|
||||||
logs(
|
logs("prepare_perfect_foresight_{clusters}_l{ll}_{opts}_{sector_opts}.log"),
|
||||||
"prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}.log"
|
|
||||||
),
|
|
||||||
benchmark:
|
benchmark:
|
||||||
benchmarks(
|
benchmarks("prepare_perfect_foresight_{clusters}_l{ll}_{opts}_{sector_opts}")
|
||||||
"prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}"
|
|
||||||
)
|
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -105,13 +101,13 @@ rule solve_sector_network_perfect:
|
|||||||
custom_extra_functionality=input_custom_extra_functionality,
|
custom_extra_functionality=input_custom_extra_functionality,
|
||||||
input:
|
input:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
+ "prenetworks-brownfield/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
||||||
costs=resources("costs_2030.csv"),
|
costs=resources("costs_2030.csv"),
|
||||||
output:
|
output:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
+ "postnetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
||||||
config=RESULTS
|
config=RESULTS
|
||||||
+ "configs/config.elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.yaml",
|
+ "configs/config.base_s_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.yaml",
|
||||||
threads: solver_threads
|
threads: solver_threads
|
||||||
resources:
|
resources:
|
||||||
mem_mb=config_provider("solving", "mem"),
|
mem_mb=config_provider("solving", "mem"),
|
||||||
@ -119,15 +115,15 @@ rule solve_sector_network_perfect:
|
|||||||
"shallow"
|
"shallow"
|
||||||
log:
|
log:
|
||||||
solver=RESULTS
|
solver=RESULTS
|
||||||
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_solver.log",
|
+ "logs/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_solver.log",
|
||||||
python=RESULTS
|
python=RESULTS
|
||||||
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_python.log",
|
+ "logs/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_python.log",
|
||||||
memory=RESULTS
|
memory=RESULTS
|
||||||
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_memory.log",
|
+ "logs/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_memory.log",
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "benchmarks/solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years}"
|
+ "benchmarks/solve_sector_network/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years}"
|
||||||
)
|
)
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
@ -137,9 +133,8 @@ rule solve_sector_network_perfect:
|
|||||||
|
|
||||||
def input_networks_make_summary_perfect(w):
|
def input_networks_make_summary_perfect(w):
|
||||||
return {
|
return {
|
||||||
f"networks_{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}": RESULTS
|
f"networks_s_{clusters}_l{ll}_{opts}_{sector_opts}": RESULTS
|
||||||
+ f"postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc"
|
+ f"postnetworks/base_s_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc"
|
||||||
for simpl in config_provider("scenario", "simpl")(w)
|
|
||||||
for clusters in config_provider("scenario", "clusters")(w)
|
for clusters in config_provider("scenario", "clusters")(w)
|
||||||
for opts in config_provider("scenario", "opts")(w)
|
for opts in config_provider("scenario", "opts")(w)
|
||||||
for sector_opts in config_provider("scenario", "sector_opts")(w)
|
for sector_opts in config_provider("scenario", "sector_opts")(w)
|
||||||
|
@ -69,16 +69,16 @@ rule build_electricity_prices:
|
|||||||
|
|
||||||
rule plot_validation_electricity_production:
|
rule plot_validation_electricity_production:
|
||||||
input:
|
input:
|
||||||
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
network=RESULTS + "networks/base_s_{clusters}_elec_l{ll}_{opts}.nc",
|
||||||
electricity_production=resources("historical_electricity_production.csv"),
|
electricity_production=resources("historical_electricity_production.csv"),
|
||||||
output:
|
output:
|
||||||
**{
|
**{
|
||||||
plot: RESULTS
|
plot: RESULTS
|
||||||
+ f"figures/validation_{plot}_elec_s{{simpl}}_{{clusters}}_ec_l{{ll}}_{{opts}}.pdf"
|
+ f"figures/validation_{plot}_base_s_{{clusters}}_elec_l{{ll}}_{{opts}}.pdf"
|
||||||
for plot in PRODUCTION_PLOTS
|
for plot in PRODUCTION_PLOTS
|
||||||
},
|
},
|
||||||
plots_touch=RESULTS
|
plots_touch=RESULTS
|
||||||
+ "figures/.validation_production_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
+ "figures/.validation_production_plots_base_s_{clusters}_elec_l{ll}_{opts}",
|
||||||
script:
|
script:
|
||||||
"../scripts/plot_validation_electricity_production.py"
|
"../scripts/plot_validation_electricity_production.py"
|
||||||
|
|
||||||
@ -87,31 +87,31 @@ rule plot_validation_cross_border_flows:
|
|||||||
params:
|
params:
|
||||||
countries=config_provider("countries"),
|
countries=config_provider("countries"),
|
||||||
input:
|
input:
|
||||||
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
network=RESULTS + "networks/base_s_{clusters}_elec_l{ll}_{opts}.nc",
|
||||||
cross_border_flows=resources("historical_cross_border_flows.csv"),
|
cross_border_flows=resources("historical_cross_border_flows.csv"),
|
||||||
output:
|
output:
|
||||||
**{
|
**{
|
||||||
plot: RESULTS
|
plot: RESULTS
|
||||||
+ f"figures/validation_{plot}_elec_s{{simpl}}_{{clusters}}_ec_l{{ll}}_{{opts}}.pdf"
|
+ f"figures/validation_{plot}_base_s_{{clusters}}_elec_l{{ll}}_{{opts}}.pdf"
|
||||||
for plot in CROSS_BORDER_PLOTS
|
for plot in CROSS_BORDER_PLOTS
|
||||||
},
|
},
|
||||||
plots_touch=RESULTS
|
plots_touch=RESULTS
|
||||||
+ "figures/.validation_cross_border_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
+ "figures/.validation_cross_border_plots_base_s_{clusters}_elec_l{ll}_{opts}",
|
||||||
script:
|
script:
|
||||||
"../scripts/plot_validation_cross_border_flows.py"
|
"../scripts/plot_validation_cross_border_flows.py"
|
||||||
|
|
||||||
|
|
||||||
rule plot_validation_electricity_prices:
|
rule plot_validation_electricity_prices:
|
||||||
input:
|
input:
|
||||||
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
network=RESULTS + "networks/base_s_{clusters}_elec_l{ll}_{opts}.nc",
|
||||||
electricity_prices=resources("historical_electricity_prices.csv"),
|
electricity_prices=resources("historical_electricity_prices.csv"),
|
||||||
output:
|
output:
|
||||||
**{
|
**{
|
||||||
plot: RESULTS
|
plot: RESULTS
|
||||||
+ f"figures/validation_{plot}_elec_s{{simpl}}_{{clusters}}_ec_l{{ll}}_{{opts}}.pdf"
|
+ f"figures/validation_{plot}_base_s_{{clusters}}_elec_l{{ll}}_{{opts}}.pdf"
|
||||||
for plot in PRICES_PLOTS
|
for plot in PRICES_PLOTS
|
||||||
},
|
},
|
||||||
plots_touch=RESULTS
|
plots_touch=RESULTS
|
||||||
+ "figures/.validation_prices_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
+ "figures/.validation_prices_plots_base_s_{clusters}_elec_l{ll}_{opts}",
|
||||||
script:
|
script:
|
||||||
"../scripts/plot_validation_electricity_prices.py"
|
"../scripts/plot_validation_electricity_prices.py"
|
||||||
|
@ -170,14 +170,6 @@ def adjust_renewable_profiles(n, input_profiles, params, year):
|
|||||||
using the latest year below or equal to the selected year.
|
using the latest year below or equal to the selected year.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# spatial clustering
|
|
||||||
cluster_busmap = pd.read_csv(snakemake.input.cluster_busmap, index_col=0).squeeze()
|
|
||||||
simplify_busmap = pd.read_csv(
|
|
||||||
snakemake.input.simplify_busmap, index_col=0
|
|
||||||
).squeeze()
|
|
||||||
clustermaps = simplify_busmap.map(cluster_busmap)
|
|
||||||
clustermaps.index = clustermaps.index.astype(str)
|
|
||||||
|
|
||||||
# temporal clustering
|
# temporal clustering
|
||||||
dr = get_snapshots(params["snapshots"], params["drop_leap_day"])
|
dr = get_snapshots(params["snapshots"], params["drop_leap_day"])
|
||||||
snapshotmaps = (
|
snapshotmaps = (
|
||||||
@ -202,11 +194,6 @@ def adjust_renewable_profiles(n, input_profiles, params, year):
|
|||||||
.transpose("time", "bus")
|
.transpose("time", "bus")
|
||||||
.to_pandas()
|
.to_pandas()
|
||||||
)
|
)
|
||||||
|
|
||||||
# spatial clustering
|
|
||||||
weight = ds["weight"].sel(year=closest_year).to_pandas()
|
|
||||||
weight = weight.groupby(clustermaps).transform(normed_or_uniform)
|
|
||||||
p_max_pu = (p_max_pu * weight).T.groupby(clustermaps).sum().T
|
|
||||||
p_max_pu.columns = p_max_pu.columns + f" {carrier}"
|
p_max_pu.columns = p_max_pu.columns + f" {carrier}"
|
||||||
|
|
||||||
# temporal_clustering
|
# temporal_clustering
|
||||||
@ -222,7 +209,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"add_brownfield",
|
"add_brownfield",
|
||||||
simpl="",
|
|
||||||
clusters="37",
|
clusters="37",
|
||||||
opts="",
|
opts="",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
|
@ -3,7 +3,8 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
"""
|
"""
|
||||||
Adds electrical generators and existing hydro storage units to a base network.
|
Adds existing electrical generators, hydro-electric plants as well as
|
||||||
|
greenfield and battery and hydrogen storage to the clustered network.
|
||||||
|
|
||||||
Relevant Settings
|
Relevant Settings
|
||||||
-----------------
|
-----------------
|
||||||
@ -11,19 +12,11 @@ Relevant Settings
|
|||||||
.. code:: yaml
|
.. code:: yaml
|
||||||
|
|
||||||
costs:
|
costs:
|
||||||
year:
|
year: version: dicountrate: emission_prices:
|
||||||
version:
|
|
||||||
dicountrate:
|
|
||||||
emission_prices:
|
|
||||||
|
|
||||||
electricity:
|
electricity:
|
||||||
max_hours:
|
max_hours: marginal_cost: capital_cost: conventional_carriers: co2limit:
|
||||||
marginal_cost:
|
extendable_carriers: estimate_renewable_capacities:
|
||||||
capital_cost:
|
|
||||||
conventional_carriers:
|
|
||||||
co2limit:
|
|
||||||
extendable_carriers:
|
|
||||||
estimate_renewable_capacities:
|
|
||||||
|
|
||||||
|
|
||||||
load:
|
load:
|
||||||
@ -31,13 +24,14 @@ Relevant Settings
|
|||||||
|
|
||||||
renewable:
|
renewable:
|
||||||
hydro:
|
hydro:
|
||||||
carriers:
|
carriers: hydro_max_hours: hydro_capital_cost:
|
||||||
hydro_max_hours:
|
|
||||||
hydro_capital_cost:
|
|
||||||
|
|
||||||
lines:
|
lines:
|
||||||
length_factor:
|
length_factor:
|
||||||
|
|
||||||
|
links:
|
||||||
|
length_factor:
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
Documentation of the configuration file ``config/config.yaml`` at :ref:`costs_cf`,
|
Documentation of the configuration file ``config/config.yaml`` at :ref:`costs_cf`,
|
||||||
:ref:`electricity_cf`, :ref:`load_cf`, :ref:`renewable_cf`, :ref:`lines_cf`
|
:ref:`electricity_cf`, :ref:`load_cf`, :ref:`renewable_cf`, :ref:`lines_cf`
|
||||||
@ -45,23 +39,31 @@ Relevant Settings
|
|||||||
Inputs
|
Inputs
|
||||||
------
|
------
|
||||||
|
|
||||||
- ``resources/costs.csv``: The database of cost assumptions for all included technologies for specific years from various sources; e.g. discount rate, lifetime, investment (CAPEX), fixed operation and maintenance (FOM), variable operation and maintenance (VOM), fuel costs, efficiency, carbon-dioxide intensity.
|
- ``resources/costs.csv``: The database of cost assumptions for all included
|
||||||
- ``data/hydro_capacities.csv``: Hydropower plant store/discharge power capacities, energy storage capacity, and average hourly inflow by country.
|
technologies for specific years from various sources; e.g. discount rate,
|
||||||
|
lifetime, investment (CAPEX), fixed operation and maintenance (FOM), variable
|
||||||
|
operation and maintenance (VOM), fuel costs, efficiency, carbon-dioxide
|
||||||
|
intensity.
|
||||||
|
- ``data/hydro_capacities.csv``: Hydropower plant store/discharge power
|
||||||
|
capacities, energy storage capacity, and average hourly inflow by country.
|
||||||
|
|
||||||
.. image:: img/hydrocapacities.png
|
.. image:: img/hydrocapacities.png
|
||||||
:scale: 34 %
|
:scale: 34 %
|
||||||
|
|
||||||
- ``resources/electricity_demand.csv`` Hourly per-country electricity demand profiles.
|
- ``resources/electricity_demand_base_s.nc`` Hourly nodal electricity demand
|
||||||
- ``resources/regions_onshore.geojson``: confer :ref:`busregions`
|
profiles.
|
||||||
|
- ``resources/regions_onshore_base_s_{clusters}.geojson``: confer
|
||||||
|
:ref:`busregions`
|
||||||
- ``resources/nuts3_shapes.geojson``: confer :ref:`shapes`
|
- ``resources/nuts3_shapes.geojson``: confer :ref:`shapes`
|
||||||
- ``resources/powerplants.csv``: confer :ref:`powerplants`
|
- ``resources/powerplants_s_{clusters}.csv``: confer :ref:`powerplants`
|
||||||
- ``resources/profile_{}.nc``: all technologies in ``config["renewables"].keys()``, confer :ref:`renewableprofiles`.
|
- ``resources/profile_{clusters}_{}.nc``: all technologies in
|
||||||
- ``networks/base.nc``: confer :ref:`base`
|
``config["renewables"].keys()``, confer :ref:`renewableprofiles`.
|
||||||
|
- ``networks/base_s_{clusters}.nc``
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``networks/elec.nc``:
|
- ``networks/base_s_{clusters}_elec.nc``:
|
||||||
|
|
||||||
.. image:: img/elec.png
|
.. image:: img/elec.png
|
||||||
:scale: 33 %
|
:scale: 33 %
|
||||||
@ -69,29 +71,53 @@ Outputs
|
|||||||
Description
|
Description
|
||||||
-----------
|
-----------
|
||||||
|
|
||||||
The rule :mod:`add_electricity` ties all the different data inputs from the preceding rules together into a detailed PyPSA network that is stored in ``networks/elec.nc``. It includes:
|
The rule :mod:`add_electricity` ties all the different data inputs from the
|
||||||
|
preceding rules together into a detailed PyPSA network that is stored in
|
||||||
|
``networks/base_s_{clusters}_elec.nc``. It includes:
|
||||||
|
|
||||||
- today's transmission topology and transfer capacities (optionally including lines which are under construction according to the config settings ``lines: under_construction`` and ``links: under_construction``),
|
- today's transmission topology and transfer capacities (optionally including
|
||||||
- today's thermal and hydro power generation capacities (for the technologies listed in the config setting ``electricity: conventional_carriers``), and
|
lines which are under construction according to the config settings ``lines:
|
||||||
- today's load time-series (upsampled in a top-down approach according to population and gross domestic product)
|
under_construction`` and ``links: under_construction``),
|
||||||
|
- today's thermal and hydro power generation capacities (for the technologies
|
||||||
|
listed in the config setting ``electricity: conventional_carriers``), and
|
||||||
|
- today's load time-series (upsampled in a top-down approach according to
|
||||||
|
population and gross domestic product)
|
||||||
|
|
||||||
It further adds extendable ``generators`` with **zero** capacity for
|
It further adds extendable ``generators`` with **zero** capacity for
|
||||||
|
|
||||||
- photovoltaic, onshore and AC- as well as DC-connected offshore wind installations with today's locational, hourly wind and solar capacity factors (but **no** current capacities),
|
- photovoltaic, onshore and AC- as well as DC-connected offshore wind
|
||||||
- additional open- and combined-cycle gas turbines (if ``OCGT`` and/or ``CCGT`` is listed in the config setting ``electricity: extendable_carriers``)
|
installations with today's locational, hourly wind and solar capacity factors
|
||||||
|
(but **no** current capacities),
|
||||||
|
- additional open- and combined-cycle gas turbines (if ``OCGT`` and/or ``CCGT``
|
||||||
|
is listed in the config setting ``electricity: extendable_carriers``)
|
||||||
|
|
||||||
|
Furthermore, it attaches additional extendable components to the clustered
|
||||||
|
network with **zero** initial capacity:
|
||||||
|
|
||||||
|
- ``StorageUnits`` of carrier 'H2' and/or 'battery'. If this option is chosen,
|
||||||
|
every bus is given an extendable ``StorageUnit`` of the corresponding carrier.
|
||||||
|
The energy and power capacities are linked through a parameter that specifies
|
||||||
|
the energy capacity as maximum hours at full dispatch power and is configured
|
||||||
|
in ``electricity: max_hours:``. This linkage leads to one investment variable
|
||||||
|
per storage unit. The default ``max_hours`` lead to long-term hydrogen and
|
||||||
|
short-term battery storage units.
|
||||||
|
|
||||||
|
- ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If
|
||||||
|
this option is chosen, the script adds extra buses with corresponding carrier
|
||||||
|
where energy ``Stores`` are attached and which are connected to the
|
||||||
|
corresponding power buses via two links, one each for charging and
|
||||||
|
discharging. This leads to three investment variables for the energy capacity,
|
||||||
|
charging and discharging capacity of the storage unit.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from itertools import product
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
|
|
||||||
import geopandas as gpd
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import powerplantmatching as pm
|
import powerplantmatching as pm
|
||||||
import pypsa
|
import pypsa
|
||||||
import scipy.sparse as sparse
|
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import (
|
from _helpers import (
|
||||||
configure_logging,
|
configure_logging,
|
||||||
@ -100,7 +126,7 @@ from _helpers import (
|
|||||||
update_p_nom_max,
|
update_p_nom_max,
|
||||||
)
|
)
|
||||||
from powerplantmatching.export import map_country_bus
|
from powerplantmatching.export import map_country_bus
|
||||||
from shapely.prepared import prep
|
from pypsa.clustering.spatial import DEFAULT_ONE_PORT_STRATEGIES, normed_or_uniform
|
||||||
|
|
||||||
idx = pd.IndexSlice
|
idx = pd.IndexSlice
|
||||||
|
|
||||||
@ -263,7 +289,20 @@ def load_costs(tech_costs, config, max_hours, Nyears=1.0):
|
|||||||
return costs
|
return costs
|
||||||
|
|
||||||
|
|
||||||
def load_powerplants(ppl_fn):
|
def load_and_aggregate_powerplants(
|
||||||
|
ppl_fn: str,
|
||||||
|
costs: pd.DataFrame,
|
||||||
|
consider_efficiency_classes: bool = False,
|
||||||
|
aggregation_strategies: dict = None,
|
||||||
|
exclude_carriers: list = None,
|
||||||
|
) -> pd.DataFrame:
|
||||||
|
|
||||||
|
if not aggregation_strategies:
|
||||||
|
aggregation_strategies = {}
|
||||||
|
|
||||||
|
if not exclude_carriers:
|
||||||
|
exclude_carriers = []
|
||||||
|
|
||||||
carrier_dict = {
|
carrier_dict = {
|
||||||
"ocgt": "OCGT",
|
"ocgt": "OCGT",
|
||||||
"ccgt": "CCGT",
|
"ccgt": "CCGT",
|
||||||
@ -271,94 +310,120 @@ def load_powerplants(ppl_fn):
|
|||||||
"ccgt, thermal": "CCGT",
|
"ccgt, thermal": "CCGT",
|
||||||
"hard coal": "coal",
|
"hard coal": "coal",
|
||||||
}
|
}
|
||||||
return (
|
tech_dict = {
|
||||||
|
"Run-Of-River": "ror",
|
||||||
|
"Reservoir": "hydro",
|
||||||
|
"Pumped Storage": "PHS",
|
||||||
|
}
|
||||||
|
ppl = (
|
||||||
pd.read_csv(ppl_fn, index_col=0, dtype={"bus": "str"})
|
pd.read_csv(ppl_fn, index_col=0, dtype={"bus": "str"})
|
||||||
.powerplant.to_pypsa_names()
|
.powerplant.to_pypsa_names()
|
||||||
.rename(columns=str.lower)
|
.rename(columns=str.lower)
|
||||||
.replace({"carrier": carrier_dict})
|
.replace({"carrier": carrier_dict, "technology": tech_dict})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Replace carriers "natural gas" and "hydro" with the respective technology;
|
||||||
|
# OCGT or CCGT and hydro, PHS, or ror)
|
||||||
|
ppl["carrier"] = ppl.carrier.where(
|
||||||
|
~ppl.carrier.isin(["hydro", "natural gas"]), ppl.technology
|
||||||
|
)
|
||||||
|
|
||||||
def shapes_to_shapes(orig, dest):
|
cost_columns = [
|
||||||
"""
|
"VOM",
|
||||||
Adopted from vresutils.transfer.Shapes2Shapes()
|
"FOM",
|
||||||
"""
|
"efficiency",
|
||||||
orig_prepped = list(map(prep, orig))
|
"capital_cost",
|
||||||
transfer = sparse.lil_matrix((len(dest), len(orig)), dtype=float)
|
"marginal_cost",
|
||||||
|
"fuel",
|
||||||
|
"lifetime",
|
||||||
|
]
|
||||||
|
ppl = ppl.join(costs[cost_columns], on="carrier", rsuffix="_r")
|
||||||
|
|
||||||
for i, j in product(range(len(dest)), range(len(orig))):
|
ppl["efficiency"] = ppl.efficiency.combine_first(ppl.efficiency_r)
|
||||||
if orig_prepped[j].intersects(dest.iloc[i]):
|
ppl["lifetime"] = (ppl.dateout - ppl.datein).fillna(np.inf)
|
||||||
area = orig.iloc[j].intersection(dest.iloc[i]).area
|
ppl["build_year"] = ppl.datein.fillna(0).astype(int)
|
||||||
transfer[i, j] = area / dest.iloc[i].area
|
ppl["marginal_cost"] = (
|
||||||
|
ppl.carrier.map(costs.VOM) + ppl.carrier.map(costs.fuel) / ppl.efficiency
|
||||||
|
)
|
||||||
|
|
||||||
return transfer
|
strategies = {
|
||||||
|
**DEFAULT_ONE_PORT_STRATEGIES,
|
||||||
|
**{"country": "first"},
|
||||||
|
**aggregation_strategies.get("generators", {}),
|
||||||
|
}
|
||||||
|
strategies = {k: v for k, v in strategies.items() if k in ppl.columns}
|
||||||
|
|
||||||
|
to_aggregate = ~ppl.carrier.isin(exclude_carriers)
|
||||||
|
df = ppl[to_aggregate].copy()
|
||||||
|
|
||||||
|
if consider_efficiency_classes:
|
||||||
|
for c in df.carrier.unique():
|
||||||
|
df_c = df.query("carrier == @c")
|
||||||
|
low = df_c.efficiency.quantile(0.10)
|
||||||
|
high = df_c.efficiency.quantile(0.90)
|
||||||
|
if low < high:
|
||||||
|
labels = ["low", "medium", "high"]
|
||||||
|
suffix = pd.cut(
|
||||||
|
df_c.efficiency, bins=[0, low, high, 1], labels=labels
|
||||||
|
).astype(str)
|
||||||
|
df.update({"carrier": df_c.carrier + " " + suffix + " efficiency"})
|
||||||
|
|
||||||
|
grouper = ["bus", "carrier"]
|
||||||
|
weights = df.groupby(grouper).p_nom.transform(normed_or_uniform)
|
||||||
|
|
||||||
|
for k, v in strategies.items():
|
||||||
|
if v == "capacity_weighted_average":
|
||||||
|
df[k] = df[k] * weights
|
||||||
|
strategies[k] = pd.Series.sum
|
||||||
|
|
||||||
|
aggregated = df.groupby(grouper, as_index=False).agg(strategies)
|
||||||
|
aggregated.index = aggregated.bus + " " + aggregated.carrier
|
||||||
|
aggregated.build_year = aggregated.build_year.astype(int)
|
||||||
|
|
||||||
|
disaggregated = ppl[~to_aggregate][aggregated.columns].copy()
|
||||||
|
disaggregated.index = (
|
||||||
|
disaggregated.bus
|
||||||
|
+ " "
|
||||||
|
+ disaggregated.carrier
|
||||||
|
+ " "
|
||||||
|
+ disaggregated.index.astype(str)
|
||||||
|
)
|
||||||
|
|
||||||
|
return pd.concat([aggregated, disaggregated])
|
||||||
|
|
||||||
|
|
||||||
def attach_load(
|
def attach_load(
|
||||||
n, regions, load, nuts3_shapes, gdp_pop_non_nuts3, countries, scaling=1.0
|
n: pypsa.Network,
|
||||||
):
|
load_fn: str,
|
||||||
substation_lv_i = n.buses.index[n.buses["substation_lv"]]
|
busmap_fn: str,
|
||||||
gdf_regions = gpd.read_file(regions).set_index("name").reindex(substation_lv_i)
|
scaling: float = 1.0,
|
||||||
opsd_load = pd.read_csv(load, index_col=0, parse_dates=True).filter(items=countries)
|
) -> None:
|
||||||
|
|
||||||
|
load = (
|
||||||
|
xr.open_dataarray(load_fn).to_dataframe().squeeze(axis=1).unstack(level="time")
|
||||||
|
)
|
||||||
|
|
||||||
|
# apply clustering busmap
|
||||||
|
busmap = pd.read_csv(busmap_fn, dtype=str).set_index("Bus").squeeze()
|
||||||
|
load = load.groupby(busmap).sum().T
|
||||||
|
|
||||||
logger.info(f"Load data scaled by factor {scaling}.")
|
logger.info(f"Load data scaled by factor {scaling}.")
|
||||||
opsd_load *= scaling
|
load *= scaling
|
||||||
|
|
||||||
nuts3 = gpd.read_file(nuts3_shapes).set_index("index")
|
n.madd("Load", load.columns, bus=load.columns, p_set=load) # carrier="electricity"
|
||||||
|
|
||||||
def upsample(cntry, group, gdp_pop_non_nuts3):
|
|
||||||
load = opsd_load[cntry]
|
|
||||||
|
|
||||||
if len(group) == 1:
|
|
||||||
return pd.DataFrame({group.index[0]: load})
|
|
||||||
nuts3_cntry = nuts3.loc[nuts3.country == cntry]
|
|
||||||
transfer = shapes_to_shapes(group, nuts3_cntry.geometry).T.tocsr()
|
|
||||||
gdp_n = pd.Series(
|
|
||||||
transfer.dot(nuts3_cntry["gdp"].fillna(1.0).values), index=group.index
|
|
||||||
)
|
|
||||||
pop_n = pd.Series(
|
|
||||||
transfer.dot(nuts3_cntry["pop"].fillna(1.0).values), index=group.index
|
|
||||||
)
|
|
||||||
|
|
||||||
# relative factors 0.6 and 0.4 have been determined from a linear
|
|
||||||
# regression on the country to continent load data
|
|
||||||
factors = normed(0.6 * normed(gdp_n) + 0.4 * normed(pop_n))
|
|
||||||
if cntry in ["UA", "MD"]:
|
|
||||||
# overwrite factor because nuts3 provides no data for UA+MD
|
|
||||||
gdp_pop_non_nuts3 = gpd.read_file(gdp_pop_non_nuts3).set_index("Bus")
|
|
||||||
gdp_pop_non_nuts3 = gdp_pop_non_nuts3.loc[
|
|
||||||
(gdp_pop_non_nuts3.country == cntry)
|
|
||||||
& (gdp_pop_non_nuts3.index.isin(substation_lv_i))
|
|
||||||
]
|
|
||||||
factors = normed(
|
|
||||||
0.6 * normed(gdp_pop_non_nuts3["gdp"])
|
|
||||||
+ 0.4 * normed(gdp_pop_non_nuts3["pop"])
|
|
||||||
)
|
|
||||||
return pd.DataFrame(
|
|
||||||
factors.values * load.values[:, np.newaxis],
|
|
||||||
index=load.index,
|
|
||||||
columns=factors.index,
|
|
||||||
)
|
|
||||||
|
|
||||||
load = pd.concat(
|
|
||||||
[
|
|
||||||
upsample(cntry, group, gdp_pop_non_nuts3)
|
|
||||||
for cntry, group in gdf_regions.geometry.groupby(gdf_regions.country)
|
|
||||||
],
|
|
||||||
axis=1,
|
|
||||||
)
|
|
||||||
|
|
||||||
n.madd(
|
|
||||||
"Load", substation_lv_i, bus=substation_lv_i, p_set=load
|
|
||||||
) # carrier="electricity"
|
|
||||||
|
|
||||||
|
|
||||||
def update_transmission_costs(n, costs, length_factor=1.0):
|
def set_transmission_costs(
|
||||||
# TODO: line length factor of lines is applied to lines and links.
|
n: pypsa.Network,
|
||||||
# Separate the function to distinguish.
|
costs: pd.DataFrame,
|
||||||
|
line_length_factor: float = 1.0,
|
||||||
|
link_length_factor: float = 1.0,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
n.lines["capital_cost"] = (
|
n.lines["capital_cost"] = (
|
||||||
n.lines["length"] * length_factor * costs.at["HVAC overhead", "capital_cost"]
|
n.lines["length"]
|
||||||
|
* line_length_factor
|
||||||
|
* costs.at["HVAC overhead", "capital_cost"]
|
||||||
)
|
)
|
||||||
|
|
||||||
if n.links.empty:
|
if n.links.empty:
|
||||||
@ -373,7 +438,7 @@ def update_transmission_costs(n, costs, length_factor=1.0):
|
|||||||
|
|
||||||
costs = (
|
costs = (
|
||||||
n.links.loc[dc_b, "length"]
|
n.links.loc[dc_b, "length"]
|
||||||
* length_factor
|
* link_length_factor
|
||||||
* (
|
* (
|
||||||
(1.0 - n.links.loc[dc_b, "underwater_fraction"])
|
(1.0 - n.links.loc[dc_b, "underwater_fraction"])
|
||||||
* costs.at["HVDC overhead", "capital_cost"]
|
* costs.at["HVDC overhead", "capital_cost"]
|
||||||
@ -386,13 +451,25 @@ def update_transmission_costs(n, costs, length_factor=1.0):
|
|||||||
|
|
||||||
|
|
||||||
def attach_wind_and_solar(
|
def attach_wind_and_solar(
|
||||||
n, costs, input_profiles, carriers, extendable_carriers, line_length_factor=1
|
n: pypsa.Network,
|
||||||
):
|
costs: pd.DataFrame,
|
||||||
|
input_profiles: str,
|
||||||
|
carriers: list | set,
|
||||||
|
extendable_carriers: list | set,
|
||||||
|
line_length_factor: float = 1.0,
|
||||||
|
landfall_lengths: dict = None,
|
||||||
|
) -> None:
|
||||||
add_missing_carriers(n, carriers)
|
add_missing_carriers(n, carriers)
|
||||||
|
|
||||||
|
if landfall_lengths is None:
|
||||||
|
landfall_lengths = {}
|
||||||
|
|
||||||
for car in carriers:
|
for car in carriers:
|
||||||
if car == "hydro":
|
if car == "hydro":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
landfall_length = landfall_lengths.get(car, 0.0)
|
||||||
|
|
||||||
with xr.open_dataset(getattr(input_profiles, "profile_" + car)) as ds:
|
with xr.open_dataset(getattr(input_profiles, "profile_" + car)) as ds:
|
||||||
if ds.indexes["bus"].empty:
|
if ds.indexes["bus"].empty:
|
||||||
continue
|
continue
|
||||||
@ -403,17 +480,15 @@ def attach_wind_and_solar(
|
|||||||
|
|
||||||
supcar = car.split("-", 2)[0]
|
supcar = car.split("-", 2)[0]
|
||||||
if supcar == "offwind":
|
if supcar == "offwind":
|
||||||
underwater_fraction = ds["underwater_fraction"].to_pandas()
|
distance = ds["average_distance"].to_pandas()
|
||||||
connection_cost = (
|
submarine_cost = costs.at[car + "-connection-submarine", "capital_cost"]
|
||||||
line_length_factor
|
underground_cost = costs.at[
|
||||||
* ds["average_distance"].to_pandas()
|
car + "-connection-underground", "capital_cost"
|
||||||
* (
|
]
|
||||||
underwater_fraction
|
connection_cost = line_length_factor * (
|
||||||
* costs.at[car + "-connection-submarine", "capital_cost"]
|
distance * submarine_cost + landfall_length * underground_cost
|
||||||
+ (1.0 - underwater_fraction)
|
|
||||||
* costs.at[car + "-connection-underground", "capital_cost"]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
capital_cost = (
|
capital_cost = (
|
||||||
costs.at["offwind", "capital_cost"]
|
costs.at["offwind", "capital_cost"]
|
||||||
+ costs.at[car + "-station", "capital_cost"]
|
+ costs.at[car + "-station", "capital_cost"]
|
||||||
@ -435,7 +510,6 @@ def attach_wind_and_solar(
|
|||||||
carrier=car,
|
carrier=car,
|
||||||
p_nom_extendable=car in extendable_carriers["Generator"],
|
p_nom_extendable=car in extendable_carriers["Generator"],
|
||||||
p_nom_max=ds["p_nom_max"].to_pandas(),
|
p_nom_max=ds["p_nom_max"].to_pandas(),
|
||||||
weight=ds["weight"].to_pandas(),
|
|
||||||
marginal_cost=costs.at[supcar, "marginal_cost"],
|
marginal_cost=costs.at[supcar, "marginal_cost"],
|
||||||
capital_cost=capital_cost,
|
capital_cost=capital_cost,
|
||||||
efficiency=costs.at[supcar, "efficiency"],
|
efficiency=costs.at[supcar, "efficiency"],
|
||||||
@ -457,19 +531,7 @@ def attach_conventional_generators(
|
|||||||
):
|
):
|
||||||
carriers = list(set(conventional_carriers) | set(extendable_carriers["Generator"]))
|
carriers = list(set(conventional_carriers) | set(extendable_carriers["Generator"]))
|
||||||
|
|
||||||
# Replace carrier "natural gas" with the respective technology (OCGT or
|
ppl = ppl.query("carrier in @carriers")
|
||||||
# CCGT) to align with PyPSA names of "carriers" and avoid filtering "natural
|
|
||||||
# gas" powerplants in ppl.query("carrier in @carriers")
|
|
||||||
ppl.loc[ppl["carrier"] == "natural gas", "carrier"] = ppl.loc[
|
|
||||||
ppl["carrier"] == "natural gas", "technology"
|
|
||||||
]
|
|
||||||
|
|
||||||
ppl = (
|
|
||||||
ppl.query("carrier in @carriers")
|
|
||||||
.join(costs, on="carrier", rsuffix="_r")
|
|
||||||
.rename(index=lambda s: f"C{str(s)}")
|
|
||||||
)
|
|
||||||
ppl["efficiency"] = ppl.efficiency.fillna(ppl.efficiency_r)
|
|
||||||
|
|
||||||
# reduce carriers to those in power plant dataset
|
# reduce carriers to those in power plant dataset
|
||||||
carriers = list(set(carriers) & set(ppl.carrier.unique()))
|
carriers = list(set(carriers) & set(ppl.carrier.unique()))
|
||||||
@ -496,13 +558,11 @@ def attach_conventional_generators(
|
|||||||
fuel_price.columns = ppl.index
|
fuel_price.columns = ppl.index
|
||||||
marginal_cost = fuel_price.div(ppl.efficiency).add(ppl.carrier.map(costs.VOM))
|
marginal_cost = fuel_price.div(ppl.efficiency).add(ppl.carrier.map(costs.VOM))
|
||||||
else:
|
else:
|
||||||
marginal_cost = (
|
marginal_cost = ppl.marginal_cost
|
||||||
ppl.carrier.map(costs.VOM) + ppl.carrier.map(costs.fuel) / ppl.efficiency
|
|
||||||
)
|
|
||||||
|
|
||||||
# Define generators using modified ppl DataFrame
|
# Define generators using modified ppl DataFrame
|
||||||
caps = ppl.groupby("carrier").p_nom.sum().div(1e3).round(2)
|
caps = ppl.groupby("carrier").p_nom.sum().div(1e3).round(2)
|
||||||
logger.info(f"Adding {len(ppl)} generators with capacities [GW] \n{caps}")
|
logger.info(f"Adding {len(ppl)} generators with capacities [GW]pp \n{caps}")
|
||||||
|
|
||||||
n.madd(
|
n.madd(
|
||||||
"Generator",
|
"Generator",
|
||||||
@ -515,8 +575,8 @@ def attach_conventional_generators(
|
|||||||
efficiency=ppl.efficiency,
|
efficiency=ppl.efficiency,
|
||||||
marginal_cost=marginal_cost,
|
marginal_cost=marginal_cost,
|
||||||
capital_cost=ppl.capital_cost,
|
capital_cost=ppl.capital_cost,
|
||||||
build_year=ppl.datein.fillna(0).astype(int),
|
build_year=ppl.build_year,
|
||||||
lifetime=(ppl.dateout - ppl.datein).fillna(np.inf),
|
lifetime=ppl.lifetime,
|
||||||
**committable_attrs,
|
**committable_attrs,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -546,14 +606,9 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **par
|
|||||||
add_missing_carriers(n, carriers)
|
add_missing_carriers(n, carriers)
|
||||||
add_co2_emissions(n, costs, carriers)
|
add_co2_emissions(n, costs, carriers)
|
||||||
|
|
||||||
ppl = (
|
ror = ppl.query('carrier == "ror"')
|
||||||
ppl.query('carrier == "hydro"')
|
phs = ppl.query('carrier == "PHS"')
|
||||||
.reset_index(drop=True)
|
hydro = ppl.query('carrier == "hydro"')
|
||||||
.rename(index=lambda s: f"{str(s)} hydro")
|
|
||||||
)
|
|
||||||
ror = ppl.query('technology == "Run-Of-River"')
|
|
||||||
phs = ppl.query('technology == "Pumped Storage"')
|
|
||||||
hydro = ppl.query('technology == "Reservoir"')
|
|
||||||
|
|
||||||
country = ppl["bus"].map(n.buses.country).rename("country")
|
country = ppl["bus"].map(n.buses.country).rename("country")
|
||||||
|
|
||||||
@ -618,7 +673,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **par
|
|||||||
if "hydro" in carriers and not hydro.empty:
|
if "hydro" in carriers and not hydro.empty:
|
||||||
hydro_max_hours = params.get("hydro_max_hours")
|
hydro_max_hours = params.get("hydro_max_hours")
|
||||||
|
|
||||||
assert hydro_max_hours is not None, "No path for hydro capacities given."
|
assert hydro_capacities is not None, "No path for hydro capacities given."
|
||||||
|
|
||||||
hydro_stats = pd.read_csv(
|
hydro_stats = pd.read_csv(
|
||||||
hydro_capacities, comment="#", na_values="-", index_col=0
|
hydro_capacities, comment="#", na_values="-", index_col=0
|
||||||
@ -626,7 +681,13 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **par
|
|||||||
e_target = hydro_stats["E_store[TWh]"].clip(lower=0.2) * 1e6
|
e_target = hydro_stats["E_store[TWh]"].clip(lower=0.2) * 1e6
|
||||||
e_installed = hydro.eval("p_nom * max_hours").groupby(hydro.country).sum()
|
e_installed = hydro.eval("p_nom * max_hours").groupby(hydro.country).sum()
|
||||||
e_missing = e_target - e_installed
|
e_missing = e_target - e_installed
|
||||||
missing_mh_i = hydro.query("max_hours.isnull()").index
|
missing_mh_i = hydro.query("max_hours.isnull() or max_hours == 0").index
|
||||||
|
# some countries may have missing storage capacity but only one plant
|
||||||
|
# which needs to be scaled to the target storage capacity
|
||||||
|
missing_mh_single_i = hydro.index[
|
||||||
|
~hydro.country.duplicated() & hydro.country.isin(e_missing.dropna().index)
|
||||||
|
]
|
||||||
|
missing_mh_i = missing_mh_i.union(missing_mh_single_i)
|
||||||
|
|
||||||
if hydro_max_hours == "energy_capacity_totals_by_country":
|
if hydro_max_hours == "energy_capacity_totals_by_country":
|
||||||
# watch out some p_nom values like IE's are totally underrepresented
|
# watch out some p_nom values like IE's are totally underrepresented
|
||||||
@ -649,7 +710,8 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **par
|
|||||||
f'Assuming max_hours=6 for hydro reservoirs in the countries: {", ".join(missing_countries)}'
|
f'Assuming max_hours=6 for hydro reservoirs in the countries: {", ".join(missing_countries)}'
|
||||||
)
|
)
|
||||||
hydro_max_hours = hydro.max_hours.where(
|
hydro_max_hours = hydro.max_hours.where(
|
||||||
hydro.max_hours > 0, hydro.country.map(max_hours_country)
|
(hydro.max_hours > 0) & ~hydro.index.isin(missing_mh_single_i),
|
||||||
|
hydro.country.map(max_hours_country),
|
||||||
).fillna(6)
|
).fillna(6)
|
||||||
|
|
||||||
if params.get("flatten_dispatch", False):
|
if params.get("flatten_dispatch", False):
|
||||||
@ -775,64 +837,144 @@ def estimate_renewable_capacities(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def attach_line_rating(
|
def attach_storageunits(n, costs, extendable_carriers, max_hours):
|
||||||
n, rating, s_max_pu, correction_factor, max_voltage_difference, max_line_rating
|
carriers = extendable_carriers["StorageUnit"]
|
||||||
):
|
|
||||||
# TODO: Only considers overhead lines
|
n.madd("Carrier", carriers)
|
||||||
n.lines_t.s_max_pu = (rating / n.lines.s_nom[rating.columns]) * correction_factor
|
|
||||||
if max_voltage_difference:
|
buses_i = n.buses.index
|
||||||
x_pu = (
|
|
||||||
n.lines.type.map(n.line_types["x_per_length"])
|
lookup_store = {"H2": "electrolysis", "battery": "battery inverter"}
|
||||||
* n.lines.length
|
lookup_dispatch = {"H2": "fuel cell", "battery": "battery inverter"}
|
||||||
/ (n.lines.v_nom**2)
|
|
||||||
|
for carrier in carriers:
|
||||||
|
roundtrip_correction = 0.5 if carrier == "battery" else 1
|
||||||
|
|
||||||
|
n.madd(
|
||||||
|
"StorageUnit",
|
||||||
|
buses_i,
|
||||||
|
" " + carrier,
|
||||||
|
bus=buses_i,
|
||||||
|
carrier=carrier,
|
||||||
|
p_nom_extendable=True,
|
||||||
|
capital_cost=costs.at[carrier, "capital_cost"],
|
||||||
|
marginal_cost=costs.at[carrier, "marginal_cost"],
|
||||||
|
efficiency_store=costs.at[lookup_store[carrier], "efficiency"]
|
||||||
|
** roundtrip_correction,
|
||||||
|
efficiency_dispatch=costs.at[lookup_dispatch[carrier], "efficiency"]
|
||||||
|
** roundtrip_correction,
|
||||||
|
max_hours=max_hours[carrier],
|
||||||
|
cyclic_state_of_charge=True,
|
||||||
)
|
)
|
||||||
# need to clip here as cap values might be below 1
|
|
||||||
# -> would mean the line cannot be operated at actual given pessimistic ampacity
|
|
||||||
s_max_pu_cap = (
|
|
||||||
np.deg2rad(max_voltage_difference) / (x_pu * n.lines.s_nom)
|
|
||||||
).clip(lower=1)
|
|
||||||
n.lines_t.s_max_pu = n.lines_t.s_max_pu.clip(
|
|
||||||
lower=1, upper=s_max_pu_cap, axis=1
|
|
||||||
)
|
|
||||||
if max_line_rating:
|
|
||||||
n.lines_t.s_max_pu = n.lines_t.s_max_pu.clip(upper=max_line_rating)
|
|
||||||
n.lines_t.s_max_pu *= s_max_pu
|
|
||||||
|
|
||||||
|
|
||||||
def add_transmission_projects(n, transmission_projects):
|
def attach_stores(n, costs, extendable_carriers):
|
||||||
logger.info(f"Adding transmission projects to network.")
|
carriers = extendable_carriers["Store"]
|
||||||
for path in transmission_projects:
|
|
||||||
path = Path(path)
|
n.madd("Carrier", carriers)
|
||||||
df = pd.read_csv(path, index_col=0, dtype={"bus0": str, "bus1": str})
|
|
||||||
if df.empty:
|
buses_i = n.buses.index
|
||||||
continue
|
|
||||||
if "new_buses" in path.name:
|
if "H2" in carriers:
|
||||||
n.madd("Bus", df.index, **df)
|
h2_buses_i = n.madd("Bus", buses_i + " H2", carrier="H2", location=buses_i)
|
||||||
elif "new_lines" in path.name:
|
|
||||||
n.madd("Line", df.index, **df)
|
n.madd(
|
||||||
elif "new_links" in path.name:
|
"Store",
|
||||||
n.madd("Link", df.index, **df)
|
h2_buses_i,
|
||||||
elif "adjust_lines":
|
bus=h2_buses_i,
|
||||||
n.lines.update(df)
|
carrier="H2",
|
||||||
elif "adjust_links":
|
e_nom_extendable=True,
|
||||||
n.links.update(df)
|
e_cyclic=True,
|
||||||
|
capital_cost=costs.at["hydrogen storage underground", "capital_cost"],
|
||||||
|
)
|
||||||
|
|
||||||
|
n.madd(
|
||||||
|
"Link",
|
||||||
|
h2_buses_i + " Electrolysis",
|
||||||
|
bus0=buses_i,
|
||||||
|
bus1=h2_buses_i,
|
||||||
|
carrier="H2 electrolysis",
|
||||||
|
p_nom_extendable=True,
|
||||||
|
efficiency=costs.at["electrolysis", "efficiency"],
|
||||||
|
capital_cost=costs.at["electrolysis", "capital_cost"],
|
||||||
|
marginal_cost=costs.at["electrolysis", "marginal_cost"],
|
||||||
|
)
|
||||||
|
|
||||||
|
n.madd(
|
||||||
|
"Link",
|
||||||
|
h2_buses_i + " Fuel Cell",
|
||||||
|
bus0=h2_buses_i,
|
||||||
|
bus1=buses_i,
|
||||||
|
carrier="H2 fuel cell",
|
||||||
|
p_nom_extendable=True,
|
||||||
|
efficiency=costs.at["fuel cell", "efficiency"],
|
||||||
|
# NB: fixed cost is per MWel
|
||||||
|
capital_cost=costs.at["fuel cell", "capital_cost"]
|
||||||
|
* costs.at["fuel cell", "efficiency"],
|
||||||
|
marginal_cost=costs.at["fuel cell", "marginal_cost"],
|
||||||
|
)
|
||||||
|
|
||||||
|
if "battery" in carriers:
|
||||||
|
b_buses_i = n.madd(
|
||||||
|
"Bus", buses_i + " battery", carrier="battery", location=buses_i
|
||||||
|
)
|
||||||
|
|
||||||
|
n.madd(
|
||||||
|
"Store",
|
||||||
|
b_buses_i,
|
||||||
|
bus=b_buses_i,
|
||||||
|
carrier="battery",
|
||||||
|
e_cyclic=True,
|
||||||
|
e_nom_extendable=True,
|
||||||
|
capital_cost=costs.at["battery storage", "capital_cost"],
|
||||||
|
marginal_cost=costs.at["battery", "marginal_cost"],
|
||||||
|
)
|
||||||
|
|
||||||
|
n.madd("Carrier", ["battery charger", "battery discharger"])
|
||||||
|
|
||||||
|
n.madd(
|
||||||
|
"Link",
|
||||||
|
b_buses_i + " charger",
|
||||||
|
bus0=buses_i,
|
||||||
|
bus1=b_buses_i,
|
||||||
|
carrier="battery charger",
|
||||||
|
# the efficiencies are "round trip efficiencies"
|
||||||
|
efficiency=costs.at["battery inverter", "efficiency"] ** 0.5,
|
||||||
|
capital_cost=costs.at["battery inverter", "capital_cost"],
|
||||||
|
p_nom_extendable=True,
|
||||||
|
marginal_cost=costs.at["battery inverter", "marginal_cost"],
|
||||||
|
)
|
||||||
|
|
||||||
|
n.madd(
|
||||||
|
"Link",
|
||||||
|
b_buses_i + " discharger",
|
||||||
|
bus0=b_buses_i,
|
||||||
|
bus1=buses_i,
|
||||||
|
carrier="battery discharger",
|
||||||
|
efficiency=costs.at["battery inverter", "efficiency"] ** 0.5,
|
||||||
|
p_nom_extendable=True,
|
||||||
|
marginal_cost=costs.at["battery inverter", "marginal_cost"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake("add_electricity")
|
snakemake = mock_snakemake("add_electricity", clusters=100)
|
||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
params = snakemake.params
|
params = snakemake.params
|
||||||
|
max_hours = params.electricity["max_hours"]
|
||||||
|
landfall_lengths = {
|
||||||
|
tech: settings["landfall_length"]
|
||||||
|
for tech, settings in params.renewable.items()
|
||||||
|
if "landfall_length" in settings.keys()
|
||||||
|
}
|
||||||
|
|
||||||
n = pypsa.Network(snakemake.input.base_network)
|
n = pypsa.Network(snakemake.input.base_network)
|
||||||
|
|
||||||
if params["transmission_projects"]["enable"]:
|
|
||||||
add_transmission_projects(n, snakemake.input.transmission_projects)
|
|
||||||
|
|
||||||
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
n.set_snapshots(time)
|
n.set_snapshots(time)
|
||||||
|
|
||||||
@ -841,22 +983,31 @@ if __name__ == "__main__":
|
|||||||
costs = load_costs(
|
costs = load_costs(
|
||||||
snakemake.input.tech_costs,
|
snakemake.input.tech_costs,
|
||||||
params.costs,
|
params.costs,
|
||||||
params.electricity["max_hours"],
|
max_hours,
|
||||||
Nyears,
|
Nyears,
|
||||||
)
|
)
|
||||||
ppl = load_powerplants(snakemake.input.powerplants)
|
|
||||||
|
ppl = load_and_aggregate_powerplants(
|
||||||
|
snakemake.input.powerplants,
|
||||||
|
costs,
|
||||||
|
params.consider_efficiency_classes,
|
||||||
|
params.aggregation_strategies,
|
||||||
|
params.exclude_carriers,
|
||||||
|
)
|
||||||
|
|
||||||
attach_load(
|
attach_load(
|
||||||
n,
|
n,
|
||||||
snakemake.input.regions,
|
|
||||||
snakemake.input.load,
|
snakemake.input.load,
|
||||||
snakemake.input.nuts3_shapes,
|
snakemake.input.busmap,
|
||||||
snakemake.input.get("gdp_pop_non_nuts3"),
|
|
||||||
params.countries,
|
|
||||||
params.scaling_factor,
|
params.scaling_factor,
|
||||||
)
|
)
|
||||||
|
|
||||||
update_transmission_costs(n, costs, params.length_factor)
|
set_transmission_costs(
|
||||||
|
n,
|
||||||
|
costs,
|
||||||
|
params.line_length_factor,
|
||||||
|
params.link_length_factor,
|
||||||
|
)
|
||||||
|
|
||||||
renewable_carriers = set(params.electricity["renewable_carriers"])
|
renewable_carriers = set(params.electricity["renewable_carriers"])
|
||||||
extendable_carriers = params.electricity["extendable_carriers"]
|
extendable_carriers = params.electricity["extendable_carriers"]
|
||||||
@ -896,7 +1047,8 @@ if __name__ == "__main__":
|
|||||||
snakemake.input,
|
snakemake.input,
|
||||||
renewable_carriers,
|
renewable_carriers,
|
||||||
extendable_carriers,
|
extendable_carriers,
|
||||||
params.length_factor,
|
params.line_length_factor,
|
||||||
|
landfall_lengths,
|
||||||
)
|
)
|
||||||
|
|
||||||
if "hydro" in renewable_carriers:
|
if "hydro" in renewable_carriers:
|
||||||
@ -933,24 +1085,12 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
update_p_nom_max(n)
|
update_p_nom_max(n)
|
||||||
|
|
||||||
line_rating_config = snakemake.config["lines"]["dynamic_line_rating"]
|
attach_storageunits(n, costs, extendable_carriers, max_hours)
|
||||||
if line_rating_config["activate"]:
|
attach_stores(n, costs, extendable_carriers)
|
||||||
rating = xr.open_dataarray(snakemake.input.line_rating).to_pandas().transpose()
|
|
||||||
s_max_pu = snakemake.config["lines"]["s_max_pu"]
|
|
||||||
correction_factor = line_rating_config["correction_factor"]
|
|
||||||
max_voltage_difference = line_rating_config["max_voltage_difference"]
|
|
||||||
max_line_rating = line_rating_config["max_line_rating"]
|
|
||||||
|
|
||||||
attach_line_rating(
|
|
||||||
n,
|
|
||||||
rating,
|
|
||||||
s_max_pu,
|
|
||||||
correction_factor,
|
|
||||||
max_voltage_difference,
|
|
||||||
max_line_rating,
|
|
||||||
)
|
|
||||||
|
|
||||||
sanitize_carriers(n, snakemake.config)
|
sanitize_carriers(n, snakemake.config)
|
||||||
|
if "location" in n.buses:
|
||||||
|
sanitize_locations(n)
|
||||||
|
|
||||||
n.meta = snakemake.config
|
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
||||||
n.export_to_netcdf(snakemake.output[0])
|
n.export_to_netcdf(snakemake.output[0])
|
||||||
|
@ -120,7 +120,7 @@ def add_existing_renewables(df_agg, costs):
|
|||||||
df_agg.at[name, "DateOut"] = (
|
df_agg.at[name, "DateOut"] = (
|
||||||
year + costs.at[cost_key, "lifetime"] - 1
|
year + costs.at[cost_key, "lifetime"] - 1
|
||||||
)
|
)
|
||||||
df_agg.at[name, "cluster_bus"] = node
|
df_agg.at[name, "bus"] = node
|
||||||
|
|
||||||
|
|
||||||
def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, baseyear):
|
def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, baseyear):
|
||||||
@ -135,7 +135,8 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
|
|||||||
baseyear : int
|
baseyear : int
|
||||||
"""
|
"""
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"Adding power capacities installed before {baseyear} from powerplants.csv"
|
f"Adding power capacities installed before {baseyear} from"
|
||||||
|
" powerplants_s_{clusters}.csv"
|
||||||
)
|
)
|
||||||
|
|
||||||
df_agg = pd.read_csv(snakemake.input.powerplants, index_col=0)
|
df_agg = pd.read_csv(snakemake.input.powerplants, index_col=0)
|
||||||
@ -184,19 +185,6 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
|
|||||||
)
|
)
|
||||||
df_agg.loc[biomass_i, "DateOut"] = df_agg.loc[biomass_i, "DateOut"].fillna(dateout)
|
df_agg.loc[biomass_i, "DateOut"] = df_agg.loc[biomass_i, "DateOut"].fillna(dateout)
|
||||||
|
|
||||||
# assign clustered bus
|
|
||||||
busmap_s = pd.read_csv(snakemake.input.busmap_s, index_col=0).squeeze()
|
|
||||||
busmap = pd.read_csv(snakemake.input.busmap, index_col=0).squeeze()
|
|
||||||
|
|
||||||
inv_busmap = {}
|
|
||||||
for k, v in busmap.items():
|
|
||||||
inv_busmap[v] = inv_busmap.get(v, []) + [k]
|
|
||||||
|
|
||||||
clustermaps = busmap_s.map(busmap)
|
|
||||||
clustermaps.index = clustermaps.index.astype(int)
|
|
||||||
|
|
||||||
df_agg["cluster_bus"] = df_agg.bus.map(clustermaps)
|
|
||||||
|
|
||||||
# include renewables in df_agg
|
# include renewables in df_agg
|
||||||
add_existing_renewables(df_agg, costs)
|
add_existing_renewables(df_agg, costs)
|
||||||
|
|
||||||
@ -225,14 +213,14 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
|
|||||||
|
|
||||||
df = df_agg.pivot_table(
|
df = df_agg.pivot_table(
|
||||||
index=["grouping_year", "Fueltype"],
|
index=["grouping_year", "Fueltype"],
|
||||||
columns="cluster_bus",
|
columns="bus",
|
||||||
values="Capacity",
|
values="Capacity",
|
||||||
aggfunc="sum",
|
aggfunc="sum",
|
||||||
)
|
)
|
||||||
|
|
||||||
lifetime = df_agg.pivot_table(
|
lifetime = df_agg.pivot_table(
|
||||||
index=["grouping_year", "Fueltype"],
|
index=["grouping_year", "Fueltype"],
|
||||||
columns="cluster_bus",
|
columns="bus",
|
||||||
values="lifetime",
|
values="lifetime",
|
||||||
aggfunc="mean", # currently taken mean for clustering lifetimes
|
aggfunc="mean", # currently taken mean for clustering lifetimes
|
||||||
)
|
)
|
||||||
@ -280,37 +268,6 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
|
|||||||
] = capacity.loc[already_build.str.replace(name_suffix, "")].values
|
] = capacity.loc[already_build.str.replace(name_suffix, "")].values
|
||||||
new_capacity = capacity.loc[new_build.str.replace(name_suffix, "")]
|
new_capacity = capacity.loc[new_build.str.replace(name_suffix, "")]
|
||||||
|
|
||||||
if "m" in snakemake.wildcards.clusters:
|
|
||||||
for ind in new_capacity.index:
|
|
||||||
# existing capacities are split evenly among regions in every country
|
|
||||||
inv_ind = list(inv_busmap[ind])
|
|
||||||
|
|
||||||
# for offshore the splitting only includes coastal regions
|
|
||||||
inv_ind = [
|
|
||||||
i for i in inv_ind if (i + name_suffix_by) in n.generators.index
|
|
||||||
]
|
|
||||||
|
|
||||||
p_max_pu = n.generators_t.p_max_pu[
|
|
||||||
[i + name_suffix_by for i in inv_ind]
|
|
||||||
]
|
|
||||||
p_max_pu.columns = [i + name_suffix for i in inv_ind]
|
|
||||||
|
|
||||||
n.madd(
|
|
||||||
"Generator",
|
|
||||||
[i + name_suffix for i in inv_ind],
|
|
||||||
bus=ind,
|
|
||||||
carrier=generator,
|
|
||||||
p_nom=new_capacity[ind]
|
|
||||||
/ len(inv_ind), # split among regions in a country
|
|
||||||
marginal_cost=marginal_cost,
|
|
||||||
capital_cost=capital_cost,
|
|
||||||
efficiency=costs.at[cost_key, "efficiency"],
|
|
||||||
p_max_pu=p_max_pu,
|
|
||||||
build_year=grouping_year,
|
|
||||||
lifetime=costs.at[cost_key, "lifetime"],
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
p_max_pu = n.generators_t.p_max_pu[capacity.index + name_suffix_by]
|
p_max_pu = n.generators_t.p_max_pu[capacity.index + name_suffix_by]
|
||||||
|
|
||||||
if not new_build.empty:
|
if not new_build.empty:
|
||||||
@ -690,7 +647,6 @@ if __name__ == "__main__":
|
|||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"add_existing_baseyear",
|
"add_existing_baseyear",
|
||||||
configfiles="config/test/config.myopic.yaml",
|
configfiles="config/test/config.myopic.yaml",
|
||||||
simpl="",
|
|
||||||
clusters="5",
|
clusters="5",
|
||||||
ll="v1.5",
|
ll="v1.5",
|
||||||
opts="",
|
opts="",
|
||||||
|
@ -1,253 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# SPDX-FileCopyrightText: : 2017-2024 The PyPSA-Eur Authors
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: MIT
|
|
||||||
|
|
||||||
# coding: utf-8
|
|
||||||
"""
|
|
||||||
Adds extra extendable components to the clustered and simplified network.
|
|
||||||
|
|
||||||
Relevant Settings
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
costs:
|
|
||||||
year:
|
|
||||||
version:
|
|
||||||
dicountrate:
|
|
||||||
emission_prices:
|
|
||||||
|
|
||||||
electricity:
|
|
||||||
max_hours:
|
|
||||||
marginal_cost:
|
|
||||||
capital_cost:
|
|
||||||
extendable_carriers:
|
|
||||||
StorageUnit:
|
|
||||||
Store:
|
|
||||||
|
|
||||||
.. seealso::
|
|
||||||
Documentation of the configuration file ``config/config.yaml`` at :ref:`costs_cf`,
|
|
||||||
:ref:`electricity_cf`
|
|
||||||
|
|
||||||
Inputs
|
|
||||||
------
|
|
||||||
|
|
||||||
- ``resources/costs.csv``: The database of cost assumptions for all included technologies for specific years from various sources; e.g. discount rate, lifetime, investment (CAPEX), fixed operation and maintenance (FOM), variable operation and maintenance (VOM), fuel costs, efficiency, carbon-dioxide intensity.
|
|
||||||
|
|
||||||
Outputs
|
|
||||||
-------
|
|
||||||
|
|
||||||
- ``networks/elec_s{simpl}_{clusters}_ec.nc``:
|
|
||||||
|
|
||||||
|
|
||||||
Description
|
|
||||||
-----------
|
|
||||||
|
|
||||||
The rule :mod:`add_extra_components` attaches additional extendable components to the clustered and simplified network. These can be configured in the ``config/config.yaml`` at ``electricity: extendable_carriers:``. It processes ``networks/elec_s{simpl}_{clusters}.nc`` to build ``networks/elec_s{simpl}_{clusters}_ec.nc``, which in contrast to the former (depending on the configuration) contain with **zero** initial capacity
|
|
||||||
|
|
||||||
- ``StorageUnits`` of carrier 'H2' and/or 'battery'. If this option is chosen, every bus is given an extendable ``StorageUnit`` of the corresponding carrier. The energy and power capacities are linked through a parameter that specifies the energy capacity as maximum hours at full dispatch power and is configured in ``electricity: max_hours:``. This linkage leads to one investment variable per storage unit. The default ``max_hours`` lead to long-term hydrogen and short-term battery storage units.
|
|
||||||
|
|
||||||
- ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If this option is chosen, the script adds extra buses with corresponding carrier where energy ``Stores`` are attached and which are connected to the corresponding power buses via two links, one each for charging and discharging. This leads to three investment variables for the energy capacity, charging and discharging capacity of the storage unit.
|
|
||||||
"""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
import pandas as pd
|
|
||||||
import pypsa
|
|
||||||
from _helpers import configure_logging, set_scenario_config
|
|
||||||
from add_electricity import load_costs, sanitize_carriers, sanitize_locations
|
|
||||||
|
|
||||||
idx = pd.IndexSlice
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def attach_storageunits(n, costs, extendable_carriers, max_hours):
|
|
||||||
carriers = extendable_carriers["StorageUnit"]
|
|
||||||
|
|
||||||
n.madd("Carrier", carriers)
|
|
||||||
|
|
||||||
buses_i = n.buses.index
|
|
||||||
|
|
||||||
lookup_store = {"H2": "electrolysis", "battery": "battery inverter"}
|
|
||||||
lookup_dispatch = {"H2": "fuel cell", "battery": "battery inverter"}
|
|
||||||
|
|
||||||
for carrier in carriers:
|
|
||||||
roundtrip_correction = 0.5 if carrier == "battery" else 1
|
|
||||||
|
|
||||||
n.madd(
|
|
||||||
"StorageUnit",
|
|
||||||
buses_i,
|
|
||||||
" " + carrier,
|
|
||||||
bus=buses_i,
|
|
||||||
carrier=carrier,
|
|
||||||
p_nom_extendable=True,
|
|
||||||
capital_cost=costs.at[carrier, "capital_cost"],
|
|
||||||
marginal_cost=costs.at[carrier, "marginal_cost"],
|
|
||||||
efficiency_store=costs.at[lookup_store[carrier], "efficiency"]
|
|
||||||
** roundtrip_correction,
|
|
||||||
efficiency_dispatch=costs.at[lookup_dispatch[carrier], "efficiency"]
|
|
||||||
** roundtrip_correction,
|
|
||||||
max_hours=max_hours[carrier],
|
|
||||||
cyclic_state_of_charge=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def attach_stores(n, costs, extendable_carriers):
|
|
||||||
carriers = extendable_carriers["Store"]
|
|
||||||
|
|
||||||
n.madd("Carrier", carriers)
|
|
||||||
|
|
||||||
buses_i = n.buses.index
|
|
||||||
|
|
||||||
if "H2" in carriers:
|
|
||||||
h2_buses_i = n.madd("Bus", buses_i + " H2", carrier="H2", location=buses_i)
|
|
||||||
|
|
||||||
n.madd(
|
|
||||||
"Store",
|
|
||||||
h2_buses_i,
|
|
||||||
bus=h2_buses_i,
|
|
||||||
carrier="H2",
|
|
||||||
e_nom_extendable=True,
|
|
||||||
e_cyclic=True,
|
|
||||||
capital_cost=costs.at["hydrogen storage underground", "capital_cost"],
|
|
||||||
)
|
|
||||||
|
|
||||||
n.madd(
|
|
||||||
"Link",
|
|
||||||
h2_buses_i + " Electrolysis",
|
|
||||||
bus0=buses_i,
|
|
||||||
bus1=h2_buses_i,
|
|
||||||
carrier="H2 electrolysis",
|
|
||||||
p_nom_extendable=True,
|
|
||||||
efficiency=costs.at["electrolysis", "efficiency"],
|
|
||||||
capital_cost=costs.at["electrolysis", "capital_cost"],
|
|
||||||
marginal_cost=costs.at["electrolysis", "marginal_cost"],
|
|
||||||
)
|
|
||||||
|
|
||||||
n.madd(
|
|
||||||
"Link",
|
|
||||||
h2_buses_i + " Fuel Cell",
|
|
||||||
bus0=h2_buses_i,
|
|
||||||
bus1=buses_i,
|
|
||||||
carrier="H2 fuel cell",
|
|
||||||
p_nom_extendable=True,
|
|
||||||
efficiency=costs.at["fuel cell", "efficiency"],
|
|
||||||
# NB: fixed cost is per MWel
|
|
||||||
capital_cost=costs.at["fuel cell", "capital_cost"]
|
|
||||||
* costs.at["fuel cell", "efficiency"],
|
|
||||||
marginal_cost=costs.at["fuel cell", "marginal_cost"],
|
|
||||||
)
|
|
||||||
|
|
||||||
if "battery" in carriers:
|
|
||||||
b_buses_i = n.madd(
|
|
||||||
"Bus", buses_i + " battery", carrier="battery", location=buses_i
|
|
||||||
)
|
|
||||||
|
|
||||||
n.madd(
|
|
||||||
"Store",
|
|
||||||
b_buses_i,
|
|
||||||
bus=b_buses_i,
|
|
||||||
carrier="battery",
|
|
||||||
e_cyclic=True,
|
|
||||||
e_nom_extendable=True,
|
|
||||||
capital_cost=costs.at["battery storage", "capital_cost"],
|
|
||||||
marginal_cost=costs.at["battery", "marginal_cost"],
|
|
||||||
)
|
|
||||||
|
|
||||||
n.madd("Carrier", ["battery charger", "battery discharger"])
|
|
||||||
|
|
||||||
n.madd(
|
|
||||||
"Link",
|
|
||||||
b_buses_i + " charger",
|
|
||||||
bus0=buses_i,
|
|
||||||
bus1=b_buses_i,
|
|
||||||
carrier="battery charger",
|
|
||||||
# the efficiencies are "round trip efficiencies"
|
|
||||||
efficiency=costs.at["battery inverter", "efficiency"] ** 0.5,
|
|
||||||
capital_cost=costs.at["battery inverter", "capital_cost"],
|
|
||||||
p_nom_extendable=True,
|
|
||||||
marginal_cost=costs.at["battery inverter", "marginal_cost"],
|
|
||||||
)
|
|
||||||
|
|
||||||
n.madd(
|
|
||||||
"Link",
|
|
||||||
b_buses_i + " discharger",
|
|
||||||
bus0=b_buses_i,
|
|
||||||
bus1=buses_i,
|
|
||||||
carrier="battery discharger",
|
|
||||||
efficiency=costs.at["battery inverter", "efficiency"] ** 0.5,
|
|
||||||
p_nom_extendable=True,
|
|
||||||
marginal_cost=costs.at["battery inverter", "marginal_cost"],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def attach_hydrogen_pipelines(n, costs, extendable_carriers):
|
|
||||||
as_stores = extendable_carriers.get("Store", [])
|
|
||||||
|
|
||||||
if "H2 pipeline" not in extendable_carriers.get("Link", []):
|
|
||||||
return
|
|
||||||
|
|
||||||
assert "H2" in as_stores, (
|
|
||||||
"Attaching hydrogen pipelines requires hydrogen "
|
|
||||||
"storage to be modelled as Store-Link-Bus combination. See "
|
|
||||||
"`config.yaml` at `electricity: extendable_carriers: Store:`."
|
|
||||||
)
|
|
||||||
|
|
||||||
# determine bus pairs
|
|
||||||
attrs = ["bus0", "bus1", "length"]
|
|
||||||
candidates = pd.concat(
|
|
||||||
[n.lines[attrs], n.links.query('carrier=="DC"')[attrs]]
|
|
||||||
).reset_index(drop=True)
|
|
||||||
|
|
||||||
# remove bus pair duplicates regardless of order of bus0 and bus1
|
|
||||||
h2_links = candidates[
|
|
||||||
~pd.DataFrame(np.sort(candidates[["bus0", "bus1"]])).duplicated()
|
|
||||||
]
|
|
||||||
h2_links.index = h2_links.apply(lambda c: f"H2 pipeline {c.bus0}-{c.bus1}", axis=1)
|
|
||||||
|
|
||||||
# add pipelines
|
|
||||||
n.add("Carrier", "H2 pipeline")
|
|
||||||
|
|
||||||
n.madd(
|
|
||||||
"Link",
|
|
||||||
h2_links.index,
|
|
||||||
bus0=h2_links.bus0.values + " H2",
|
|
||||||
bus1=h2_links.bus1.values + " H2",
|
|
||||||
p_min_pu=-1,
|
|
||||||
p_nom_extendable=True,
|
|
||||||
length=h2_links.length.values,
|
|
||||||
capital_cost=costs.at["H2 pipeline", "capital_cost"] * h2_links.length,
|
|
||||||
efficiency=costs.at["H2 pipeline", "efficiency"],
|
|
||||||
carrier="H2 pipeline",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
if "snakemake" not in globals():
|
|
||||||
from _helpers import mock_snakemake
|
|
||||||
|
|
||||||
snakemake = mock_snakemake("add_extra_components", simpl="", clusters=5)
|
|
||||||
configure_logging(snakemake)
|
|
||||||
set_scenario_config(snakemake)
|
|
||||||
|
|
||||||
n = pypsa.Network(snakemake.input.network)
|
|
||||||
extendable_carriers = snakemake.params.extendable_carriers
|
|
||||||
max_hours = snakemake.params.max_hours
|
|
||||||
|
|
||||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.0
|
|
||||||
costs = load_costs(
|
|
||||||
snakemake.input.tech_costs, snakemake.params.costs, max_hours, Nyears
|
|
||||||
)
|
|
||||||
|
|
||||||
attach_storageunits(n, costs, extendable_carriers, max_hours)
|
|
||||||
attach_stores(n, costs, extendable_carriers)
|
|
||||||
attach_hydrogen_pipelines(n, costs, extendable_carriers)
|
|
||||||
|
|
||||||
sanitize_carriers(n, snakemake.config)
|
|
||||||
if "location" in n.buses:
|
|
||||||
sanitize_locations(n)
|
|
||||||
|
|
||||||
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
|
||||||
n.export_to_netcdf(snakemake.output[0])
|
|
106
scripts/add_transmission_projects_and_dlr.py
Normal file
106
scripts/add_transmission_projects_and_dlr.py
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2024 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
"""
|
||||||
|
Add transmission projects and DLR to the network.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import pypsa
|
||||||
|
import xarray as xr
|
||||||
|
from _helpers import configure_logging, set_scenario_config
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def attach_transmission_projects(
|
||||||
|
n: pypsa.Network, transmission_projects: list[str]
|
||||||
|
) -> None:
|
||||||
|
logger.info("Adding transmission projects to network.")
|
||||||
|
for path in transmission_projects:
|
||||||
|
path = Path(path)
|
||||||
|
df = pd.read_csv(path, index_col=0, dtype={"bus0": str, "bus1": str})
|
||||||
|
if df.empty:
|
||||||
|
continue
|
||||||
|
if "new_buses" in path.name:
|
||||||
|
n.madd("Bus", df.index, **df)
|
||||||
|
elif "new_lines" in path.name:
|
||||||
|
n.madd("Line", df.index, **df)
|
||||||
|
elif "new_links" in path.name:
|
||||||
|
n.madd("Link", df.index, **df)
|
||||||
|
elif "adjust_lines" in path.name:
|
||||||
|
n.lines.update(df)
|
||||||
|
elif "adjust_links" in path.name:
|
||||||
|
n.links.update(df)
|
||||||
|
|
||||||
|
|
||||||
|
def attach_line_rating(
|
||||||
|
n: pypsa.Network,
|
||||||
|
rating: pd.DataFrame,
|
||||||
|
s_max_pu: float,
|
||||||
|
correction_factor: float,
|
||||||
|
max_voltage_difference: float | bool,
|
||||||
|
max_line_rating: float | bool,
|
||||||
|
) -> None:
|
||||||
|
logger.info("Attaching dynamic line rating to network.")
|
||||||
|
# TODO: Only considers overhead lines
|
||||||
|
n.lines_t.s_max_pu = (rating / n.lines.s_nom[rating.columns]) * correction_factor
|
||||||
|
if max_voltage_difference:
|
||||||
|
x_pu = (
|
||||||
|
n.lines.type.map(n.line_types["x_per_length"])
|
||||||
|
* n.lines.length
|
||||||
|
/ (n.lines.v_nom**2)
|
||||||
|
)
|
||||||
|
# need to clip here as cap values might be below 1
|
||||||
|
# -> would mean the line cannot be operated at actual given pessimistic ampacity
|
||||||
|
s_max_pu_cap = (
|
||||||
|
np.deg2rad(max_voltage_difference) / (x_pu * n.lines.s_nom)
|
||||||
|
).clip(lower=1)
|
||||||
|
n.lines_t.s_max_pu = n.lines_t.s_max_pu.clip(
|
||||||
|
lower=1, upper=s_max_pu_cap, axis=1
|
||||||
|
)
|
||||||
|
if max_line_rating:
|
||||||
|
n.lines_t.s_max_pu = n.lines_t.s_max_pu.clip(upper=max_line_rating)
|
||||||
|
n.lines_t.s_max_pu *= s_max_pu
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake("add_transmission_projects_and_dlr")
|
||||||
|
configure_logging(snakemake)
|
||||||
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
|
params = snakemake.params
|
||||||
|
|
||||||
|
n = pypsa.Network(snakemake.input.network)
|
||||||
|
|
||||||
|
if params["transmission_projects"]["enable"]:
|
||||||
|
|
||||||
|
attach_transmission_projects(n, snakemake.input.transmission_projects)
|
||||||
|
|
||||||
|
if params["dlr"]["activate"]:
|
||||||
|
|
||||||
|
rating = xr.open_dataarray(snakemake.input.dlr).to_pandas().transpose()
|
||||||
|
|
||||||
|
s_max_pu = params["s_max_pu"]
|
||||||
|
correction_factor = params["dlr"]["correction_factor"]
|
||||||
|
max_voltage_difference = params["dlr"]["max_voltage_difference"]
|
||||||
|
max_line_rating = params["dlr"]["max_line_rating"]
|
||||||
|
|
||||||
|
attach_line_rating(
|
||||||
|
n,
|
||||||
|
rating,
|
||||||
|
s_max_pu,
|
||||||
|
correction_factor,
|
||||||
|
max_voltage_difference,
|
||||||
|
max_line_rating,
|
||||||
|
)
|
||||||
|
|
||||||
|
n.export_to_netcdf(snakemake.output[0])
|
@ -344,7 +344,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_biomass_potentials",
|
"build_biomass_potentials",
|
||||||
simpl="",
|
|
||||||
clusters="39",
|
clusters="39",
|
||||||
planning_horizons=2050,
|
planning_horizons=2050,
|
||||||
)
|
)
|
||||||
|
@ -136,7 +136,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_cop_profiles",
|
"build_cop_profiles",
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
clusters=48,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -17,11 +17,7 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake("build_clustered_population_layouts", clusters=48)
|
||||||
"build_clustered_population_layouts",
|
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
|
||||||
)
|
|
||||||
|
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
|
@ -104,7 +104,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_cop_profiles",
|
"build_cop_profiles",
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
clusters=48,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -27,13 +27,13 @@ Inputs
|
|||||||
------
|
------
|
||||||
|
|
||||||
- ``resources/<run_name>/pop_layout_<scope>.nc``: Population layout (spatial population distribution).
|
- ``resources/<run_name>/pop_layout_<scope>.nc``: Population layout (spatial population distribution).
|
||||||
- ``resources/<run_name>/regions_onshore_elec_s<simpl>_<clusters>.geojson``: Onshore region shapes.
|
- ``resources/<run_name>/regions_onshore_base_s<simpl>_<clusters>.geojson``: Onshore region shapes.
|
||||||
- ``cutout``: Weather data cutout, as specified in config
|
- ``cutout``: Weather data cutout, as specified in config
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/daily_heat_demand_<scope>_elec_s<simpl>_<clusters>.nc``:
|
- ``resources/daily_heat_demand_<scope>_base_s<simpl>_<clusters>.nc``:
|
||||||
|
|
||||||
Relevant settings
|
Relevant settings
|
||||||
-----------------
|
-----------------
|
||||||
@ -58,7 +58,6 @@ if __name__ == "__main__":
|
|||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_daily_heat_demands",
|
"build_daily_heat_demands",
|
||||||
scope="total",
|
scope="total",
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
clusters=48,
|
||||||
)
|
)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
@ -44,7 +44,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_district_heat_share",
|
"build_district_heat_share",
|
||||||
simpl="",
|
|
||||||
clusters=60,
|
clusters=60,
|
||||||
planning_horizons="2050",
|
planning_horizons="2050",
|
||||||
)
|
)
|
||||||
|
@ -201,7 +201,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_egs_potentials",
|
"build_egs_potentials",
|
||||||
simpl="",
|
|
||||||
clusters=37,
|
clusters=37,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
127
scripts/build_electricity_demand_base.py
Normal file
127
scripts/build_electricity_demand_base.py
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2024 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
"""
|
||||||
|
Builds the electricity demand for base regions based on population and GDP.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from itertools import product
|
||||||
|
|
||||||
|
import geopandas as gpd
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import pypsa
|
||||||
|
import scipy.sparse as sparse
|
||||||
|
import xarray as xr
|
||||||
|
from _helpers import configure_logging, set_scenario_config
|
||||||
|
from shapely.prepared import prep
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def normed(s: pd.Series) -> pd.Series:
|
||||||
|
return s / s.sum()
|
||||||
|
|
||||||
|
|
||||||
|
def shapes_to_shapes(orig: gpd.GeoSeries, dest: gpd.GeoSeries) -> sparse.lil_matrix:
|
||||||
|
"""
|
||||||
|
Adopted from vresutils.transfer.Shapes2Shapes()
|
||||||
|
"""
|
||||||
|
orig_prepped = list(map(prep, orig))
|
||||||
|
transfer = sparse.lil_matrix((len(dest), len(orig)), dtype=float)
|
||||||
|
|
||||||
|
for i, j in product(range(len(dest)), range(len(orig))):
|
||||||
|
if orig_prepped[j].intersects(dest.iloc[i]):
|
||||||
|
area = orig.iloc[j].intersection(dest.iloc[i]).area
|
||||||
|
transfer[i, j] = area / dest.iloc[i].area
|
||||||
|
|
||||||
|
return transfer
|
||||||
|
|
||||||
|
|
||||||
|
def upsample_load(
|
||||||
|
n: pypsa.Network,
|
||||||
|
regions_fn: str,
|
||||||
|
load_fn: str,
|
||||||
|
nuts3_fn: str,
|
||||||
|
gdp_pop_non_nuts3_fn: str,
|
||||||
|
distribution_key: dict[str, float],
|
||||||
|
) -> pd.DataFrame:
|
||||||
|
substation_lv_i = n.buses.index[n.buses["substation_lv"]]
|
||||||
|
gdf_regions = gpd.read_file(regions_fn).set_index("name").reindex(substation_lv_i)
|
||||||
|
load = pd.read_csv(load_fn, index_col=0, parse_dates=True)
|
||||||
|
|
||||||
|
nuts3 = gpd.read_file(nuts3_fn).set_index("index")
|
||||||
|
|
||||||
|
gdp_weight = distribution_key.get("gdp", 0.6)
|
||||||
|
pop_weight = distribution_key.get("pop", 0.4)
|
||||||
|
|
||||||
|
data_arrays = []
|
||||||
|
|
||||||
|
for cntry, group in gdf_regions.geometry.groupby(gdf_regions.country):
|
||||||
|
|
||||||
|
load_ct = load[cntry]
|
||||||
|
|
||||||
|
if cntry in ["UA", "MD"]:
|
||||||
|
# separate handling because nuts3 provides no data for UA+MD
|
||||||
|
gdp_pop_non_nuts3 = gpd.read_file(gdp_pop_non_nuts3_fn).set_index("Bus")
|
||||||
|
gdp_pop_non_nuts3 = gdp_pop_non_nuts3.loc[
|
||||||
|
(gdp_pop_non_nuts3.country == cntry)
|
||||||
|
& (gdp_pop_non_nuts3.index.isin(substation_lv_i))
|
||||||
|
]
|
||||||
|
factors = normed(
|
||||||
|
gdp_weight * normed(gdp_pop_non_nuts3["gdp"])
|
||||||
|
+ pop_weight * normed(gdp_pop_non_nuts3["pop"])
|
||||||
|
)
|
||||||
|
|
||||||
|
elif len(group) == 1:
|
||||||
|
factors = pd.Series(1.0, index=group.index)
|
||||||
|
|
||||||
|
else:
|
||||||
|
nuts3_cntry = nuts3.loc[nuts3.country == cntry]
|
||||||
|
transfer = shapes_to_shapes(group, nuts3_cntry.geometry).T.tocsr()
|
||||||
|
gdp_n = pd.Series(
|
||||||
|
transfer.dot(nuts3_cntry["gdp"].fillna(1.0).values), index=group.index
|
||||||
|
)
|
||||||
|
pop_n = pd.Series(
|
||||||
|
transfer.dot(nuts3_cntry["pop"].fillna(1.0).values), index=group.index
|
||||||
|
)
|
||||||
|
|
||||||
|
factors = normed(gdp_weight * normed(gdp_n) + pop_weight * normed(pop_n))
|
||||||
|
|
||||||
|
data_arrays.append(
|
||||||
|
xr.DataArray(
|
||||||
|
factors.values * load_ct.values[:, np.newaxis],
|
||||||
|
dims=["time", "bus"],
|
||||||
|
coords={"time": load_ct.index.values, "bus": factors.index.values},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return xr.concat(data_arrays, dim="bus")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake("build_electricity_demand_base")
|
||||||
|
configure_logging(snakemake)
|
||||||
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
|
params = snakemake.params
|
||||||
|
|
||||||
|
n = pypsa.Network(snakemake.input.base_network)
|
||||||
|
|
||||||
|
load = upsample_load(
|
||||||
|
n,
|
||||||
|
regions_fn=snakemake.input.regions,
|
||||||
|
load_fn=snakemake.input.load,
|
||||||
|
nuts3_fn=snakemake.input.nuts3,
|
||||||
|
gdp_pop_non_nuts3_fn=snakemake.input.get("gdp_pop_non_nuts3"),
|
||||||
|
distribution_key=params.distribution_key,
|
||||||
|
)
|
||||||
|
|
||||||
|
load.name = "electricity demand (MW)"
|
||||||
|
comp = dict(zlib=True, complevel=9, least_significant_digit=5)
|
||||||
|
load.to_netcdf(snakemake.output[0], encoding={load.name: comp})
|
@ -14,11 +14,11 @@ Inputs:
|
|||||||
- Existing heating generators: `data/existing_heating_raw.csv` per country
|
- Existing heating generators: `data/existing_heating_raw.csv` per country
|
||||||
- Population layout: `resources/{run_name}/pop_layout_s<simpl>_<clusters>.csv`. Output of `scripts/build_clustered_population_layout.py`
|
- Population layout: `resources/{run_name}/pop_layout_s<simpl>_<clusters>.csv`. Output of `scripts/build_clustered_population_layout.py`
|
||||||
- Population layout with energy demands: `resources/<run_name>/pop_weighted_energy_totals_s<simpl>_<clusters>.csv`
|
- Population layout with energy demands: `resources/<run_name>/pop_weighted_energy_totals_s<simpl>_<clusters>.csv`
|
||||||
- District heating share: `resources/<run_name>/district_heat_share_elec_s<simpl>_<clusters>_<planning_horizons>.csv`
|
- District heating share: `resources/<run_name>/district_heat_share_base_s<simpl>_<clusters>_<planning_horizons>.csv`
|
||||||
|
|
||||||
Outputs:
|
Outputs:
|
||||||
--------
|
--------
|
||||||
- Existing heat generation capacities distributed to nodes: `resources/{run_name}/existing_heating_distribution_elec_s{simpl}_{clusters}_{planning_horizons}.csv`
|
- Existing heat generation capacities distributed to nodes: `resources/{run_name}/existing_heating_distribution_base_s_{clusters}_{planning_horizons}.csv`
|
||||||
|
|
||||||
Relevant settings:
|
Relevant settings:
|
||||||
------------------
|
------------------
|
||||||
@ -154,7 +154,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_existing_heating_distribution",
|
"build_existing_heating_distribution",
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
clusters=48,
|
||||||
planning_horizons=2050,
|
planning_horizons=2050,
|
||||||
)
|
)
|
||||||
|
@ -141,7 +141,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_gas_input_locations",
|
"build_gas_input_locations",
|
||||||
simpl="",
|
|
||||||
clusters="128",
|
clusters="128",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -42,11 +42,7 @@ def calc_gdp_pop(country, regions, gdp_non_nuts3, pop_non_nuts3):
|
|||||||
- gdp: A GeoDataFrame with the mean GDP p.c. values mapped to each bus.
|
- gdp: A GeoDataFrame with the mean GDP p.c. values mapped to each bus.
|
||||||
- pop: A GeoDataFrame with the summed POP values mapped to each bus.
|
- pop: A GeoDataFrame with the summed POP values mapped to each bus.
|
||||||
"""
|
"""
|
||||||
regions = (
|
regions = regions.rename(columns={"name": "Bus"}).set_index("Bus")
|
||||||
regions.rename(columns={"name": "Bus"})
|
|
||||||
.drop(columns=["x", "y"])
|
|
||||||
.set_index("Bus")
|
|
||||||
)
|
|
||||||
regions = regions[regions.country == country]
|
regions = regions[regions.country == country]
|
||||||
# Create a bounding box for UA, MD from region shape, including a buffer of 10000 metres
|
# Create a bounding box for UA, MD from region shape, including a buffer of 10000 metres
|
||||||
bounding_box = (
|
bounding_box = (
|
||||||
|
47
scripts/build_hac_features.py
Normal file
47
scripts/build_hac_features.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2024 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
"""
|
||||||
|
Aggregate all rastered cutout data to base regions Voronoi cells.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import atlite
|
||||||
|
import geopandas as gpd
|
||||||
|
from _helpers import get_snapshots, set_scenario_config
|
||||||
|
from atlite.aggregate import aggregate_matrix
|
||||||
|
from dask.distributed import Client
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake("build_hac_features")
|
||||||
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
|
params = snakemake.params
|
||||||
|
nprocesses = int(snakemake.threads)
|
||||||
|
|
||||||
|
if nprocesses > 1:
|
||||||
|
client = Client(n_workers=nprocesses, threads_per_worker=1)
|
||||||
|
else:
|
||||||
|
client = None
|
||||||
|
|
||||||
|
time = get_snapshots(params.snapshots, params.drop_leap_day)
|
||||||
|
|
||||||
|
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
||||||
|
|
||||||
|
regions = gpd.read_file(snakemake.input.regions).set_index("name")
|
||||||
|
I = cutout.indicatormatrix(regions) # noqa: E741
|
||||||
|
|
||||||
|
ds = cutout.data[params.features].map(
|
||||||
|
aggregate_matrix, matrix=I, index=regions.index
|
||||||
|
)
|
||||||
|
|
||||||
|
ds = ds.load(scheduler=client)
|
||||||
|
|
||||||
|
ds.to_netcdf(snakemake.output[0])
|
@ -22,12 +22,12 @@ Inputs
|
|||||||
------
|
------
|
||||||
|
|
||||||
- ``data/heat_load_profile_BDEW.csv``: Intraday heat profile for water and space heating demand for the residential and services sectors for weekends and weekdays.
|
- ``data/heat_load_profile_BDEW.csv``: Intraday heat profile for water and space heating demand for the residential and services sectors for weekends and weekdays.
|
||||||
- ``resources/daily_heat_demand_total_elec_s<simpl>_<clusters>.nc``: Daily heat demand per cluster.
|
- ``resources/daily_heat_demand_total_base_s<simpl>_<clusters>.nc``: Daily heat demand per cluster.
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/hourly_heat_demand_total_elec_s<simpl>_<clusters>.nc``:
|
- ``resources/hourly_heat_demand_total_base_s<simpl>_<clusters>.nc``:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from itertools import product
|
from itertools import product
|
||||||
@ -43,7 +43,6 @@ if __name__ == "__main__":
|
|||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_hourly_heat_demand",
|
"build_hourly_heat_demand",
|
||||||
scope="total",
|
scope="total",
|
||||||
simpl="",
|
|
||||||
clusters=5,
|
clusters=5,
|
||||||
)
|
)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
@ -8,13 +8,13 @@ Build spatial distribution of industries from Hotmaps database.
|
|||||||
Inputs
|
Inputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/regions_onshore_elec_s{simpl}_{clusters}.geojson``
|
- ``resources/regions_onshore_base_s_{clusters}.geojson``
|
||||||
- ``resources/pop_layout_elec_s{simpl}_{clusters}.csv``
|
- ``resources/pop_layout_base_s_{clusters}.csv``
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/industrial_distribution_key_elec_s{simpl}_{clusters}.csv``
|
- ``resources/industrial_distribution_key_base_s_{clusters}.csv``
|
||||||
|
|
||||||
Description
|
Description
|
||||||
-------
|
-------
|
||||||
@ -388,7 +388,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_industrial_distribution_key",
|
"build_industrial_distribution_key",
|
||||||
simpl="",
|
|
||||||
clusters=128,
|
clusters=128,
|
||||||
)
|
)
|
||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
|
@ -8,14 +8,14 @@ Build industrial energy demand per model region.
|
|||||||
Inputs
|
Inputs
|
||||||
------
|
------
|
||||||
|
|
||||||
- ``resources/industrial_energy_demand_today_elec_s{simpl}_{clusters}.csv``
|
- ``resources/industrial_energy_demand_today_base_s_{clusters}.csv``
|
||||||
- ``resources/industry_sector_ratios_{planning_horizons}.csv``
|
- ``resources/industry_sector_ratios_{planning_horizons}.csv``
|
||||||
- ``resources/industrial_production_elec_s{simpl}_{clusters}_{planning_horizons}.csv``
|
- ``resources/industrial_production_base_s_{clusters}_{planning_horizons}.csv``
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/industrial_energy_demand_elec_s{simpl}_{clusters}_{planning_horizons}.csv``
|
- ``resources/industrial_energy_demand_base_s_{clusters}_{planning_horizons}.csv``
|
||||||
|
|
||||||
Description
|
Description
|
||||||
-------
|
-------
|
||||||
@ -45,7 +45,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_industrial_energy_demand_per_node",
|
"build_industrial_energy_demand_per_node",
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
clusters=48,
|
||||||
planning_horizons=2030,
|
planning_horizons=2030,
|
||||||
)
|
)
|
||||||
|
@ -8,19 +8,19 @@ Build industrial energy demand per model region.
|
|||||||
Inputs
|
Inputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/industrial_distribution_key_elec_s{simpl}_{clusters}.csv``
|
- ``resources/industrial_distribution_key_base_s_{clusters}.csv``
|
||||||
- ``resources/industrial_energy_demand_per_country_today.csv``
|
- ``resources/industrial_energy_demand_per_country_today.csv``
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/industrial_energy_demand_per_node_today_elec_s{simpl}_{clusters}.csv``
|
- ``resources/industrial_energy_demand_per_node_today_base_s_{clusters}.csv``
|
||||||
|
|
||||||
Description
|
Description
|
||||||
-------
|
-------
|
||||||
|
|
||||||
This rule maps the industrial energy demand per country `industrial_energy_demand_per_country_today.csv` to each bus region.
|
This rule maps the industrial energy demand per country `industrial_energy_demand_per_country_today.csv` to each bus region.
|
||||||
The energy demand per country is multiplied by the mapping value from the file ``industrial_distribution_key_elec_s{simpl}_{clusters}.csv`` between 0 and 1 to get the industrial energy demand per bus.
|
The energy demand per country is multiplied by the mapping value from the file ``industrial_distribution_key_base_s_{clusters}.csv`` between 0 and 1 to get the industrial energy demand per bus.
|
||||||
|
|
||||||
The unit of the energy demand is TWh/a.
|
The unit of the energy demand is TWh/a.
|
||||||
"""
|
"""
|
||||||
@ -92,7 +92,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_industrial_energy_demand_per_node_today",
|
"build_industrial_energy_demand_per_node_today",
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
clusters=48,
|
||||||
)
|
)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
@ -8,13 +8,13 @@ Build industrial production per model region.
|
|||||||
Inputs
|
Inputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/industrial_distribution_key_elec_s{simpl}_{clusters}.csv``
|
- ``resources/industrial_distribution_key_base_s_{clusters}.csv``
|
||||||
- ``resources/industrial_production_per_country_tomorrow_{planning_horizons}.csv``
|
- ``resources/industrial_production_per_country_tomorrow_{planning_horizons}.csv``
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/industrial_production_per_node_elec_s{simpl}_{clusters}_{planning_horizons}.csv``
|
- ``resources/industrial_production_per_node_base_s_{clusters}_{planning_horizons}.csv``
|
||||||
|
|
||||||
Description
|
Description
|
||||||
-------
|
-------
|
||||||
@ -87,11 +87,7 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake("build_industrial_production_per_node", clusters=48)
|
||||||
"build_industrial_production_per_node",
|
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
|
||||||
)
|
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
build_nodal_industrial_production()
|
build_nodal_industrial_production()
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
"""
|
"""
|
||||||
Adds dynamic line rating timeseries to the base network.
|
Calculates dynamic line rating time series from base network.
|
||||||
|
|
||||||
Relevant Settings
|
Relevant Settings
|
||||||
-----------------
|
-----------------
|
||||||
@ -14,11 +14,12 @@ Relevant Settings
|
|||||||
|
|
||||||
lines:
|
lines:
|
||||||
cutout:
|
cutout:
|
||||||
line_rating:
|
dynamic_line_rating:
|
||||||
|
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
Documentation of the configuration file ``config.yaml`
|
Documentation of the configuration file ``config.yaml`
|
||||||
|
|
||||||
Inputs
|
Inputs
|
||||||
------
|
------
|
||||||
|
|
||||||
@ -28,7 +29,7 @@ Inputs
|
|||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/line_rating.nc``
|
- ``resources/dlr.nc``
|
||||||
|
|
||||||
|
|
||||||
Description
|
Description
|
||||||
@ -50,6 +51,7 @@ With a heat balance considering the maximum temperature threshold of the transmi
|
|||||||
the maximal possible capacity factor "s_max_pu" for each transmission line at each time step is calculated.
|
the maximal possible capacity factor "s_max_pu" for each transmission line at each time step is calculated.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import atlite
|
import atlite
|
||||||
@ -58,11 +60,14 @@ import numpy as np
|
|||||||
import pypsa
|
import pypsa
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import configure_logging, get_snapshots, set_scenario_config
|
from _helpers import configure_logging, get_snapshots, set_scenario_config
|
||||||
|
from dask.distributed import Client
|
||||||
from shapely.geometry import LineString as Line
|
from shapely.geometry import LineString as Line
|
||||||
from shapely.geometry import Point
|
from shapely.geometry import Point
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def calculate_resistance(T, R_ref, T_ref=293, alpha=0.00403):
|
|
||||||
|
def calculate_resistance(T, R_ref, T_ref: float | int = 293, alpha: float = 0.00403):
|
||||||
"""
|
"""
|
||||||
Calculates the resistance at other temperatures than the reference
|
Calculates the resistance at other temperatures than the reference
|
||||||
temperature.
|
temperature.
|
||||||
@ -84,7 +89,12 @@ def calculate_resistance(T, R_ref, T_ref=293, alpha=0.00403):
|
|||||||
return R_ref * (1 + alpha * (T - T_ref))
|
return R_ref * (1 + alpha * (T - T_ref))
|
||||||
|
|
||||||
|
|
||||||
def calculate_line_rating(n, cutout):
|
def calculate_line_rating(
|
||||||
|
n: pypsa.Network,
|
||||||
|
cutout: atlite.Cutout,
|
||||||
|
show_progress: bool = True,
|
||||||
|
dask_kwargs: dict = None,
|
||||||
|
) -> xr.DataArray:
|
||||||
"""
|
"""
|
||||||
Calculates the maximal allowed power flow in each line for each time step
|
Calculates the maximal allowed power flow in each line for each time step
|
||||||
considering the maximal temperature.
|
considering the maximal temperature.
|
||||||
@ -97,6 +107,10 @@ def calculate_line_rating(n, cutout):
|
|||||||
-------
|
-------
|
||||||
xarray DataArray object with maximal power.
|
xarray DataArray object with maximal power.
|
||||||
"""
|
"""
|
||||||
|
if dask_kwargs is None:
|
||||||
|
dask_kwargs = {}
|
||||||
|
|
||||||
|
logger.info("Calculating dynamic line rating.")
|
||||||
relevant_lines = n.lines[~n.lines["underground"]].copy()
|
relevant_lines = n.lines[~n.lines["underground"]].copy()
|
||||||
buses = relevant_lines[["bus0", "bus1"]].values
|
buses = relevant_lines[["bus0", "bus1"]].values
|
||||||
x = n.buses.x
|
x = n.buses.x
|
||||||
@ -120,7 +134,16 @@ def calculate_line_rating(n, cutout):
|
|||||||
relevant_lines["n_bundle"] = relevant_lines["n_bundle"].fillna(1)
|
relevant_lines["n_bundle"] = relevant_lines["n_bundle"].fillna(1)
|
||||||
R *= relevant_lines["n_bundle"]
|
R *= relevant_lines["n_bundle"]
|
||||||
R = calculate_resistance(T=353, R_ref=R)
|
R = calculate_resistance(T=353, R_ref=R)
|
||||||
Imax = cutout.line_rating(shapes, R, D=0.0218, Ts=353, epsilon=0.8, alpha=0.8)
|
Imax = cutout.line_rating(
|
||||||
|
shapes,
|
||||||
|
R,
|
||||||
|
D=0.0218,
|
||||||
|
Ts=353,
|
||||||
|
epsilon=0.8,
|
||||||
|
alpha=0.8,
|
||||||
|
show_progress=show_progress,
|
||||||
|
dask_kwargs=dask_kwargs,
|
||||||
|
)
|
||||||
line_factor = relevant_lines.eval("v_nom * n_bundle * num_parallel") / 1e3 # in mW
|
line_factor = relevant_lines.eval("v_nom * n_bundle * num_parallel") / 1e3 # in mW
|
||||||
return xr.DataArray(
|
return xr.DataArray(
|
||||||
data=np.sqrt(3) * Imax * line_factor.values.reshape(-1, 1),
|
data=np.sqrt(3) * Imax * line_factor.values.reshape(-1, 1),
|
||||||
@ -134,21 +157,23 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake("build_line_rating")
|
||||||
"build_line_rating",
|
|
||||||
network="elec",
|
|
||||||
simpl="",
|
|
||||||
clusters="5",
|
|
||||||
ll="v1.0",
|
|
||||||
opts="Co2L-4H",
|
|
||||||
)
|
|
||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
|
nprocesses = int(snakemake.threads)
|
||||||
|
show_progress = not snakemake.config["run"].get("disable_progressbar", True)
|
||||||
|
show_progress = show_progress and snakemake.config["atlite"]["show_progress"]
|
||||||
|
if nprocesses > 1:
|
||||||
|
client = Client(n_workers=nprocesses, threads_per_worker=1)
|
||||||
|
else:
|
||||||
|
client = None
|
||||||
|
dask_kwargs = {"scheduler": client}
|
||||||
|
|
||||||
n = pypsa.Network(snakemake.input.base_network)
|
n = pypsa.Network(snakemake.input.base_network)
|
||||||
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
|
|
||||||
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
||||||
|
|
||||||
da = calculate_line_rating(n, cutout)
|
da = calculate_line_rating(n, cutout, show_progress, dask_kwargs)
|
||||||
da.to_netcdf(snakemake.output[0])
|
da.to_netcdf(snakemake.output[0])
|
||||||
|
@ -16,7 +16,6 @@ if __name__ == "__main__":
|
|||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_population_weighted_energy_totals",
|
"build_population_weighted_energy_totals",
|
||||||
kind="heat",
|
kind="heat",
|
||||||
simpl="",
|
|
||||||
clusters=60,
|
clusters=60,
|
||||||
)
|
)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
@ -35,7 +35,7 @@ Inputs
|
|||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resource/powerplants.csv``: A list of conventional power plants (i.e. neither wind nor solar) with fields for name, fuel type, technology, country, capacity in MW, duration, commissioning year, retrofit year, latitude, longitude, and dam information as documented in the `powerplantmatching README <https://github.com/PyPSA/powerplantmatching/blob/master/README.md>`_; additionally it includes information on the closest substation/bus in ``networks/base.nc``.
|
- ``resource/powerplants_s_{clusters}.csv``: A list of conventional power plants (i.e. neither wind nor solar) with fields for name, fuel type, technology, country, capacity in MW, duration, commissioning year, retrofit year, latitude, longitude, and dam information as documented in the `powerplantmatching README <https://github.com/PyPSA/powerplantmatching/blob/master/README.md>`_; additionally it includes information on the closest substation/bus in ``networks/base_s_{clusters}.nc``.
|
||||||
|
|
||||||
.. image:: img/powerplantmatching.png
|
.. image:: img/powerplantmatching.png
|
||||||
:scale: 30 %
|
:scale: 30 %
|
||||||
@ -171,7 +171,7 @@ if __name__ == "__main__":
|
|||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
n = pypsa.Network(snakemake.input.base_network)
|
n = pypsa.Network(snakemake.input.network)
|
||||||
countries = snakemake.params.countries
|
countries = snakemake.params.countries
|
||||||
|
|
||||||
ppl = (
|
ppl = (
|
||||||
|
@ -5,12 +5,11 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
"""
|
"""
|
||||||
Calculates for each network node the (i) installable capacity (based on land-
|
Calculates for each clustered region the (i) installable capacity (based on
|
||||||
use), (ii) the available generation time series (based on weather data), and
|
land-use from :mod:`determine_availability_matrix`), (ii) the available
|
||||||
(iii) the average distance from the node for onshore wind, AC-connected
|
generation time series (based on weather data), and (iii) the average distance
|
||||||
offshore wind, DC-connected offshore wind and solar PV generators. In addition
|
from the node for onshore wind, AC-connected offshore wind, DC-connected
|
||||||
for offshore wind it calculates the fraction of the grid connection which is
|
offshore wind and solar PV generators.
|
||||||
under water.
|
|
||||||
|
|
||||||
.. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`.
|
.. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`.
|
||||||
|
|
||||||
@ -26,9 +25,8 @@ Relevant settings
|
|||||||
|
|
||||||
renewable:
|
renewable:
|
||||||
{technology}:
|
{technology}:
|
||||||
cutout: corine: luisa: grid_codes: distance: natura: max_depth: min_depth:
|
cutout: capacity_per_sqkm: correction_factor: min_p_max_pu:
|
||||||
max_shore_distance: min_shore_distance: capacity_per_sqkm:
|
clip_p_max_pu: resource:
|
||||||
correction_factor: min_p_max_pu: clip_p_max_pu: resource:
|
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
Documentation of the configuration file ``config/config.yaml`` at
|
Documentation of the configuration file ``config/config.yaml`` at
|
||||||
@ -37,40 +35,14 @@ Relevant settings
|
|||||||
Inputs
|
Inputs
|
||||||
------
|
------
|
||||||
|
|
||||||
- ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC)
|
- ``resources/availability_matrix_{clusters}_{technology}.nc``: see :mod:`determine_availability_matrix`
|
||||||
<https://land.copernicus.eu/pan-european/corine-land-cover>`_ inventory on `44
|
|
||||||
classes <https://wiki.openstreetmap.org/wiki/Corine_Land_Cover#Tagging>`_ of
|
|
||||||
land use (e.g. forests, arable land, industrial, urban areas) at 100m
|
|
||||||
resolution.
|
|
||||||
|
|
||||||
.. image:: img/corine.png
|
|
||||||
:scale: 33 %
|
|
||||||
|
|
||||||
- ``data/LUISA_basemap_020321_50m.tif``: `LUISA Base Map
|
|
||||||
<https://publications.jrc.ec.europa.eu/repository/handle/JRC124621>`_ land
|
|
||||||
coverage dataset at 50m resolution similar to CORINE. For codes in relation to
|
|
||||||
CORINE land cover, see `Annex 1 of the technical documentation
|
|
||||||
<https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_.
|
|
||||||
|
|
||||||
- ``data/bundle/gebco/GEBCO_2014_2D.nc``: A `bathymetric
|
|
||||||
<https://en.wikipedia.org/wiki/Bathymetry>`_ data set with a global terrain
|
|
||||||
model for ocean and land at 15 arc-second intervals by the `General
|
|
||||||
Bathymetric Chart of the Oceans (GEBCO)
|
|
||||||
<https://www.gebco.net/data_and_products/gridded_bathymetry_data/>`_.
|
|
||||||
|
|
||||||
.. image:: img/gebco_2019_grid_image.jpg
|
|
||||||
:scale: 50 %
|
|
||||||
|
|
||||||
**Source:** `GEBCO
|
|
||||||
<https://www.gebco.net/data_and_products/images/gebco_2019_grid_image.jpg>`_
|
|
||||||
|
|
||||||
- ``resources/natura.tiff``: confer :ref:`natura`
|
|
||||||
- ``resources/offshore_shapes.geojson``: confer :ref:`shapes`
|
- ``resources/offshore_shapes.geojson``: confer :ref:`shapes`
|
||||||
- ``resources/regions_onshore.geojson``: (if not offshore wind), confer
|
- ``resources/regions_onshore_base_s_{clusters}.geojson``: (if not offshore
|
||||||
|
wind), confer :ref:`busregions`
|
||||||
|
- ``resources/regions_offshore_base_s_{clusters}.geojson``: (if offshore wind),
|
||||||
:ref:`busregions`
|
:ref:`busregions`
|
||||||
- ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions`
|
|
||||||
- ``"cutouts/" + params["renewable"][{technology}]['cutout']``: :ref:`cutout`
|
- ``"cutouts/" + params["renewable"][{technology}]['cutout']``: :ref:`cutout`
|
||||||
- ``networks/base.nc``: :ref:`base`
|
- ``networks/_base_s_{clusters}.nc``: :ref:`base`
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
@ -80,21 +52,13 @@ Outputs
|
|||||||
=================== ========== =========================================================
|
=================== ========== =========================================================
|
||||||
Field Dimensions Description
|
Field Dimensions Description
|
||||||
=================== ========== =========================================================
|
=================== ========== =========================================================
|
||||||
profile bus, time the per unit hourly availability factors for each node
|
profile bus, time the per unit hourly availability factors for each bus
|
||||||
------------------- ---------- ---------------------------------------------------------
|
------------------- ---------- ---------------------------------------------------------
|
||||||
weight bus sum of the layout weighting for each node
|
p_nom_max bus maximal installable capacity at the bus (in MW)
|
||||||
------------------- ---------- ---------------------------------------------------------
|
------------------- ---------- ---------------------------------------------------------
|
||||||
p_nom_max bus maximal installable capacity at the node (in MW)
|
average_distance bus average distance of units in the region to the
|
||||||
------------------- ---------- ---------------------------------------------------------
|
grid bus for onshore technologies and to the shoreline
|
||||||
potential y, x layout of generator units at cutout grid cells inside the
|
for offshore technologies (in km)
|
||||||
Voronoi cell (maximal installable capacity at each grid
|
|
||||||
cell multiplied by capacity factor)
|
|
||||||
------------------- ---------- ---------------------------------------------------------
|
|
||||||
average_distance bus average distance of units in the Voronoi cell to the
|
|
||||||
grid node (in km)
|
|
||||||
------------------- ---------- ---------------------------------------------------------
|
|
||||||
underwater_fraction bus fraction of the average connection distance which is
|
|
||||||
under water (only for offshore)
|
|
||||||
=================== ========== =========================================================
|
=================== ========== =========================================================
|
||||||
|
|
||||||
- **profile**
|
- **profile**
|
||||||
@ -109,50 +73,28 @@ Outputs
|
|||||||
:scale: 33 %
|
:scale: 33 %
|
||||||
:align: center
|
:align: center
|
||||||
|
|
||||||
- **potential**
|
|
||||||
|
|
||||||
.. image:: img/potential_heatmap.png
|
|
||||||
:scale: 33 %
|
|
||||||
:align: center
|
|
||||||
|
|
||||||
- **average_distance**
|
- **average_distance**
|
||||||
|
|
||||||
.. image:: img/distance_hist.png
|
.. image:: img/distance_hist.png
|
||||||
:scale: 33 %
|
:scale: 33 %
|
||||||
:align: center
|
:align: center
|
||||||
|
|
||||||
- **underwater_fraction**
|
|
||||||
|
|
||||||
.. image:: img/underwater_hist.png
|
|
||||||
:scale: 33 %
|
|
||||||
:align: center
|
|
||||||
|
|
||||||
Description
|
Description
|
||||||
-----------
|
-----------
|
||||||
|
|
||||||
This script functions at two main spatial resolutions: the resolution of the
|
This script functions at two main spatial resolutions: the resolution of the
|
||||||
network nodes and their `Voronoi cells
|
clustered network regions, and the resolution of the cutout grid cells for the
|
||||||
<https://en.wikipedia.org/wiki/Voronoi_diagram>`_, and the resolution of the
|
weather data. Typically the weather data grid is finer than the network regions,
|
||||||
cutout grid cells for the weather data. Typically the weather data grid is finer
|
so we have to work out the distribution of generators across the grid cells
|
||||||
than the network nodes, so we have to work out the distribution of generators
|
within each region. This is done by taking account of a combination of the
|
||||||
across the grid cells within each Voronoi cell. This is done by taking account
|
available land at each grid cell (computed in
|
||||||
of a combination of the available land at each grid cell and the capacity factor
|
:mod:`determine_availability_matrix`) and the capacity factor there.
|
||||||
there.
|
|
||||||
|
|
||||||
First the script computes how much of the technology can be installed at each
|
Based on the availability matrix, the script first computes how much of the
|
||||||
cutout grid cell and each node using the `atlite
|
technology can be installed at each cutout grid cell. To compute the layout of
|
||||||
<https://github.com/pypsa/atlite>`_ library. This uses the CORINE land use data,
|
generators in each clustered region, the installable potential in each grid cell
|
||||||
LUISA land use data, Natura2000 nature reserves, GEBCO bathymetry data, and
|
is multiplied with the capacity factor at each grid cell. This is done since we
|
||||||
shipping lanes.
|
assume more generators are installed at cells with a higher capacity factor.
|
||||||
|
|
||||||
.. image:: img/eligibility.png
|
|
||||||
:scale: 50 %
|
|
||||||
:align: center
|
|
||||||
|
|
||||||
To compute the layout of generators in each node's Voronoi cell, the installable
|
|
||||||
potential in each grid cell is multiplied with the capacity factor at each grid
|
|
||||||
cell. This is done since we assume more generators are installed at cells with a
|
|
||||||
higher capacity factor.
|
|
||||||
|
|
||||||
.. image:: img/offwinddc-gridcell.png
|
.. image:: img/offwinddc-gridcell.png
|
||||||
:scale: 50 %
|
:scale: 50 %
|
||||||
@ -174,23 +116,17 @@ This layout is then used to compute the generation availability time series from
|
|||||||
the weather data cutout from ``atlite``.
|
the weather data cutout from ``atlite``.
|
||||||
|
|
||||||
The maximal installable potential for the node (`p_nom_max`) is computed by
|
The maximal installable potential for the node (`p_nom_max`) is computed by
|
||||||
adding up the installable potentials of the individual grid cells. If the model
|
adding up the installable potentials of the individual grid cells.
|
||||||
comes close to this limit, then the time series may slightly overestimate
|
|
||||||
production since it is assumed the geographical distribution is proportional to
|
|
||||||
capacity factor.
|
|
||||||
"""
|
"""
|
||||||
import functools
|
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import atlite
|
import atlite
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
import numpy as np
|
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import configure_logging, get_snapshots, set_scenario_config
|
from _helpers import configure_logging, get_snapshots, set_scenario_config
|
||||||
|
from build_shapes import _simplify_polys
|
||||||
from dask.distributed import Client
|
from dask.distributed import Client
|
||||||
from pypsa.geo import haversine
|
|
||||||
from shapely.geometry import LineString
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -199,15 +135,19 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake("build_renewable_profiles", technology="offwind-dc")
|
snakemake = mock_snakemake(
|
||||||
|
"build_renewable_profiles", clusters=38, technology="offwind-ac"
|
||||||
|
)
|
||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
nprocesses = int(snakemake.threads)
|
nprocesses = int(snakemake.threads)
|
||||||
noprogress = snakemake.config["run"].get("disable_progressbar", True)
|
noprogress = snakemake.config["run"].get("disable_progressbar", True)
|
||||||
noprogress = noprogress or not snakemake.config["atlite"]["show_progress"]
|
noprogress = noprogress or not snakemake.config["atlite"]["show_progress"]
|
||||||
params = snakemake.params.renewable[snakemake.wildcards.technology]
|
technology = snakemake.wildcards.technology
|
||||||
|
params = snakemake.params.renewable[technology]
|
||||||
resource = params["resource"] # pv panel params / wind turbine params
|
resource = params["resource"] # pv panel params / wind turbine params
|
||||||
|
resource["show_progress"] = not noprogress
|
||||||
|
|
||||||
tech = next(t for t in ["panel", "turbine"] if t in resource)
|
tech = next(t for t in ["panel", "turbine"] if t in resource)
|
||||||
models = resource[tech]
|
models = resource[tech]
|
||||||
@ -229,6 +169,9 @@ if __name__ == "__main__":
|
|||||||
sns = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
sns = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
|
|
||||||
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=sns)
|
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=sns)
|
||||||
|
|
||||||
|
availability = xr.open_dataarray(snakemake.input.availability_matrix)
|
||||||
|
|
||||||
regions = gpd.read_file(snakemake.input.regions)
|
regions = gpd.read_file(snakemake.input.regions)
|
||||||
assert not regions.empty, (
|
assert not regions.empty, (
|
||||||
f"List of regions in {snakemake.input.regions} is empty, please "
|
f"List of regions in {snakemake.input.regions} is empty, please "
|
||||||
@ -236,186 +179,96 @@ if __name__ == "__main__":
|
|||||||
)
|
)
|
||||||
# do not pull up, set_index does not work if geo dataframe is empty
|
# do not pull up, set_index does not work if geo dataframe is empty
|
||||||
regions = regions.set_index("name").rename_axis("bus")
|
regions = regions.set_index("name").rename_axis("bus")
|
||||||
|
if snakemake.wildcards.technology.startswith("offwind"):
|
||||||
|
# for offshore regions, the shortest distance to the shoreline is used
|
||||||
|
offshore_regions = availability.coords["bus"].values
|
||||||
|
regions = regions.loc[offshore_regions]
|
||||||
|
regions = regions.map(lambda g: _simplify_polys(g, minarea=1)).set_crs(
|
||||||
|
regions.crs
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# for onshore regions, the representative point of the region is used
|
||||||
|
regions = regions.representative_point()
|
||||||
|
regions = regions.geometry.to_crs(3035)
|
||||||
buses = regions.index
|
buses = regions.index
|
||||||
|
|
||||||
res = params.get("excluder_resolution", 100)
|
|
||||||
excluder = atlite.ExclusionContainer(crs=3035, res=res)
|
|
||||||
|
|
||||||
if params["natura"]:
|
|
||||||
excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True)
|
|
||||||
|
|
||||||
for dataset in ["corine", "luisa"]:
|
|
||||||
kwargs = {"nodata": 0} if dataset == "luisa" else {}
|
|
||||||
settings = params.get(dataset, {})
|
|
||||||
if not settings:
|
|
||||||
continue
|
|
||||||
if dataset == "luisa" and res > 50:
|
|
||||||
logger.info(
|
|
||||||
"LUISA data is available at 50m resolution, "
|
|
||||||
f"but coarser {res}m resolution is used."
|
|
||||||
)
|
|
||||||
if isinstance(settings, list):
|
|
||||||
settings = {"grid_codes": settings}
|
|
||||||
if "grid_codes" in settings:
|
|
||||||
codes = settings["grid_codes"]
|
|
||||||
excluder.add_raster(
|
|
||||||
snakemake.input[dataset], codes=codes, invert=True, crs=3035, **kwargs
|
|
||||||
)
|
|
||||||
if settings.get("distance", 0.0) > 0.0:
|
|
||||||
codes = settings["distance_grid_codes"]
|
|
||||||
buffer = settings["distance"]
|
|
||||||
excluder.add_raster(
|
|
||||||
snakemake.input[dataset], codes=codes, buffer=buffer, crs=3035, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
if params.get("ship_threshold"):
|
|
||||||
shipping_threshold = (
|
|
||||||
params["ship_threshold"] * 8760 * 6
|
|
||||||
) # approximation because 6 years of data which is hourly collected
|
|
||||||
func = functools.partial(np.less, shipping_threshold)
|
|
||||||
excluder.add_raster(
|
|
||||||
snakemake.input.ship_density, codes=func, crs=4326, allow_no_overlap=True
|
|
||||||
)
|
|
||||||
|
|
||||||
if params.get("max_depth"):
|
|
||||||
# lambda not supported for atlite + multiprocessing
|
|
||||||
# use named function np.greater with partially frozen argument instead
|
|
||||||
# and exclude areas where: -max_depth > grid cell depth
|
|
||||||
func = functools.partial(np.greater, -params["max_depth"])
|
|
||||||
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4326, nodata=-1000)
|
|
||||||
|
|
||||||
if params.get("min_depth"):
|
|
||||||
func = functools.partial(np.greater, -params["min_depth"])
|
|
||||||
excluder.add_raster(
|
|
||||||
snakemake.input.gebco, codes=func, crs=4326, nodata=-1000, invert=True
|
|
||||||
)
|
|
||||||
|
|
||||||
if "min_shore_distance" in params:
|
|
||||||
buffer = params["min_shore_distance"]
|
|
||||||
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)
|
|
||||||
|
|
||||||
if "max_shore_distance" in params:
|
|
||||||
buffer = params["max_shore_distance"]
|
|
||||||
excluder.add_geometry(
|
|
||||||
snakemake.input.country_shapes, buffer=buffer, invert=True
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info("Calculate landuse availability...")
|
|
||||||
start = time.time()
|
|
||||||
|
|
||||||
kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress)
|
|
||||||
availability = cutout.availabilitymatrix(regions, excluder, **kwargs)
|
|
||||||
|
|
||||||
duration = time.time() - start
|
|
||||||
logger.info(f"Completed landuse availability calculation ({duration:2.2f}s)")
|
|
||||||
|
|
||||||
# For Moldova and Ukraine: Overwrite parts not covered by Corine with
|
|
||||||
# externally determined available areas
|
|
||||||
if "availability_matrix_MD_UA" in snakemake.input.keys():
|
|
||||||
availability_MDUA = xr.open_dataarray(
|
|
||||||
snakemake.input["availability_matrix_MD_UA"]
|
|
||||||
)
|
|
||||||
availability.loc[availability_MDUA.coords] = availability_MDUA
|
|
||||||
|
|
||||||
area = cutout.grid.to_crs(3035).area / 1e6
|
area = cutout.grid.to_crs(3035).area / 1e6
|
||||||
area = xr.DataArray(
|
area = xr.DataArray(
|
||||||
area.values.reshape(cutout.shape), [cutout.coords["y"], cutout.coords["x"]]
|
area.values.reshape(cutout.shape), [cutout.coords["y"], cutout.coords["x"]]
|
||||||
)
|
)
|
||||||
|
|
||||||
potential = capacity_per_sqkm * availability.sum("bus") * area
|
|
||||||
func = getattr(cutout, resource.pop("method"))
|
func = getattr(cutout, resource.pop("method"))
|
||||||
if client is not None:
|
if client is not None:
|
||||||
resource["dask_kwargs"] = {"scheduler": client}
|
resource["dask_kwargs"] = {"scheduler": client}
|
||||||
|
|
||||||
logger.info("Calculate average capacity factor...")
|
logger.info(f"Calculate average capacity factor for technology {technology}...")
|
||||||
start = time.time()
|
start = time.time()
|
||||||
|
|
||||||
capacity_factor = correction_factor * func(capacity_factor=True, **resource)
|
capacity_factor = correction_factor * func(capacity_factor=True, **resource)
|
||||||
layout = capacity_factor * area * capacity_per_sqkm
|
layout = capacity_factor * area * capacity_per_sqkm
|
||||||
|
|
||||||
duration = time.time() - start
|
duration = time.time() - start
|
||||||
logger.info(f"Completed average capacity factor calculation ({duration:2.2f}s)")
|
logger.info(
|
||||||
|
f"Completed average capacity factor calculation for technology {technology} ({duration:2.2f}s)"
|
||||||
|
)
|
||||||
|
|
||||||
profiles = []
|
profiles = []
|
||||||
capacities = []
|
|
||||||
for year, model in models.items():
|
for year, model in models.items():
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Calculate weighted capacity factor time series for model {model}..."
|
f"Calculate weighted capacity factor time series for model {model} for technology {technology}..."
|
||||||
)
|
)
|
||||||
start = time.time()
|
start = time.time()
|
||||||
|
|
||||||
resource[tech] = model
|
resource[tech] = model
|
||||||
|
|
||||||
profile, capacity = func(
|
profile = func(
|
||||||
matrix=availability.stack(spatial=["y", "x"]),
|
matrix=availability.stack(spatial=["y", "x"]),
|
||||||
layout=layout,
|
layout=layout,
|
||||||
index=buses,
|
index=buses,
|
||||||
per_unit=True,
|
per_unit=True,
|
||||||
return_capacity=True,
|
return_capacity=False,
|
||||||
**resource,
|
**resource,
|
||||||
)
|
)
|
||||||
|
|
||||||
dim = {"year": [year]}
|
dim = {"year": [year]}
|
||||||
profile = profile.expand_dims(dim)
|
profile = profile.expand_dims(dim)
|
||||||
capacity = capacity.expand_dims(dim)
|
|
||||||
|
|
||||||
profiles.append(profile.rename("profile"))
|
profiles.append(profile.rename("profile"))
|
||||||
capacities.append(capacity.rename("weight"))
|
|
||||||
|
|
||||||
duration = time.time() - start
|
duration = time.time() - start
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Completed weighted capacity factor time series calculation for model {model} ({duration:2.2f}s)"
|
f"Completed weighted capacity factor time series calculation for model {model} for technology {technology} ({duration:2.2f}s)"
|
||||||
)
|
)
|
||||||
|
|
||||||
profiles = xr.merge(profiles)
|
profiles = xr.merge(profiles)
|
||||||
capacities = xr.merge(capacities)
|
|
||||||
|
|
||||||
logger.info("Calculating maximal capacity per bus")
|
logger.info(f"Calculating maximal capacity per bus for technology {technology}")
|
||||||
p_nom_max = capacity_per_sqkm * availability @ area
|
p_nom_max = capacity_per_sqkm * availability @ area
|
||||||
|
|
||||||
logger.info("Calculate average distances.")
|
logger.info(f"Calculate average distances for technology {technology}.")
|
||||||
layoutmatrix = (layout * availability).stack(spatial=["y", "x"])
|
layoutmatrix = (layout * availability).stack(spatial=["y", "x"])
|
||||||
|
|
||||||
coords = cutout.grid[["x", "y"]]
|
coords = cutout.grid.representative_point().to_crs(3035)
|
||||||
bus_coords = regions[["x", "y"]]
|
|
||||||
|
|
||||||
average_distance = []
|
average_distance = []
|
||||||
centre_of_mass = []
|
|
||||||
for bus in buses:
|
for bus in buses:
|
||||||
row = layoutmatrix.sel(bus=bus).data
|
row = layoutmatrix.sel(bus=bus).data
|
||||||
nz_b = row != 0
|
nz_b = row != 0
|
||||||
row = row[nz_b]
|
row = row[nz_b]
|
||||||
co = coords[nz_b]
|
co = coords[nz_b]
|
||||||
distances = haversine(bus_coords.loc[bus], co)
|
distances = co.distance(regions[bus]).div(1e3) # km
|
||||||
average_distance.append((distances * (row / row.sum())).sum())
|
average_distance.append((distances * (row / row.sum())).sum())
|
||||||
centre_of_mass.append(co.values.T @ (row / row.sum()))
|
|
||||||
|
|
||||||
average_distance = xr.DataArray(average_distance, [buses])
|
average_distance = xr.DataArray(average_distance, [buses])
|
||||||
centre_of_mass = xr.DataArray(centre_of_mass, [buses, ("spatial", ["x", "y"])])
|
|
||||||
|
|
||||||
ds = xr.merge(
|
ds = xr.merge(
|
||||||
[
|
[
|
||||||
correction_factor * profiles,
|
correction_factor * profiles,
|
||||||
capacities,
|
|
||||||
p_nom_max.rename("p_nom_max"),
|
p_nom_max.rename("p_nom_max"),
|
||||||
potential.rename("potential"),
|
|
||||||
average_distance.rename("average_distance"),
|
average_distance.rename("average_distance"),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
if snakemake.wildcards.technology.startswith("offwind"):
|
|
||||||
logger.info("Calculate underwater fraction of connections.")
|
|
||||||
offshore_shape = gpd.read_file(snakemake.input["offshore_shapes"]).union_all()
|
|
||||||
underwater_fraction = []
|
|
||||||
for bus in buses:
|
|
||||||
p = centre_of_mass.sel(bus=bus).data
|
|
||||||
line = LineString([p, regions.loc[bus, ["x", "y"]]])
|
|
||||||
frac = line.intersection(offshore_shape).length / line.length
|
|
||||||
underwater_fraction.append(frac)
|
|
||||||
|
|
||||||
ds["underwater_fraction"] = xr.DataArray(underwater_fraction, [buses])
|
|
||||||
|
|
||||||
# select only buses with some capacity and minimal capacity factor
|
# select only buses with some capacity and minimal capacity factor
|
||||||
mean_profile = ds["profile"].mean("time")
|
mean_profile = ds["profile"].mean("time")
|
||||||
if "year" in ds.indexes:
|
if "year" in ds.indexes:
|
||||||
|
@ -1050,7 +1050,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_retro_cost",
|
"build_retro_cost",
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
clusters=48,
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
sector_opts="Co2L0-168H-T-H-B-I-solar3-dist1",
|
sector_opts="Co2L0-168H-T-H-B-I-solar3-dist1",
|
||||||
|
@ -74,9 +74,7 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake("build_salt_cavern_potentials", clusters="37")
|
||||||
"build_salt_cavern_potentials", simpl="", clusters="37"
|
|
||||||
)
|
|
||||||
|
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
|
@ -38,9 +38,7 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake("build_sequestration_potentials", clusters="128")
|
||||||
"build_sequestration_potentials", simpl="", clusters="128"
|
|
||||||
)
|
|
||||||
|
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
|
@ -17,11 +17,7 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake("build_shipping_demand", clusters=48)
|
||||||
"build_shipping_demand",
|
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
|
||||||
)
|
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
scope = gpd.read_file(snakemake.input.scope).geometry[0]
|
scope = gpd.read_file(snakemake.input.scope).geometry[0]
|
||||||
|
@ -26,13 +26,13 @@ Inputs
|
|||||||
------
|
------
|
||||||
|
|
||||||
- ``resources/<run_name/pop_layout_<scope>.nc``:
|
- ``resources/<run_name/pop_layout_<scope>.nc``:
|
||||||
- ``resources/<run_name/regions_onshore_elec_s<simpl>_<clusters>.geojson``:
|
- ``resources/<run_name/regions_onshore_base_s<simpl>_<clusters>.geojson``:
|
||||||
- ``cutout``: Weather data cutout, as specified in config
|
- ``cutout``: Weather data cutout, as specified in config
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/solar_thermal_<scope>_elec_s<simpl>_<clusters>.nc``:
|
- ``resources/solar_thermal_<scope>_base_s<simpl>_<clusters>.nc``:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import atlite
|
import atlite
|
||||||
@ -46,11 +46,7 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake("build_solar_thermal_profiles", clusters=48)
|
||||||
"build_solar_thermal_profiles",
|
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
|
||||||
)
|
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
nprocesses = int(snakemake.threads)
|
nprocesses = int(snakemake.threads)
|
||||||
|
@ -26,14 +26,14 @@ Inputs
|
|||||||
------
|
------
|
||||||
|
|
||||||
- ``resources/<run_name>/pop_layout_total.nc``:
|
- ``resources/<run_name>/pop_layout_total.nc``:
|
||||||
- ``resources/<run_name>/regions_onshore_elec_s<simpl>_<clusters>.geojson``:
|
- ``resources/<run_name>/regions_onshore_base_s<simpl>_<clusters>.geojson``:
|
||||||
- ``cutout``: Weather data cutout, as specified in config
|
- ``cutout``: Weather data cutout, as specified in config
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/temp_soil_total_elec_s<simpl>_<clusters>.nc``:
|
- ``resources/temp_soil_total_base_s<simpl>_<clusters>.nc``:
|
||||||
- ``resources/temp_air_total_elec_s<simpl>_<clusters>.nc`
|
- ``resources/temp_air_total_base_s<simpl>_<clusters>.nc`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import atlite
|
import atlite
|
||||||
@ -49,7 +49,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_temperature_profiles",
|
"build_temperature_profiles",
|
||||||
simpl="",
|
|
||||||
clusters=48,
|
clusters=48,
|
||||||
)
|
)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
@ -167,11 +167,7 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake("build_transport_demand", clusters=128)
|
||||||
"build_transport_demand",
|
|
||||||
simpl="",
|
|
||||||
clusters=128,
|
|
||||||
)
|
|
||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake("cluster_gas_network", simpl="", clusters="37")
|
snakemake = mock_snakemake("cluster_gas_network", clusters="37")
|
||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
"""
|
"""
|
||||||
Creates networks clustered to ``{cluster}`` number of zones with aggregated
|
Creates networks clustered to ``{cluster}`` number of zones with aggregated
|
||||||
buses, generators and transmission corridors.
|
buses and transmission corridors.
|
||||||
|
|
||||||
Relevant Settings
|
Relevant Settings
|
||||||
-----------------
|
-----------------
|
||||||
@ -32,30 +32,30 @@ Relevant Settings
|
|||||||
Inputs
|
Inputs
|
||||||
------
|
------
|
||||||
|
|
||||||
- ``resources/regions_onshore_elec_s{simpl}.geojson``: confer :ref:`simplify`
|
- ``resources/regions_onshore_base.geojson``: confer :ref:`simplify`
|
||||||
- ``resources/regions_offshore_elec_s{simpl}.geojson``: confer :ref:`simplify`
|
- ``resources/regions_offshore_base.geojson``: confer :ref:`simplify`
|
||||||
- ``resources/busmap_elec_s{simpl}.csv``: confer :ref:`simplify`
|
- ``resources/busmap_base_s.csv``: confer :ref:`simplify`
|
||||||
- ``networks/elec_s{simpl}.nc``: confer :ref:`simplify`
|
- ``networks/base.nc``: confer :ref:`simplify`
|
||||||
- ``data/custom_busmap_elec_s{simpl}_{clusters}_{base_network}.csv``: optional input
|
- ``data/custom_busmap_base_s_{clusters}_{base_network}.csv``: optional input
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/regions_onshore_elec_s{simpl}_{clusters}.geojson``:
|
- ``resources/regions_onshore_base_s_{clusters}.geojson``:
|
||||||
|
|
||||||
.. image:: img/regions_onshore_elec_s_X.png
|
.. image:: img/regions_onshore_base_s_X.png
|
||||||
:scale: 33 %
|
:scale: 33 %
|
||||||
|
|
||||||
- ``resources/regions_offshore_elec_s{simpl}_{clusters}.geojson``:
|
- ``resources/regions_offshore_base_s_{clusters}.geojson``:
|
||||||
|
|
||||||
.. image:: img/regions_offshore_elec_s_X.png
|
.. image:: img/regions_offshore_base_s_X.png
|
||||||
:scale: 33 %
|
:scale: 33 %
|
||||||
|
|
||||||
- ``resources/busmap_elec_s{simpl}_{clusters}.csv``: Mapping of buses from ``networks/elec_s{simpl}.nc`` to ``networks/elec_s{simpl}_{clusters}.nc``;
|
- ``resources/busmap_base_s_{clusters}.csv``: Mapping of buses from ``networks/base.nc`` to ``networks/base_s_{clusters}.nc``;
|
||||||
- ``resources/linemap_elec_s{simpl}_{clusters}.csv``: Mapping of lines from ``networks/elec_s{simpl}.nc`` to ``networks/elec_s{simpl}_{clusters}.nc``;
|
- ``resources/linemap_base_s_{clusters}.csv``: Mapping of lines from ``networks/base.nc`` to ``networks/base_s_{clusters}.nc``;
|
||||||
- ``networks/elec_s{simpl}_{clusters}.nc``:
|
- ``networks/base_s_{clusters}.nc``:
|
||||||
|
|
||||||
.. image:: img/elec_s_X.png
|
.. image:: img/base_s_X.png
|
||||||
:scale: 40 %
|
:scale: 40 %
|
||||||
|
|
||||||
Description
|
Description
|
||||||
@ -63,60 +63,33 @@ Description
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
**Why is clustering used both in** ``simplify_network`` **and** ``cluster_network`` **?**
|
|
||||||
|
|
||||||
Consider for example a network ``networks/elec_s100_50.nc`` in which
|
|
||||||
``simplify_network`` clusters the network to 100 buses and in a second
|
|
||||||
step ``cluster_network``` reduces it down to 50 buses.
|
|
||||||
|
|
||||||
In preliminary tests, it turns out, that the principal effect of
|
|
||||||
changing spatial resolution is actually only partially due to the
|
|
||||||
transmission network. It is more important to differentiate between
|
|
||||||
wind generators with higher capacity factors from those with lower
|
|
||||||
capacity factors, i.e. to have a higher spatial resolution in the
|
|
||||||
renewable generation than in the number of buses.
|
|
||||||
|
|
||||||
The two-step clustering allows to study this effect by looking at
|
|
||||||
networks like ``networks/elec_s100_50m.nc``. Note the additional
|
|
||||||
``m`` in the ``{cluster}`` wildcard. So in the example network
|
|
||||||
there are still up to 100 different wind generators.
|
|
||||||
|
|
||||||
In combination these two features allow you to study the spatial
|
|
||||||
resolution of the transmission network separately from the
|
|
||||||
spatial resolution of renewable generators.
|
|
||||||
|
|
||||||
**Is it possible to run the model without the** ``simplify_network`` **rule?**
|
**Is it possible to run the model without the** ``simplify_network`` **rule?**
|
||||||
|
|
||||||
No, the network clustering methods in the PyPSA module
|
No, the network clustering methods in the PyPSA module
|
||||||
`pypsa.clustering.spatial <https://github.com/PyPSA/PyPSA/blob/master/pypsa/clustering/spatial.py>`_
|
`pypsa.clustering.spatial <https://github.com/PyPSA/PyPSA/blob/master/pypsa/clustering/spatial.py>`_
|
||||||
do not work reliably with multiple voltage levels and transformers.
|
do not work reliably with multiple voltage levels and transformers.
|
||||||
|
|
||||||
.. tip::
|
|
||||||
The rule :mod:`cluster_networks` runs
|
|
||||||
for all ``scenario`` s in the configuration file
|
|
||||||
the rule :mod:`cluster_network`.
|
|
||||||
|
|
||||||
Exemplary unsolved network clustered to 512 nodes:
|
Exemplary unsolved network clustered to 512 nodes:
|
||||||
|
|
||||||
.. image:: img/elec_s_512.png
|
.. image:: img/base_s_512.png
|
||||||
:scale: 40 %
|
:scale: 40 %
|
||||||
:align: center
|
:align: center
|
||||||
|
|
||||||
Exemplary unsolved network clustered to 256 nodes:
|
Exemplary unsolved network clustered to 256 nodes:
|
||||||
|
|
||||||
.. image:: img/elec_s_256.png
|
.. image:: img/base_s_256.png
|
||||||
:scale: 40 %
|
:scale: 40 %
|
||||||
:align: center
|
:align: center
|
||||||
|
|
||||||
Exemplary unsolved network clustered to 128 nodes:
|
Exemplary unsolved network clustered to 128 nodes:
|
||||||
|
|
||||||
.. image:: img/elec_s_128.png
|
.. image:: img/base_s_128.png
|
||||||
:scale: 40 %
|
:scale: 40 %
|
||||||
:align: center
|
:align: center
|
||||||
|
|
||||||
Exemplary unsolved network clustered to 37 nodes:
|
Exemplary unsolved network clustered to 37 nodes:
|
||||||
|
|
||||||
.. image:: img/elec_s_37.png
|
.. image:: img/base_s_37.png
|
||||||
:scale: 40 %
|
:scale: 40 %
|
||||||
:align: center
|
:align: center
|
||||||
"""
|
"""
|
||||||
@ -127,13 +100,11 @@ from functools import reduce
|
|||||||
|
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
import linopy
|
import linopy
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pypsa
|
import pypsa
|
||||||
import seaborn as sns
|
import xarray as xr
|
||||||
from _helpers import configure_logging, set_scenario_config, update_p_nom_max
|
from _helpers import configure_logging, set_scenario_config
|
||||||
from add_electricity import load_costs
|
|
||||||
from base_network import append_bus_shapes
|
from base_network import append_bus_shapes
|
||||||
from packaging.version import Version, parse
|
from packaging.version import Version, parse
|
||||||
from pypsa.clustering.spatial import (
|
from pypsa.clustering.spatial import (
|
||||||
@ -142,6 +113,7 @@ from pypsa.clustering.spatial import (
|
|||||||
busmap_by_kmeans,
|
busmap_by_kmeans,
|
||||||
get_clustering_from_busmap,
|
get_clustering_from_busmap,
|
||||||
)
|
)
|
||||||
|
from scipy.sparse.csgraph import connected_components
|
||||||
|
|
||||||
PD_GE_2_2 = parse(pd.__version__) >= Version("2.2")
|
PD_GE_2_2 = parse(pd.__version__) >= Version("2.2")
|
||||||
|
|
||||||
@ -154,79 +126,61 @@ def normed(x):
|
|||||||
return (x / x.sum()).fillna(0.0)
|
return (x / x.sum()).fillna(0.0)
|
||||||
|
|
||||||
|
|
||||||
def weighting_for_country(n, x):
|
def weighting_for_country(df: pd.DataFrame, weights: pd.Series) -> pd.Series:
|
||||||
conv_carriers = {"OCGT", "CCGT", "PHS", "hydro"}
|
w = normed(weights.reindex(df.index, fill_value=0))
|
||||||
gen = n.generators.loc[n.generators.carrier.isin(conv_carriers)].groupby(
|
return (w * (100 / w.max())).clip(lower=1).astype(int)
|
||||||
"bus"
|
|
||||||
).p_nom.sum().reindex(n.buses.index, fill_value=0.0) + n.storage_units.loc[
|
|
||||||
n.storage_units.carrier.isin(conv_carriers)
|
def get_feature_data_for_hac(fn: str) -> pd.DataFrame:
|
||||||
].groupby(
|
ds = xr.open_dataset(fn)
|
||||||
"bus"
|
feature_data = (
|
||||||
).p_nom.sum().reindex(
|
pd.concat([ds[var].to_pandas() for var in ds.data_vars], axis=0).fillna(0.0).T
|
||||||
n.buses.index, fill_value=0.0
|
|
||||||
)
|
)
|
||||||
load = n.loads_t.p_set.mean().groupby(n.loads.bus).sum()
|
|
||||||
|
|
||||||
b_i = x.index
|
|
||||||
g = normed(gen.reindex(b_i, fill_value=0))
|
|
||||||
l = normed(load.reindex(b_i, fill_value=0))
|
|
||||||
|
|
||||||
w = g + l
|
|
||||||
return (w * (100.0 / w.max())).clip(lower=1.0).astype(int)
|
|
||||||
|
|
||||||
|
|
||||||
def get_feature_for_hac(n, buses_i=None, feature=None):
|
|
||||||
if buses_i is None:
|
|
||||||
buses_i = n.buses.index
|
|
||||||
|
|
||||||
if feature is None:
|
|
||||||
feature = "solar+onwind-time"
|
|
||||||
|
|
||||||
carriers = feature.split("-")[0].split("+")
|
|
||||||
if "offwind" in carriers:
|
|
||||||
carriers.remove("offwind")
|
|
||||||
carriers = np.append(
|
|
||||||
carriers, n.generators.carrier.filter(like="offwind").unique()
|
|
||||||
)
|
|
||||||
|
|
||||||
if feature.split("-")[1] == "cap":
|
|
||||||
feature_data = pd.DataFrame(index=buses_i, columns=carriers)
|
|
||||||
for carrier in carriers:
|
|
||||||
gen_i = n.generators.query("carrier == @carrier").index
|
|
||||||
attach = (
|
|
||||||
n.generators_t.p_max_pu[gen_i]
|
|
||||||
.mean()
|
|
||||||
.rename(index=n.generators.loc[gen_i].bus)
|
|
||||||
)
|
|
||||||
feature_data[carrier] = attach
|
|
||||||
|
|
||||||
if feature.split("-")[1] == "time":
|
|
||||||
feature_data = pd.DataFrame(columns=buses_i)
|
|
||||||
for carrier in carriers:
|
|
||||||
gen_i = n.generators.query("carrier == @carrier").index
|
|
||||||
attach = n.generators_t.p_max_pu[gen_i].rename(
|
|
||||||
columns=n.generators.loc[gen_i].bus
|
|
||||||
)
|
|
||||||
feature_data = pd.concat([feature_data, attach], axis=0)[buses_i]
|
|
||||||
|
|
||||||
feature_data = feature_data.T
|
|
||||||
# timestamp raises error in sklearn >= v1.2:
|
|
||||||
feature_data.columns = feature_data.columns.astype(str)
|
feature_data.columns = feature_data.columns.astype(str)
|
||||||
|
|
||||||
feature_data = feature_data.fillna(0)
|
|
||||||
|
|
||||||
return feature_data
|
return feature_data
|
||||||
|
|
||||||
|
|
||||||
def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="scip"):
|
def fix_country_assignment_for_hac(n: pypsa.Network) -> None:
|
||||||
|
|
||||||
|
# overwrite country of nodes that are disconnected from their country-topology
|
||||||
|
for country in n.buses.country.unique():
|
||||||
|
m = n[n.buses.country == country].copy()
|
||||||
|
|
||||||
|
_, labels = connected_components(m.adjacency_matrix(), directed=False)
|
||||||
|
|
||||||
|
component = pd.Series(labels, index=m.buses.index)
|
||||||
|
component_sizes = component.value_counts()
|
||||||
|
|
||||||
|
if len(component_sizes) > 1:
|
||||||
|
disconnected_bus = component[component == component_sizes.index[-1]].index[
|
||||||
|
0
|
||||||
|
]
|
||||||
|
|
||||||
|
neighbor_bus = n.lines.query(
|
||||||
|
"bus0 == @disconnected_bus or bus1 == @disconnected_bus"
|
||||||
|
).iloc[0][["bus0", "bus1"]]
|
||||||
|
new_country = list(set(n.buses.loc[neighbor_bus].country) - {country})[0]
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"overwriting country `{country}` of bus `{disconnected_bus}` "
|
||||||
|
f"to new country `{new_country}`, because it is disconnected "
|
||||||
|
"from its initial inter-country transmission grid."
|
||||||
|
)
|
||||||
|
n.buses.at[disconnected_bus, "country"] = new_country
|
||||||
|
|
||||||
|
|
||||||
|
def distribute_n_clusters_to_countries(
|
||||||
|
n: pypsa.Network,
|
||||||
|
n_clusters: int,
|
||||||
|
cluster_weights: pd.Series,
|
||||||
|
focus_weights: dict | None = None,
|
||||||
|
solver_name: str = "scip",
|
||||||
|
) -> pd.Series:
|
||||||
"""
|
"""
|
||||||
Determine the number of clusters per country.
|
Determine the number of clusters per country.
|
||||||
"""
|
"""
|
||||||
L = (
|
L = (
|
||||||
n.loads_t.p_set.mean()
|
cluster_weights.groupby([n.buses.country, n.buses.sub_network])
|
||||||
.groupby(n.loads.bus)
|
|
||||||
.sum()
|
|
||||||
.groupby([n.buses.country, n.buses.sub_network])
|
|
||||||
.sum()
|
.sum()
|
||||||
.pipe(normed)
|
.pipe(normed)
|
||||||
)
|
)
|
||||||
@ -277,92 +231,50 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="scip"):
|
|||||||
|
|
||||||
|
|
||||||
def busmap_for_n_clusters(
|
def busmap_for_n_clusters(
|
||||||
n,
|
n: pypsa.Network,
|
||||||
n_clusters,
|
n_clusters_c: pd.Series,
|
||||||
solver_name,
|
cluster_weights: pd.Series,
|
||||||
focus_weights=None,
|
algorithm: str = "kmeans",
|
||||||
algorithm="kmeans",
|
features: pd.DataFrame | None = None,
|
||||||
feature=None,
|
|
||||||
**algorithm_kwds,
|
**algorithm_kwds,
|
||||||
):
|
) -> pd.Series:
|
||||||
|
if algorithm == "hac" and features is None:
|
||||||
|
raise ValueError("For HAC clustering, features must be provided.")
|
||||||
|
|
||||||
if algorithm == "kmeans":
|
if algorithm == "kmeans":
|
||||||
algorithm_kwds.setdefault("n_init", 1000)
|
algorithm_kwds.setdefault("n_init", 1000)
|
||||||
algorithm_kwds.setdefault("max_iter", 30000)
|
algorithm_kwds.setdefault("max_iter", 30000)
|
||||||
algorithm_kwds.setdefault("tol", 1e-6)
|
algorithm_kwds.setdefault("tol", 1e-6)
|
||||||
algorithm_kwds.setdefault("random_state", 0)
|
algorithm_kwds.setdefault("random_state", 0)
|
||||||
|
|
||||||
def fix_country_assignment_for_hac(n):
|
|
||||||
from scipy.sparse import csgraph
|
|
||||||
|
|
||||||
# overwrite country of nodes that are disconnected from their country-topology
|
|
||||||
for country in n.buses.country.unique():
|
|
||||||
m = n[n.buses.country == country].copy()
|
|
||||||
|
|
||||||
_, labels = csgraph.connected_components(
|
|
||||||
m.adjacency_matrix(), directed=False
|
|
||||||
)
|
|
||||||
|
|
||||||
component = pd.Series(labels, index=m.buses.index)
|
|
||||||
component_sizes = component.value_counts()
|
|
||||||
|
|
||||||
if len(component_sizes) > 1:
|
|
||||||
disconnected_bus = component[
|
|
||||||
component == component_sizes.index[-1]
|
|
||||||
].index[0]
|
|
||||||
|
|
||||||
neighbor_bus = n.lines.query(
|
|
||||||
"bus0 == @disconnected_bus or bus1 == @disconnected_bus"
|
|
||||||
).iloc[0][["bus0", "bus1"]]
|
|
||||||
new_country = list(set(n.buses.loc[neighbor_bus].country) - {country})[
|
|
||||||
0
|
|
||||||
]
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"overwriting country `{country}` of bus `{disconnected_bus}` "
|
|
||||||
f"to new country `{new_country}`, because it is disconnected "
|
|
||||||
"from its initial inter-country transmission grid."
|
|
||||||
)
|
|
||||||
n.buses.at[disconnected_bus, "country"] = new_country
|
|
||||||
return n
|
|
||||||
|
|
||||||
if algorithm == "hac":
|
|
||||||
feature = get_feature_for_hac(n, buses_i=n.buses.index, feature=feature)
|
|
||||||
n = fix_country_assignment_for_hac(n)
|
|
||||||
|
|
||||||
if (algorithm != "hac") and (feature is not None):
|
|
||||||
logger.warning(
|
|
||||||
f"Keyword argument feature is only valid for algorithm `hac`. "
|
|
||||||
f"Given feature `{feature}` will be ignored."
|
|
||||||
)
|
|
||||||
|
|
||||||
n.determine_network_topology()
|
|
||||||
|
|
||||||
n_clusters = distribute_clusters(
|
|
||||||
n, n_clusters, focus_weights=focus_weights, solver_name=solver_name
|
|
||||||
)
|
|
||||||
|
|
||||||
def busmap_for_country(x):
|
def busmap_for_country(x):
|
||||||
prefix = x.name[0] + x.name[1] + " "
|
prefix = x.name[0] + x.name[1] + " "
|
||||||
logger.debug(f"Determining busmap for country {prefix[:-1]}")
|
logger.debug(
|
||||||
|
f"Determining busmap for country {prefix[:-1]} "
|
||||||
|
f"from {len(x)} buses to {n_clusters_c[x.name]}."
|
||||||
|
)
|
||||||
if len(x) == 1:
|
if len(x) == 1:
|
||||||
return pd.Series(prefix + "0", index=x.index)
|
return pd.Series(prefix + "0", index=x.index)
|
||||||
weight = weighting_for_country(n, x)
|
weight = weighting_for_country(x, cluster_weights)
|
||||||
|
|
||||||
if algorithm == "kmeans":
|
if algorithm == "kmeans":
|
||||||
return prefix + busmap_by_kmeans(
|
return prefix + busmap_by_kmeans(
|
||||||
n, weight, n_clusters[x.name], buses_i=x.index, **algorithm_kwds
|
n, weight, n_clusters_c[x.name], buses_i=x.index, **algorithm_kwds
|
||||||
)
|
)
|
||||||
elif algorithm == "hac":
|
elif algorithm == "hac":
|
||||||
return prefix + busmap_by_hac(
|
return prefix + busmap_by_hac(
|
||||||
n, n_clusters[x.name], buses_i=x.index, feature=feature.loc[x.index]
|
n,
|
||||||
|
n_clusters_c[x.name],
|
||||||
|
buses_i=x.index,
|
||||||
|
feature=features.reindex(x.index, fill_value=0.0),
|
||||||
)
|
)
|
||||||
elif algorithm == "modularity":
|
elif algorithm == "modularity":
|
||||||
return prefix + busmap_by_greedy_modularity(
|
return prefix + busmap_by_greedy_modularity(
|
||||||
n, n_clusters[x.name], buses_i=x.index
|
n, n_clusters_c[x.name], buses_i=x.index
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"`algorithm` must be one of 'kmeans' or 'hac'. Is {algorithm}."
|
f"`algorithm` must be one of 'kmeans' or 'hac' or 'modularity'. Is {algorithm}."
|
||||||
)
|
)
|
||||||
|
|
||||||
compat_kws = dict(include_groups=False) if PD_GE_2_2 else {}
|
compat_kws = dict(include_groups=False) if PD_GE_2_2 else {}
|
||||||
@ -376,93 +288,61 @@ def busmap_for_n_clusters(
|
|||||||
|
|
||||||
|
|
||||||
def clustering_for_n_clusters(
|
def clustering_for_n_clusters(
|
||||||
n,
|
n: pypsa.Network,
|
||||||
n_clusters,
|
busmap: pd.Series,
|
||||||
custom_busmap=False,
|
line_length_factor: float = 1.25,
|
||||||
aggregate_carriers=None,
|
aggregation_strategies: dict | None = None,
|
||||||
line_length_factor=1.25,
|
) -> pypsa.clustering.spatial.Clustering:
|
||||||
aggregation_strategies=dict(),
|
|
||||||
solver_name="scip",
|
if aggregation_strategies is None:
|
||||||
algorithm="hac",
|
aggregation_strategies = dict()
|
||||||
feature=None,
|
|
||||||
extended_link_costs=0,
|
|
||||||
focus_weights=None,
|
|
||||||
):
|
|
||||||
if not isinstance(custom_busmap, pd.Series):
|
|
||||||
busmap = busmap_for_n_clusters(
|
|
||||||
n, n_clusters, solver_name, focus_weights, algorithm, feature
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
busmap = custom_busmap
|
|
||||||
|
|
||||||
line_strategies = aggregation_strategies.get("lines", dict())
|
line_strategies = aggregation_strategies.get("lines", dict())
|
||||||
generator_strategies = aggregation_strategies.get("generators", dict())
|
|
||||||
one_port_strategies = aggregation_strategies.get("one_ports", dict())
|
bus_strategies = aggregation_strategies.get("buses", dict())
|
||||||
|
bus_strategies.setdefault("substation_lv", lambda x: bool(x.sum()))
|
||||||
|
bus_strategies.setdefault("substation_off", lambda x: bool(x.sum()))
|
||||||
|
|
||||||
clustering = get_clustering_from_busmap(
|
clustering = get_clustering_from_busmap(
|
||||||
n,
|
n,
|
||||||
busmap,
|
busmap,
|
||||||
aggregate_generators_weighted=True,
|
|
||||||
aggregate_generators_carriers=aggregate_carriers,
|
|
||||||
aggregate_one_ports=["Load", "StorageUnit"],
|
|
||||||
line_length_factor=line_length_factor,
|
line_length_factor=line_length_factor,
|
||||||
|
bus_strategies=bus_strategies,
|
||||||
line_strategies=line_strategies,
|
line_strategies=line_strategies,
|
||||||
generator_strategies=generator_strategies,
|
|
||||||
one_port_strategies=one_port_strategies,
|
|
||||||
scale_link_capital_costs=False,
|
|
||||||
custom_line_groupers=["build_year"],
|
custom_line_groupers=["build_year"],
|
||||||
)
|
)
|
||||||
|
|
||||||
if not n.links.empty:
|
|
||||||
nc = clustering.network
|
|
||||||
nc.links["underwater_fraction"] = (
|
|
||||||
n.links.eval("underwater_fraction * length").div(nc.links.length).dropna()
|
|
||||||
)
|
|
||||||
nc.links["capital_cost"] = nc.links["capital_cost"].add(
|
|
||||||
(nc.links.length - n.links.length)
|
|
||||||
.clip(lower=0)
|
|
||||||
.mul(extended_link_costs)
|
|
||||||
.dropna(),
|
|
||||||
fill_value=0,
|
|
||||||
)
|
|
||||||
|
|
||||||
return clustering
|
return clustering
|
||||||
|
|
||||||
|
|
||||||
def cluster_regions(busmaps, regions):
|
def cluster_regions(
|
||||||
|
busmaps: tuple | list, regions: gpd.GeoDataFrame, with_country: bool = False
|
||||||
|
) -> gpd.GeoDataFrame:
|
||||||
"""
|
"""
|
||||||
Cluster regions based on busmaps and save the results to a file and to the
|
Cluster regions based on busmaps and save the results to a file and to the
|
||||||
network.
|
network.
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
- busmaps (list): A list of busmaps used for clustering.
|
- busmaps (list): A list of busmaps used for clustering.
|
||||||
- which (str): The type of regions to cluster.
|
- regions (gpd.GeoDataFrame): The regions to cluster.
|
||||||
|
- with_country (bool): Whether to keep country column.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
None
|
None
|
||||||
"""
|
"""
|
||||||
busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
|
busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
|
||||||
regions = regions.reindex(columns=["name", "geometry"]).set_index("name")
|
columns = ["name", "country", "geometry"] if with_country else ["name", "geometry"]
|
||||||
|
regions = regions.reindex(columns=columns).set_index("name")
|
||||||
regions_c = regions.dissolve(busmap)
|
regions_c = regions.dissolve(busmap)
|
||||||
regions_c.index.name = "name"
|
regions_c.index.name = "name"
|
||||||
return regions_c.reset_index()
|
return regions_c.reset_index()
|
||||||
|
|
||||||
|
|
||||||
def plot_busmap_for_n_clusters(n, n_clusters, solver_name="scip", fn=None):
|
|
||||||
busmap = busmap_for_n_clusters(n, n_clusters, solver_name)
|
|
||||||
cs = busmap.unique()
|
|
||||||
cr = sns.color_palette("hls", len(cs))
|
|
||||||
n.plot(bus_colors=busmap.map(dict(zip(cs, cr))))
|
|
||||||
if fn is not None:
|
|
||||||
plt.savefig(fn, bbox_inches="tight")
|
|
||||||
del cs, cr
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake("cluster_network", simpl="", clusters="40")
|
snakemake = mock_snakemake("cluster_network", clusters=60)
|
||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
@ -470,43 +350,20 @@ if __name__ == "__main__":
|
|||||||
solver_name = snakemake.config["solving"]["solver"]["name"]
|
solver_name = snakemake.config["solving"]["solver"]["name"]
|
||||||
|
|
||||||
n = pypsa.Network(snakemake.input.network)
|
n = pypsa.Network(snakemake.input.network)
|
||||||
|
buses_prev, lines_prev, links_prev = len(n.buses), len(n.lines), len(n.links)
|
||||||
|
|
||||||
# remove integer outputs for compatibility with PyPSA v0.26.0
|
load = (
|
||||||
n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore")
|
xr.open_dataarray(snakemake.input.load)
|
||||||
|
.mean(dim="time")
|
||||||
|
.to_pandas()
|
||||||
|
.reindex(n.buses.index, fill_value=0.0)
|
||||||
|
)
|
||||||
|
|
||||||
exclude_carriers = params.cluster_network["exclude_carriers"]
|
if snakemake.wildcards.clusters == "all":
|
||||||
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
|
|
||||||
conventional_carriers = set(params.conventional_carriers)
|
|
||||||
if snakemake.wildcards.clusters.endswith("m"):
|
|
||||||
n_clusters = int(snakemake.wildcards.clusters[:-1])
|
|
||||||
aggregate_carriers = conventional_carriers & aggregate_carriers
|
|
||||||
elif snakemake.wildcards.clusters.endswith("c"):
|
|
||||||
n_clusters = int(snakemake.wildcards.clusters[:-1])
|
|
||||||
aggregate_carriers = aggregate_carriers - conventional_carriers
|
|
||||||
elif snakemake.wildcards.clusters == "all":
|
|
||||||
n_clusters = len(n.buses)
|
n_clusters = len(n.buses)
|
||||||
else:
|
else:
|
||||||
n_clusters = int(snakemake.wildcards.clusters)
|
n_clusters = int(snakemake.wildcards.clusters)
|
||||||
|
|
||||||
if params.cluster_network.get("consider_efficiency_classes", False):
|
|
||||||
carriers = []
|
|
||||||
for c in aggregate_carriers:
|
|
||||||
gens = n.generators.query("carrier == @c")
|
|
||||||
low = gens.efficiency.quantile(0.10)
|
|
||||||
high = gens.efficiency.quantile(0.90)
|
|
||||||
if low >= high:
|
|
||||||
carriers += [c]
|
|
||||||
else:
|
|
||||||
labels = ["low", "medium", "high"]
|
|
||||||
suffix = pd.cut(
|
|
||||||
gens.efficiency, bins=[0, low, high, 1], labels=labels
|
|
||||||
).astype(str)
|
|
||||||
carriers += [f"{c} {label} efficiency" for label in labels]
|
|
||||||
n.generators.update(
|
|
||||||
{"carrier": gens.carrier + " " + suffix + " efficiency"}
|
|
||||||
)
|
|
||||||
aggregate_carriers = carriers
|
|
||||||
|
|
||||||
if n_clusters == len(n.buses):
|
if n_clusters == len(n.buses):
|
||||||
# Fast-path if no clustering is necessary
|
# Fast-path if no clustering is necessary
|
||||||
busmap = n.buses.index.to_series()
|
busmap = n.buses.index.to_series()
|
||||||
@ -515,13 +372,6 @@ if __name__ == "__main__":
|
|||||||
else:
|
else:
|
||||||
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
||||||
|
|
||||||
hvac_overhead_cost = load_costs(
|
|
||||||
snakemake.input.tech_costs,
|
|
||||||
params.costs,
|
|
||||||
params.max_hours,
|
|
||||||
Nyears,
|
|
||||||
).at["HVAC overhead", "capital_cost"]
|
|
||||||
|
|
||||||
custom_busmap = params.custom_busmap
|
custom_busmap = params.custom_busmap
|
||||||
if custom_busmap:
|
if custom_busmap:
|
||||||
custom_busmap = pd.read_csv(
|
custom_busmap = pd.read_csv(
|
||||||
@ -529,32 +379,42 @@ if __name__ == "__main__":
|
|||||||
).squeeze()
|
).squeeze()
|
||||||
custom_busmap.index = custom_busmap.index.astype(str)
|
custom_busmap.index = custom_busmap.index.astype(str)
|
||||||
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
|
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
|
||||||
|
busmap = custom_busmap
|
||||||
|
else:
|
||||||
|
algorithm = params.cluster_network["algorithm"]
|
||||||
|
features = None
|
||||||
|
if algorithm == "hac":
|
||||||
|
features = get_feature_data_for_hac(snakemake.input.hac_features)
|
||||||
|
fix_country_assignment_for_hac(n)
|
||||||
|
|
||||||
|
n.determine_network_topology()
|
||||||
|
|
||||||
|
n_clusters_c = distribute_n_clusters_to_countries(
|
||||||
|
n,
|
||||||
|
n_clusters,
|
||||||
|
load,
|
||||||
|
focus_weights=params.focus_weights,
|
||||||
|
solver_name=solver_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
busmap = busmap_for_n_clusters(
|
||||||
|
n,
|
||||||
|
n_clusters_c,
|
||||||
|
cluster_weights=load,
|
||||||
|
algorithm=algorithm,
|
||||||
|
features=features,
|
||||||
|
)
|
||||||
|
|
||||||
clustering = clustering_for_n_clusters(
|
clustering = clustering_for_n_clusters(
|
||||||
n,
|
n,
|
||||||
n_clusters,
|
busmap,
|
||||||
custom_busmap,
|
line_length_factor=params.length_factor,
|
||||||
aggregate_carriers,
|
aggregation_strategies=params.aggregation_strategies,
|
||||||
params.length_factor,
|
|
||||||
params.aggregation_strategies,
|
|
||||||
solver_name,
|
|
||||||
params.cluster_network["algorithm"],
|
|
||||||
params.cluster_network["feature"],
|
|
||||||
hvac_overhead_cost,
|
|
||||||
params.focus_weights,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
nc = clustering.network
|
nc = clustering.network
|
||||||
update_p_nom_max(nc)
|
|
||||||
|
|
||||||
if params.cluster_network.get("consider_efficiency_classes"):
|
for attr in ["busmap", "linemap"]:
|
||||||
labels = [f" {label} efficiency" for label in ["low", "medium", "high"]]
|
|
||||||
nc.generators["carrier"] = nc.generators.carrier.replace(labels, "", regex=True)
|
|
||||||
|
|
||||||
for attr in (
|
|
||||||
"busmap",
|
|
||||||
"linemap",
|
|
||||||
): # also available: linemap_positive, linemap_negative
|
|
||||||
getattr(clustering, attr).to_csv(snakemake.output[attr])
|
getattr(clustering, attr).to_csv(snakemake.output[attr])
|
||||||
|
|
||||||
# nc.shapes = n.shapes.copy()
|
# nc.shapes = n.shapes.copy()
|
||||||
@ -566,3 +426,10 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
nc.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
nc.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
||||||
nc.export_to_netcdf(snakemake.output.network)
|
nc.export_to_netcdf(snakemake.output.network)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Clustered network:\n"
|
||||||
|
f"Buses: {buses_prev} to {len(nc.buses)}\n"
|
||||||
|
f"Lines: {lines_prev} to {len(nc.lines)}\n"
|
||||||
|
f"Links: {links_prev} to {len(nc.links)}"
|
||||||
|
)
|
||||||
|
194
scripts/determine_availability_matrix.py
Normal file
194
scripts/determine_availability_matrix.py
Normal file
@ -0,0 +1,194 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2024 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
"""
|
||||||
|
The script performs a land eligibility analysis of what share of land is
|
||||||
|
availability for developing the selected technology at each cutout grid cell.
|
||||||
|
The script uses the `atlite <https://github.com/pypsa/atlite>`_ library and
|
||||||
|
several GIS datasets like the CORINE land use data, LUISA land use data,
|
||||||
|
Natura2000 nature reserves, GEBCO bathymetry data, and shipping lanes.
|
||||||
|
|
||||||
|
Relevant settings
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
atlite:
|
||||||
|
nprocesses:
|
||||||
|
|
||||||
|
renewable:
|
||||||
|
{technology}:
|
||||||
|
cutout: corine: luisa: grid_codes: distance: natura: max_depth:
|
||||||
|
min_depth: max_shore_distance: min_shore_distance: resource:
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
Documentation of the configuration file ``config/config.yaml`` at
|
||||||
|
:ref:`atlite_cf`, :ref:`renewable_cf`
|
||||||
|
|
||||||
|
Inputs
|
||||||
|
------
|
||||||
|
|
||||||
|
- ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC)
|
||||||
|
<https://land.copernicus.eu/pan-european/corine-land-cover>`_ inventory on `44
|
||||||
|
classes <https://wiki.openstreetmap.org/wiki/Corine_Land_Cover#Tagging>`_ of
|
||||||
|
land use (e.g. forests, arable land, industrial, urban areas) at 100m
|
||||||
|
resolution.
|
||||||
|
|
||||||
|
.. image:: img/corine.png
|
||||||
|
:scale: 33 %
|
||||||
|
|
||||||
|
- ``data/LUISA_basemap_020321_50m.tif``: `LUISA Base Map
|
||||||
|
<https://publications.jrc.ec.europa.eu/repository/handle/JRC124621>`_ land
|
||||||
|
coverage dataset at 50m resolution similar to CORINE. For codes in relation to
|
||||||
|
CORINE land cover, see `Annex 1 of the technical documentation
|
||||||
|
<https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_.
|
||||||
|
|
||||||
|
- ``data/bundle/gebco/GEBCO_2014_2D.nc``: A `bathymetric
|
||||||
|
<https://en.wikipedia.org/wiki/Bathymetry>`_ data set with a global terrain
|
||||||
|
model for ocean and land at 15 arc-second intervals by the `General
|
||||||
|
Bathymetric Chart of the Oceans (GEBCO)
|
||||||
|
<https://www.gebco.net/data_and_products/gridded_bathymetry_data/>`_.
|
||||||
|
|
||||||
|
.. image:: img/gebco_2019_grid_image.jpg
|
||||||
|
:scale: 50 %
|
||||||
|
|
||||||
|
**Source:** `GEBCO
|
||||||
|
<https://www.gebco.net/data_and_products/images/gebco_2019_grid_image.jpg>`_
|
||||||
|
|
||||||
|
- ``resources/natura.tiff``: confer :ref:`natura`
|
||||||
|
- ``resources/offshore_shapes.geojson``: confer :ref:`shapes`
|
||||||
|
- ``resources/regions_onshore_base_s_{clusters}.geojson``: (if not offshore
|
||||||
|
wind), confer :ref:`busregions`
|
||||||
|
- ``resources/regions_offshore_base_s_{clusters}.geojson``: (if offshore wind),
|
||||||
|
:ref:`busregions`
|
||||||
|
- ``"cutouts/" + params["renewable"][{technology}]['cutout']``: :ref:`cutout`
|
||||||
|
- ``networks/_base_s_{clusters}.nc``: :ref:`base`
|
||||||
|
|
||||||
|
Outputs
|
||||||
|
-------
|
||||||
|
|
||||||
|
- ``resources/availability_matrix_{clusters_{technology}.nc``
|
||||||
|
"""
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
|
import atlite
|
||||||
|
import geopandas as gpd
|
||||||
|
import numpy as np
|
||||||
|
import xarray as xr
|
||||||
|
from _helpers import configure_logging, set_scenario_config
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake(
|
||||||
|
"build_renewable_profiles", clusters=100, technology="onwind"
|
||||||
|
)
|
||||||
|
configure_logging(snakemake)
|
||||||
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
|
nprocesses = int(snakemake.threads)
|
||||||
|
noprogress = snakemake.config["run"].get("disable_progressbar", True)
|
||||||
|
noprogress = noprogress or not snakemake.config["atlite"]["show_progress"]
|
||||||
|
technology = snakemake.wildcards.technology
|
||||||
|
params = snakemake.params.renewable[technology]
|
||||||
|
|
||||||
|
cutout = atlite.Cutout(snakemake.input.cutout)
|
||||||
|
regions = gpd.read_file(snakemake.input.regions)
|
||||||
|
assert not regions.empty, (
|
||||||
|
f"List of regions in {snakemake.input.regions} is empty, please "
|
||||||
|
"disable the corresponding renewable technology"
|
||||||
|
)
|
||||||
|
# do not pull up, set_index does not work if geo dataframe is empty
|
||||||
|
regions = regions.set_index("name").rename_axis("bus")
|
||||||
|
|
||||||
|
res = params.get("excluder_resolution", 100)
|
||||||
|
excluder = atlite.ExclusionContainer(crs=3035, res=res)
|
||||||
|
|
||||||
|
if params["natura"]:
|
||||||
|
excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True)
|
||||||
|
|
||||||
|
for dataset in ["corine", "luisa"]:
|
||||||
|
kwargs = {"nodata": 0} if dataset == "luisa" else {}
|
||||||
|
settings = params.get(dataset, {})
|
||||||
|
if not settings:
|
||||||
|
continue
|
||||||
|
if dataset == "luisa" and res > 50:
|
||||||
|
logger.info(
|
||||||
|
"LUISA data is available at 50m resolution, "
|
||||||
|
f"but coarser {res}m resolution is used."
|
||||||
|
)
|
||||||
|
if isinstance(settings, list):
|
||||||
|
settings = {"grid_codes": settings}
|
||||||
|
if "grid_codes" in settings:
|
||||||
|
codes = settings["grid_codes"]
|
||||||
|
excluder.add_raster(
|
||||||
|
snakemake.input[dataset], codes=codes, invert=True, crs=3035, **kwargs
|
||||||
|
)
|
||||||
|
if settings.get("distance", 0.0) > 0.0:
|
||||||
|
codes = settings["distance_grid_codes"]
|
||||||
|
buffer = settings["distance"]
|
||||||
|
excluder.add_raster(
|
||||||
|
snakemake.input[dataset], codes=codes, buffer=buffer, crs=3035, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
if params.get("ship_threshold"):
|
||||||
|
shipping_threshold = (
|
||||||
|
params["ship_threshold"] * 8760 * 6
|
||||||
|
) # approximation because 6 years of data which is hourly collected
|
||||||
|
func = functools.partial(np.less, shipping_threshold)
|
||||||
|
excluder.add_raster(
|
||||||
|
snakemake.input.ship_density, codes=func, crs=4326, allow_no_overlap=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if params.get("max_depth"):
|
||||||
|
# lambda not supported for atlite + multiprocessing
|
||||||
|
# use named function np.greater with partially frozen argument instead
|
||||||
|
# and exclude areas where: -max_depth > grid cell depth
|
||||||
|
func = functools.partial(np.greater, -params["max_depth"])
|
||||||
|
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4326, nodata=-1000)
|
||||||
|
|
||||||
|
if params.get("min_depth"):
|
||||||
|
func = functools.partial(np.greater, -params["min_depth"])
|
||||||
|
excluder.add_raster(
|
||||||
|
snakemake.input.gebco, codes=func, crs=4326, nodata=-1000, invert=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if "min_shore_distance" in params:
|
||||||
|
buffer = params["min_shore_distance"]
|
||||||
|
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)
|
||||||
|
|
||||||
|
if "max_shore_distance" in params:
|
||||||
|
buffer = params["max_shore_distance"]
|
||||||
|
excluder.add_geometry(
|
||||||
|
snakemake.input.country_shapes, buffer=buffer, invert=True
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Calculate landuse availability for {technology}...")
|
||||||
|
start = time.time()
|
||||||
|
|
||||||
|
kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress)
|
||||||
|
availability = cutout.availabilitymatrix(regions, excluder, **kwargs)
|
||||||
|
|
||||||
|
duration = time.time() - start
|
||||||
|
logger.info(
|
||||||
|
f"Completed landuse availability calculation for {technology} ({duration:2.2f}s)"
|
||||||
|
)
|
||||||
|
|
||||||
|
# For Moldova and Ukraine: Overwrite parts not covered by Corine with
|
||||||
|
# externally determined available areas
|
||||||
|
if "availability_matrix_MD_UA" in snakemake.input.keys():
|
||||||
|
availability_MDUA = xr.open_dataarray(
|
||||||
|
snakemake.input["availability_matrix_MD_UA"]
|
||||||
|
)
|
||||||
|
availability.loc[availability_MDUA.coords] = availability_MDUA
|
||||||
|
|
||||||
|
availability.to_netcdf(snakemake.output[0])
|
@ -34,21 +34,21 @@ if __name__ == "__main__":
|
|||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"determine_availability_matrix_MD_UA", technology="solar"
|
"determine_availability_matrix_MD_UA", clusters=100, technology="solar"
|
||||||
)
|
)
|
||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
nprocesses = int(snakemake.threads)
|
nprocesses = int(snakemake.threads)
|
||||||
noprogress = not snakemake.config["atlite"].get("show_progress", True)
|
noprogress = not snakemake.config["atlite"].get("show_progress", True)
|
||||||
config = snakemake.config["renewable"][snakemake.wildcards.technology]
|
config = snakemake.params["renewable"][snakemake.wildcards.technology]
|
||||||
|
|
||||||
cutout = atlite.Cutout(snakemake.input.cutout)
|
cutout = atlite.Cutout(snakemake.input.cutout)
|
||||||
regions = (
|
regions = (
|
||||||
gpd.read_file(snakemake.input.regions).set_index("name").rename_axis("bus")
|
gpd.read_file(snakemake.input.regions).set_index("name").rename_axis("bus")
|
||||||
)
|
)
|
||||||
# Limit to "UA" and "MD" regions
|
# Limit to "UA" and "MD" regions
|
||||||
buses = regions.loc[regions["country"].isin(["UA", "MD"])].index.values
|
buses = regions.filter(regex="(UA|MD)", axis=0).index.values
|
||||||
regions = regions.loc[buses]
|
regions = regions.loc[buses]
|
||||||
|
|
||||||
excluder = atlite.ExclusionContainer(crs=3035, res=100)
|
excluder = atlite.ExclusionContainer(crs=3035, res=100)
|
||||||
@ -125,24 +125,24 @@ if __name__ == "__main__":
|
|||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
excluder.add_geometry(pts_tmp_fn)
|
excluder.add_geometry(pts_tmp_fn)
|
||||||
|
|
||||||
if "max_depth" in config:
|
if config.get("max_depth"):
|
||||||
# lambda not supported for atlite + multiprocessing
|
# lambda not supported for atlite + multiprocessing
|
||||||
# use named function np.greater with partially frozen argument instead
|
# use named function np.greater with partially frozen argument instead
|
||||||
# and exclude areas where: -max_depth > grid cell depth
|
# and exclude areas where: -max_depth > grid cell depth
|
||||||
func = functools.partial(np.greater, -config["max_depth"])
|
func = functools.partial(np.greater, -config["max_depth"])
|
||||||
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4236, nodata=-1000)
|
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4236, nodata=-1000)
|
||||||
|
|
||||||
if "min_shore_distance" in config:
|
if config.get("min_shore_distance"):
|
||||||
buffer = config["min_shore_distance"]
|
buffer = config["min_shore_distance"]
|
||||||
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)
|
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)
|
||||||
|
|
||||||
if "max_shore_distance" in config:
|
if config.get("max_shore_distance"):
|
||||||
buffer = config["max_shore_distance"]
|
buffer = config["max_shore_distance"]
|
||||||
excluder.add_geometry(
|
excluder.add_geometry(
|
||||||
snakemake.input.country_shapes, buffer=buffer, invert=True
|
snakemake.input.country_shapes, buffer=buffer, invert=True
|
||||||
)
|
)
|
||||||
|
|
||||||
if "ship_threshold" in config:
|
if config.get("ship_threshold"):
|
||||||
shipping_threshold = config["ship_threshold"] * 8760 * 6
|
shipping_threshold = config["ship_threshold"] * 8760 * 6
|
||||||
func = functools.partial(np.less, shipping_threshold)
|
func = functools.partial(np.less, shipping_threshold)
|
||||||
excluder.add_raster(
|
excluder.add_raster(
|
||||||
|
@ -761,8 +761,7 @@ if __name__ == "__main__":
|
|||||||
networks_dict = {
|
networks_dict = {
|
||||||
(cluster, ll, opt + sector_opt, planning_horizon): "results/"
|
(cluster, ll, opt + sector_opt, planning_horizon): "results/"
|
||||||
+ snakemake.params.RDIR
|
+ snakemake.params.RDIR
|
||||||
+ f"/postnetworks/elec_s{simpl}_{cluster}_l{ll}_{opt}_{sector_opt}_{planning_horizon}.nc"
|
+ f"/postnetworks/base_s_{cluster}_l{ll}_{opt}_{sector_opt}_{planning_horizon}.nc"
|
||||||
for simpl in snakemake.params.scenario["simpl"]
|
|
||||||
for cluster in snakemake.params.scenario["clusters"]
|
for cluster in snakemake.params.scenario["clusters"]
|
||||||
for opt in snakemake.params.scenario["opts"]
|
for opt in snakemake.params.scenario["opts"]
|
||||||
for sector_opt in snakemake.params.scenario["sector_opts"]
|
for sector_opt in snakemake.params.scenario["sector_opts"]
|
||||||
|
@ -732,8 +732,7 @@ if __name__ == "__main__":
|
|||||||
networks_dict = {
|
networks_dict = {
|
||||||
(clusters, lv, opts + sector_opts): "results/"
|
(clusters, lv, opts + sector_opts): "results/"
|
||||||
+ run
|
+ run
|
||||||
+ f"postnetworks/elec_s{simpl}_{clusters}_l{lv}_{opts}_{sector_opts}_brownfield_all_years.nc"
|
+ f"postnetworks/base_s_{clusters}_l{lv}_{opts}_{sector_opts}_brownfield_all_years.nc"
|
||||||
for simpl in snakemake.config["scenario"]["simpl"]
|
|
||||||
for clusters in snakemake.config["scenario"]["clusters"]
|
for clusters in snakemake.config["scenario"]["clusters"]
|
||||||
for opts in snakemake.config["scenario"]["opts"]
|
for opts in snakemake.config["scenario"]["opts"]
|
||||||
for sector_opts in snakemake.config["scenario"]["sector_opts"]
|
for sector_opts in snakemake.config["scenario"]["sector_opts"]
|
||||||
|
@ -229,7 +229,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"plot_gas_network",
|
"plot_gas_network",
|
||||||
simpl="",
|
|
||||||
opts="",
|
opts="",
|
||||||
clusters="37",
|
clusters="37",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
|
@ -256,7 +256,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"plot_hydrogen_network",
|
"plot_hydrogen_network",
|
||||||
simpl="",
|
|
||||||
opts="",
|
opts="",
|
||||||
clusters="37",
|
clusters="37",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
|
@ -249,7 +249,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"plot_power_network",
|
"plot_power_network",
|
||||||
simpl="",
|
|
||||||
opts="",
|
opts="",
|
||||||
clusters="37",
|
clusters="37",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
|
@ -176,7 +176,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"plot_power_network_perfect",
|
"plot_power_network_perfect",
|
||||||
simpl="",
|
|
||||||
opts="",
|
opts="",
|
||||||
clusters="37",
|
clusters="37",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
|
@ -18,7 +18,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"plot_elec_statistics",
|
"plot_elec_statistics",
|
||||||
simpl="",
|
|
||||||
opts="Ept-12h",
|
opts="Ept-12h",
|
||||||
clusters="37",
|
clusters="37",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
|
@ -181,7 +181,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"plot_electricity_prices",
|
"plot_electricity_prices",
|
||||||
simpl="",
|
|
||||||
opts="Ept-12h",
|
opts="Ept-12h",
|
||||||
clusters="37",
|
clusters="37",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
|
@ -18,7 +18,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"plot_electricity_prices",
|
"plot_electricity_prices",
|
||||||
simpl="",
|
|
||||||
opts="Ept-12h",
|
opts="Ept-12h",
|
||||||
clusters="37",
|
clusters="37",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
|
@ -29,7 +29,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"plot_validation_electricity_production",
|
"plot_validation_electricity_production",
|
||||||
simpl="",
|
|
||||||
opts="Ept",
|
opts="Ept",
|
||||||
clusters="37c",
|
clusters="37c",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
|
@ -41,12 +41,12 @@ Inputs
|
|||||||
------
|
------
|
||||||
|
|
||||||
- ``resources/costs.csv``: The database of cost assumptions for all included technologies for specific years from various sources; e.g. discount rate, lifetime, investment (CAPEX), fixed operation and maintenance (FOM), variable operation and maintenance (VOM), fuel costs, efficiency, carbon-dioxide intensity.
|
- ``resources/costs.csv``: The database of cost assumptions for all included technologies for specific years from various sources; e.g. discount rate, lifetime, investment (CAPEX), fixed operation and maintenance (FOM), variable operation and maintenance (VOM), fuel costs, efficiency, carbon-dioxide intensity.
|
||||||
- ``networks/elec_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
|
- ``networks/base_s_{clusters}.nc``: confer :ref:`cluster`
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Complete PyPSA network that will be handed to the ``solve_network`` rule.
|
- ``networks/base_s_{clusters}_elec_l{ll}_{opts}.nc``: Complete PyPSA network that will be handed to the ``solve_network`` rule.
|
||||||
|
|
||||||
Description
|
Description
|
||||||
-----------
|
-----------
|
||||||
@ -68,7 +68,7 @@ from _helpers import (
|
|||||||
set_scenario_config,
|
set_scenario_config,
|
||||||
update_config_from_wildcards,
|
update_config_from_wildcards,
|
||||||
)
|
)
|
||||||
from add_electricity import load_costs, update_transmission_costs
|
from add_electricity import load_costs, set_transmission_costs
|
||||||
from pypsa.descriptors import expand_series
|
from pypsa.descriptors import expand_series
|
||||||
|
|
||||||
idx = pd.IndexSlice
|
idx = pd.IndexSlice
|
||||||
@ -191,7 +191,7 @@ def set_transmission_limit(n, ll_type, factor, costs, Nyears=1):
|
|||||||
+ n.links.loc[links_dc_b, "p_nom"] @ n.links.loc[links_dc_b, col]
|
+ n.links.loc[links_dc_b, "p_nom"] @ n.links.loc[links_dc_b, col]
|
||||||
)
|
)
|
||||||
|
|
||||||
update_transmission_costs(n, costs)
|
set_transmission_costs(n, costs)
|
||||||
|
|
||||||
if factor == "opt" or float(factor) > 1.0:
|
if factor == "opt" or float(factor) > 1.0:
|
||||||
n.lines["s_nom_min"] = lines_s_nom
|
n.lines["s_nom_min"] = lines_s_nom
|
||||||
@ -325,7 +325,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"prepare_network",
|
"prepare_network",
|
||||||
simpl="",
|
|
||||||
clusters="37",
|
clusters="37",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
opts="Co2L-4H",
|
opts="Co2L-4H",
|
||||||
|
@ -493,7 +493,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"prepare_perfect_foresight",
|
"prepare_perfect_foresight",
|
||||||
simpl="",
|
|
||||||
opts="",
|
opts="",
|
||||||
clusters="37",
|
clusters="37",
|
||||||
ll="v1.5",
|
ll="v1.5",
|
||||||
|
@ -407,12 +407,20 @@ def create_network_topology(
|
|||||||
return topo
|
return topo
|
||||||
|
|
||||||
|
|
||||||
# TODO merge issue with PyPSA-Eur
|
def update_wind_solar_costs(
|
||||||
def update_wind_solar_costs(n, costs):
|
n: pypsa.Network,
|
||||||
|
costs: pd.DataFrame,
|
||||||
|
line_length_factor: int | float = 1,
|
||||||
|
landfall_lengths: dict = None,
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Update costs for wind and solar generators added with pypsa-eur to those
|
Update costs for wind and solar generators added with pypsa-eur to those
|
||||||
cost in the planning year.
|
cost in the planning year.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if landfall_lengths is None:
|
||||||
|
landfall_lengths = {}
|
||||||
|
|
||||||
# NB: solar costs are also manipulated for rooftop
|
# NB: solar costs are also manipulated for rooftop
|
||||||
# when distribution grid is inserted
|
# when distribution grid is inserted
|
||||||
n.generators.loc[n.generators.carrier == "solar", "capital_cost"] = costs.at[
|
n.generators.loc[n.generators.carrier == "solar", "capital_cost"] = costs.at[
|
||||||
@ -424,22 +432,9 @@ def update_wind_solar_costs(n, costs):
|
|||||||
]
|
]
|
||||||
|
|
||||||
# for offshore wind, need to calculated connection costs
|
# for offshore wind, need to calculated connection costs
|
||||||
|
|
||||||
# assign clustered bus
|
|
||||||
# map initial network -> simplified network
|
|
||||||
busmap_s = pd.read_csv(snakemake.input.busmap_s, index_col=0).squeeze()
|
|
||||||
busmap_s.index = busmap_s.index.astype(str)
|
|
||||||
busmap_s = busmap_s.astype(str)
|
|
||||||
# map simplified network -> clustered network
|
|
||||||
busmap = pd.read_csv(snakemake.input.busmap, index_col=0).squeeze()
|
|
||||||
busmap.index = busmap.index.astype(str)
|
|
||||||
busmap = busmap.astype(str)
|
|
||||||
# map initial network -> clustered network
|
|
||||||
clustermaps = busmap_s.map(busmap)
|
|
||||||
|
|
||||||
# code adapted from pypsa-eur/scripts/add_electricity.py
|
|
||||||
for connection in ["dc", "ac", "float"]:
|
for connection in ["dc", "ac", "float"]:
|
||||||
tech = "offwind-" + connection
|
tech = "offwind-" + connection
|
||||||
|
landfall_length = landfall_lengths.get(tech, 0.0)
|
||||||
if tech not in n.generators.carrier.values:
|
if tech not in n.generators.carrier.values:
|
||||||
continue
|
continue
|
||||||
profile = snakemake.input["profile_offwind-" + connection]
|
profile = snakemake.input["profile_offwind-" + connection]
|
||||||
@ -449,30 +444,12 @@ def update_wind_solar_costs(n, costs):
|
|||||||
if "year" in ds.indexes:
|
if "year" in ds.indexes:
|
||||||
ds = ds.sel(year=ds.year.min(), drop=True)
|
ds = ds.sel(year=ds.year.min(), drop=True)
|
||||||
|
|
||||||
underwater_fraction = ds["underwater_fraction"].to_pandas()
|
distance = ds["average_distance"].to_pandas()
|
||||||
connection_cost = (
|
submarine_cost = costs.at[tech + "-connection-submarine", "fixed"]
|
||||||
snakemake.params.length_factor
|
underground_cost = costs.at[tech + "-connection-underground", "fixed"]
|
||||||
* ds["average_distance"].to_pandas()
|
connection_cost = line_length_factor * (
|
||||||
* (
|
distance * submarine_cost + landfall_length * underground_cost
|
||||||
underwater_fraction
|
|
||||||
* costs.at[tech + "-connection-submarine", "fixed"]
|
|
||||||
+ (1.0 - underwater_fraction)
|
|
||||||
* costs.at[tech + "-connection-underground", "fixed"]
|
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
# convert to aggregated clusters with weighting
|
|
||||||
weight = ds["weight"].to_pandas()
|
|
||||||
|
|
||||||
# e.g. clusters == 37m means that VRE generators are left
|
|
||||||
# at clustering of simplified network, but that they are
|
|
||||||
# connected to 37-node network
|
|
||||||
genmap = (
|
|
||||||
busmap_s if snakemake.wildcards.clusters[-1:] == "m" else clustermaps
|
|
||||||
)
|
|
||||||
connection_cost = (connection_cost * weight).groupby(
|
|
||||||
genmap
|
|
||||||
).sum() / weight.groupby(genmap).sum()
|
|
||||||
|
|
||||||
capital_cost = (
|
capital_cost = (
|
||||||
costs.at["offwind", "fixed"]
|
costs.at["offwind", "fixed"]
|
||||||
@ -613,10 +590,10 @@ def remove_non_electric_buses(n):
|
|||||||
n.buses = n.buses[n.buses.carrier.isin(["AC", "DC"])]
|
n.buses = n.buses[n.buses.carrier.isin(["AC", "DC"])]
|
||||||
|
|
||||||
|
|
||||||
def patch_electricity_network(n):
|
def patch_electricity_network(n, costs, landfall_lengths):
|
||||||
remove_elec_base_techs(n)
|
remove_elec_base_techs(n)
|
||||||
remove_non_electric_buses(n)
|
remove_non_electric_buses(n)
|
||||||
update_wind_solar_costs(n, costs)
|
update_wind_solar_costs(n, costs, landfall_lengths=landfall_lengths)
|
||||||
n.loads["carrier"] = "electricity"
|
n.loads["carrier"] = "electricity"
|
||||||
n.buses["location"] = n.buses.index
|
n.buses["location"] = n.buses.index
|
||||||
n.buses["unit"] = "MWh_el"
|
n.buses["unit"] = "MWh_el"
|
||||||
@ -1340,12 +1317,6 @@ def insert_electricity_distribution_grid(n, costs):
|
|||||||
# set existing solar to cost of utility cost rather the 50-50 rooftop-utility
|
# set existing solar to cost of utility cost rather the 50-50 rooftop-utility
|
||||||
solar = n.generators.index[n.generators.carrier == "solar"]
|
solar = n.generators.index[n.generators.carrier == "solar"]
|
||||||
n.generators.loc[solar, "capital_cost"] = costs.at["solar-utility", "fixed"]
|
n.generators.loc[solar, "capital_cost"] = costs.at["solar-utility", "fixed"]
|
||||||
if snakemake.wildcards.clusters[-1:] == "m":
|
|
||||||
simplified_pop_layout = pd.read_csv(
|
|
||||||
snakemake.input.simplified_pop_layout, index_col=0
|
|
||||||
)
|
|
||||||
pop_solar = simplified_pop_layout.total.rename(index=lambda x: x + " solar")
|
|
||||||
else:
|
|
||||||
pop_solar = pop_layout.total.rename(index=lambda x: x + " solar")
|
pop_solar = pop_layout.total.rename(index=lambda x: x + " solar")
|
||||||
|
|
||||||
# add max solar rooftop potential assuming 0.1 kW/m2 and 20 m2/person,
|
# add max solar rooftop potential assuming 0.1 kW/m2 and 20 m2/person,
|
||||||
@ -4621,7 +4592,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"prepare_sector_network",
|
"prepare_sector_network",
|
||||||
simpl="",
|
|
||||||
opts="",
|
opts="",
|
||||||
clusters="38",
|
clusters="38",
|
||||||
ll="vopt",
|
ll="vopt",
|
||||||
@ -4658,12 +4628,17 @@ if __name__ == "__main__":
|
|||||||
)
|
)
|
||||||
pop_weighted_energy_totals.update(pop_weighted_heat_totals)
|
pop_weighted_energy_totals.update(pop_weighted_heat_totals)
|
||||||
|
|
||||||
|
landfall_lengths = {
|
||||||
|
tech: settings["landfall_length"]
|
||||||
|
for tech, settings in snakemake.params.renewable.items()
|
||||||
|
if "landfall_length" in settings.keys()
|
||||||
|
}
|
||||||
|
patch_electricity_network(n, costs, landfall_lengths)
|
||||||
|
|
||||||
fn = snakemake.input.heating_efficiencies
|
fn = snakemake.input.heating_efficiencies
|
||||||
year = int(snakemake.params["energy_totals_year"])
|
year = int(snakemake.params["energy_totals_year"])
|
||||||
heating_efficiencies = pd.read_csv(fn, index_col=[1, 0]).loc[year]
|
heating_efficiencies = pd.read_csv(fn, index_col=[1, 0]).loc[year]
|
||||||
|
|
||||||
patch_electricity_network(n)
|
|
||||||
|
|
||||||
spatial = define_spatial(pop_layout.index, options)
|
spatial = define_spatial(pop_layout.index, options)
|
||||||
|
|
||||||
if snakemake.params.foresight in ["myopic", "perfect"]:
|
if snakemake.params.foresight in ["myopic", "perfect"]:
|
||||||
|
@ -19,96 +19,74 @@ Relevant Settings
|
|||||||
cluster_network:
|
cluster_network:
|
||||||
aggregation_strategies:
|
aggregation_strategies:
|
||||||
|
|
||||||
costs:
|
|
||||||
year:
|
|
||||||
version:
|
|
||||||
fill_values:
|
|
||||||
marginal_cost:
|
|
||||||
capital_cost:
|
|
||||||
|
|
||||||
electricity:
|
|
||||||
max_hours:
|
|
||||||
|
|
||||||
lines:
|
|
||||||
length_factor:
|
|
||||||
|
|
||||||
links:
|
links:
|
||||||
p_max_pu:
|
p_max_pu:
|
||||||
|
|
||||||
solving:
|
|
||||||
solver:
|
|
||||||
name:
|
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
Documentation of the configuration file ``config/config.yaml`` at
|
Documentation of the configuration file ``config/config.yaml`` at
|
||||||
:ref:`costs_cf`, :ref:`electricity_cf`, :ref:`renewable_cf`,
|
:ref:`electricity_cf`, :ref:`renewable_cf`,
|
||||||
:ref:`lines_cf`, :ref:`links_cf`, :ref:`solving_cf`
|
:ref:`lines_cf`, :ref:`links_cf`
|
||||||
|
|
||||||
Inputs
|
Inputs
|
||||||
------
|
------
|
||||||
|
|
||||||
- ``resources/costs.csv``: The database of cost assumptions for all included technologies for specific years from various sources; e.g. discount rate, lifetime, investment (CAPEX), fixed operation and maintenance (FOM), variable operation and maintenance (VOM), fuel costs, efficiency, carbon-dioxide intensity.
|
|
||||||
- ``resources/regions_onshore.geojson``: confer :ref:`busregions`
|
- ``resources/regions_onshore.geojson``: confer :ref:`busregions`
|
||||||
- ``resources/regions_offshore.geojson``: confer :ref:`busregions`
|
- ``resources/regions_offshore.geojson``: confer :ref:`busregions`
|
||||||
- ``networks/elec.nc``: confer :ref:`electricity`
|
- ``networks/base.nc``
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``resources/regions_onshore_elec_s{simpl}.geojson``:
|
- ``resources/regions_onshore_base.geojson``:
|
||||||
|
|
||||||
.. image:: img/regions_onshore_elec_s.png
|
.. image:: img/regions_onshore_base_s.png
|
||||||
:scale: 33 %
|
:scale: 33 %
|
||||||
|
|
||||||
- ``resources/regions_offshore_elec_s{simpl}.geojson``:
|
- ``resources/regions_offshore_base.geojson``:
|
||||||
|
|
||||||
.. image:: img/regions_offshore_elec_s .png
|
.. image:: img/regions_offshore_base_s .png
|
||||||
:scale: 33 %
|
:scale: 33 %
|
||||||
|
|
||||||
- ``resources/busmap_elec_s{simpl}.csv``: Mapping of buses from ``networks/elec.nc`` to ``networks/elec_s{simpl}.nc``;
|
- ``resources/busmap_base_s.csv``: Mapping of buses from ``networks/base.nc`` to ``networks/base_s.nc``;
|
||||||
- ``networks/elec_s{simpl}.nc``:
|
- ``networks/base.nc``:
|
||||||
|
|
||||||
.. image:: img/elec_s.png
|
.. image:: img/base_s.png
|
||||||
:scale: 33 %
|
:scale: 33 %
|
||||||
|
|
||||||
Description
|
Description
|
||||||
-----------
|
-----------
|
||||||
|
|
||||||
The rule :mod:`simplify_network` does up to four things:
|
The rule :mod:`simplify_network` does up to three things:
|
||||||
|
|
||||||
1. Create an equivalent transmission network in which all voltage levels are mapped to the 380 kV level by the function ``simplify_network(...)``.
|
1. Create an equivalent transmission network in which all voltage levels are mapped to the 380 kV level by the function ``simplify_network(...)``.
|
||||||
|
|
||||||
2. DC only sub-networks that are connected at only two buses to the AC network are reduced to a single representative link in the function ``simplify_links(...)``. The components attached to buses in between are moved to the nearest endpoint. The grid connection cost of offshore wind generators are added to the capital costs of the generator.
|
2. DC only sub-networks that are connected at only two buses to the AC network are reduced to a single representative link in the function ``simplify_links(...)``.
|
||||||
|
|
||||||
3. Stub lines and links, i.e. dead-ends of the network, are sequentially removed from the network in the function ``remove_stubs(...)``. Components are moved along.
|
3. Stub lines and links, i.e. dead-ends of the network, are sequentially removed from the network in the function ``remove_stubs(...)``.
|
||||||
|
|
||||||
4. Optionally, if an integer were provided for the wildcard ``{simpl}`` (e.g. ``networks/elec_s500.nc``), the network is clustered to this number of clusters with the routines from the ``cluster_network`` rule with the function ``cluster_network.cluster(...)``. This step is usually skipped!
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pypsa
|
import pypsa
|
||||||
import scipy as sp
|
import scipy as sp
|
||||||
from _helpers import configure_logging, set_scenario_config, update_p_nom_max
|
from _helpers import configure_logging, set_scenario_config
|
||||||
from add_electricity import load_costs
|
|
||||||
from base_network import append_bus_shapes
|
from base_network import append_bus_shapes
|
||||||
from cluster_network import cluster_regions, clustering_for_n_clusters
|
from cluster_network import cluster_regions
|
||||||
from pypsa.clustering.spatial import (
|
from pypsa.clustering.spatial import busmap_by_stubs, get_clustering_from_busmap
|
||||||
aggregateoneport,
|
|
||||||
busmap_by_stubs,
|
|
||||||
get_clustering_from_busmap,
|
|
||||||
)
|
|
||||||
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
|
|
||||||
from scipy.sparse.csgraph import connected_components, dijkstra
|
from scipy.sparse.csgraph import connected_components, dijkstra
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def simplify_network_to_380(n, linetype_380):
|
def simplify_network_to_380(
|
||||||
|
n: pypsa.Network, linetype_380: str
|
||||||
|
) -> Tuple[pypsa.Network, pd.Series]:
|
||||||
"""
|
"""
|
||||||
Fix all lines to a voltage level of 380 kV and remove all transformers.
|
Fix all lines to a voltage level of 380 kV and remove all transformers.
|
||||||
|
|
||||||
@ -149,123 +127,7 @@ def simplify_network_to_380(n, linetype_380):
|
|||||||
return n, trafo_map
|
return n, trafo_map
|
||||||
|
|
||||||
|
|
||||||
def _prepare_connection_costs_per_link(n, costs, renewable_carriers, length_factor):
|
def _remove_clustered_buses_and_branches(n: pypsa.Network, busmap: pd.Series) -> None:
|
||||||
if n.links.empty:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
return {
|
|
||||||
tech: (
|
|
||||||
n.links.length
|
|
||||||
* length_factor
|
|
||||||
* (
|
|
||||||
n.links.underwater_fraction
|
|
||||||
* costs.at[tech + "-connection-submarine", "capital_cost"]
|
|
||||||
+ (1.0 - n.links.underwater_fraction)
|
|
||||||
* costs.at[tech + "-connection-underground", "capital_cost"]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
for tech in renewable_carriers
|
|
||||||
if tech.startswith("offwind")
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _compute_connection_costs_to_bus(
|
|
||||||
n,
|
|
||||||
busmap,
|
|
||||||
costs,
|
|
||||||
renewable_carriers,
|
|
||||||
length_factor,
|
|
||||||
connection_costs_per_link=None,
|
|
||||||
buses=None,
|
|
||||||
):
|
|
||||||
if connection_costs_per_link is None:
|
|
||||||
connection_costs_per_link = _prepare_connection_costs_per_link(
|
|
||||||
n, costs, renewable_carriers, length_factor
|
|
||||||
)
|
|
||||||
|
|
||||||
if buses is None:
|
|
||||||
buses = busmap.index[busmap.index != busmap.values]
|
|
||||||
|
|
||||||
connection_costs_to_bus = pd.DataFrame(index=buses)
|
|
||||||
|
|
||||||
for tech in connection_costs_per_link:
|
|
||||||
adj = n.adjacency_matrix(
|
|
||||||
weights=pd.concat(
|
|
||||||
dict(
|
|
||||||
Link=connection_costs_per_link[tech].reindex(n.links.index),
|
|
||||||
Line=pd.Series(0.0, n.lines.index),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
costs_between_buses = dijkstra(
|
|
||||||
adj, directed=False, indices=n.buses.index.get_indexer(buses)
|
|
||||||
)
|
|
||||||
connection_costs_to_bus[tech] = costs_between_buses[
|
|
||||||
np.arange(len(buses)), n.buses.index.get_indexer(busmap.loc[buses])
|
|
||||||
]
|
|
||||||
|
|
||||||
return connection_costs_to_bus
|
|
||||||
|
|
||||||
|
|
||||||
def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus):
|
|
||||||
connection_costs = {}
|
|
||||||
for tech in connection_costs_to_bus:
|
|
||||||
tech_b = n.generators.carrier == tech
|
|
||||||
costs = (
|
|
||||||
n.generators.loc[tech_b, "bus"]
|
|
||||||
.map(connection_costs_to_bus[tech])
|
|
||||||
.loc[lambda s: s > 0]
|
|
||||||
)
|
|
||||||
if not costs.empty:
|
|
||||||
n.generators.loc[costs.index, "capital_cost"] += costs
|
|
||||||
logger.info(
|
|
||||||
"Displacing {} generator(s) and adding connection costs to capital_costs: {} ".format(
|
|
||||||
tech,
|
|
||||||
", ".join(
|
|
||||||
"{:.0f} Eur/MW/a for `{}`".format(d, b)
|
|
||||||
for b, d in costs.items()
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
connection_costs[tech] = costs
|
|
||||||
|
|
||||||
|
|
||||||
def _aggregate_and_move_components(
|
|
||||||
n,
|
|
||||||
busmap,
|
|
||||||
connection_costs_to_bus,
|
|
||||||
aggregate_one_ports={"Load", "StorageUnit"},
|
|
||||||
aggregation_strategies=dict(),
|
|
||||||
exclude_carriers=None,
|
|
||||||
):
|
|
||||||
def replace_components(n, c, df, pnl):
|
|
||||||
n.mremove(c, n.df(c).index)
|
|
||||||
|
|
||||||
import_components_from_dataframe(n, df, c)
|
|
||||||
for attr, df in pnl.items():
|
|
||||||
if not df.empty:
|
|
||||||
import_series_from_dataframe(n, df, c, attr)
|
|
||||||
|
|
||||||
_adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus)
|
|
||||||
|
|
||||||
generator_strategies = aggregation_strategies["generators"]
|
|
||||||
|
|
||||||
carriers = set(n.generators.carrier) - set(exclude_carriers)
|
|
||||||
generators, generators_pnl = aggregateoneport(
|
|
||||||
n,
|
|
||||||
busmap,
|
|
||||||
"Generator",
|
|
||||||
carriers=carriers,
|
|
||||||
custom_strategies=generator_strategies,
|
|
||||||
)
|
|
||||||
|
|
||||||
replace_components(n, "Generator", generators, generators_pnl)
|
|
||||||
|
|
||||||
for one_port in aggregate_one_ports:
|
|
||||||
df, pnl = aggregateoneport(n, busmap, component=one_port)
|
|
||||||
replace_components(n, one_port, df, pnl)
|
|
||||||
|
|
||||||
buses_to_del = n.buses.index.difference(busmap)
|
buses_to_del = n.buses.index.difference(busmap)
|
||||||
n.mremove("Bus", buses_to_del)
|
n.mremove("Bus", buses_to_del)
|
||||||
for c in n.branch_components:
|
for c in n.branch_components:
|
||||||
@ -274,14 +136,8 @@ def _aggregate_and_move_components(
|
|||||||
|
|
||||||
|
|
||||||
def simplify_links(
|
def simplify_links(
|
||||||
n,
|
n: pypsa.Network, p_max_pu: int | float
|
||||||
costs,
|
) -> Tuple[pypsa.Network, pd.Series]:
|
||||||
renewables,
|
|
||||||
length_factor,
|
|
||||||
p_max_pu,
|
|
||||||
exclude_carriers,
|
|
||||||
aggregation_strategies=dict(),
|
|
||||||
):
|
|
||||||
## Complex multi-node links are folded into end-points
|
## Complex multi-node links are folded into end-points
|
||||||
logger.info("Simplifying connected link components")
|
logger.info("Simplifying connected link components")
|
||||||
|
|
||||||
@ -343,13 +199,6 @@ def simplify_links(
|
|||||||
|
|
||||||
busmap = n.buses.index.to_series()
|
busmap = n.buses.index.to_series()
|
||||||
|
|
||||||
connection_costs_per_link = _prepare_connection_costs_per_link(
|
|
||||||
n, costs, renewables, length_factor
|
|
||||||
)
|
|
||||||
connection_costs_to_bus = pd.DataFrame(
|
|
||||||
0.0, index=n.buses.index, columns=list(connection_costs_per_link)
|
|
||||||
)
|
|
||||||
|
|
||||||
node_corsica = find_closest_bus(
|
node_corsica = find_closest_bus(
|
||||||
n,
|
n,
|
||||||
x=9.44802,
|
x=9.44802,
|
||||||
@ -375,15 +224,6 @@ def simplify_links(
|
|||||||
n.buses.loc[b, ["x", "y"]], n.buses.loc[buses[1:-1], ["x", "y"]]
|
n.buses.loc[b, ["x", "y"]], n.buses.loc[buses[1:-1], ["x", "y"]]
|
||||||
)
|
)
|
||||||
busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]]
|
busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]]
|
||||||
connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(
|
|
||||||
n,
|
|
||||||
busmap,
|
|
||||||
costs,
|
|
||||||
renewables,
|
|
||||||
length_factor,
|
|
||||||
connection_costs_per_link,
|
|
||||||
buses,
|
|
||||||
)
|
|
||||||
|
|
||||||
all_links = [i for _, i in sum(links, [])]
|
all_links = [i for _, i in sum(links, [])]
|
||||||
|
|
||||||
@ -421,61 +261,41 @@ def simplify_links(
|
|||||||
params.setdefault(attr, default)
|
params.setdefault(attr, default)
|
||||||
n.links.loc[name] = pd.Series(params)
|
n.links.loc[name] = pd.Series(params)
|
||||||
|
|
||||||
# n.add("Link", **params)
|
# n.add("Link", name, **params)
|
||||||
|
|
||||||
logger.debug("Collecting all components using the busmap")
|
logger.debug("Collecting all components using the busmap")
|
||||||
|
|
||||||
|
_remove_clustered_buses_and_branches(n, busmap)
|
||||||
|
|
||||||
# Change carrier type of all added super_nodes to "AC"
|
# Change carrier type of all added super_nodes to "AC"
|
||||||
n.buses.loc[added_supernodes, "carrier"] = "AC"
|
n.buses.loc[added_supernodes, "carrier"] = "AC"
|
||||||
|
|
||||||
_aggregate_and_move_components(
|
|
||||||
n,
|
|
||||||
busmap,
|
|
||||||
connection_costs_to_bus,
|
|
||||||
aggregation_strategies=aggregation_strategies,
|
|
||||||
exclude_carriers=exclude_carriers,
|
|
||||||
)
|
|
||||||
return n, busmap
|
return n, busmap
|
||||||
|
|
||||||
|
|
||||||
def remove_stubs(
|
def remove_stubs(
|
||||||
n,
|
n: pypsa.Network, simplify_network: dict
|
||||||
costs,
|
) -> Tuple[pypsa.Network, pd.Series]:
|
||||||
renewable_carriers,
|
|
||||||
length_factor,
|
|
||||||
simplify_network,
|
|
||||||
aggregation_strategies=dict(),
|
|
||||||
):
|
|
||||||
logger.info("Removing stubs")
|
logger.info("Removing stubs")
|
||||||
|
|
||||||
across_borders = simplify_network["remove_stubs_across_borders"]
|
across_borders = simplify_network["remove_stubs_across_borders"]
|
||||||
matching_attrs = [] if across_borders else ["country"]
|
matching_attrs = [] if across_borders else ["country"]
|
||||||
busmap = busmap_by_stubs(n, matching_attrs)
|
busmap = busmap_by_stubs(n, matching_attrs)
|
||||||
|
|
||||||
connection_costs_to_bus = _compute_connection_costs_to_bus(
|
_remove_clustered_buses_and_branches(n, busmap)
|
||||||
n, busmap, costs, renewable_carriers, length_factor
|
|
||||||
)
|
|
||||||
|
|
||||||
_aggregate_and_move_components(
|
|
||||||
n,
|
|
||||||
busmap,
|
|
||||||
connection_costs_to_bus,
|
|
||||||
aggregation_strategies=aggregation_strategies,
|
|
||||||
exclude_carriers=simplify_network["exclude_carriers"],
|
|
||||||
)
|
|
||||||
|
|
||||||
return n, busmap
|
return n, busmap
|
||||||
|
|
||||||
|
|
||||||
def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None):
|
def aggregate_to_substations(
|
||||||
|
n: pypsa.Network,
|
||||||
|
buses_i: pd.Index | list,
|
||||||
|
aggregation_strategies: dict | None = None,
|
||||||
|
) -> Tuple[pypsa.Network, pd.Series]:
|
||||||
# can be used to aggregate a selection of buses to electrically closest neighbors
|
# can be used to aggregate a selection of buses to electrically closest neighbors
|
||||||
# if no buses are given, nodes that are no substations or without offshore connection are aggregated
|
logger.info("Aggregating buses to substations")
|
||||||
|
if aggregation_strategies is None:
|
||||||
if buses_i is None:
|
aggregation_strategies = dict()
|
||||||
logger.info(
|
|
||||||
"Aggregating buses that are no substations or have no valid offshore connection"
|
|
||||||
)
|
|
||||||
buses_i = list(set(n.buses.index) - set(n.generators.bus) - set(n.loads.bus))
|
|
||||||
|
|
||||||
weight = pd.concat(
|
weight = pd.concat(
|
||||||
{
|
{
|
||||||
@ -503,49 +323,21 @@ def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None):
|
|||||||
busmap.loc[buses_i] = dist.idxmin(1)
|
busmap.loc[buses_i] = dist.idxmin(1)
|
||||||
|
|
||||||
line_strategies = aggregation_strategies.get("lines", dict())
|
line_strategies = aggregation_strategies.get("lines", dict())
|
||||||
generator_strategies = aggregation_strategies.get("generators", dict())
|
|
||||||
one_port_strategies = aggregation_strategies.get("one_ports", dict())
|
bus_strategies = aggregation_strategies.get("buses", dict())
|
||||||
|
bus_strategies.setdefault("substation_lv", lambda x: bool(x.sum()))
|
||||||
|
bus_strategies.setdefault("substation_off", lambda x: bool(x.sum()))
|
||||||
|
|
||||||
clustering = get_clustering_from_busmap(
|
clustering = get_clustering_from_busmap(
|
||||||
n,
|
n,
|
||||||
busmap,
|
busmap,
|
||||||
aggregate_generators_weighted=True,
|
|
||||||
aggregate_generators_carriers=None,
|
|
||||||
aggregate_one_ports=["Load", "StorageUnit"],
|
|
||||||
line_length_factor=1.0,
|
line_length_factor=1.0,
|
||||||
|
bus_strategies=bus_strategies,
|
||||||
line_strategies=line_strategies,
|
line_strategies=line_strategies,
|
||||||
generator_strategies=generator_strategies,
|
|
||||||
one_port_strategies=one_port_strategies,
|
|
||||||
scale_link_capital_costs=False,
|
|
||||||
)
|
)
|
||||||
return clustering.network, busmap
|
return clustering.network, busmap
|
||||||
|
|
||||||
|
|
||||||
def cluster(
|
|
||||||
n,
|
|
||||||
n_clusters,
|
|
||||||
focus_weights,
|
|
||||||
solver_name,
|
|
||||||
algorithm="hac",
|
|
||||||
feature=None,
|
|
||||||
aggregation_strategies=dict(),
|
|
||||||
):
|
|
||||||
logger.info(f"Clustering to {n_clusters} buses")
|
|
||||||
|
|
||||||
clustering = clustering_for_n_clusters(
|
|
||||||
n,
|
|
||||||
n_clusters,
|
|
||||||
custom_busmap=False,
|
|
||||||
aggregation_strategies=aggregation_strategies,
|
|
||||||
solver_name=solver_name,
|
|
||||||
algorithm=algorithm,
|
|
||||||
feature=feature,
|
|
||||||
focus_weights=focus_weights,
|
|
||||||
)
|
|
||||||
|
|
||||||
return clustering.network, clustering.busmap
|
|
||||||
|
|
||||||
|
|
||||||
def find_closest_bus(n, x, y, tol=2000):
|
def find_closest_bus(n, x, y, tol=2000):
|
||||||
"""
|
"""
|
||||||
Find the index of the closest bus to the given coordinates within a specified tolerance.
|
Find the index of the closest bus to the given coordinates within a specified tolerance.
|
||||||
@ -586,71 +378,28 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake("simplify_network", simpl="", run="all")
|
snakemake = mock_snakemake("simplify_network")
|
||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
params = snakemake.params
|
params = snakemake.params
|
||||||
solver_name = snakemake.config["solving"]["solver"]["name"]
|
|
||||||
|
|
||||||
n = pypsa.Network(snakemake.input.network)
|
n = pypsa.Network(snakemake.input.network)
|
||||||
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
||||||
|
buses_prev, lines_prev, links_prev = len(n.buses), len(n.lines), len(n.links)
|
||||||
# remove integer outputs for compatibility with PyPSA v0.26.0
|
|
||||||
n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore")
|
|
||||||
|
|
||||||
linetype_380 = snakemake.config["lines"]["types"][380]
|
linetype_380 = snakemake.config["lines"]["types"][380]
|
||||||
n, trafo_map = simplify_network_to_380(n, linetype_380)
|
n, trafo_map = simplify_network_to_380(n, linetype_380)
|
||||||
|
|
||||||
technology_costs = load_costs(
|
n, simplify_links_map = simplify_links(n, params.p_max_pu)
|
||||||
snakemake.input.tech_costs,
|
|
||||||
params.costs,
|
|
||||||
params.max_hours,
|
|
||||||
Nyears,
|
|
||||||
)
|
|
||||||
|
|
||||||
n, simplify_links_map = simplify_links(
|
|
||||||
n,
|
|
||||||
technology_costs,
|
|
||||||
params.renewable_carriers,
|
|
||||||
params.length_factor,
|
|
||||||
params.p_max_pu,
|
|
||||||
params.simplify_network["exclude_carriers"],
|
|
||||||
params.aggregation_strategies,
|
|
||||||
)
|
|
||||||
|
|
||||||
busmaps = [trafo_map, simplify_links_map]
|
busmaps = [trafo_map, simplify_links_map]
|
||||||
|
|
||||||
if params.simplify_network["remove_stubs"]:
|
if params.simplify_network["remove_stubs"]:
|
||||||
n, stub_map = remove_stubs(
|
n, stub_map = remove_stubs(n, params.simplify_network)
|
||||||
n,
|
|
||||||
technology_costs,
|
|
||||||
params.renewable_carriers,
|
|
||||||
params.length_factor,
|
|
||||||
params.simplify_network,
|
|
||||||
aggregation_strategies=params.aggregation_strategies,
|
|
||||||
)
|
|
||||||
busmaps.append(stub_map)
|
busmaps.append(stub_map)
|
||||||
|
|
||||||
if params.simplify_network["to_substations"]:
|
substations_i = n.buses.query("substation_lv or substation_off").index
|
||||||
n, substation_map = aggregate_to_substations(n, params.aggregation_strategies)
|
|
||||||
busmaps.append(substation_map)
|
|
||||||
|
|
||||||
# treatment of outliers (nodes without a profile for considered carrier):
|
|
||||||
# all nodes that have no profile of the given carrier are being aggregated to closest neighbor
|
|
||||||
if params.simplify_network["algorithm"] == "hac":
|
|
||||||
carriers = params.simplify_network["feature"].split("-")[0].split("+")
|
|
||||||
for carrier in carriers:
|
|
||||||
buses_i = list(
|
|
||||||
set(n.buses.index) - set(n.generators.query("carrier == @carrier").bus)
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
f"clustering preparation (hac): aggregating {len(buses_i)} buses of type {carrier}."
|
|
||||||
)
|
|
||||||
n, busmap_hac = aggregate_to_substations(
|
|
||||||
n, params.aggregation_strategies, buses_i
|
|
||||||
)
|
|
||||||
busmaps.append(busmap_hac)
|
|
||||||
|
|
||||||
# some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed
|
# some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed
|
||||||
# and are lost when clustering (for example with the simpl wildcard), we remove them for consistency:
|
# and are lost when clustering (for example with the simpl wildcard), we remove them for consistency:
|
||||||
@ -659,8 +408,6 @@ if __name__ == "__main__":
|
|||||||
"tags",
|
"tags",
|
||||||
"under_construction",
|
"under_construction",
|
||||||
"onshore_bus",
|
"onshore_bus",
|
||||||
"substation_lv",
|
|
||||||
"substation_off",
|
|
||||||
"geometry",
|
"geometry",
|
||||||
"underground",
|
"underground",
|
||||||
"project_status",
|
"project_status",
|
||||||
@ -668,30 +415,39 @@ if __name__ == "__main__":
|
|||||||
n.buses.drop(remove, axis=1, inplace=True, errors="ignore")
|
n.buses.drop(remove, axis=1, inplace=True, errors="ignore")
|
||||||
n.lines.drop(remove, axis=1, errors="ignore", inplace=True)
|
n.lines.drop(remove, axis=1, errors="ignore", inplace=True)
|
||||||
|
|
||||||
if snakemake.wildcards.simpl:
|
if params.simplify_network["to_substations"]:
|
||||||
# shapes = n.shapes
|
n, substation_map = aggregate_to_substations(
|
||||||
n, cluster_map = cluster(
|
n, substations_i, params.aggregation_strategies
|
||||||
n,
|
|
||||||
int(snakemake.wildcards.simpl),
|
|
||||||
params.focus_weights,
|
|
||||||
solver_name,
|
|
||||||
params.simplify_network["algorithm"],
|
|
||||||
params.simplify_network["feature"],
|
|
||||||
params.aggregation_strategies,
|
|
||||||
)
|
)
|
||||||
# n.shapes = shapes
|
busmaps.append(substation_map)
|
||||||
busmaps.append(cluster_map)
|
|
||||||
|
|
||||||
update_p_nom_max(n)
|
# all buses without shapes need to be clustered to their closest neighbor for HAC
|
||||||
|
if params.cluster_network["algorithm"] == "hac":
|
||||||
|
buses_i = list(n.buses.index.difference(n.shapes.idx))
|
||||||
|
logger.info(
|
||||||
|
"Preparing for HAC-Clustering. "
|
||||||
|
f"Aggregating {len(buses_i)} buses without Voronoi shapes to closest neighbor."
|
||||||
|
)
|
||||||
|
n, busmap_hac = aggregate_to_substations(
|
||||||
|
n, buses_i, params.aggregation_strategies
|
||||||
|
)
|
||||||
|
busmaps.append(busmap_hac)
|
||||||
|
|
||||||
busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
|
busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
|
||||||
busmap_s.to_csv(snakemake.output.busmap)
|
busmap_s.to_csv(snakemake.output.busmap)
|
||||||
|
|
||||||
for which in ["regions_onshore", "regions_offshore"]:
|
for which in ["regions_onshore", "regions_offshore"]:
|
||||||
regions = gpd.read_file(snakemake.input[which])
|
regions = gpd.read_file(snakemake.input[which])
|
||||||
clustered_regions = cluster_regions(busmaps, regions)
|
clustered_regions = cluster_regions(busmaps, regions, with_country=True)
|
||||||
clustered_regions.to_file(snakemake.output[which])
|
clustered_regions.to_file(snakemake.output[which])
|
||||||
# append_bus_shapes(n, clustered_regions, type=which.split("_")[1])
|
# append_bus_shapes(n, clustered_regions, type=which.split("_")[1])
|
||||||
|
|
||||||
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
||||||
n.export_to_netcdf(snakemake.output.network)
|
n.export_to_netcdf(snakemake.output.network)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Simplified network:\n"
|
||||||
|
f"Buses: {buses_prev} to {len(n.buses)}\n"
|
||||||
|
f"Lines: {lines_prev} to {len(n.lines)}\n"
|
||||||
|
f"Links: {links_prev} to {len(n.links)}"
|
||||||
|
)
|
||||||
|
@ -51,13 +51,6 @@ logger = logging.getLogger(__name__)
|
|||||||
pypsa.pf.logger.setLevel(logging.WARNING)
|
pypsa.pf.logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
|
||||||
def add_land_use_constraint(n, planning_horizons, config):
|
|
||||||
if "m" in snakemake.wildcards.clusters:
|
|
||||||
_add_land_use_constraint_m(n, planning_horizons, config)
|
|
||||||
else:
|
|
||||||
_add_land_use_constraint(n)
|
|
||||||
|
|
||||||
|
|
||||||
def add_land_use_constraint_perfect(n):
|
def add_land_use_constraint_perfect(n):
|
||||||
"""
|
"""
|
||||||
Add global constraints for tech capacity limit.
|
Add global constraints for tech capacity limit.
|
||||||
@ -121,7 +114,7 @@ def add_land_use_constraint_perfect(n):
|
|||||||
return n
|
return n
|
||||||
|
|
||||||
|
|
||||||
def _add_land_use_constraint(n):
|
def add_land_use_constraint(n):
|
||||||
# warning: this will miss existing offwind which is not classed AC-DC and has carrier 'offwind'
|
# warning: this will miss existing offwind which is not classed AC-DC and has carrier 'offwind'
|
||||||
|
|
||||||
for carrier in [
|
for carrier in [
|
||||||
@ -159,58 +152,6 @@ def _add_land_use_constraint(n):
|
|||||||
n.generators["p_nom_max"] = n.generators["p_nom_max"].clip(lower=0)
|
n.generators["p_nom_max"] = n.generators["p_nom_max"].clip(lower=0)
|
||||||
|
|
||||||
|
|
||||||
def _add_land_use_constraint_m(n, planning_horizons, config):
|
|
||||||
# if generators clustering is lower than network clustering, land_use accounting is at generators clusters
|
|
||||||
|
|
||||||
grouping_years = config["existing_capacities"]["grouping_years_power"]
|
|
||||||
current_horizon = snakemake.wildcards.planning_horizons
|
|
||||||
|
|
||||||
for carrier in [
|
|
||||||
"solar",
|
|
||||||
"solar rooftop",
|
|
||||||
"solar-hsat",
|
|
||||||
"onwind",
|
|
||||||
"offwind-ac",
|
|
||||||
"offwind-dc",
|
|
||||||
]:
|
|
||||||
|
|
||||||
existing = n.generators.loc[n.generators.carrier == carrier, "p_nom"]
|
|
||||||
ind = list(
|
|
||||||
{i.split(sep=" ")[0] + " " + i.split(sep=" ")[1] for i in existing.index}
|
|
||||||
)
|
|
||||||
|
|
||||||
previous_years = [
|
|
||||||
str(y)
|
|
||||||
for y in set(planning_horizons + grouping_years)
|
|
||||||
if y < int(snakemake.wildcards.planning_horizons)
|
|
||||||
]
|
|
||||||
|
|
||||||
for p_year in previous_years:
|
|
||||||
ind2 = [
|
|
||||||
i for i in ind if i + " " + carrier + "-" + p_year in existing.index
|
|
||||||
]
|
|
||||||
sel_current = [i + " " + carrier + "-" + current_horizon for i in ind2]
|
|
||||||
sel_p_year = [i + " " + carrier + "-" + p_year for i in ind2]
|
|
||||||
n.generators.loc[sel_current, "p_nom_max"] -= existing.loc[
|
|
||||||
sel_p_year
|
|
||||||
].rename(lambda x: x[:-4] + current_horizon)
|
|
||||||
|
|
||||||
# check if existing capacities are larger than technical potential
|
|
||||||
existing_large = n.generators[
|
|
||||||
n.generators["p_nom_min"] > n.generators["p_nom_max"]
|
|
||||||
].index
|
|
||||||
if len(existing_large):
|
|
||||||
logger.warning(
|
|
||||||
f"Existing capacities larger than technical potential for {existing_large},\
|
|
||||||
adjust technical potential to existing capacities"
|
|
||||||
)
|
|
||||||
n.generators.loc[existing_large, "p_nom_max"] = n.generators.loc[
|
|
||||||
existing_large, "p_nom_min"
|
|
||||||
]
|
|
||||||
|
|
||||||
n.generators["p_nom_max"] = n.generators["p_nom_max"].clip(lower=0)
|
|
||||||
|
|
||||||
|
|
||||||
def add_solar_potential_constraints(n, config):
|
def add_solar_potential_constraints(n, config):
|
||||||
"""
|
"""
|
||||||
Add constraint to make sure the sum capacity of all solar technologies (fixed, tracking, ets. ) is below the region potential.
|
Add constraint to make sure the sum capacity of all solar technologies (fixed, tracking, ets. ) is below the region potential.
|
||||||
@ -246,26 +187,6 @@ def add_solar_potential_constraints(n, config):
|
|||||||
lambda x: (x * factor) if carrier in x.name else x, axis=1
|
lambda x: (x * factor) if carrier in x.name else x, axis=1
|
||||||
)
|
)
|
||||||
|
|
||||||
if "m" in snakemake.wildcards.clusters:
|
|
||||||
location = pd.Series(
|
|
||||||
[" ".join(i.split(" ")[:2]) for i in n.generators.index],
|
|
||||||
index=n.generators.index,
|
|
||||||
)
|
|
||||||
ggrouper = pd.Series(
|
|
||||||
n.generators.loc[solar].index.rename("bus").map(location),
|
|
||||||
index=n.generators.loc[solar].index,
|
|
||||||
).to_xarray()
|
|
||||||
rhs = (
|
|
||||||
n.generators.loc[solar_today, "p_nom_max"]
|
|
||||||
.groupby(n.generators.loc[solar_today].index.rename("bus").map(location))
|
|
||||||
.sum()
|
|
||||||
- n.generators.loc[solar_hsat, "p_nom_opt"]
|
|
||||||
.groupby(n.generators.loc[solar_hsat].index.rename("bus").map(location))
|
|
||||||
.sum()
|
|
||||||
* land_use_factors["solar-hsat"]
|
|
||||||
).clip(lower=0)
|
|
||||||
|
|
||||||
else:
|
|
||||||
location = pd.Series(n.buses.index, index=n.buses.index)
|
location = pd.Series(n.buses.index, index=n.buses.index)
|
||||||
ggrouper = n.generators.loc[solar].bus
|
ggrouper = n.generators.loc[solar].bus
|
||||||
rhs = (
|
rhs = (
|
||||||
@ -515,7 +436,7 @@ def prepare_network(
|
|||||||
n.snapshot_weightings[:] = 8760.0 / nhours
|
n.snapshot_weightings[:] = 8760.0 / nhours
|
||||||
|
|
||||||
if foresight == "myopic":
|
if foresight == "myopic":
|
||||||
add_land_use_constraint(n, planning_horizons, config)
|
add_land_use_constraint(n)
|
||||||
|
|
||||||
if foresight == "perfect":
|
if foresight == "perfect":
|
||||||
n = add_land_use_constraint_perfect(n)
|
n = add_land_use_constraint_perfect(n)
|
||||||
@ -1134,7 +1055,6 @@ if __name__ == "__main__":
|
|||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"solve_sector_network_perfect",
|
"solve_sector_network_perfect",
|
||||||
configfiles="../config/test/config.perfect.yaml",
|
configfiles="../config/test/config.perfect.yaml",
|
||||||
simpl="",
|
|
||||||
opts="",
|
opts="",
|
||||||
clusters="5",
|
clusters="5",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
|
@ -29,7 +29,6 @@ if __name__ == "__main__":
|
|||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"solve_operations_network",
|
"solve_operations_network",
|
||||||
configfiles="test/config.electricity.yaml",
|
configfiles="test/config.electricity.yaml",
|
||||||
simpl="",
|
|
||||||
opts="",
|
opts="",
|
||||||
clusters="5",
|
clusters="5",
|
||||||
ll="v1.5",
|
ll="v1.5",
|
||||||
|
@ -20,17 +20,17 @@ Relevant Settings
|
|||||||
Inputs
|
Inputs
|
||||||
------
|
------
|
||||||
|
|
||||||
- ``networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: the network whose
|
- ``networks/base_s_{clusters}_elec_l{ll}_{opts}.nc``: the network whose
|
||||||
snapshots are to be aggregated
|
snapshots are to be aggregated
|
||||||
- ``resources/hourly_heat_demand_total_elec_s{simpl}_{clusters}.nc``: the total
|
- ``resources/hourly_heat_demand_total_base_s_{clusters}.nc``: the total
|
||||||
hourly heat demand
|
hourly heat demand
|
||||||
- ``resources/solar_thermal_total_elec_s{simpl}_{clusters}.nc``: the total
|
- ``resources/solar_thermal_total_base_s_{clusters}.nc``: the total
|
||||||
hourly solar thermal generation
|
hourly solar thermal generation
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
|
|
||||||
- ``snapshot_weightings_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.csv``
|
- ``snapshot_weightings_base_s_{clusters}_elec_l{ll}_{opts}.csv``
|
||||||
|
|
||||||
Description
|
Description
|
||||||
-----------
|
-----------
|
||||||
@ -63,7 +63,6 @@ if __name__ == "__main__":
|
|||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"time_aggregation",
|
"time_aggregation",
|
||||||
configfiles="test/config.overnight.yaml",
|
configfiles="test/config.overnight.yaml",
|
||||||
simpl="",
|
|
||||||
opts="",
|
opts="",
|
||||||
clusters="37",
|
clusters="37",
|
||||||
ll="v1.0",
|
ll="v1.0",
|
||||||
|
2
test.sh
2
test.sh
@ -7,7 +7,7 @@ set -x && \
|
|||||||
snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime && \
|
snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime && \
|
||||||
snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime && \
|
snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime && \
|
||||||
snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime && \
|
snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime && \
|
||||||
snakemake -call all --configfile config/test/config.perfect.yaml --rerun-triggers=mtime && \
|
snakemake -call make_summary_perfect --configfile config/test/config.perfect.yaml --rerun-triggers=mtime && \
|
||||||
snakemake -call all --configfile config/test/config.scenarios.yaml --rerun-triggers=mtime -n && \
|
snakemake -call all --configfile config/test/config.scenarios.yaml --rerun-triggers=mtime -n && \
|
||||||
|
|
||||||
set +x
|
set +x
|
||||||
|
Loading…
Reference in New Issue
Block a user