Merge branch 'master' into validation
This commit is contained in:
commit
07224751e5
3
.github/ISSUE_TEMPLATE/config.yml
vendored
3
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -3,3 +3,6 @@ contact_links:
|
||||
- name: PyPSA Mailing List
|
||||
url: https://groups.google.com/forum/#!forum/pypsa
|
||||
about: Please ask and answer general usage questions here.
|
||||
- name: Stackoverflow
|
||||
url: https://stackoverflow.com/questions/tagged/pypsa
|
||||
about: Please ask and answer code-related questions here.
|
||||
|
@ -30,7 +30,7 @@ repos:
|
||||
|
||||
# Find common spelling mistakes in comments and docstrings
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.4
|
||||
rev: v2.2.5
|
||||
hooks:
|
||||
- id: codespell
|
||||
args: ['--ignore-regex="(\b[A-Z]+\b)"', '--ignore-words-list=fom,appartment,bage,ore,setis,tabacco,berfore'] # Ignore capital case words, e.g. country codes
|
||||
@ -39,7 +39,7 @@ repos:
|
||||
|
||||
# Make docstrings PEP 257 compliant
|
||||
- repo: https://github.com/PyCQA/docformatter
|
||||
rev: v1.6.3
|
||||
rev: v1.7.3
|
||||
hooks:
|
||||
- id: docformatter
|
||||
args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"]
|
||||
@ -67,7 +67,7 @@ repos:
|
||||
|
||||
# Do YAML formatting (before the linter checks it for misses)
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.8.0
|
||||
rev: v2.9.0
|
||||
hooks:
|
||||
- id: pretty-format-yaml
|
||||
args: [--autofix, --indent, "2", --preserve-quotes]
|
||||
@ -87,6 +87,6 @@ repos:
|
||||
|
||||
# Check for FSFE REUSE compliance (licensing)
|
||||
- repo: https://github.com/fsfe/reuse-tool
|
||||
rev: v1.1.2
|
||||
rev: v2.0.0
|
||||
hooks:
|
||||
- id: reuse
|
||||
|
@ -4,8 +4,14 @@
|
||||
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.11"
|
||||
apt_packages:
|
||||
- graphviz
|
||||
|
||||
python:
|
||||
version: 3.8
|
||||
install:
|
||||
- requirements: doc/requirements.txt
|
||||
system_packages: true
|
||||
system_packages: false
|
||||
|
@ -11,6 +11,7 @@ SPDX-License-Identifier: CC-BY-4.0
|
||||
[![Zenodo PyPSA-Eur-Sec](https://zenodo.org/badge/DOI/10.5281/zenodo.3938042.svg)](https://doi.org/10.5281/zenodo.3938042)
|
||||
[![Snakemake](https://img.shields.io/badge/snakemake-≥5.0.0-brightgreen.svg?style=flat)](https://snakemake.readthedocs.io)
|
||||
[![REUSE status](https://api.reuse.software/badge/github.com/pypsa/pypsa-eur)](https://api.reuse.software/info/github.com/pypsa/pypsa-eur)
|
||||
[![Stack Exchange questions](https://img.shields.io/stackexchange/stackoverflow/t/pypsa)](https://stackoverflow.com/questions/tagged/pypsa)
|
||||
|
||||
# PyPSA-Eur: A Sector-Coupled Open Optimisation Model of the European Energy System
|
||||
|
||||
@ -90,6 +91,14 @@ to 50-200 nodes.
|
||||
|
||||
Already-built versions of the model can be found in the accompanying [Zenodo
|
||||
repository](https://doi.org/10.5281/zenodo.3601881).
|
||||
|
||||
# Contributing and Support
|
||||
We strongly welcome anyone interested in contributing to this project. If you have any ideas, suggestions or encounter problems, feel invited to file issues or make pull requests on GitHub.
|
||||
- In case of code-related **questions**, please post on [stack overflow](https://stackoverflow.com/questions/tagged/pypsa).
|
||||
- For non-programming related and more general questions please refer to the [mailing list](https://groups.google.com/group/pypsa).
|
||||
- To **discuss** with other PyPSA users, organise projects, share news, and get in touch with the community you can use the [discord server](https://discord.com/invite/AnuJBk23FU).
|
||||
- For **bugs and feature requests**, please use the [PyPSA-Eur Github Issues page](https://github.com/PyPSA/pypsa-eur/issues).
|
||||
|
||||
# Licence
|
||||
|
||||
The code in PyPSA-Eur is released as free software under the
|
||||
|
@ -471,6 +471,8 @@ sector:
|
||||
dac: true
|
||||
co2_vent: false
|
||||
allam_cycle: false
|
||||
hydrogen_fuel_cell: true
|
||||
hydrogen_turbine: false
|
||||
SMR: true
|
||||
regional_co2_sequestration_potential:
|
||||
enable: false # enable regionally resolved geological co2 storage potential
|
||||
@ -623,10 +625,10 @@ clustering:
|
||||
solving:
|
||||
#tmpdir: "path/to/tmp"
|
||||
options:
|
||||
formulation: kirchhoff
|
||||
clip_p_max_pu: 1.e-2
|
||||
linearized_unit_commitment: true
|
||||
load_shedding: false
|
||||
transmission_losses: 0
|
||||
noisy_costs: true
|
||||
skip_iterations: true
|
||||
track_iterations: false
|
||||
@ -905,6 +907,7 @@ plotting:
|
||||
H2 pipeline: '#f081dc'
|
||||
H2 pipeline retrofitted: '#ba99b5'
|
||||
H2 Fuel Cell: '#c251ae'
|
||||
H2 turbine: '#991f83'
|
||||
H2 Electrolysis: '#ff29d9'
|
||||
# ammonia
|
||||
NH3: '#46caf0'
|
||||
|
@ -31,6 +31,14 @@ snapshots:
|
||||
end: "2013-03-08"
|
||||
|
||||
electricity:
|
||||
co2limit: 100.e+6
|
||||
|
||||
extendable_carriers:
|
||||
Generator: [OCGT]
|
||||
StorageUnit: [battery]
|
||||
Store: [H2]
|
||||
Link: [H2 pipeline]
|
||||
|
||||
renewable_carriers: [solar, onwind, offwind-ac, offwind-dc]
|
||||
|
||||
atlite:
|
||||
|
@ -28,6 +28,14 @@ snapshots:
|
||||
end: "2013-03-08"
|
||||
|
||||
electricity:
|
||||
co2limit: 100.e+6
|
||||
|
||||
extendable_carriers:
|
||||
Generator: [OCGT]
|
||||
StorageUnit: [battery]
|
||||
Store: [H2]
|
||||
Link: [H2 pipeline]
|
||||
|
||||
renewable_carriers: [solar, onwind, offwind-ac, offwind-dc]
|
||||
|
||||
atlite:
|
||||
|
@ -36,6 +36,7 @@ sys.path.insert(0, os.path.abspath("../scripts"))
|
||||
extensions = [
|
||||
#'sphinx.ext.autodoc',
|
||||
#'sphinx.ext.autosummary',
|
||||
"myst_parser",
|
||||
"sphinx.ext.autosectionlabel",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.todo",
|
||||
|
@ -1,7 +1,7 @@
|
||||
,Unit,Values,Description
|
||||
options,,,
|
||||
-- formulation,--,"Any of {'angles', 'kirchhoff', 'cycles', 'ptdf'}","Specifies which variant of linearized power flow formulations to use in the optimisation problem. Recommended is 'kirchhoff'. Explained in `this article <https://arxiv.org/abs/1704.01881>`_."
|
||||
-- load_shedding,bool/float,"{'true','false', float}","Add generators with very high marginal cost to simulate load shedding and avoid problem infeasibilities. If load shedding is a float, it denotes the marginal cost in EUR/kWh."
|
||||
-- transmission_losses,int,"[0-9]","Add piecewise linear approximation of transmission losses based on n tangents. Defaults to 0, which means losses are ignored."
|
||||
-- noisy_costs,bool,"{'true','false'}","Add random noise to marginal cost of generators by :math:`\mathcal{U}(0.009,0,011)` and capital cost of lines and links by :math:`\mathcal{U}(0.09,0,11)`."
|
||||
-- min_iterations,--,int,"Minimum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run."
|
||||
-- max_iterations,--,int,"Maximum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run."
|
||||
|
|
@ -31,7 +31,9 @@ PyPSA-Eur: A Sector-Coupled Open Optimisation Model of the European Energy Syste
|
||||
:target: https://api.reuse.software/info/github.com/pypsa/pypsa-eur
|
||||
:alt: REUSE
|
||||
|
||||
|
|
||||
.. image:: https://img.shields.io/stackexchange/stackoverflow/t/pypsa
|
||||
:target: https://stackoverflow.com/questions/tagged/pypsa
|
||||
:alt: Stackoverflow
|
||||
|
||||
PyPSA-Eur is an open model dataset of the European energy system at the
|
||||
transmission network level that covers the full ENTSO-E area. It covers demand
|
||||
@ -274,4 +276,5 @@ The included ``.nc`` files are PyPSA network files which can be imported with Py
|
||||
licenses
|
||||
limitations
|
||||
contributing
|
||||
support
|
||||
publications
|
||||
|
@ -39,7 +39,7 @@ The environment can be installed and activated using
|
||||
|
||||
.. code:: bash
|
||||
|
||||
.../pypsa-eur % mamba create -f envs/environment.yaml
|
||||
.../pypsa-eur % mamba env create -f envs/environment.yaml
|
||||
|
||||
.../pypsa-eur % mamba activate pypsa-eur
|
||||
|
||||
|
@ -10,6 +10,7 @@ Release Notes
|
||||
Upcoming Release
|
||||
================
|
||||
|
||||
* ``param:`` section in rule definition are added to track changed settings in ``config.yaml``. The goal is to automatically re-execute rules whose parameters have changed. See `Non-file parameters for rules <https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules>`_ in the snakemake documentation.
|
||||
|
||||
* **Important:** The configuration files are now located in the ``config`` directory. This counts for ``config.default.yaml``, ``config.yaml`` as well as the test configuration files which are now located in ``config/test``. Config files that are still in the root directory will be ignored.
|
||||
|
||||
@ -17,7 +18,17 @@ Upcoming Release
|
||||
|
||||
* Renamed script file from PyPSA-EUR ``build_load_data`` to ``build_electricity_demand`` and ``retrieve_load_data`` to ``retrieve_electricity_demand``.
|
||||
|
||||
* Fix docs readthedocs built
|
||||
|
||||
* Add plain hydrogen turbine as additional re-electrification option besides
|
||||
hydrogen fuel cell. Add switches for both re-electrification options under
|
||||
``sector: hydrogen_turbine:`` and ``sector: hydrogen_fuel_cell:``.
|
||||
|
||||
* Remove ``vresutils`` dependency.
|
||||
|
||||
* Add option to include a piecewise linear approximation of transmission losses,
|
||||
e.g. by setting ``solving: options: transmission_losses: 2`` for an
|
||||
approximation with two tangents.
|
||||
|
||||
PyPSA-Eur 0.8.0 (18th March 2023)
|
||||
=================================
|
||||
|
@ -2,12 +2,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
setuptools
|
||||
sphinx
|
||||
sphinx_book_theme
|
||||
sphinxcontrib-bibtex
|
||||
myst-parser # recommark is deprecated, https://stackoverflow.com/a/71660856/13573820
|
||||
|
||||
pypsa
|
||||
vresutils>=0.3.1
|
||||
powerplantmatching>=0.5.5
|
||||
atlite>=0.2.9
|
||||
dask[distributed]
|
||||
|
@ -133,12 +133,12 @@ The coefficient of performance (COP) of air- and ground-sourced heat pumps depen
|
||||
For the sink water temperature Tsink we assume 55 °C [`Config <https://github.com/PyPSA/pypsa-eur-sec/blob/3daff49c9999ba7ca7534df4e587e1d516044fc3/config.default.yaml#L207>`_ file]. For the time- and location-dependent source temperatures Tsource, we rely on the `ERA5 <https://doi.org/10.1002/qj.3803>`_ reanalysis weather data. The temperature differences are converted into COP time series using results from a regression analysis performed in the study by `Stafell et al. <https://pubs.rsc.org/en/content/articlelanding/2012/EE/c2ee22653g>`_. For air-sourced heat pumps (ASHP), we use the function:
|
||||
|
||||
.. math::
|
||||
COP (\Delta T) = 6.81 + 0.121\Delta T + 0.000630\Delta T^2
|
||||
COP (\Delta T) = 6.81 - 0.121\Delta T + 0.000630\Delta T^2
|
||||
|
||||
for ground-sourced heat pumps (GSHP), we use the function:
|
||||
|
||||
.. math::
|
||||
COP(\Delta T) = 8.77 + 0.150\Delta T + 0.000734\Delta T^2
|
||||
COP(\Delta T) = 8.77 - 0.150\Delta T + 0.000734\Delta T^2
|
||||
|
||||
**Resistive heaters**
|
||||
|
||||
|
14
doc/support.rst
Normal file
14
doc/support.rst
Normal file
@ -0,0 +1,14 @@
|
||||
..
|
||||
SPDX-FileCopyrightText: 2019-2023 The PyPSA-Eur Authors
|
||||
|
||||
SPDX-License-Identifier: CC-BY-4.0
|
||||
|
||||
#######################
|
||||
Support
|
||||
#######################
|
||||
|
||||
* In case of code-related **questions**, please post on `stack overflow <https://stackoverflow.com/questions/tagged/pypsa>`_.
|
||||
* For non-programming related and more general questions please refer to the `mailing list <https://groups.google.com/group/pypsa>`_.
|
||||
* To **discuss** with other PyPSA users, organise projects, share news, and get in touch with the community you can use the `discord server <https://discord.gg/AnuJBk23FU>`_.
|
||||
* For **bugs and feature requests**, please use the `issue tracker <https://github.com/PyPSA/pypsa-eur/issues>`_.
|
||||
* We strongly welcome anyone interested in providing **contributions** to this project. If you have any ideas, suggestions or encounter problems, feel invited to file issues or make pull requests on `Github <https://github.com/PyPSA/PyPSA>`_. For further information on how to contribute, please refer to :ref:`contributing`.
|
@ -226,7 +226,7 @@ dependencies:
|
||||
- nspr=4.35
|
||||
- nss=3.88
|
||||
- numexpr=2.8.3
|
||||
- numpy=1.23.5
|
||||
- numpy=1.24
|
||||
- openjdk=17.0.3
|
||||
- openjpeg=2.5.0
|
||||
- openpyxl=3.1.0
|
||||
@ -378,4 +378,3 @@ dependencies:
|
||||
- highspy==1.5.0.dev0
|
||||
- pybind11==2.10.3
|
||||
- tsam==2.2.2
|
||||
- vresutils==0.3.1
|
||||
|
@ -10,7 +10,7 @@ dependencies:
|
||||
- python>=3.8
|
||||
- pip
|
||||
|
||||
- pypsa>=0.21.3
|
||||
- pypsa>=0.23
|
||||
- atlite>=0.2.9
|
||||
- dask
|
||||
|
||||
@ -25,7 +25,7 @@ dependencies:
|
||||
- pytables
|
||||
- lxml
|
||||
- powerplantmatching>=0.5.5
|
||||
- numpy<1.24
|
||||
- numpy
|
||||
- pandas>=1.4
|
||||
- geopandas>=0.11.0
|
||||
- xarray
|
||||
@ -55,5 +55,4 @@ dependencies:
|
||||
- rasterio!=1.2.10
|
||||
|
||||
- pip:
|
||||
- vresutils>=0.3.1
|
||||
- tsam>=1.1.0
|
||||
|
@ -19,6 +19,10 @@ if config["enable"].get("prepare_links_p_nom", False):
|
||||
|
||||
|
||||
rule build_electricity_demand:
|
||||
params:
|
||||
snapshots=config["snapshots"],
|
||||
countries=config["countries"],
|
||||
load=config["load"],
|
||||
input:
|
||||
ancient("data/load_raw.csv"),
|
||||
output:
|
||||
@ -34,6 +38,10 @@ rule build_electricity_demand:
|
||||
|
||||
|
||||
rule build_powerplants:
|
||||
params:
|
||||
powerplants_filter=config["electricity"]["powerplants_filter"],
|
||||
custom_powerplants=config["electricity"]["custom_powerplants"],
|
||||
countries=config["countries"],
|
||||
input:
|
||||
base_network=RESOURCES + "networks/base.nc",
|
||||
custom_powerplants="data/custom_powerplants.csv",
|
||||
@ -79,6 +87,8 @@ rule base_network:
|
||||
|
||||
|
||||
rule build_shapes:
|
||||
params:
|
||||
countries=config["countries"],
|
||||
input:
|
||||
naturalearth=ancient("data/bundle/naturalearth/ne_10m_admin_0_countries.shp"),
|
||||
eez=ancient("data/bundle/eez/World_EEZ_v8_2014.shp"),
|
||||
@ -104,6 +114,8 @@ rule build_shapes:
|
||||
|
||||
|
||||
rule build_bus_regions:
|
||||
params:
|
||||
countries=config["countries"],
|
||||
input:
|
||||
country_shapes=RESOURCES + "country_shapes.geojson",
|
||||
offshore_shapes=RESOURCES + "offshore_shapes.geojson",
|
||||
@ -125,6 +137,9 @@ rule build_bus_regions:
|
||||
if config["enable"].get("build_cutout", False):
|
||||
|
||||
rule build_cutout:
|
||||
params:
|
||||
snapshots=config["snapshots"],
|
||||
cutouts=config["atlite"]["cutouts"],
|
||||
input:
|
||||
regions_onshore=RESOURCES + "regions_onshore.geojson",
|
||||
regions_offshore=RESOURCES + "regions_offshore.geojson",
|
||||
@ -186,6 +201,8 @@ rule build_ship_raster:
|
||||
|
||||
|
||||
rule build_renewable_profiles:
|
||||
params:
|
||||
renewable=config["renewable"],
|
||||
input:
|
||||
base_network=RESOURCES + "networks/base.nc",
|
||||
corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"),
|
||||
@ -251,6 +268,9 @@ rule build_monthly_prices:
|
||||
"../scripts/build_monthly_prices.py"
|
||||
|
||||
rule build_hydro_profile:
|
||||
params:
|
||||
hydro=config["renewable"]["hydro"],
|
||||
countries=config["countries"],
|
||||
input:
|
||||
country_shapes=RESOURCES + "country_shapes.geojson",
|
||||
eia_hydro_generation="data/eia_hydro_annual_generation.csv",
|
||||
@ -268,6 +288,14 @@ rule build_hydro_profile:
|
||||
|
||||
|
||||
rule add_electricity:
|
||||
params:
|
||||
length_factor=config["lines"]["length_factor"],
|
||||
scaling_factor=config["load"]["scaling_factor"],
|
||||
countries=config["countries"],
|
||||
renewable=config["renewable"],
|
||||
electricity=config["electricity"],
|
||||
conventional=config.get("conventional", {}),
|
||||
costs=config["costs"],
|
||||
input:
|
||||
**{
|
||||
f"profile_{tech}": RESOURCES + f"profile_{tech}.nc"
|
||||
@ -304,6 +332,15 @@ rule add_electricity:
|
||||
|
||||
|
||||
rule simplify_network:
|
||||
params:
|
||||
simplify_network=config["clustering"]["simplify_network"],
|
||||
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}),
|
||||
focus_weights=config.get("focus_weights", None),
|
||||
renewable_carriers=config["electricity"]["renewable_carriers"],
|
||||
max_hours=config["electricity"]["max_hours"],
|
||||
length_factor=config["lines"]["length_factor"],
|
||||
p_max_pu=config["links"].get("p_max_pu", 1.0),
|
||||
costs=config["costs"],
|
||||
input:
|
||||
network=RESOURCES + "networks/elec.nc",
|
||||
tech_costs=COSTS,
|
||||
@ -329,6 +366,16 @@ rule simplify_network:
|
||||
|
||||
|
||||
rule cluster_network:
|
||||
params:
|
||||
cluster_network=config["clustering"]["cluster_network"],
|
||||
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}),
|
||||
custom_busmap=config["enable"].get("custom_busmap", False),
|
||||
focus_weights=config.get("focus_weights", None),
|
||||
renewable_carriers=config["electricity"]["renewable_carriers"],
|
||||
conventional_carriers=config["electricity"].get("conventional_carriers", []),
|
||||
max_hours=config["electricity"]["max_hours"],
|
||||
length_factor=config["lines"]["length_factor"],
|
||||
costs=config["costs"],
|
||||
input:
|
||||
network=RESOURCES + "networks/elec_s{simpl}.nc",
|
||||
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}.geojson",
|
||||
@ -360,6 +407,10 @@ rule cluster_network:
|
||||
|
||||
|
||||
rule add_extra_components:
|
||||
params:
|
||||
extendable_carriers=config["electricity"]["extendable_carriers"],
|
||||
max_hours=config["electricity"]["max_hours"],
|
||||
costs=config["costs"],
|
||||
input:
|
||||
network=RESOURCES + "networks/elec_s{simpl}_{clusters}.nc",
|
||||
tech_costs=COSTS,
|
||||
@ -379,6 +430,14 @@ rule add_extra_components:
|
||||
|
||||
|
||||
rule prepare_network:
|
||||
params:
|
||||
links=config["links"],
|
||||
lines=config["lines"],
|
||||
co2base=config["electricity"]["co2base"],
|
||||
co2limit=config["electricity"]["co2limit"],
|
||||
gaslimit=config["electricity"].get("gaslimit"),
|
||||
max_hours=config["electricity"]["max_hours"],
|
||||
costs=config["costs"],
|
||||
input:
|
||||
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc",
|
||||
tech_costs=COSTS,
|
||||
|
@ -140,6 +140,8 @@ if not (config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]):
|
||||
|
||||
|
||||
rule build_heat_demands:
|
||||
params:
|
||||
snapshots=config["snapshots"],
|
||||
input:
|
||||
pop_layout=RESOURCES + "pop_layout_{scope}.nc",
|
||||
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
||||
@ -160,6 +162,8 @@ rule build_heat_demands:
|
||||
|
||||
|
||||
rule build_temperature_profiles:
|
||||
params:
|
||||
snapshots=config["snapshots"],
|
||||
input:
|
||||
pop_layout=RESOURCES + "pop_layout_{scope}.nc",
|
||||
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
||||
@ -181,6 +185,8 @@ rule build_temperature_profiles:
|
||||
|
||||
|
||||
rule build_cop_profiles:
|
||||
params:
|
||||
heat_pump_sink_T=config["sector"]["heat_pump_sink_T"],
|
||||
input:
|
||||
temp_soil_total=RESOURCES + "temp_soil_total_elec_s{simpl}_{clusters}.nc",
|
||||
temp_soil_rural=RESOURCES + "temp_soil_rural_elec_s{simpl}_{clusters}.nc",
|
||||
@ -208,6 +214,9 @@ rule build_cop_profiles:
|
||||
|
||||
|
||||
rule build_solar_thermal_profiles:
|
||||
params:
|
||||
snapshots=config["snapshots"],
|
||||
solar_thermal=config["solar_thermal"],
|
||||
input:
|
||||
pop_layout=RESOURCES + "pop_layout_{scope}.nc",
|
||||
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
||||
@ -228,6 +237,9 @@ rule build_solar_thermal_profiles:
|
||||
|
||||
|
||||
rule build_energy_totals:
|
||||
params:
|
||||
countries=config["countries"],
|
||||
energy=config["energy"],
|
||||
input:
|
||||
nuts3_shapes=RESOURCES + "nuts3_shapes.geojson",
|
||||
co2="data/eea/UNFCCC_v23.csv",
|
||||
@ -253,6 +265,8 @@ rule build_energy_totals:
|
||||
|
||||
|
||||
rule build_biomass_potentials:
|
||||
params:
|
||||
biomass=config["biomass"],
|
||||
input:
|
||||
enspreso_biomass=HTTP.remote(
|
||||
"https://cidportal.jrc.ec.europa.eu/ftp/jrc-opendata/ENSPRESO/ENSPRESO_BIOMASS.xlsx",
|
||||
@ -315,6 +329,10 @@ if not config["sector"]["biomass_transport"]:
|
||||
if config["sector"]["regional_co2_sequestration_potential"]["enable"]:
|
||||
|
||||
rule build_sequestration_potentials:
|
||||
params:
|
||||
sequestration_potential=config["sector"][
|
||||
"regional_co2_sequestration_potential"
|
||||
],
|
||||
input:
|
||||
sequestration_potential=HTTP.remote(
|
||||
"https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson",
|
||||
@ -368,6 +386,8 @@ rule build_salt_cavern_potentials:
|
||||
|
||||
|
||||
rule build_ammonia_production:
|
||||
params:
|
||||
countries=config["countries"],
|
||||
input:
|
||||
usgs="data/myb1-2017-nitro.xls",
|
||||
output:
|
||||
@ -386,6 +406,9 @@ rule build_ammonia_production:
|
||||
|
||||
|
||||
rule build_industry_sector_ratios:
|
||||
params:
|
||||
industry=config["industry"],
|
||||
ammonia=config["sector"].get("ammonia", False),
|
||||
input:
|
||||
ammonia_production=RESOURCES + "ammonia_production.csv",
|
||||
idees="data/jrc-idees-2015",
|
||||
@ -405,6 +428,9 @@ rule build_industry_sector_ratios:
|
||||
|
||||
|
||||
rule build_industrial_production_per_country:
|
||||
params:
|
||||
industry=config["industry"],
|
||||
countries=config["countries"],
|
||||
input:
|
||||
ammonia_production=RESOURCES + "ammonia_production.csv",
|
||||
jrc="data/jrc-idees-2015",
|
||||
@ -426,6 +452,8 @@ rule build_industrial_production_per_country:
|
||||
|
||||
|
||||
rule build_industrial_production_per_country_tomorrow:
|
||||
params:
|
||||
industry=config["industry"],
|
||||
input:
|
||||
industrial_production_per_country=RESOURCES
|
||||
+ "industrial_production_per_country.csv",
|
||||
@ -450,6 +478,9 @@ rule build_industrial_production_per_country_tomorrow:
|
||||
|
||||
|
||||
rule build_industrial_distribution_key:
|
||||
params:
|
||||
hotmaps_locate_missing=config["industry"].get("hotmaps_locate_missing", False),
|
||||
countries=config["countries"],
|
||||
input:
|
||||
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
||||
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
|
||||
@ -524,6 +555,9 @@ rule build_industrial_energy_demand_per_node:
|
||||
|
||||
|
||||
rule build_industrial_energy_demand_per_country_today:
|
||||
params:
|
||||
countries=config["countries"],
|
||||
industry=config["industry"],
|
||||
input:
|
||||
jrc="data/jrc-idees-2015",
|
||||
ammonia_production=RESOURCES + "ammonia_production.csv",
|
||||
@ -570,6 +604,9 @@ rule build_industrial_energy_demand_per_node_today:
|
||||
if config["sector"]["retrofitting"]["retro_endogen"]:
|
||||
|
||||
rule build_retro_cost:
|
||||
params:
|
||||
retrofitting=config["sector"]["retrofitting"],
|
||||
countries=config["countries"],
|
||||
input:
|
||||
building_stock="data/retro/data_building_stock.csv",
|
||||
data_tabula="data/retro/tabula-calculator-calcsetbuilding.csv",
|
||||
@ -640,6 +677,9 @@ rule build_shipping_demand:
|
||||
|
||||
|
||||
rule build_transport_demand:
|
||||
params:
|
||||
snapshots=config["snapshots"],
|
||||
sector=config["sector"],
|
||||
input:
|
||||
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
|
||||
pop_weighted_energy_totals=RESOURCES
|
||||
@ -666,6 +706,18 @@ rule build_transport_demand:
|
||||
|
||||
rule prepare_sector_network:
|
||||
params:
|
||||
co2_budget=config["co2_budget"],
|
||||
conventional_carriers=config["existing_capacities"]["conventional_carriers"],
|
||||
foresight=config["foresight"],
|
||||
costs=config["costs"],
|
||||
sector=config["sector"],
|
||||
industry=config["industry"],
|
||||
pypsa_eur=config["pypsa_eur"],
|
||||
length_factor=config["lines"]["length_factor"],
|
||||
planning_horizons=config["scenario"]["planning_horizons"],
|
||||
countries=config["countries"],
|
||||
emissions_scope=config["energy"]["emissions"],
|
||||
eurostat_report_year=config["energy"]["eurostat_report_year"],
|
||||
RDIR=RDIR,
|
||||
input:
|
||||
**build_retro_cost_output,
|
||||
|
@ -9,6 +9,9 @@ localrules:
|
||||
|
||||
|
||||
rule plot_network:
|
||||
params:
|
||||
foresight=config["foresight"],
|
||||
plotting=config["plotting"],
|
||||
input:
|
||||
overrides="data/override_component_attrs",
|
||||
network=RESULTS
|
||||
@ -67,6 +70,10 @@ rule copy_conda_env:
|
||||
|
||||
rule make_summary:
|
||||
params:
|
||||
foresight=config["foresight"],
|
||||
costs=config["costs"],
|
||||
snapshots=config["snapshots"],
|
||||
scenario=config["scenario"],
|
||||
RDIR=RDIR,
|
||||
input:
|
||||
overrides="data/override_component_attrs",
|
||||
@ -114,6 +121,10 @@ rule make_summary:
|
||||
|
||||
rule plot_summary:
|
||||
params:
|
||||
countries=config["countries"],
|
||||
planning_horizons=config["scenario"]["planning_horizons"],
|
||||
sector_opts=config["scenario"]["sector_opts"],
|
||||
plotting=config["plotting"],
|
||||
RDIR=RDIR,
|
||||
input:
|
||||
costs=RESULTS + "csvs/costs.csv",
|
||||
|
@ -19,6 +19,8 @@ if config["enable"].get("retrieve_databundle", True):
|
||||
datafiles.extend(["natura/Natura2000_end2015.shp", "GEBCO_2014_2D.nc"])
|
||||
|
||||
rule retrieve_databundle:
|
||||
params:
|
||||
tutorial=config["tutorial"],
|
||||
output:
|
||||
expand("data/bundle/{file}", file=datafiles),
|
||||
log:
|
||||
|
@ -4,6 +4,13 @@
|
||||
|
||||
|
||||
rule solve_network:
|
||||
params:
|
||||
solving=config["solving"],
|
||||
foresight=config["foresight"],
|
||||
planning_horizons=config["scenario"]["planning_horizons"],
|
||||
co2_sequestration_potential=config["sector"].get(
|
||||
"co2_sequestration_potential", 200
|
||||
),
|
||||
input:
|
||||
unit_commitment_params="data/unit_commitment.csv",
|
||||
network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||
@ -15,8 +22,6 @@ rule solve_network:
|
||||
),
|
||||
python=LOGS
|
||||
+ "solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log",
|
||||
memory=LOGS
|
||||
+ "solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log",
|
||||
benchmark:
|
||||
BENCHMARKS + "solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||
threads: 4
|
||||
@ -31,6 +36,8 @@ rule solve_network:
|
||||
|
||||
|
||||
rule solve_operations_network:
|
||||
params:
|
||||
options=config["solving"]["options"],
|
||||
input:
|
||||
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||
output:
|
||||
@ -42,8 +49,6 @@ rule solve_operations_network:
|
||||
),
|
||||
python=LOGS
|
||||
+ "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log",
|
||||
memory=LOGS
|
||||
+ "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_memory.log",
|
||||
benchmark:
|
||||
(
|
||||
BENCHMARKS
|
||||
|
@ -4,6 +4,11 @@
|
||||
|
||||
|
||||
rule add_existing_baseyear:
|
||||
params:
|
||||
baseyear=config["scenario"]["planning_horizons"][0],
|
||||
sector=config["sector"],
|
||||
existing_capacities=config["existing_capacities"],
|
||||
costs=config["costs"],
|
||||
input:
|
||||
overrides="data/override_component_attrs",
|
||||
network=RESULTS
|
||||
@ -42,6 +47,10 @@ rule add_existing_baseyear:
|
||||
|
||||
|
||||
rule add_brownfield:
|
||||
params:
|
||||
H2_retrofit=config["sector"]["H2_retrofit"],
|
||||
H2_retrofit_capacity_per_CH4=config["sector"]["H2_retrofit_capacity_per_CH4"],
|
||||
threshold_capacity=config["existing_capacities"]["threshold_capacity"],
|
||||
input:
|
||||
overrides="data/override_component_attrs",
|
||||
network=RESULTS
|
||||
@ -74,6 +83,13 @@ ruleorder: add_existing_baseyear > add_brownfield
|
||||
|
||||
|
||||
rule solve_sector_network_myopic:
|
||||
params:
|
||||
solving=config["solving"],
|
||||
foresight=config["foresight"],
|
||||
planning_horizons=config["scenario"]["planning_horizons"],
|
||||
co2_sequestration_potential=config["sector"].get(
|
||||
"co2_sequestration_potential", 200
|
||||
),
|
||||
input:
|
||||
overrides="data/override_component_attrs",
|
||||
network=RESULTS
|
||||
@ -90,8 +106,6 @@ rule solve_sector_network_myopic:
|
||||
+ "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log",
|
||||
python=LOGS
|
||||
+ "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log",
|
||||
memory=LOGS
|
||||
+ "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_memory.log",
|
||||
threads: 4
|
||||
resources:
|
||||
mem_mb=config["solving"]["mem"],
|
||||
|
@ -4,6 +4,13 @@
|
||||
|
||||
|
||||
rule solve_sector_network:
|
||||
params:
|
||||
solving=config["solving"],
|
||||
foresight=config["foresight"],
|
||||
planning_horizons=config["scenario"]["planning_horizons"],
|
||||
co2_sequestration_potential=config["sector"].get(
|
||||
"co2_sequestration_potential", 200
|
||||
),
|
||||
input:
|
||||
overrides="data/override_component_attrs",
|
||||
network=RESULTS
|
||||
@ -21,8 +28,6 @@ rule solve_sector_network:
|
||||
+ "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_solver.log",
|
||||
python=LOGS
|
||||
+ "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log",
|
||||
memory=LOGS
|
||||
+ "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_memory.log",
|
||||
threads: config["solving"]["solver"].get("threads", 4)
|
||||
resources:
|
||||
mem_mb=config["solving"]["mem"],
|
||||
|
@ -82,7 +82,7 @@ def load_network(import_name=None, custom_components=None):
|
||||
As in pypsa.Network(import_name)
|
||||
custom_components : dict
|
||||
Dictionary listing custom components.
|
||||
For using ``snakemake.config['override_components']``
|
||||
For using ``snakemake.params['override_components']``
|
||||
in ``config/config.yaml`` define:
|
||||
|
||||
.. code:: yaml
|
||||
@ -385,10 +385,11 @@ def mock_snakemake(rulename, configfiles=[], **wildcards):
|
||||
|
||||
|
||||
def override_component_attrs(directory):
|
||||
"""Tell PyPSA that links can have multiple outputs by
|
||||
overriding the component_attrs. This can be done for
|
||||
as many buses as you need with format busi for i = 2,3,4,5,....
|
||||
See https://pypsa.org/doc/components.html#link-with-multiple-outputs-or-inputs
|
||||
"""
|
||||
Tell PyPSA that links can have multiple outputs by overriding the
|
||||
component_attrs. This can be done for as many buses as you need with format
|
||||
busi for i = 2,3,4,5,.... See https://pypsa.org/doc/components.html#link-
|
||||
with-multiple-outputs-or-inputs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
@ -49,7 +49,7 @@ def add_brownfield(n, n_p, year):
|
||||
)
|
||||
]
|
||||
|
||||
threshold = snakemake.config["existing_capacities"]["threshold_capacity"]
|
||||
threshold = snakemake.params.threshold_capacity
|
||||
|
||||
if not chp_heat.empty:
|
||||
threshold_chp_heat = (
|
||||
@ -87,7 +87,7 @@ def add_brownfield(n, n_p, year):
|
||||
|
||||
# deal with gas network
|
||||
pipe_carrier = ["gas pipeline"]
|
||||
if snakemake.config["sector"]["H2_retrofit"]:
|
||||
if snakemake.params.H2_retrofit:
|
||||
# drop capacities of previous year to avoid duplicating
|
||||
to_drop = n.links.carrier.isin(pipe_carrier) & (n.links.build_year != year)
|
||||
n.mremove("Link", n.links.loc[to_drop].index)
|
||||
@ -98,7 +98,7 @@ def add_brownfield(n, n_p, year):
|
||||
& (n.links.build_year != year)
|
||||
].index
|
||||
gas_pipes_i = n.links[n.links.carrier.isin(pipe_carrier)].index
|
||||
CH4_per_H2 = 1 / snakemake.config["sector"]["H2_retrofit_capacity_per_CH4"]
|
||||
CH4_per_H2 = 1 / snakemake.params.H2_retrofit_capacity_per_CH4
|
||||
fr = "H2 pipeline retrofitted"
|
||||
to = "gas pipeline"
|
||||
# today's pipe capacity
|
||||
|
@ -85,16 +85,18 @@ It further adds extendable ``generators`` with **zero** capacity for
|
||||
"""
|
||||
|
||||
import logging
|
||||
from itertools import product
|
||||
|
||||
import geopandas as gpd
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import powerplantmatching as pm
|
||||
import pypsa
|
||||
import scipy.sparse as sparse
|
||||
import xarray as xr
|
||||
from _helpers import configure_logging, update_p_nom_max
|
||||
from powerplantmatching.export import map_country_bus
|
||||
from vresutils import transfer as vtransfer
|
||||
from shapely.prepared import prep
|
||||
|
||||
idx = pd.IndexSlice
|
||||
|
||||
@ -135,7 +137,7 @@ def _add_missing_carriers_from_costs(n, costs, carriers):
|
||||
n.import_components_from_dataframe(emissions, "Carrier")
|
||||
|
||||
|
||||
def load_costs(tech_costs, config, elec_config, Nyears=1.0):
|
||||
def load_costs(tech_costs, config, max_hours, Nyears=1.0):
|
||||
# set all asset costs and other parameters
|
||||
costs = pd.read_csv(tech_costs, index_col=[0, 1]).sort_index()
|
||||
|
||||
@ -177,7 +179,6 @@ def load_costs(tech_costs, config, elec_config, Nyears=1.0):
|
||||
dict(capital_cost=capital_cost, marginal_cost=0.0, co2_emissions=0.0)
|
||||
)
|
||||
|
||||
max_hours = elec_config["max_hours"]
|
||||
costs.loc["battery"] = costs_for_storage(
|
||||
costs.loc["battery storage"],
|
||||
costs.loc["battery inverter"],
|
||||
@ -215,6 +216,21 @@ def load_powerplants(ppl_fn):
|
||||
)
|
||||
|
||||
|
||||
def shapes_to_shapes(orig, dest):
|
||||
"""
|
||||
Adopted from vresutils.transfer.Shapes2Shapes()
|
||||
"""
|
||||
orig_prepped = list(map(prep, orig))
|
||||
transfer = sparse.lil_matrix((len(dest), len(orig)), dtype=float)
|
||||
|
||||
for i, j in product(range(len(dest)), range(len(orig))):
|
||||
if orig_prepped[j].intersects(dest[i]):
|
||||
area = orig[j].intersection(dest[i]).area
|
||||
transfer[i, j] = area / dest[i].area
|
||||
|
||||
return transfer
|
||||
|
||||
|
||||
def attach_load(n, regions, load, nuts3_shapes, countries, scaling=1.0):
|
||||
substation_lv_i = n.buses.index[n.buses["substation_lv"]]
|
||||
regions = gpd.read_file(regions).set_index("name").reindex(substation_lv_i)
|
||||
@ -231,9 +247,7 @@ def attach_load(n, regions, load, nuts3_shapes, countries, scaling=1.0):
|
||||
return pd.DataFrame({group.index[0]: l})
|
||||
else:
|
||||
nuts3_cntry = nuts3.loc[nuts3.country == cntry]
|
||||
transfer = vtransfer.Shapes2Shapes(
|
||||
group, nuts3_cntry.geometry, normed=False
|
||||
).T.tocsr()
|
||||
transfer = shapes_to_shapes(group, nuts3_cntry.geometry).T.tocsr()
|
||||
gdp_n = pd.Series(
|
||||
transfer.dot(nuts3_cntry["gdp"].fillna(1.0).values), index=group.index
|
||||
)
|
||||
@ -356,7 +370,7 @@ def attach_conventional_generators(
|
||||
ppl,
|
||||
conventional_carriers,
|
||||
extendable_carriers,
|
||||
conventional_config,
|
||||
conventional_params,
|
||||
conventional_inputs,
|
||||
):
|
||||
carriers = set(conventional_carriers) | set(extendable_carriers["Generator"])
|
||||
@ -401,17 +415,19 @@ def attach_conventional_generators(
|
||||
lifetime=(ppl.dateout - ppl.datein).fillna(np.inf),
|
||||
)
|
||||
|
||||
for carrier in conventional_config:
|
||||
for carrier in conventional_params:
|
||||
# Generators with technology affected
|
||||
idx = n.generators.query("carrier == @carrier").index
|
||||
|
||||
for attr in list(set(conventional_config[carrier]) & set(n.generators)):
|
||||
values = conventional_config[carrier][attr]
|
||||
for attr in list(set(conventional_params[carrier]) & set(n.generators)):
|
||||
values = conventional_params[carrier][attr]
|
||||
|
||||
if f"conventional_{carrier}_{attr}" in conventional_inputs:
|
||||
# Values affecting generators of technology k country-specific
|
||||
# First map generator buses to countries; then map countries to p_max_pu
|
||||
values = pd.read_csv(values, index_col=0).iloc[:, 0]
|
||||
values = pd.read_csv(
|
||||
snakemake.input[f"conventional_{carrier}_{attr}"], index_col=0
|
||||
).iloc[:, 0]
|
||||
bus_values = n.buses.country.map(values)
|
||||
n.generators[attr].update(
|
||||
n.generators.loc[idx].bus.map(bus_values).dropna()
|
||||
@ -421,7 +437,7 @@ def attach_conventional_generators(
|
||||
n.generators.loc[idx, attr] = values
|
||||
|
||||
|
||||
def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **config):
|
||||
def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **params):
|
||||
_add_missing_carriers_from_costs(n, costs, carriers)
|
||||
|
||||
ppl = (
|
||||
@ -476,9 +492,9 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **con
|
||||
)
|
||||
|
||||
if "PHS" in carriers and not phs.empty:
|
||||
# fill missing max hours to config value and
|
||||
# fill missing max hours to params value and
|
||||
# assume no natural inflow due to lack of data
|
||||
max_hours = config.get("PHS_max_hours", 6)
|
||||
max_hours = params.get("PHS_max_hours", 6)
|
||||
phs = phs.replace({"max_hours": {0: max_hours}})
|
||||
n.madd(
|
||||
"StorageUnit",
|
||||
@ -494,7 +510,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **con
|
||||
)
|
||||
|
||||
if "hydro" in carriers and not hydro.empty:
|
||||
hydro_max_hours = config.get("hydro_max_hours")
|
||||
hydro_max_hours = params.get("hydro_max_hours")
|
||||
|
||||
assert hydro_max_hours is not None, "No path for hydro capacities given."
|
||||
|
||||
@ -644,16 +660,7 @@ def attach_OPSD_renewables(n, tech_map):
|
||||
n.generators.p_nom_min.update(gens.bus.map(caps).dropna())
|
||||
|
||||
|
||||
def estimate_renewable_capacities(n, config):
|
||||
year = config["electricity"]["estimate_renewable_capacities"]["year"]
|
||||
tech_map = config["electricity"]["estimate_renewable_capacities"][
|
||||
"technology_mapping"
|
||||
]
|
||||
countries = config["countries"]
|
||||
expansion_limit = config["electricity"]["estimate_renewable_capacities"][
|
||||
"expansion_limit"
|
||||
]
|
||||
|
||||
def estimate_renewable_capacities(n, year, tech_map, expansion_limit, countries):
|
||||
if not len(countries) or not len(tech_map):
|
||||
return
|
||||
|
||||
@ -716,48 +723,33 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake("add_electricity")
|
||||
configure_logging(snakemake)
|
||||
|
||||
params = snakemake.params
|
||||
|
||||
n = pypsa.Network(snakemake.input.base_network)
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.0
|
||||
|
||||
costs = load_costs(
|
||||
snakemake.input.tech_costs,
|
||||
snakemake.config["costs"],
|
||||
snakemake.config["electricity"],
|
||||
params.costs,
|
||||
params.electricity["max_hours"],
|
||||
Nyears,
|
||||
)
|
||||
ppl = load_powerplants(snakemake.input.powerplants)
|
||||
|
||||
if "renewable_carriers" in snakemake.config["electricity"]:
|
||||
renewable_carriers = set(snakemake.config["electricity"]["renewable_carriers"])
|
||||
else:
|
||||
logger.warning(
|
||||
"Missing key `renewable_carriers` under config entry `electricity`. "
|
||||
"In future versions, this will raise an error. "
|
||||
"Falling back to carriers listed under `renewable`."
|
||||
)
|
||||
renewable_carriers = snakemake.config["renewable"]
|
||||
|
||||
extendable_carriers = snakemake.config["electricity"]["extendable_carriers"]
|
||||
if not (set(renewable_carriers) & set(extendable_carriers["Generator"])):
|
||||
logger.warning(
|
||||
"No renewables found in config entry `extendable_carriers`. "
|
||||
"In future versions, these have to be explicitly listed. "
|
||||
"Falling back to all renewables."
|
||||
)
|
||||
|
||||
conventional_carriers = snakemake.config["electricity"]["conventional_carriers"]
|
||||
|
||||
attach_load(
|
||||
n,
|
||||
snakemake.input.regions,
|
||||
snakemake.input.load,
|
||||
snakemake.input.nuts3_shapes,
|
||||
snakemake.config["countries"],
|
||||
snakemake.config["load"]["scaling_factor"],
|
||||
params.countries,
|
||||
params.scaling_factor,
|
||||
)
|
||||
|
||||
update_transmission_costs(n, costs, snakemake.config["lines"]["length_factor"])
|
||||
update_transmission_costs(n, costs, params.length_factor)
|
||||
|
||||
renewable_carriers = set(params.electricity["renewable_carriers"])
|
||||
extendable_carriers = params.electricity["extendable_carriers"]
|
||||
conventional_carriers = params.electricity["conventional_carriers"]
|
||||
conventional_inputs = {
|
||||
k: v for k, v in snakemake.input.items() if k.startswith("conventional_")
|
||||
}
|
||||
@ -774,7 +766,7 @@ if __name__ == "__main__":
|
||||
ppl,
|
||||
conventional_carriers,
|
||||
extendable_carriers,
|
||||
snakemake.config.get("conventional", {}),
|
||||
params.conventional,
|
||||
conventional_inputs,
|
||||
)
|
||||
|
||||
@ -784,67 +776,32 @@ if __name__ == "__main__":
|
||||
snakemake.input,
|
||||
renewable_carriers,
|
||||
extendable_carriers,
|
||||
snakemake.config["lines"]["length_factor"],
|
||||
params.length_factor,
|
||||
)
|
||||
|
||||
if "hydro" in renewable_carriers:
|
||||
conf = snakemake.config["renewable"]["hydro"]
|
||||
para = params.renewable["hydro"]
|
||||
attach_hydro(
|
||||
n,
|
||||
costs,
|
||||
ppl,
|
||||
snakemake.input.profile_hydro,
|
||||
snakemake.input.hydro_capacities,
|
||||
conf.pop("carriers", []),
|
||||
**conf,
|
||||
para.pop("carriers", []),
|
||||
**para,
|
||||
)
|
||||
|
||||
if "estimate_renewable_capacities" not in snakemake.config["electricity"]:
|
||||
logger.warning(
|
||||
"Missing key `estimate_renewable_capacities` under config entry `electricity`. "
|
||||
"In future versions, this will raise an error. "
|
||||
"Falling back to whether ``estimate_renewable_capacities_from_capacity_stats`` is in the config."
|
||||
)
|
||||
if (
|
||||
"estimate_renewable_capacities_from_capacity_stats"
|
||||
in snakemake.config["electricity"]
|
||||
):
|
||||
estimate_renewable_caps = {
|
||||
"enable": True,
|
||||
**snakemake.config["electricity"][
|
||||
"estimate_renewable_capacities_from_capacity_stats"
|
||||
],
|
||||
}
|
||||
else:
|
||||
estimate_renewable_caps = {"enable": False}
|
||||
else:
|
||||
estimate_renewable_caps = snakemake.config["electricity"][
|
||||
"estimate_renewable_capacities"
|
||||
]
|
||||
if "enable" not in estimate_renewable_caps:
|
||||
logger.warning(
|
||||
"Missing key `enable` under config entry `estimate_renewable_capacities`. "
|
||||
"In future versions, this will raise an error. Falling back to False."
|
||||
)
|
||||
estimate_renewable_caps = {"enable": False}
|
||||
if "from_opsd" not in estimate_renewable_caps:
|
||||
logger.warning(
|
||||
"Missing key `from_opsd` under config entry `estimate_renewable_capacities`. "
|
||||
"In future versions, this will raise an error. "
|
||||
"Falling back to whether `renewable_capacities_from_opsd` is non-empty."
|
||||
)
|
||||
from_opsd = bool(
|
||||
snakemake.config["electricity"].get("renewable_capacities_from_opsd", False)
|
||||
)
|
||||
estimate_renewable_caps["from_opsd"] = from_opsd
|
||||
|
||||
estimate_renewable_caps = params.electricity["estimate_renewable_capacities"]
|
||||
if estimate_renewable_caps["enable"]:
|
||||
tech_map = estimate_renewable_caps["technology_mapping"]
|
||||
expansion_limit = estimate_renewable_caps["expansion_limit"]
|
||||
year = estimate_renewable_caps["year"]
|
||||
|
||||
if estimate_renewable_caps["from_opsd"]:
|
||||
tech_map = snakemake.config["electricity"]["estimate_renewable_capacities"][
|
||||
"technology_mapping"
|
||||
]
|
||||
attach_OPSD_renewables(n, tech_map)
|
||||
estimate_renewable_capacities(n, snakemake.config)
|
||||
estimate_renewable_capacities(
|
||||
n, year, tech_map, expansion_limit, params.countries
|
||||
)
|
||||
|
||||
update_p_nom_max(n)
|
||||
|
||||
|
@ -157,7 +157,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
|
||||
# Fill missing DateOut
|
||||
dateout = (
|
||||
df_agg.loc[biomass_i, "DateIn"]
|
||||
+ snakemake.config["costs"]["fill_values"]["lifetime"]
|
||||
+ snakemake.params.costs["fill_values"]["lifetime"]
|
||||
)
|
||||
df_agg.loc[biomass_i, "DateOut"] = df_agg.loc[biomass_i, "DateOut"].fillna(dateout)
|
||||
|
||||
@ -218,7 +218,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
|
||||
capacity = df.loc[grouping_year, generator]
|
||||
capacity = capacity[~capacity.isna()]
|
||||
capacity = capacity[
|
||||
capacity > snakemake.config["existing_capacities"]["threshold_capacity"]
|
||||
capacity > snakemake.params.existing_capacities["threshold_capacity"]
|
||||
]
|
||||
suffix = "-ac" if generator == "offwind" else ""
|
||||
name_suffix = f" {generator}{suffix}-{grouping_year}"
|
||||
@ -582,7 +582,7 @@ def add_heating_capacities_installed_before_baseyear(
|
||||
)
|
||||
|
||||
# delete links with capacities below threshold
|
||||
threshold = snakemake.config["existing_capacities"]["threshold_capacity"]
|
||||
threshold = snakemake.params.existing_capacities["threshold_capacity"]
|
||||
n.mremove(
|
||||
"Link",
|
||||
[
|
||||
@ -612,10 +612,10 @@ if __name__ == "__main__":
|
||||
|
||||
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
|
||||
|
||||
options = snakemake.config["sector"]
|
||||
options = snakemake.params.sector
|
||||
opts = snakemake.wildcards.sector_opts.split("-")
|
||||
|
||||
baseyear = snakemake.config["scenario"]["planning_horizons"][0]
|
||||
baseyear = snakemake.params.baseyear
|
||||
|
||||
overrides = override_component_attrs(snakemake.input.overrides)
|
||||
n = pypsa.Network(snakemake.input.network, override_component_attrs=overrides)
|
||||
@ -626,14 +626,12 @@ if __name__ == "__main__":
|
||||
Nyears = n.snapshot_weightings.generators.sum() / 8760.0
|
||||
costs = prepare_costs(
|
||||
snakemake.input.costs,
|
||||
snakemake.config["costs"],
|
||||
snakemake.params.costs,
|
||||
Nyears,
|
||||
)
|
||||
|
||||
grouping_years_power = snakemake.config["existing_capacities"][
|
||||
"grouping_years_power"
|
||||
]
|
||||
grouping_years_heat = snakemake.config["existing_capacities"]["grouping_years_heat"]
|
||||
grouping_years_power = snakemake.params.existing_capacities["grouping_years_power"]
|
||||
grouping_years_heat = snakemake.params.existing_capacities["grouping_years_heat"]
|
||||
add_power_capacities_installed_before_baseyear(
|
||||
n, grouping_years_power, costs, baseyear
|
||||
)
|
||||
@ -650,7 +648,7 @@ if __name__ == "__main__":
|
||||
.to_pandas()
|
||||
.reindex(index=n.snapshots)
|
||||
)
|
||||
default_lifetime = snakemake.config["costs"]["fill_values"]["lifetime"]
|
||||
default_lifetime = snakemake.params.costs["fill_values"]["lifetime"]
|
||||
add_heating_capacities_installed_before_baseyear(
|
||||
n,
|
||||
baseyear,
|
||||
|
@ -67,9 +67,8 @@ idx = pd.IndexSlice
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def attach_storageunits(n, costs, elec_opts):
|
||||
carriers = elec_opts["extendable_carriers"]["StorageUnit"]
|
||||
max_hours = elec_opts["max_hours"]
|
||||
def attach_storageunits(n, costs, extendable_carriers, max_hours):
|
||||
carriers = extendable_carriers["StorageUnit"]
|
||||
|
||||
_add_missing_carriers_from_costs(n, costs, carriers)
|
||||
|
||||
@ -99,8 +98,8 @@ def attach_storageunits(n, costs, elec_opts):
|
||||
)
|
||||
|
||||
|
||||
def attach_stores(n, costs, elec_opts):
|
||||
carriers = elec_opts["extendable_carriers"]["Store"]
|
||||
def attach_stores(n, costs, extendable_carriers):
|
||||
carriers = extendable_carriers["Store"]
|
||||
|
||||
_add_missing_carriers_from_costs(n, costs, carriers)
|
||||
|
||||
@ -187,11 +186,10 @@ def attach_stores(n, costs, elec_opts):
|
||||
)
|
||||
|
||||
|
||||
def attach_hydrogen_pipelines(n, costs, elec_opts):
|
||||
ext_carriers = elec_opts["extendable_carriers"]
|
||||
as_stores = ext_carriers.get("Store", [])
|
||||
def attach_hydrogen_pipelines(n, costs, extendable_carriers):
|
||||
as_stores = extendable_carriers.get("Store", [])
|
||||
|
||||
if "H2 pipeline" not in ext_carriers.get("Link", []):
|
||||
if "H2 pipeline" not in extendable_carriers.get("Link", []):
|
||||
return
|
||||
|
||||
assert "H2" in as_stores, (
|
||||
@ -235,16 +233,17 @@ if __name__ == "__main__":
|
||||
configure_logging(snakemake)
|
||||
|
||||
n = pypsa.Network(snakemake.input.network)
|
||||
elec_config = snakemake.config["electricity"]
|
||||
extendable_carriers = snakemake.params.extendable_carriers
|
||||
max_hours = snakemake.params.max_hours
|
||||
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.0
|
||||
costs = load_costs(
|
||||
snakemake.input.tech_costs, snakemake.config["costs"], elec_config, Nyears
|
||||
snakemake.input.tech_costs, snakemake.params.costs, max_hours, Nyears
|
||||
)
|
||||
|
||||
attach_storageunits(n, costs, elec_config)
|
||||
attach_stores(n, costs, elec_config)
|
||||
attach_hydrogen_pipelines(n, costs, elec_config)
|
||||
attach_storageunits(n, costs, extendable_carriers, max_hours)
|
||||
attach_stores(n, costs, extendable_carriers)
|
||||
attach_hydrogen_pipelines(n, costs, extendable_carriers)
|
||||
|
||||
add_nice_carrier_names(n, snakemake.config)
|
||||
|
||||
|
@ -30,7 +30,7 @@ if __name__ == "__main__":
|
||||
ammonia.index = cc.convert(ammonia.index, to="iso2")
|
||||
|
||||
years = [str(i) for i in range(2013, 2018)]
|
||||
countries = ammonia.index.intersection(snakemake.config["countries"])
|
||||
countries = ammonia.index.intersection(snakemake.params.countries)
|
||||
ammonia = ammonia.loc[countries, years].astype(float)
|
||||
|
||||
# convert from ktonN to ktonNH3
|
||||
|
@ -210,9 +210,9 @@ if __name__ == "__main__":
|
||||
|
||||
snakemake = mock_snakemake("build_biomass_potentials", simpl="", clusters="5")
|
||||
|
||||
config = snakemake.config["biomass"]
|
||||
year = config["year"]
|
||||
scenario = config["scenario"]
|
||||
params = snakemake.params.biomass
|
||||
year = params["year"]
|
||||
scenario = params["scenario"]
|
||||
|
||||
enspreso = enspreso_biomass_potentials(year, scenario)
|
||||
|
||||
@ -228,7 +228,7 @@ if __name__ == "__main__":
|
||||
|
||||
df.to_csv(snakemake.output.biomass_potentials_all)
|
||||
|
||||
grouper = {v: k for k, vv in config["classes"].items() for v in vv}
|
||||
grouper = {v: k for k, vv in params["classes"].items() for v in vv}
|
||||
df = df.groupby(grouper, axis=1).sum()
|
||||
|
||||
df *= 1e6 # TWh/a to MWh/a
|
||||
|
@ -116,7 +116,7 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake("build_bus_regions")
|
||||
configure_logging(snakemake)
|
||||
|
||||
countries = snakemake.config["countries"]
|
||||
countries = snakemake.params.countries
|
||||
|
||||
n = pypsa.Network(snakemake.input.base_network)
|
||||
|
||||
|
@ -39,7 +39,7 @@ if __name__ == "__main__":
|
||||
for source in ["air", "soil"]:
|
||||
source_T = xr.open_dataarray(snakemake.input[f"temp_{source}_{area}"])
|
||||
|
||||
delta_T = snakemake.config["sector"]["heat_pump_sink_T"] - source_T
|
||||
delta_T = snakemake.params.heat_pump_sink_T - source_T
|
||||
|
||||
cop = coefficient_of_performance(delta_T, source)
|
||||
|
||||
|
@ -106,9 +106,9 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake("build_cutout", cutout="europe-2013-era5")
|
||||
configure_logging(snakemake)
|
||||
|
||||
cutout_params = snakemake.config["atlite"]["cutouts"][snakemake.wildcards.cutout]
|
||||
cutout_params = snakemake.params.cutouts[snakemake.wildcards.cutout]
|
||||
|
||||
snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"])
|
||||
snapshots = pd.date_range(freq="h", **snakemake.params.snapshots)
|
||||
time = [snapshots[0], snapshots[-1]]
|
||||
cutout_params["time"] = slice(*cutout_params.get("time", time))
|
||||
|
||||
|
@ -291,16 +291,16 @@ if __name__ == "__main__":
|
||||
|
||||
configure_logging(snakemake)
|
||||
|
||||
powerstatistics = snakemake.config["load"]["power_statistics"]
|
||||
interpolate_limit = snakemake.config["load"]["interpolate_limit"]
|
||||
countries = snakemake.config["countries"]
|
||||
snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"])
|
||||
powerstatistics = snakemake.params.load["power_statistics"]
|
||||
interpolate_limit = snakemake.params.load["interpolate_limit"]
|
||||
countries = snakemake.params.countries
|
||||
snapshots = pd.date_range(freq="h", **snakemake.params.snapshots)
|
||||
years = slice(snapshots[0], snapshots[-1])
|
||||
time_shift = snakemake.config["load"]["time_shift_for_large_gaps"]
|
||||
time_shift = snakemake.params.load["time_shift_for_large_gaps"]
|
||||
|
||||
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
|
||||
|
||||
if snakemake.config["load"]["manual_adjustments"]:
|
||||
if snakemake.params.load["manual_adjustments"]:
|
||||
load = manual_adjustment(load, snakemake.input[0], powerstatistics)
|
||||
|
||||
logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.")
|
||||
|
@ -737,16 +737,16 @@ if __name__ == "__main__":
|
||||
|
||||
logging.basicConfig(level=snakemake.config["logging"]["level"])
|
||||
|
||||
config = snakemake.config["energy"]
|
||||
params = snakemake.params.energy
|
||||
|
||||
nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index("index")
|
||||
population = nuts3["pop"].groupby(nuts3.country).sum()
|
||||
|
||||
countries = snakemake.config["countries"]
|
||||
countries = snakemake.params.countries
|
||||
idees_countries = pd.Index(countries).intersection(eu28)
|
||||
|
||||
data_year = config["energy_totals_year"]
|
||||
report_year = snakemake.config["energy"]["eurostat_report_year"]
|
||||
data_year = params["energy_totals_year"]
|
||||
report_year = snakemake.params.energy["eurostat_report_year"]
|
||||
input_eurostat = snakemake.input.eurostat
|
||||
eurostat = build_eurostat(input_eurostat, countries, report_year, data_year)
|
||||
swiss = build_swiss(data_year)
|
||||
@ -755,8 +755,8 @@ if __name__ == "__main__":
|
||||
energy = build_energy_totals(countries, eurostat, swiss, idees)
|
||||
energy.to_csv(snakemake.output.energy_name)
|
||||
|
||||
base_year_emissions = config["base_emissions_year"]
|
||||
emissions_scope = snakemake.config["energy"]["emissions"]
|
||||
base_year_emissions = params["base_emissions_year"]
|
||||
emissions_scope = snakemake.params.energy["emissions"]
|
||||
eea_co2 = build_eea_co2(snakemake.input.co2, base_year_emissions, emissions_scope)
|
||||
eurostat_co2 = build_eurostat_co2(
|
||||
input_eurostat, countries, report_year, base_year_emissions
|
||||
|
@ -27,7 +27,7 @@ if __name__ == "__main__":
|
||||
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
|
||||
client = Client(cluster, asynchronous=True)
|
||||
|
||||
time = pd.date_range(freq="h", **snakemake.config["snapshots"])
|
||||
time = pd.date_range(freq="h", **snakemake.params.snapshots)
|
||||
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
||||
|
||||
clustered_regions = (
|
||||
|
@ -130,10 +130,10 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake("build_hydro_profile")
|
||||
configure_logging(snakemake)
|
||||
|
||||
config_hydro = snakemake.config["renewable"]["hydro"]
|
||||
params_hydro = snakemake.params.hydro
|
||||
cutout = atlite.Cutout(snakemake.input.cutout)
|
||||
|
||||
countries = snakemake.config["countries"]
|
||||
countries = snakemake.params.countries
|
||||
country_shapes = (
|
||||
gpd.read_file(snakemake.input.country_shapes)
|
||||
.set_index("name")["geometry"]
|
||||
@ -151,7 +151,7 @@ if __name__ == "__main__":
|
||||
normalize_using_yearly=eia_stats,
|
||||
)
|
||||
|
||||
if "clip_min_inflow" in config_hydro:
|
||||
inflow = inflow.where(inflow > config_hydro["clip_min_inflow"], 0)
|
||||
if "clip_min_inflow" in params_hydro:
|
||||
inflow = inflow.where(inflow > params_hydro["clip_min_inflow"], 0)
|
||||
|
||||
inflow.to_netcdf(snakemake.output[0])
|
||||
|
@ -73,7 +73,7 @@ def prepare_hotmaps_database(regions):
|
||||
|
||||
df[["srid", "coordinates"]] = df.geom.str.split(";", expand=True)
|
||||
|
||||
if snakemake.config["industry"].get("hotmaps_locate_missing", False):
|
||||
if snakemake.params.hotmaps_locate_missing:
|
||||
df = locate_missing_industrial_sites(df)
|
||||
|
||||
# remove those sites without valid locations
|
||||
@ -143,7 +143,7 @@ if __name__ == "__main__":
|
||||
|
||||
logging.basicConfig(level=snakemake.config["logging"]["level"])
|
||||
|
||||
countries = snakemake.config["countries"]
|
||||
countries = snakemake.params.countries
|
||||
|
||||
regions = gpd.read_file(snakemake.input.regions_onshore).set_index("name")
|
||||
|
||||
|
@ -101,8 +101,8 @@ def add_ammonia_energy_demand(demand):
|
||||
|
||||
def get_ammonia_by_fuel(x):
|
||||
fuels = {
|
||||
"gas": config["MWh_CH4_per_tNH3_SMR"],
|
||||
"electricity": config["MWh_elec_per_tNH3_SMR"],
|
||||
"gas": params["MWh_CH4_per_tNH3_SMR"],
|
||||
"electricity": params["MWh_elec_per_tNH3_SMR"],
|
||||
}
|
||||
|
||||
return pd.Series({k: x * v for k, v in fuels.items()})
|
||||
@ -112,7 +112,7 @@ def add_ammonia_energy_demand(demand):
|
||||
index=demand.index, fill_value=0.0
|
||||
)
|
||||
|
||||
ammonia = pd.DataFrame({"ammonia": ammonia * config["MWh_NH3_per_tNH3"]}).T
|
||||
ammonia = pd.DataFrame({"ammonia": ammonia * params["MWh_NH3_per_tNH3"]}).T
|
||||
|
||||
demand["Ammonia"] = ammonia.unstack().reindex(index=demand.index, fill_value=0.0)
|
||||
|
||||
@ -178,9 +178,9 @@ if __name__ == "__main__":
|
||||
|
||||
snakemake = mock_snakemake("build_industrial_energy_demand_per_country_today")
|
||||
|
||||
config = snakemake.config["industry"]
|
||||
year = config.get("reference_year", 2015)
|
||||
countries = pd.Index(snakemake.config["countries"])
|
||||
params = snakemake.params.industry
|
||||
year = params.get("reference_year", 2015)
|
||||
countries = pd.Index(snakemake.params.countries)
|
||||
|
||||
demand = industrial_energy_demand(countries.intersection(eu28), year)
|
||||
|
||||
|
@ -264,9 +264,9 @@ def separate_basic_chemicals(demand, year):
|
||||
|
||||
# assume HVC, methanol, chlorine production proportional to non-ammonia basic chemicals
|
||||
distribution_key = demand["Basic chemicals"] / demand["Basic chemicals"].sum()
|
||||
demand["HVC"] = config["HVC_production_today"] * 1e3 * distribution_key
|
||||
demand["Chlorine"] = config["chlorine_production_today"] * 1e3 * distribution_key
|
||||
demand["Methanol"] = config["methanol_production_today"] * 1e3 * distribution_key
|
||||
demand["HVC"] = params["HVC_production_today"] * 1e3 * distribution_key
|
||||
demand["Chlorine"] = params["chlorine_production_today"] * 1e3 * distribution_key
|
||||
demand["Methanol"] = params["methanol_production_today"] * 1e3 * distribution_key
|
||||
|
||||
demand.drop(columns=["Basic chemicals"], inplace=True)
|
||||
|
||||
@ -279,11 +279,11 @@ if __name__ == "__main__":
|
||||
|
||||
logging.basicConfig(level=snakemake.config["logging"]["level"])
|
||||
|
||||
countries = snakemake.config["countries"]
|
||||
countries = snakemake.params.countries
|
||||
|
||||
year = snakemake.config["industry"]["reference_year"]
|
||||
year = snakemake.params.industry["reference_year"]
|
||||
|
||||
config = snakemake.config["industry"]
|
||||
params = snakemake.params.industry
|
||||
|
||||
jrc_dir = snakemake.input.jrc
|
||||
eurostat_dir = snakemake.input.eurostat
|
||||
|
@ -15,7 +15,7 @@ if __name__ == "__main__":
|
||||
|
||||
snakemake = mock_snakemake("build_industrial_production_per_country_tomorrow")
|
||||
|
||||
config = snakemake.config["industry"]
|
||||
params = snakemake.params.industry
|
||||
|
||||
investment_year = int(snakemake.wildcards.planning_horizons)
|
||||
|
||||
@ -25,8 +25,8 @@ if __name__ == "__main__":
|
||||
keys = ["Integrated steelworks", "Electric arc"]
|
||||
total_steel = production[keys].sum(axis=1)
|
||||
|
||||
st_primary_fraction = get(config["St_primary_fraction"], investment_year)
|
||||
dri_fraction = get(config["DRI_fraction"], investment_year)
|
||||
st_primary_fraction = get(params["St_primary_fraction"], investment_year)
|
||||
dri_fraction = get(params["DRI_fraction"], investment_year)
|
||||
int_steel = production["Integrated steelworks"].sum()
|
||||
fraction_persistent_primary = st_primary_fraction * total_steel.sum() / int_steel
|
||||
|
||||
@ -51,7 +51,7 @@ if __name__ == "__main__":
|
||||
key_pri = "Aluminium - primary production"
|
||||
key_sec = "Aluminium - secondary production"
|
||||
|
||||
al_primary_fraction = get(config["Al_primary_fraction"], investment_year)
|
||||
al_primary_fraction = get(params["Al_primary_fraction"], investment_year)
|
||||
fraction_persistent_primary = (
|
||||
al_primary_fraction * total_aluminium.sum() / production[key_pri].sum()
|
||||
)
|
||||
@ -60,15 +60,15 @@ if __name__ == "__main__":
|
||||
production[key_sec] = total_aluminium - production[key_pri]
|
||||
|
||||
production["HVC (mechanical recycling)"] = (
|
||||
get(config["HVC_mechanical_recycling_fraction"], investment_year)
|
||||
get(params["HVC_mechanical_recycling_fraction"], investment_year)
|
||||
* production["HVC"]
|
||||
)
|
||||
production["HVC (chemical recycling)"] = (
|
||||
get(config["HVC_chemical_recycling_fraction"], investment_year)
|
||||
get(params["HVC_chemical_recycling_fraction"], investment_year)
|
||||
* production["HVC"]
|
||||
)
|
||||
|
||||
production["HVC"] *= get(config["HVC_primary_fraction"], investment_year)
|
||||
production["HVC"] *= get(params["HVC_primary_fraction"], investment_year)
|
||||
|
||||
fn = snakemake.output.industrial_production_per_country_tomorrow
|
||||
production.to_csv(fn, float_format="%.2f")
|
||||
|
@ -185,10 +185,10 @@ def iron_and_steel():
|
||||
df[sector] = df["Electric arc"]
|
||||
|
||||
# add H2 consumption for DRI at 1.7 MWh H2 /ton steel
|
||||
df.at["hydrogen", sector] = config["H2_DRI"]
|
||||
df.at["hydrogen", sector] = params["H2_DRI"]
|
||||
|
||||
# add electricity consumption in DRI shaft (0.322 MWh/tSl)
|
||||
df.at["elec", sector] += config["elec_DRI"]
|
||||
df.at["elec", sector] += params["elec_DRI"]
|
||||
|
||||
## Integrated steelworks
|
||||
# could be used in combination with CCS)
|
||||
@ -383,19 +383,19 @@ def chemicals_industry():
|
||||
assert s_emi.index[0] == sector
|
||||
|
||||
# convert from MtHVC/a to ktHVC/a
|
||||
s_out = config["HVC_production_today"] * 1e3
|
||||
s_out = params["HVC_production_today"] * 1e3
|
||||
|
||||
# tCO2/t material
|
||||
df.loc["process emission", sector] += (
|
||||
s_emi["Process emissions"]
|
||||
- config["petrochemical_process_emissions"] * 1e3
|
||||
- config["NH3_process_emissions"] * 1e3
|
||||
- params["petrochemical_process_emissions"] * 1e3
|
||||
- params["NH3_process_emissions"] * 1e3
|
||||
) / s_out
|
||||
|
||||
# emissions originating from feedstock, could be non-fossil origin
|
||||
# tCO2/t material
|
||||
df.loc["process emission from feedstock", sector] += (
|
||||
config["petrochemical_process_emissions"] * 1e3
|
||||
params["petrochemical_process_emissions"] * 1e3
|
||||
) / s_out
|
||||
|
||||
# convert from ktoe/a to GWh/a
|
||||
@ -405,18 +405,18 @@ def chemicals_industry():
|
||||
# subtract ammonia energy demand (in ktNH3/a)
|
||||
ammonia = pd.read_csv(snakemake.input.ammonia_production, index_col=0)
|
||||
ammonia_total = ammonia.loc[ammonia.index.intersection(eu28), str(year)].sum()
|
||||
df.loc["methane", sector] -= ammonia_total * config["MWh_CH4_per_tNH3_SMR"]
|
||||
df.loc["elec", sector] -= ammonia_total * config["MWh_elec_per_tNH3_SMR"]
|
||||
df.loc["methane", sector] -= ammonia_total * params["MWh_CH4_per_tNH3_SMR"]
|
||||
df.loc["elec", sector] -= ammonia_total * params["MWh_elec_per_tNH3_SMR"]
|
||||
|
||||
# subtract chlorine demand
|
||||
chlorine_total = config["chlorine_production_today"]
|
||||
df.loc["hydrogen", sector] -= chlorine_total * config["MWh_H2_per_tCl"]
|
||||
df.loc["elec", sector] -= chlorine_total * config["MWh_elec_per_tCl"]
|
||||
chlorine_total = params["chlorine_production_today"]
|
||||
df.loc["hydrogen", sector] -= chlorine_total * params["MWh_H2_per_tCl"]
|
||||
df.loc["elec", sector] -= chlorine_total * params["MWh_elec_per_tCl"]
|
||||
|
||||
# subtract methanol demand
|
||||
methanol_total = config["methanol_production_today"]
|
||||
df.loc["methane", sector] -= methanol_total * config["MWh_CH4_per_tMeOH"]
|
||||
df.loc["elec", sector] -= methanol_total * config["MWh_elec_per_tMeOH"]
|
||||
methanol_total = params["methanol_production_today"]
|
||||
df.loc["methane", sector] -= methanol_total * params["MWh_CH4_per_tMeOH"]
|
||||
df.loc["elec", sector] -= methanol_total * params["MWh_elec_per_tMeOH"]
|
||||
|
||||
# MWh/t material
|
||||
df.loc[sources, sector] = df.loc[sources, sector] / s_out
|
||||
@ -427,37 +427,37 @@ def chemicals_industry():
|
||||
|
||||
sector = "HVC (mechanical recycling)"
|
||||
df[sector] = 0.0
|
||||
df.loc["elec", sector] = config["MWh_elec_per_tHVC_mechanical_recycling"]
|
||||
df.loc["elec", sector] = params["MWh_elec_per_tHVC_mechanical_recycling"]
|
||||
|
||||
# HVC chemical recycling
|
||||
|
||||
sector = "HVC (chemical recycling)"
|
||||
df[sector] = 0.0
|
||||
df.loc["elec", sector] = config["MWh_elec_per_tHVC_chemical_recycling"]
|
||||
df.loc["elec", sector] = params["MWh_elec_per_tHVC_chemical_recycling"]
|
||||
|
||||
# Ammonia
|
||||
|
||||
sector = "Ammonia"
|
||||
df[sector] = 0.0
|
||||
if snakemake.config["sector"].get("ammonia", False):
|
||||
df.loc["ammonia", sector] = config["MWh_NH3_per_tNH3"]
|
||||
if snakemake.params.ammonia:
|
||||
df.loc["ammonia", sector] = params["MWh_NH3_per_tNH3"]
|
||||
else:
|
||||
df.loc["hydrogen", sector] = config["MWh_H2_per_tNH3_electrolysis"]
|
||||
df.loc["elec", sector] = config["MWh_elec_per_tNH3_electrolysis"]
|
||||
df.loc["hydrogen", sector] = params["MWh_H2_per_tNH3_electrolysis"]
|
||||
df.loc["elec", sector] = params["MWh_elec_per_tNH3_electrolysis"]
|
||||
|
||||
# Chlorine
|
||||
|
||||
sector = "Chlorine"
|
||||
df[sector] = 0.0
|
||||
df.loc["hydrogen", sector] = config["MWh_H2_per_tCl"]
|
||||
df.loc["elec", sector] = config["MWh_elec_per_tCl"]
|
||||
df.loc["hydrogen", sector] = params["MWh_H2_per_tCl"]
|
||||
df.loc["elec", sector] = params["MWh_elec_per_tCl"]
|
||||
|
||||
# Methanol
|
||||
|
||||
sector = "Methanol"
|
||||
df[sector] = 0.0
|
||||
df.loc["methane", sector] = config["MWh_CH4_per_tMeOH"]
|
||||
df.loc["elec", sector] = config["MWh_elec_per_tMeOH"]
|
||||
df.loc["methane", sector] = params["MWh_CH4_per_tMeOH"]
|
||||
df.loc["elec", sector] = params["MWh_elec_per_tMeOH"]
|
||||
|
||||
# Other chemicals
|
||||
|
||||
@ -1465,10 +1465,10 @@ if __name__ == "__main__":
|
||||
|
||||
snakemake = mock_snakemake("build_industry_sector_ratios")
|
||||
|
||||
# TODO make config option
|
||||
# TODO make params option
|
||||
year = 2015
|
||||
|
||||
config = snakemake.config["industry"]
|
||||
params = snakemake.params.industry
|
||||
|
||||
df = pd.concat(
|
||||
[
|
||||
|
@ -115,7 +115,7 @@ if __name__ == "__main__":
|
||||
configure_logging(snakemake)
|
||||
|
||||
n = pypsa.Network(snakemake.input.base_network)
|
||||
countries = snakemake.config["countries"]
|
||||
countries = snakemake.params.countries
|
||||
|
||||
ppl = (
|
||||
pm.powerplants(from_url=True)
|
||||
@ -134,12 +134,12 @@ if __name__ == "__main__":
|
||||
ppl = ppl.query('not (Country in @available_countries and Fueltype == "Bioenergy")')
|
||||
ppl = pd.concat([ppl, opsd])
|
||||
|
||||
ppl_query = snakemake.config["electricity"]["powerplants_filter"]
|
||||
ppl_query = snakemake.params.powerplants_filter
|
||||
if isinstance(ppl_query, str):
|
||||
ppl.query(ppl_query, inplace=True)
|
||||
|
||||
# add carriers from own powerplant files:
|
||||
custom_ppl_query = snakemake.config["electricity"]["custom_powerplants"]
|
||||
custom_ppl_query = snakemake.params.custom_powerplants
|
||||
ppl = add_custom_powerplants(
|
||||
ppl, snakemake.input.custom_powerplants, custom_ppl_query
|
||||
)
|
||||
|
@ -64,7 +64,7 @@ Inputs
|
||||
- ``resources/offshore_shapes.geojson``: confer :ref:`shapes`
|
||||
- ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions`
|
||||
- ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions`
|
||||
- ``"cutouts/" + config["renewable"][{technology}]['cutout']``: :ref:`cutout`
|
||||
- ``"cutouts/" + params["renewable"][{technology}]['cutout']``: :ref:`cutout`
|
||||
- ``networks/base.nc``: :ref:`base`
|
||||
|
||||
Outputs
|
||||
@ -204,14 +204,14 @@ if __name__ == "__main__":
|
||||
|
||||
nprocesses = int(snakemake.threads)
|
||||
noprogress = snakemake.config["run"].get("disable_progressbar", True)
|
||||
config = snakemake.config["renewable"][snakemake.wildcards.technology]
|
||||
resource = config["resource"] # pv panel config / wind turbine config
|
||||
correction_factor = config.get("correction_factor", 1.0)
|
||||
capacity_per_sqkm = config["capacity_per_sqkm"]
|
||||
p_nom_max_meth = config.get("potential", "conservative")
|
||||
params = snakemake.params.renewable[snakemake.wildcards.technology]
|
||||
resource = params["resource"] # pv panel params / wind turbine params
|
||||
correction_factor = params.get("correction_factor", 1.0)
|
||||
capacity_per_sqkm = params["capacity_per_sqkm"]
|
||||
p_nom_max_meth = params.get("potential", "conservative")
|
||||
|
||||
if isinstance(config.get("corine", {}), list):
|
||||
config["corine"] = {"grid_codes": config["corine"]}
|
||||
if isinstance(params.get("corine", {}), list):
|
||||
params["corine"] = {"grid_codes": params["corine"]}
|
||||
|
||||
if correction_factor != 1.0:
|
||||
logger.info(f"correction_factor is set as {correction_factor}")
|
||||
@ -229,13 +229,13 @@ if __name__ == "__main__":
|
||||
regions = regions.set_index("name").rename_axis("bus")
|
||||
buses = regions.index
|
||||
|
||||
res = config.get("excluder_resolution", 100)
|
||||
res = params.get("excluder_resolution", 100)
|
||||
excluder = atlite.ExclusionContainer(crs=3035, res=res)
|
||||
|
||||
if config["natura"]:
|
||||
if params["natura"]:
|
||||
excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True)
|
||||
|
||||
corine = config.get("corine", {})
|
||||
corine = params.get("corine", {})
|
||||
if "grid_codes" in corine:
|
||||
codes = corine["grid_codes"]
|
||||
excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035)
|
||||
@ -246,28 +246,28 @@ if __name__ == "__main__":
|
||||
snakemake.input.corine, codes=codes, buffer=buffer, crs=3035
|
||||
)
|
||||
|
||||
# if "ship_threshold" in config:
|
||||
# shipping_threshold = (
|
||||
# config["ship_threshold"] * 8760 * 6
|
||||
# ) # approximation because 6 years of data which is hourly collected
|
||||
# func = functools.partial(np.less, shipping_threshold)
|
||||
# excluder.add_raster(
|
||||
# snakemake.input.ship_density, codes=func, crs=4326, allow_no_overlap=True
|
||||
# )
|
||||
if "ship_threshold" in params:
|
||||
shipping_threshold = (
|
||||
params["ship_threshold"] * 8760 * 6
|
||||
) # approximation because 6 years of data which is hourly collected
|
||||
func = functools.partial(np.less, shipping_threshold)
|
||||
excluder.add_raster(
|
||||
snakemake.input.ship_density, codes=func, crs=4326, allow_no_overlap=True
|
||||
)
|
||||
|
||||
if config.get("max_depth"):
|
||||
if params.get("max_depth"):
|
||||
# lambda not supported for atlite + multiprocessing
|
||||
# use named function np.greater with partially frozen argument instead
|
||||
# and exclude areas where: -max_depth > grid cell depth
|
||||
func = functools.partial(np.greater, -config["max_depth"])
|
||||
func = functools.partial(np.greater, -params["max_depth"])
|
||||
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4326, nodata=-1000)
|
||||
|
||||
if "min_shore_distance" in config:
|
||||
buffer = config["min_shore_distance"]
|
||||
if "min_shore_distance" in params:
|
||||
buffer = params["min_shore_distance"]
|
||||
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)
|
||||
|
||||
if "max_shore_distance" in config:
|
||||
buffer = config["max_shore_distance"]
|
||||
if "max_shore_distance" in params:
|
||||
buffer = params["max_shore_distance"]
|
||||
excluder.add_geometry(
|
||||
snakemake.input.country_shapes, buffer=buffer, invert=True
|
||||
)
|
||||
@ -358,13 +358,13 @@ if __name__ == "__main__":
|
||||
# select only buses with some capacity and minimal capacity factor
|
||||
ds = ds.sel(
|
||||
bus=(
|
||||
(ds["profile"].mean("time") > config.get("min_p_max_pu", 0.0))
|
||||
& (ds["p_nom_max"] > config.get("min_p_nom_max", 0.0))
|
||||
(ds["profile"].mean("time") > params.get("min_p_max_pu", 0.0))
|
||||
& (ds["p_nom_max"] > params.get("min_p_nom_max", 0.0))
|
||||
)
|
||||
)
|
||||
|
||||
if "clip_p_max_pu" in config:
|
||||
min_p_max_pu = config["clip_p_max_pu"]
|
||||
if "clip_p_max_pu" in params:
|
||||
min_p_max_pu = params["clip_p_max_pu"]
|
||||
ds["profile"] = ds["profile"].where(ds["profile"] >= min_p_max_pu, 0)
|
||||
|
||||
ds.to_netcdf(snakemake.output.profile)
|
||||
|
@ -305,7 +305,7 @@ def prepare_building_stock_data():
|
||||
u_values.set_index(["country_code", "subsector", "bage", "type"], inplace=True)
|
||||
|
||||
# only take in config.yaml specified countries into account
|
||||
countries = snakemake.config["countries"]
|
||||
countries = snakemake.params.countries
|
||||
area_tot = area_tot.loc[countries]
|
||||
|
||||
return u_values, country_iso_dic, countries, area_tot, area
|
||||
@ -513,7 +513,7 @@ def prepare_cost_retro(country_iso_dic):
|
||||
|
||||
def prepare_temperature_data():
|
||||
"""
|
||||
returns the temperature dependent data for each country:
|
||||
Returns the temperature dependent data for each country:
|
||||
|
||||
d_heat : length of heating season pd.Series(index=countries) [days/year]
|
||||
on those days, daily average temperature is below
|
||||
@ -621,7 +621,7 @@ def calculate_costs(u_values, l, cost_retro, window_assumptions):
|
||||
|
||||
def calculate_new_u(u_values, l, l_weight, window_assumptions, k=0.035):
|
||||
"""
|
||||
calculate U-values after building retrofitting, depending on the old
|
||||
Calculate U-values after building retrofitting, depending on the old
|
||||
U-values (u_values). This is for simple insulation measuers, adding an
|
||||
additional layer of insulation.
|
||||
|
||||
@ -682,7 +682,7 @@ def map_tabula_to_hotmaps(df_tabula, df_hotmaps, column_prefix):
|
||||
|
||||
def get_solar_gains_per_year(window_area):
|
||||
"""
|
||||
returns solar heat gains during heating season in [kWh/a] depending on the
|
||||
Returns solar heat gains during heating season in [kWh/a] depending on the
|
||||
window area [m^2] of the building, assuming a equal distributed window
|
||||
orientation (east, south, north, west)
|
||||
"""
|
||||
@ -698,8 +698,8 @@ def get_solar_gains_per_year(window_area):
|
||||
|
||||
def map_to_lstrength(l_strength, df):
|
||||
"""
|
||||
renames column names from a pandas dataframe to map tabula retrofitting
|
||||
strengths [2 = moderate, 3 = ambitious] to l_strength
|
||||
Renames column names from a pandas dataframe to map tabula retrofitting
|
||||
strengths [2 = moderate, 3 = ambitious] to l_strength.
|
||||
"""
|
||||
middle = len(l_strength) // 2
|
||||
map_to_l = pd.MultiIndex.from_arrays(
|
||||
@ -718,7 +718,7 @@ def map_to_lstrength(l_strength, df):
|
||||
|
||||
def calculate_heat_losses(u_values, data_tabula, l_strength, temperature_factor):
|
||||
"""
|
||||
calculates total annual heat losses Q_ht for different insulation
|
||||
Calculates total annual heat losses Q_ht for different insulation
|
||||
thicknesses (l_strength), depending on current insulation state (u_values),
|
||||
standard building topologies and air ventilation from TABULA (data_tabula)
|
||||
and the accumulated difference between internal and external temperature
|
||||
@ -840,7 +840,7 @@ def calculate_heat_losses(u_values, data_tabula, l_strength, temperature_factor)
|
||||
|
||||
def calculate_heat_gains(data_tabula, heat_transfer_perm2, d_heat):
|
||||
"""
|
||||
calculates heat gains Q_gain [W/m^2], which consititure from gains by:
|
||||
Calculates heat gains Q_gain [W/m^2], which consititure from gains by:
|
||||
|
||||
(1) solar radiation (2) internal heat gains
|
||||
"""
|
||||
@ -885,7 +885,7 @@ def calculate_space_heat_savings(
|
||||
u_values, data_tabula, l_strength, temperature_factor, d_heat
|
||||
):
|
||||
"""
|
||||
calculates space heat savings (dE_space [per unit of unrefurbished state])
|
||||
Calculates space heat savings (dE_space [per unit of unrefurbished state])
|
||||
through retrofitting of the thermal envelope by additional insulation
|
||||
material (l_strength[m])
|
||||
"""
|
||||
@ -1040,7 +1040,7 @@ if __name__ == "__main__":
|
||||
|
||||
# ******** config *********************************************************
|
||||
|
||||
retro_opts = snakemake.config["sector"]["retrofitting"]
|
||||
retro_opts = snakemake.params.retrofitting
|
||||
interest_rate = retro_opts["interest_rate"]
|
||||
annualise_cost = retro_opts["annualise_cost"] # annualise the investment costs
|
||||
tax_weighting = retro_opts[
|
||||
|
@ -41,7 +41,7 @@ if __name__ == "__main__":
|
||||
"build_sequestration_potentials", simpl="", clusters="181"
|
||||
)
|
||||
|
||||
cf = snakemake.config["sector"]["regional_co2_sequestration_potential"]
|
||||
cf = snakemake.params.sequestration_potential
|
||||
|
||||
gdf = gpd.read_file(snakemake.input.sequestration_potential[0])
|
||||
|
||||
|
@ -234,6 +234,7 @@ def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp):
|
||||
manual = gpd.GeoDataFrame(
|
||||
[["BA1", "BA", 3871.0], ["RS1", "RS", 7210.0], ["AL1", "AL", 2893.0]],
|
||||
columns=["NUTS_ID", "country", "pop"],
|
||||
geometry=gpd.GeoSeries(),
|
||||
)
|
||||
manual["geometry"] = manual["country"].map(country_shapes)
|
||||
manual = manual.dropna()
|
||||
@ -254,13 +255,11 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake("build_shapes")
|
||||
configure_logging(snakemake)
|
||||
|
||||
country_shapes = countries(
|
||||
snakemake.input.naturalearth, snakemake.config["countries"]
|
||||
)
|
||||
country_shapes = countries(snakemake.input.naturalearth, snakemake.params.countries)
|
||||
country_shapes.reset_index().to_file(snakemake.output.country_shapes)
|
||||
|
||||
offshore_shapes = eez(
|
||||
country_shapes, snakemake.input.eez, snakemake.config["countries"]
|
||||
country_shapes, snakemake.input.eez, snakemake.params.countries
|
||||
)
|
||||
offshore_shapes.reset_index().to_file(snakemake.output.offshore_shapes)
|
||||
|
||||
|
@ -27,9 +27,9 @@ if __name__ == "__main__":
|
||||
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
|
||||
client = Client(cluster, asynchronous=True)
|
||||
|
||||
config = snakemake.config["solar_thermal"]
|
||||
config = snakemake.params.solar_thermal
|
||||
|
||||
time = pd.date_range(freq="h", **snakemake.config["snapshots"])
|
||||
time = pd.date_range(freq="h", **snakemake.params.snapshots)
|
||||
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
||||
|
||||
clustered_regions = (
|
||||
|
@ -27,7 +27,7 @@ if __name__ == "__main__":
|
||||
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
|
||||
client = Client(cluster, asynchronous=True)
|
||||
|
||||
time = pd.date_range(freq="h", **snakemake.config["snapshots"])
|
||||
time = pd.date_range(freq="h", **snakemake.params.snapshots)
|
||||
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
||||
|
||||
clustered_regions = (
|
||||
|
@ -175,9 +175,9 @@ if __name__ == "__main__":
|
||||
snakemake.input.pop_weighted_energy_totals, index_col=0
|
||||
)
|
||||
|
||||
options = snakemake.config["sector"]
|
||||
options = snakemake.params.sector
|
||||
|
||||
snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"], tz="UTC")
|
||||
snapshots = pd.date_range(freq="h", **snakemake.params.snapshots, tz="UTC")
|
||||
|
||||
nyears = len(snapshots) / 8760
|
||||
|
||||
|
@ -186,7 +186,7 @@ def get_feature_for_hac(n, buses_i=None, feature=None):
|
||||
if "offwind" in carriers:
|
||||
carriers.remove("offwind")
|
||||
carriers = np.append(
|
||||
carriers, network.generators.carrier.filter(like="offwind").unique()
|
||||
carriers, n.generators.carrier.filter(like="offwind").unique()
|
||||
)
|
||||
|
||||
if feature.split("-")[1] == "cap":
|
||||
@ -424,7 +424,10 @@ def clustering_for_n_clusters(
|
||||
n.links.eval("underwater_fraction * length").div(nc.links.length).dropna()
|
||||
)
|
||||
nc.links["capital_cost"] = nc.links["capital_cost"].add(
|
||||
(nc.links.length - n.links.length).clip(lower=0).mul(extended_link_costs),
|
||||
(nc.links.length - n.links.length)
|
||||
.clip(lower=0)
|
||||
.mul(extended_link_costs)
|
||||
.dropna(),
|
||||
fill_value=0,
|
||||
)
|
||||
|
||||
@ -460,34 +463,20 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake("cluster_network", simpl="", clusters="37c")
|
||||
configure_logging(snakemake)
|
||||
|
||||
params = snakemake.params
|
||||
solver_name = snakemake.config["solving"]["solver"]["name"]
|
||||
|
||||
n = pypsa.Network(snakemake.input.network)
|
||||
|
||||
focus_weights = snakemake.config.get("focus_weights", None)
|
||||
|
||||
renewable_carriers = pd.Index(
|
||||
[
|
||||
tech
|
||||
for tech in n.generators.carrier.unique()
|
||||
if tech in snakemake.config["renewable"]
|
||||
]
|
||||
)
|
||||
|
||||
exclude_carriers = snakemake.config["clustering"]["cluster_network"].get(
|
||||
"exclude_carriers", []
|
||||
)
|
||||
exclude_carriers = params.cluster_network["exclude_carriers"]
|
||||
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
|
||||
conventional_carriers = set(params.conventional_carriers)
|
||||
if snakemake.wildcards.clusters.endswith("m"):
|
||||
n_clusters = int(snakemake.wildcards.clusters[:-1])
|
||||
conventional = set(
|
||||
snakemake.config["electricity"].get("conventional_carriers", [])
|
||||
)
|
||||
aggregate_carriers = conventional.intersection(aggregate_carriers)
|
||||
aggregate_carriers = params.conventional_carriers & aggregate_carriers
|
||||
elif snakemake.wildcards.clusters.endswith("c"):
|
||||
n_clusters = int(snakemake.wildcards.clusters[:-1])
|
||||
conventional = set(
|
||||
snakemake.config["electricity"].get("conventional_carriers", [])
|
||||
)
|
||||
aggregate_carriers = aggregate_carriers - conventional
|
||||
aggregate_carriers = aggregate_carriers - conventional_carriers
|
||||
elif snakemake.wildcards.clusters == "all":
|
||||
n_clusters = len(n.buses)
|
||||
else:
|
||||
@ -501,13 +490,12 @@ if __name__ == "__main__":
|
||||
n, busmap, linemap, linemap, pd.Series(dtype="O")
|
||||
)
|
||||
else:
|
||||
line_length_factor = snakemake.config["lines"]["length_factor"]
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
||||
|
||||
hvac_overhead_cost = load_costs(
|
||||
snakemake.input.tech_costs,
|
||||
snakemake.config["costs"],
|
||||
snakemake.config["electricity"],
|
||||
params.costs,
|
||||
params.max_hours,
|
||||
Nyears,
|
||||
).at["HVAC overhead", "capital_cost"]
|
||||
|
||||
@ -518,16 +506,16 @@ if __name__ == "__main__":
|
||||
).all() or x.isnull().all(), "The `potential` configuration option must agree for all renewable carriers, for now!"
|
||||
return v
|
||||
|
||||
aggregation_strategies = snakemake.config["clustering"].get(
|
||||
"aggregation_strategies", {}
|
||||
)
|
||||
# translate str entries of aggregation_strategies to pd.Series functions:
|
||||
aggregation_strategies = {
|
||||
p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()}
|
||||
for p in aggregation_strategies.keys()
|
||||
p: {
|
||||
k: getattr(pd.Series, v)
|
||||
for k, v in params.aggregation_strategies[p].items()
|
||||
}
|
||||
for p in params.aggregation_strategies.keys()
|
||||
}
|
||||
|
||||
custom_busmap = snakemake.config["enable"].get("custom_busmap", False)
|
||||
custom_busmap = params.custom_busmap
|
||||
if custom_busmap:
|
||||
custom_busmap = pd.read_csv(
|
||||
snakemake.input.custom_busmap, index_col=0, squeeze=True
|
||||
@ -535,21 +523,18 @@ if __name__ == "__main__":
|
||||
custom_busmap.index = custom_busmap.index.astype(str)
|
||||
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
|
||||
|
||||
cluster_config = snakemake.config.get("clustering", {}).get(
|
||||
"cluster_network", {}
|
||||
)
|
||||
clustering = clustering_for_n_clusters(
|
||||
n,
|
||||
n_clusters,
|
||||
custom_busmap,
|
||||
aggregate_carriers,
|
||||
line_length_factor,
|
||||
aggregation_strategies,
|
||||
snakemake.config["solving"]["solver"]["name"],
|
||||
cluster_config.get("algorithm", "hac"),
|
||||
cluster_config.get("feature", "solar+onwind-time"),
|
||||
params.length_factor,
|
||||
params.aggregation_strategies,
|
||||
solver_name,
|
||||
params.cluster_network["algorithm"],
|
||||
params.cluster_network["feature"],
|
||||
hvac_overhead_cost,
|
||||
focus_weights,
|
||||
params.focus_weights,
|
||||
)
|
||||
|
||||
update_p_nom_max(clustering.network)
|
||||
|
@ -198,7 +198,7 @@ def calculate_costs(n, label, costs):
|
||||
|
||||
|
||||
def calculate_cumulative_cost():
|
||||
planning_horizons = snakemake.config["scenario"]["planning_horizons"]
|
||||
planning_horizons = snakemake.params.scenario["planning_horizons"]
|
||||
|
||||
cumulative_cost = pd.DataFrame(
|
||||
index=df["costs"].sum().index,
|
||||
@ -688,19 +688,19 @@ if __name__ == "__main__":
|
||||
(cluster, ll, opt + sector_opt, planning_horizon): "results/"
|
||||
+ snakemake.params.RDIR
|
||||
+ f"/postnetworks/elec_s{simpl}_{cluster}_l{ll}_{opt}_{sector_opt}_{planning_horizon}.nc"
|
||||
for simpl in snakemake.config["scenario"]["simpl"]
|
||||
for cluster in snakemake.config["scenario"]["clusters"]
|
||||
for opt in snakemake.config["scenario"]["opts"]
|
||||
for sector_opt in snakemake.config["scenario"]["sector_opts"]
|
||||
for ll in snakemake.config["scenario"]["ll"]
|
||||
for planning_horizon in snakemake.config["scenario"]["planning_horizons"]
|
||||
for simpl in snakemake.params.scenario["simpl"]
|
||||
for cluster in snakemake.params.scenario["clusters"]
|
||||
for opt in snakemake.params.scenario["opts"]
|
||||
for sector_opt in snakemake.params.scenario["sector_opts"]
|
||||
for ll in snakemake.params.scenario["ll"]
|
||||
for planning_horizon in snakemake.params.scenario["planning_horizons"]
|
||||
}
|
||||
|
||||
Nyears = len(pd.date_range(freq="h", **snakemake.config["snapshots"])) / 8760
|
||||
Nyears = len(pd.date_range(freq="h", **snakemake.params.snapshots)) / 8760
|
||||
|
||||
costs_db = prepare_costs(
|
||||
snakemake.input.costs,
|
||||
snakemake.config["costs"],
|
||||
snakemake.params.costs,
|
||||
Nyears,
|
||||
)
|
||||
|
||||
@ -710,7 +710,7 @@ if __name__ == "__main__":
|
||||
|
||||
to_csv(df)
|
||||
|
||||
if snakemake.config["foresight"] == "myopic":
|
||||
if snakemake.params.foresight == "myopic":
|
||||
cumulative_cost = calculate_cumulative_cost()
|
||||
cumulative_cost.to_csv(
|
||||
"results/" + snakemake.params.RDIR + "/csvs/cumulative_cost.csv"
|
||||
|
@ -70,7 +70,7 @@ def plot_map(
|
||||
transmission=False,
|
||||
with_legend=True,
|
||||
):
|
||||
tech_colors = snakemake.config["plotting"]["tech_colors"]
|
||||
tech_colors = snakemake.params.plotting["tech_colors"]
|
||||
|
||||
n = network.copy()
|
||||
assign_location(n)
|
||||
@ -116,9 +116,7 @@ def plot_map(
|
||||
costs = costs.stack() # .sort_index()
|
||||
|
||||
# hack because impossible to drop buses...
|
||||
eu_location = snakemake.config["plotting"].get(
|
||||
"eu_node_location", dict(x=-5.5, y=46)
|
||||
)
|
||||
eu_location = snakemake.params.plotting.get("eu_node_location", dict(x=-5.5, y=46))
|
||||
n.buses.loc["EU gas", "x"] = eu_location["x"]
|
||||
n.buses.loc["EU gas", "y"] = eu_location["y"]
|
||||
|
||||
@ -315,7 +313,7 @@ def plot_h2_map(network, regions):
|
||||
h2_new = n.links[n.links.carrier == "H2 pipeline"]
|
||||
h2_retro = n.links[n.links.carrier == "H2 pipeline retrofitted"]
|
||||
|
||||
if snakemake.config["foresight"] == "myopic":
|
||||
if snakemake.params.foresight == "myopic":
|
||||
# sum capacitiy for pipelines from different investment periods
|
||||
h2_new = group_pipes(h2_new)
|
||||
|
||||
@ -558,7 +556,7 @@ def plot_ch4_map(network):
|
||||
link_widths_used = max_usage / linewidth_factor
|
||||
link_widths_used[max_usage < line_lower_threshold] = 0.0
|
||||
|
||||
tech_colors = snakemake.config["plotting"]["tech_colors"]
|
||||
tech_colors = snakemake.params.plotting["tech_colors"]
|
||||
|
||||
pipe_colors = {
|
||||
"gas pipeline": "#f08080",
|
||||
@ -700,7 +698,7 @@ def plot_map_without(network):
|
||||
|
||||
# hack because impossible to drop buses...
|
||||
if "EU gas" in n.buses.index:
|
||||
eu_location = snakemake.config["plotting"].get(
|
||||
eu_location = snakemake.params.plotting.get(
|
||||
"eu_node_location", dict(x=-5.5, y=46)
|
||||
)
|
||||
n.buses.loc["EU gas", "x"] = eu_location["x"]
|
||||
@ -876,7 +874,7 @@ def plot_series(network, carrier="AC", name="test"):
|
||||
stacked=True,
|
||||
linewidth=0.0,
|
||||
color=[
|
||||
snakemake.config["plotting"]["tech_colors"][i.replace(suffix, "")]
|
||||
snakemake.params.plotting["tech_colors"][i.replace(suffix, "")]
|
||||
for i in new_columns
|
||||
],
|
||||
)
|
||||
@ -937,7 +935,7 @@ if __name__ == "__main__":
|
||||
|
||||
regions = gpd.read_file(snakemake.input.regions).set_index("name")
|
||||
|
||||
map_opts = snakemake.config["plotting"]["map"]
|
||||
map_opts = snakemake.params.plotting["map"]
|
||||
|
||||
if map_opts["boundaries"] is None:
|
||||
map_opts["boundaries"] = regions.total_bounds[[0, 2, 1, 3]] + [-1, 1, -1, 1]
|
||||
|
@ -142,10 +142,10 @@ def plot_costs():
|
||||
|
||||
df = df.groupby(df.index.map(rename_techs)).sum()
|
||||
|
||||
to_drop = df.index[df.max(axis=1) < snakemake.config["plotting"]["costs_threshold"]]
|
||||
to_drop = df.index[df.max(axis=1) < snakemake.params.plotting["costs_threshold"]]
|
||||
|
||||
logger.info(
|
||||
f"Dropping technology with costs below {snakemake.config['plotting']['costs_threshold']} EUR billion per year"
|
||||
f"Dropping technology with costs below {snakemake.params['plotting']['costs_threshold']} EUR billion per year"
|
||||
)
|
||||
logger.debug(df.loc[to_drop])
|
||||
|
||||
@ -165,7 +165,7 @@ def plot_costs():
|
||||
kind="bar",
|
||||
ax=ax,
|
||||
stacked=True,
|
||||
color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index],
|
||||
color=[snakemake.params.plotting["tech_colors"][i] for i in new_index],
|
||||
)
|
||||
|
||||
handles, labels = ax.get_legend_handles_labels()
|
||||
@ -173,7 +173,7 @@ def plot_costs():
|
||||
handles.reverse()
|
||||
labels.reverse()
|
||||
|
||||
ax.set_ylim([0, snakemake.config["plotting"]["costs_max"]])
|
||||
ax.set_ylim([0, snakemake.params.plotting["costs_max"]])
|
||||
|
||||
ax.set_ylabel("System Cost [EUR billion per year]")
|
||||
|
||||
@ -201,11 +201,11 @@ def plot_energy():
|
||||
df = df.groupby(df.index.map(rename_techs)).sum()
|
||||
|
||||
to_drop = df.index[
|
||||
df.abs().max(axis=1) < snakemake.config["plotting"]["energy_threshold"]
|
||||
df.abs().max(axis=1) < snakemake.params.plotting["energy_threshold"]
|
||||
]
|
||||
|
||||
logger.info(
|
||||
f"Dropping all technology with energy consumption or production below {snakemake.config['plotting']['energy_threshold']} TWh/a"
|
||||
f"Dropping all technology with energy consumption or production below {snakemake.params['plotting']['energy_threshold']} TWh/a"
|
||||
)
|
||||
logger.debug(df.loc[to_drop])
|
||||
|
||||
@ -227,7 +227,7 @@ def plot_energy():
|
||||
kind="bar",
|
||||
ax=ax,
|
||||
stacked=True,
|
||||
color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index],
|
||||
color=[snakemake.params.plotting["tech_colors"][i] for i in new_index],
|
||||
)
|
||||
|
||||
handles, labels = ax.get_legend_handles_labels()
|
||||
@ -237,8 +237,8 @@ def plot_energy():
|
||||
|
||||
ax.set_ylim(
|
||||
[
|
||||
snakemake.config["plotting"]["energy_min"],
|
||||
snakemake.config["plotting"]["energy_max"],
|
||||
snakemake.params.plotting["energy_min"],
|
||||
snakemake.params.plotting["energy_max"],
|
||||
]
|
||||
)
|
||||
|
||||
@ -287,7 +287,7 @@ def plot_balances():
|
||||
df = df.groupby(df.index.map(rename_techs)).sum()
|
||||
|
||||
to_drop = df.index[
|
||||
df.abs().max(axis=1) < snakemake.config["plotting"]["energy_threshold"] / 10
|
||||
df.abs().max(axis=1) < snakemake.params.plotting["energy_threshold"] / 10
|
||||
]
|
||||
|
||||
if v[0] in co2_carriers:
|
||||
@ -296,7 +296,7 @@ def plot_balances():
|
||||
units = "TWh/a"
|
||||
|
||||
logger.debug(
|
||||
f"Dropping technology energy balance smaller than {snakemake.config['plotting']['energy_threshold']/10} {units}"
|
||||
f"Dropping technology energy balance smaller than {snakemake.params['plotting']['energy_threshold']/10} {units}"
|
||||
)
|
||||
logger.debug(df.loc[to_drop])
|
||||
|
||||
@ -317,7 +317,7 @@ def plot_balances():
|
||||
kind="bar",
|
||||
ax=ax,
|
||||
stacked=True,
|
||||
color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index],
|
||||
color=[snakemake.params.plotting["tech_colors"][i] for i in new_index],
|
||||
)
|
||||
|
||||
handles, labels = ax.get_legend_handles_labels()
|
||||
@ -455,10 +455,10 @@ def plot_carbon_budget_distribution(input_eurostat):
|
||||
ax1 = plt.subplot(gs1[0, 0])
|
||||
ax1.set_ylabel("CO$_2$ emissions (Gt per year)", fontsize=22)
|
||||
ax1.set_ylim([0, 5])
|
||||
ax1.set_xlim([1990, snakemake.config["scenario"]["planning_horizons"][-1] + 1])
|
||||
ax1.set_xlim([1990, snakemake.params.planning_horizons[-1] + 1])
|
||||
|
||||
path_cb = "results/" + snakemake.params.RDIR + "/csvs/"
|
||||
countries = snakemake.config["countries"]
|
||||
countries = snakemake.params.countries
|
||||
e_1990 = co2_emissions_year(countries, input_eurostat, opts, year=1990)
|
||||
CO2_CAP = pd.read_csv(path_cb + "carbon_budget_distribution.csv", index_col=0)
|
||||
|
||||
@ -555,7 +555,7 @@ if __name__ == "__main__":
|
||||
|
||||
plot_balances()
|
||||
|
||||
for sector_opts in snakemake.config["scenario"]["sector_opts"]:
|
||||
for sector_opts in snakemake.params.sector_opts:
|
||||
opts = sector_opts.split("-")
|
||||
for o in opts:
|
||||
if "cb" in o:
|
||||
|
@ -266,12 +266,12 @@ if __name__ == "__main__":
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.0
|
||||
costs = load_costs(
|
||||
snakemake.input.tech_costs,
|
||||
snakemake.config["costs"],
|
||||
snakemake.config["electricity"],
|
||||
snakemake.params.costs,
|
||||
snakemake.params.max_hours,
|
||||
Nyears,
|
||||
)
|
||||
|
||||
set_line_s_max_pu(n, snakemake.config["lines"]["s_max_pu"])
|
||||
set_line_s_max_pu(n, snakemake.params.lines["s_max_pu"])
|
||||
|
||||
for o in opts:
|
||||
m = re.match(r"^\d+h$", o, re.IGNORECASE)
|
||||
@ -290,11 +290,11 @@ if __name__ == "__main__":
|
||||
if "Co2L" in o:
|
||||
m = re.findall("[0-9]*\.?[0-9]+$", o)
|
||||
if len(m) > 0:
|
||||
co2limit = float(m[0]) * snakemake.config["electricity"]["co2base"]
|
||||
co2limit = float(m[0]) * snakemake.params.co2base
|
||||
add_co2limit(n, co2limit, Nyears)
|
||||
logger.info("Setting CO2 limit according to wildcard value.")
|
||||
else:
|
||||
add_co2limit(n, snakemake.config["electricity"]["co2limit"], Nyears)
|
||||
add_co2limit(n, snakemake.params.co2limit, Nyears)
|
||||
logger.info("Setting CO2 limit according to config value.")
|
||||
break
|
||||
|
||||
@ -306,7 +306,7 @@ if __name__ == "__main__":
|
||||
add_gaslimit(n, limit, Nyears)
|
||||
logger.info("Setting gas usage limit according to wildcard value.")
|
||||
else:
|
||||
add_gaslimit(n, snakemake.config["electricity"].get("gaslimit"), Nyears)
|
||||
add_gaslimit(n, snakemake.params.gaslimit, Nyears)
|
||||
logger.info("Setting gas usage limit according to config value.")
|
||||
break
|
||||
|
||||
@ -335,7 +335,7 @@ if __name__ == "__main__":
|
||||
add_emission_prices(n, dict(co2=float(m[0])))
|
||||
else:
|
||||
logger.info("Setting emission prices according to config value.")
|
||||
add_emission_prices(n, snakemake.config["costs"]["emission_prices"])
|
||||
add_emission_prices(n, snakemake.params.costs["emission_prices"])
|
||||
break
|
||||
if "ept" in o:
|
||||
logger.info("Setting time dependent emission prices according spot market price")
|
||||
@ -346,8 +346,8 @@ if __name__ == "__main__":
|
||||
|
||||
set_line_nom_max(
|
||||
n,
|
||||
s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf),
|
||||
p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf),
|
||||
s_nom_max_set=snakemake.params.lines.get("s_nom_max,", np.inf),
|
||||
p_nom_max_set=snakemake.params.links.get("p_nom_max,", np.inf),
|
||||
)
|
||||
|
||||
if "ATK" in opts:
|
||||
|
@ -22,13 +22,13 @@ from _helpers import (
|
||||
override_component_attrs,
|
||||
update_config_with_sector_opts,
|
||||
)
|
||||
from add_electricity import calculate_annuity
|
||||
from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2
|
||||
from networkx.algorithms import complement
|
||||
from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation
|
||||
from pypsa.geo import haversine_pts
|
||||
from pypsa.io import import_components_from_dataframe
|
||||
from scipy.stats import beta
|
||||
from vresutils.costdata import annuity
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -200,12 +200,12 @@ def co2_emissions_year(
|
||||
"""
|
||||
Calculate CO2 emissions in one specific year (e.g. 1990 or 2018).
|
||||
"""
|
||||
emissions_scope = snakemake.config["energy"]["emissions"]
|
||||
emissions_scope = snakemake.params.energy["emissions"]
|
||||
eea_co2 = build_eea_co2(snakemake.input.co2, year, emissions_scope)
|
||||
|
||||
# TODO: read Eurostat data from year > 2014
|
||||
# this only affects the estimation of CO2 emissions for BA, RS, AL, ME, MK
|
||||
report_year = snakemake.config["energy"]["eurostat_report_year"]
|
||||
report_year = snakemake.params.energy["eurostat_report_year"]
|
||||
if year > 2014:
|
||||
eurostat_co2 = build_eurostat_co2(
|
||||
input_eurostat, countries, report_year, year=2014
|
||||
@ -241,7 +241,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year):
|
||||
carbon_budget = float(o[o.find("cb") + 2 : o.find("ex")])
|
||||
r = float(o[o.find("ex") + 2 :])
|
||||
|
||||
countries = snakemake.config["countries"]
|
||||
countries = snakemake.params.countries
|
||||
|
||||
e_1990 = co2_emissions_year(
|
||||
countries, input_eurostat, opts, emissions_scope, report_year, year=1990
|
||||
@ -252,7 +252,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year):
|
||||
countries, input_eurostat, opts, emissions_scope, report_year, year=2018
|
||||
)
|
||||
|
||||
planning_horizons = snakemake.config["scenario"]["planning_horizons"]
|
||||
planning_horizons = snakemake.params.planning_horizons
|
||||
t_0 = planning_horizons[0]
|
||||
|
||||
if "be" in o:
|
||||
@ -391,7 +391,7 @@ def update_wind_solar_costs(n, costs):
|
||||
with xr.open_dataset(profile) as ds:
|
||||
underwater_fraction = ds["underwater_fraction"].to_pandas()
|
||||
connection_cost = (
|
||||
snakemake.config["lines"]["length_factor"]
|
||||
snakemake.params.length_factor
|
||||
* ds["average_distance"].to_pandas()
|
||||
* (
|
||||
underwater_fraction
|
||||
@ -483,8 +483,8 @@ def remove_elec_base_techs(n):
|
||||
batteries and H2) from base electricity-only network, since they're added
|
||||
here differently using links.
|
||||
"""
|
||||
for c in n.iterate_components(snakemake.config["pypsa_eur"]):
|
||||
to_keep = snakemake.config["pypsa_eur"][c.name]
|
||||
for c in n.iterate_components(snakemake.params.pypsa_eur):
|
||||
to_keep = snakemake.params.pypsa_eur[c.name]
|
||||
to_remove = pd.Index(c.df.carrier.unique()).symmetric_difference(to_keep)
|
||||
if to_remove.empty:
|
||||
continue
|
||||
@ -674,7 +674,7 @@ def add_dac(n, costs):
|
||||
def add_co2limit(n, nyears=1.0, limit=0.0):
|
||||
logger.info(f"Adding CO2 budget limit as per unit of 1990 levels of {limit}")
|
||||
|
||||
countries = snakemake.config["countries"]
|
||||
countries = snakemake.params.countries
|
||||
|
||||
sectors = emission_sectors_from_opts(opts)
|
||||
|
||||
@ -727,7 +727,7 @@ def cycling_shift(df, steps=1):
|
||||
return df
|
||||
|
||||
|
||||
def prepare_costs(cost_file, config, nyears):
|
||||
def prepare_costs(cost_file, params, nyears):
|
||||
# set all asset costs and other parameters
|
||||
costs = pd.read_csv(cost_file, index_col=[0, 1]).sort_index()
|
||||
|
||||
@ -739,10 +739,10 @@ def prepare_costs(cost_file, config, nyears):
|
||||
costs.loc[:, "value"].unstack(level=1).groupby("technology").sum(min_count=1)
|
||||
)
|
||||
|
||||
costs = costs.fillna(config["fill_values"])
|
||||
costs = costs.fillna(params["fill_values"])
|
||||
|
||||
def annuity_factor(v):
|
||||
return annuity(v["lifetime"], v["discount rate"]) + v["FOM"] / 100
|
||||
return calculate_annuity(v["lifetime"], v["discount rate"]) + v["FOM"] / 100
|
||||
|
||||
costs["fixed"] = [
|
||||
annuity_factor(v) * v["investment"] * nyears for i, v in costs.iterrows()
|
||||
@ -787,7 +787,7 @@ def add_ammonia(n, costs):
|
||||
|
||||
nodes = pop_layout.index
|
||||
|
||||
cf_industry = snakemake.config["industry"]
|
||||
cf_industry = snakemake.params.industry
|
||||
|
||||
n.add("Carrier", "NH3")
|
||||
|
||||
@ -851,7 +851,7 @@ def add_wave(n, wave_cost_factor):
|
||||
capacity = pd.Series({"Attenuator": 750, "F2HB": 1000, "MultiPA": 600})
|
||||
|
||||
# in EUR/MW
|
||||
annuity_factor = annuity(25, 0.07) + 0.03
|
||||
annuity_factor = calculate_annuity(25, 0.07) + 0.03
|
||||
costs = (
|
||||
1e6
|
||||
* wave_cost_factor
|
||||
@ -1067,6 +1067,9 @@ def add_storage_and_grids(n, costs):
|
||||
lifetime=costs.at["electrolysis", "lifetime"],
|
||||
)
|
||||
|
||||
if options["hydrogen_fuel_cell"]:
|
||||
logger.info("Adding hydrogen fuel cell for re-electrification.")
|
||||
|
||||
n.madd(
|
||||
"Link",
|
||||
nodes + " H2 Fuel Cell",
|
||||
@ -1080,10 +1083,33 @@ def add_storage_and_grids(n, costs):
|
||||
lifetime=costs.at["fuel cell", "lifetime"],
|
||||
)
|
||||
|
||||
cavern_types = snakemake.config["sector"]["hydrogen_underground_storage_locations"]
|
||||
if options["hydrogen_turbine"]:
|
||||
logger.info(
|
||||
"Adding hydrogen turbine for re-electrification. Assuming OCGT technology costs."
|
||||
)
|
||||
# TODO: perhaps replace with hydrogen-specific technology assumptions.
|
||||
|
||||
n.madd(
|
||||
"Link",
|
||||
nodes + " H2 turbine",
|
||||
bus0=nodes + " H2",
|
||||
bus1=nodes,
|
||||
p_nom_extendable=True,
|
||||
carrier="H2 turbine",
|
||||
efficiency=costs.at["OCGT", "efficiency"],
|
||||
capital_cost=costs.at["OCGT", "fixed"]
|
||||
* costs.at["OCGT", "efficiency"], # NB: fixed cost is per MWel
|
||||
lifetime=costs.at["OCGT", "lifetime"],
|
||||
)
|
||||
|
||||
cavern_types = snakemake.params.sector["hydrogen_underground_storage_locations"]
|
||||
h2_caverns = pd.read_csv(snakemake.input.h2_cavern, index_col=0)
|
||||
|
||||
if not h2_caverns.empty and options["hydrogen_underground_storage"]:
|
||||
if (
|
||||
not h2_caverns.empty
|
||||
and options["hydrogen_underground_storage"]
|
||||
and set(cavern_types).intersection(h2_caverns.columns)
|
||||
):
|
||||
h2_caverns = h2_caverns[cavern_types].sum(axis=1)
|
||||
|
||||
# only use sites with at least 2 TWh potential
|
||||
@ -3248,7 +3274,7 @@ if __name__ == "__main__":
|
||||
|
||||
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
|
||||
|
||||
options = snakemake.config["sector"]
|
||||
options = snakemake.params.sector
|
||||
|
||||
opts = snakemake.wildcards.sector_opts.split("-")
|
||||
|
||||
@ -3263,7 +3289,7 @@ if __name__ == "__main__":
|
||||
|
||||
costs = prepare_costs(
|
||||
snakemake.input.costs,
|
||||
snakemake.config["costs"],
|
||||
snakemake.params.costs,
|
||||
nyears,
|
||||
)
|
||||
|
||||
@ -3275,10 +3301,10 @@ if __name__ == "__main__":
|
||||
|
||||
spatial = define_spatial(pop_layout.index, options)
|
||||
|
||||
if snakemake.config["foresight"] == "myopic":
|
||||
if snakemake.params.foresight == "myopic":
|
||||
add_lifetime_wind_solar(n, costs)
|
||||
|
||||
conventional = snakemake.config["existing_capacities"]["conventional_carriers"]
|
||||
conventional = snakemake.params.conventional_carriers
|
||||
for carrier in conventional:
|
||||
add_carrier_buses(n, carrier)
|
||||
|
||||
@ -3347,15 +3373,15 @@ if __name__ == "__main__":
|
||||
n = set_temporal_aggregation(n, opts, solver_name)
|
||||
|
||||
limit_type = "config"
|
||||
limit = get(snakemake.config["co2_budget"], investment_year)
|
||||
limit = get(snakemake.params.co2_budget, investment_year)
|
||||
for o in opts:
|
||||
if "cb" not in o:
|
||||
continue
|
||||
limit_type = "carbon budget"
|
||||
fn = "results/" + snakemake.params.RDIR + "/csvs/carbon_budget_distribution.csv"
|
||||
if not os.path.exists(fn):
|
||||
emissions_scope = snakemake.config["energy"]["emissions"]
|
||||
report_year = snakemake.config["energy"]["eurostat_report_year"]
|
||||
emissions_scope = snakemake.params.emissions_scope
|
||||
report_year = snakemake.params.eurostat_report_year
|
||||
build_carbon_budget(
|
||||
o, snakemake.input.eurostat, fn, emissions_scope, report_year
|
||||
)
|
||||
@ -3390,8 +3416,8 @@ if __name__ == "__main__":
|
||||
if options["electricity_grid_connection"]:
|
||||
add_electricity_grid_connection(n, costs)
|
||||
|
||||
first_year_myopic = (snakemake.config["foresight"] == "myopic") and (
|
||||
snakemake.config["scenario"]["planning_horizons"][0] == investment_year
|
||||
first_year_myopic = (snakemake.params.foresight == "myopic") and (
|
||||
snakemake.params.planning_horizons[0] == investment_year
|
||||
)
|
||||
|
||||
if options.get("cluster_heat_buses", False) and not first_year_myopic:
|
||||
|
@ -53,14 +53,13 @@ if __name__ == "__main__":
|
||||
snakemake
|
||||
) # TODO Make logging compatible with progressbar (see PR #102)
|
||||
|
||||
if snakemake.config["tutorial"]:
|
||||
if snakemake.params.tutorial:
|
||||
url = "https://zenodo.org/record/3517921/files/pypsa-eur-tutorial-data-bundle.tar.xz"
|
||||
else:
|
||||
url = "https://zenodo.org/record/3517935/files/pypsa-eur-data-bundle.tar.xz"
|
||||
|
||||
# Save locations
|
||||
tarball_fn = Path(f"{rootpath}/bundle.tar.xz")
|
||||
to_fn = Path(f"{rootpath}/data")
|
||||
to_fn = Path(rootpath) / Path(snakemake.output[0]).parent.parent
|
||||
|
||||
logger.info(f"Downloading databundle from '{url}'.")
|
||||
disable_progress = snakemake.config["run"].get("disable_progressbar", False)
|
||||
|
@ -29,7 +29,7 @@ if __name__ == "__main__":
|
||||
|
||||
# Save locations
|
||||
zip_fn = Path(f"{rootpath}/IGGIELGN.zip")
|
||||
to_fn = Path(f"{rootpath}/data/gas_network/scigrid-gas")
|
||||
to_fn = Path(rootpath) / Path(snakemake.output[0]).parent.parent
|
||||
|
||||
logger.info(f"Downloading databundle from '{url}'.")
|
||||
disable_progress = snakemake.config["run"].get("disable_progressbar", False)
|
||||
|
@ -10,23 +10,25 @@ import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
|
||||
# Add pypsa-eur scripts to path for import of _helpers
|
||||
sys.path.insert(0, os.getcwd() + "/../pypsa-eur/scripts")
|
||||
|
||||
from _helpers import configure_logging, progress_retrieve
|
||||
|
||||
if __name__ == "__main__":
|
||||
if "snakemake" not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
||||
snakemake = mock_snakemake("retrieve_databundle")
|
||||
rootpath = ".."
|
||||
else:
|
||||
rootpath = "."
|
||||
configure_logging(snakemake)
|
||||
|
||||
url = "https://zenodo.org/record/5824485/files/pypsa-eur-sec-data-bundle.tar.gz"
|
||||
|
||||
tarball_fn = Path("sector-bundle.tar.gz")
|
||||
to_fn = Path("data")
|
||||
tarball_fn = Path(f"{rootpath}/sector-bundle.tar.gz")
|
||||
to_fn = Path(rootpath) / Path(snakemake.output[0]).parent.parent
|
||||
|
||||
logger.info(f"Downloading databundle from '{url}'.")
|
||||
disable_progress = snakemake.config["run"].get("disable_progressbar", False)
|
||||
|
@ -149,17 +149,17 @@ def simplify_network_to_380(n):
|
||||
return n, trafo_map
|
||||
|
||||
|
||||
def _prepare_connection_costs_per_link(n, costs, config):
|
||||
def _prepare_connection_costs_per_link(n, costs, renewable_carriers, length_factor):
|
||||
if n.links.empty:
|
||||
return {}
|
||||
|
||||
connection_costs_per_link = {}
|
||||
|
||||
for tech in config["renewable"]:
|
||||
for tech in renewable_carriers:
|
||||
if tech.startswith("offwind"):
|
||||
connection_costs_per_link[tech] = (
|
||||
n.links.length
|
||||
* config["lines"]["length_factor"]
|
||||
* length_factor
|
||||
* (
|
||||
n.links.underwater_fraction
|
||||
* costs.at[tech + "-connection-submarine", "capital_cost"]
|
||||
@ -172,10 +172,18 @@ def _prepare_connection_costs_per_link(n, costs, config):
|
||||
|
||||
|
||||
def _compute_connection_costs_to_bus(
|
||||
n, busmap, costs, config, connection_costs_per_link=None, buses=None
|
||||
n,
|
||||
busmap,
|
||||
costs,
|
||||
renewable_carriers,
|
||||
length_factor,
|
||||
connection_costs_per_link=None,
|
||||
buses=None,
|
||||
):
|
||||
if connection_costs_per_link is None:
|
||||
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config)
|
||||
connection_costs_per_link = _prepare_connection_costs_per_link(
|
||||
n, costs, renewable_carriers, length_factor
|
||||
)
|
||||
|
||||
if buses is None:
|
||||
buses = busmap.index[busmap.index != busmap.values]
|
||||
@ -265,7 +273,16 @@ def _aggregate_and_move_components(
|
||||
n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)])
|
||||
|
||||
|
||||
def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
|
||||
def simplify_links(
|
||||
n,
|
||||
costs,
|
||||
renewables,
|
||||
length_factor,
|
||||
p_max_pu,
|
||||
exclude_carriers,
|
||||
output,
|
||||
aggregation_strategies=dict(),
|
||||
):
|
||||
## Complex multi-node links are folded into end-points
|
||||
logger.info("Simplifying connected link components")
|
||||
|
||||
@ -315,7 +332,9 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
|
||||
|
||||
busmap = n.buses.index.to_series()
|
||||
|
||||
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config)
|
||||
connection_costs_per_link = _prepare_connection_costs_per_link(
|
||||
n, costs, renewables, length_factor
|
||||
)
|
||||
connection_costs_to_bus = pd.DataFrame(
|
||||
0.0, index=n.buses.index, columns=list(connection_costs_per_link)
|
||||
)
|
||||
@ -333,12 +352,17 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
|
||||
)
|
||||
busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]]
|
||||
connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(
|
||||
n, busmap, costs, config, connection_costs_per_link, buses
|
||||
n,
|
||||
busmap,
|
||||
costs,
|
||||
renewables,
|
||||
length_factor,
|
||||
connection_costs_per_link,
|
||||
buses,
|
||||
)
|
||||
|
||||
all_links = [i for _, i in sum(links, [])]
|
||||
|
||||
p_max_pu = config["links"].get("p_max_pu", 1.0)
|
||||
lengths = n.links.loc[all_links, "length"]
|
||||
name = lengths.idxmax() + "+{}".format(len(links) - 1)
|
||||
params = dict(
|
||||
@ -377,10 +401,6 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
|
||||
|
||||
logger.debug("Collecting all components using the busmap")
|
||||
|
||||
exclude_carriers = config["clustering"]["simplify_network"].get(
|
||||
"exclude_carriers", []
|
||||
)
|
||||
|
||||
_aggregate_and_move_components(
|
||||
n,
|
||||
busmap,
|
||||
@ -392,19 +412,23 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
|
||||
return n, busmap
|
||||
|
||||
|
||||
def remove_stubs(n, costs, config, output, aggregation_strategies=dict()):
|
||||
def remove_stubs(
|
||||
n,
|
||||
costs,
|
||||
renewable_carriers,
|
||||
length_factor,
|
||||
simplify_network,
|
||||
output,
|
||||
aggregation_strategies=dict(),
|
||||
):
|
||||
logger.info("Removing stubs")
|
||||
|
||||
across_borders = config["clustering"]["simplify_network"].get(
|
||||
"remove_stubs_across_borders", True
|
||||
)
|
||||
across_borders = simplify_network["remove_stubs_across_borders"]
|
||||
matching_attrs = [] if across_borders else ["country"]
|
||||
busmap = busmap_by_stubs(n, matching_attrs)
|
||||
|
||||
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config)
|
||||
|
||||
exclude_carriers = config["clustering"]["simplify_network"].get(
|
||||
"exclude_carriers", []
|
||||
connection_costs_to_bus = _compute_connection_costs_to_bus(
|
||||
n, busmap, costs, renewable_carriers, length_factor
|
||||
)
|
||||
|
||||
_aggregate_and_move_components(
|
||||
@ -413,7 +437,7 @@ def remove_stubs(n, costs, config, output, aggregation_strategies=dict()):
|
||||
connection_costs_to_bus,
|
||||
output,
|
||||
aggregation_strategies=aggregation_strategies,
|
||||
exclude_carriers=exclude_carriers,
|
||||
exclude_carriers=simplify_network["exclude_carriers"],
|
||||
)
|
||||
|
||||
return n, busmap
|
||||
@ -473,26 +497,22 @@ def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None):
|
||||
|
||||
|
||||
def cluster(
|
||||
n, n_clusters, config, algorithm="hac", feature=None, aggregation_strategies=dict()
|
||||
n,
|
||||
n_clusters,
|
||||
focus_weights,
|
||||
solver_name,
|
||||
algorithm="hac",
|
||||
feature=None,
|
||||
aggregation_strategies=dict(),
|
||||
):
|
||||
logger.info(f"Clustering to {n_clusters} buses")
|
||||
|
||||
focus_weights = config.get("focus_weights", None)
|
||||
|
||||
renewable_carriers = pd.Index(
|
||||
[
|
||||
tech
|
||||
for tech in n.generators.carrier.unique()
|
||||
if tech.split("-", 2)[0] in config["renewable"]
|
||||
]
|
||||
)
|
||||
|
||||
clustering = clustering_for_n_clusters(
|
||||
n,
|
||||
n_clusters,
|
||||
custom_busmap=False,
|
||||
aggregation_strategies=aggregation_strategies,
|
||||
solver_name=config["solving"]["solver"]["name"],
|
||||
solver_name=solver_name,
|
||||
algorithm=algorithm,
|
||||
feature=feature,
|
||||
focus_weights=focus_weights,
|
||||
@ -508,67 +528,69 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake("simplify_network", simpl="")
|
||||
configure_logging(snakemake)
|
||||
|
||||
n = pypsa.Network(snakemake.input.network)
|
||||
params = snakemake.params
|
||||
solver_name = snakemake.config["solving"]["solver"]["name"]
|
||||
|
||||
n = pypsa.Network(snakemake.input.network)
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
||||
|
||||
aggregation_strategies = snakemake.config["clustering"].get(
|
||||
"aggregation_strategies", {}
|
||||
)
|
||||
# translate str entries of aggregation_strategies to pd.Series functions:
|
||||
aggregation_strategies = {
|
||||
p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()}
|
||||
for p in aggregation_strategies.keys()
|
||||
p: {
|
||||
k: getattr(pd.Series, v)
|
||||
for k, v in params.aggregation_strategies[p].items()
|
||||
}
|
||||
for p in params.aggregation_strategies.keys()
|
||||
}
|
||||
|
||||
n, trafo_map = simplify_network_to_380(n)
|
||||
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
||||
|
||||
technology_costs = load_costs(
|
||||
snakemake.input.tech_costs,
|
||||
snakemake.config["costs"],
|
||||
snakemake.config["electricity"],
|
||||
params.costs,
|
||||
params.max_hours,
|
||||
Nyears,
|
||||
)
|
||||
|
||||
n, simplify_links_map = simplify_links(
|
||||
n, technology_costs, snakemake.config, snakemake.output, aggregation_strategies
|
||||
n,
|
||||
technology_costs,
|
||||
params.renewable_carriers,
|
||||
params.length_factor,
|
||||
params.p_max_pu,
|
||||
params.simplify_network["exclude_carriers"],
|
||||
snakemake.output,
|
||||
aggregation_strategies,
|
||||
)
|
||||
|
||||
busmaps = [trafo_map, simplify_links_map]
|
||||
|
||||
cluster_config = snakemake.config["clustering"]["simplify_network"]
|
||||
if cluster_config.get("remove_stubs", True):
|
||||
if params.simplify_network["remove_stubs"]:
|
||||
n, stub_map = remove_stubs(
|
||||
n,
|
||||
technology_costs,
|
||||
snakemake.config,
|
||||
params.renewable_carriers,
|
||||
params.length_factor,
|
||||
params.simplify_network,
|
||||
snakemake.output,
|
||||
aggregation_strategies=aggregation_strategies,
|
||||
)
|
||||
busmaps.append(stub_map)
|
||||
|
||||
if cluster_config.get("to_substations", False):
|
||||
if params.simplify_network["to_substations"]:
|
||||
n, substation_map = aggregate_to_substations(n, aggregation_strategies)
|
||||
busmaps.append(substation_map)
|
||||
|
||||
# treatment of outliers (nodes without a profile for considered carrier):
|
||||
# all nodes that have no profile of the given carrier are being aggregated to closest neighbor
|
||||
if (
|
||||
snakemake.config.get("clustering", {})
|
||||
.get("cluster_network", {})
|
||||
.get("algorithm", "hac")
|
||||
== "hac"
|
||||
or cluster_config.get("algorithm", "hac") == "hac"
|
||||
):
|
||||
carriers = (
|
||||
cluster_config.get("feature", "solar+onwind-time").split("-")[0].split("+")
|
||||
)
|
||||
if params.simplify_network["algorithm"] == "hac":
|
||||
carriers = params.simplify_network["feature"].split("-")[0].split("+")
|
||||
for carrier in carriers:
|
||||
buses_i = list(
|
||||
set(n.buses.index) - set(n.generators.query("carrier == @carrier").bus)
|
||||
)
|
||||
logger.info(
|
||||
f"clustering preparaton (hac): aggregating {len(buses_i)} buses of type {carrier}."
|
||||
f"clustering preparation (hac): aggregating {len(buses_i)} buses of type {carrier}."
|
||||
)
|
||||
n, busmap_hac = aggregate_to_substations(n, aggregation_strategies, buses_i)
|
||||
busmaps.append(busmap_hac)
|
||||
@ -577,9 +599,10 @@ if __name__ == "__main__":
|
||||
n, cluster_map = cluster(
|
||||
n,
|
||||
int(snakemake.wildcards.simpl),
|
||||
snakemake.config,
|
||||
cluster_config.get("algorithm", "hac"),
|
||||
cluster_config.get("feature", None),
|
||||
params.focus_weights,
|
||||
solver_name,
|
||||
params.simplify_network["algorithm"],
|
||||
params.simplify_network["feature"],
|
||||
aggregation_strategies,
|
||||
)
|
||||
busmaps.append(cluster_map)
|
||||
|
@ -44,14 +44,14 @@ pypsa.pf.logger.setLevel(logging.WARNING)
|
||||
from pypsa.descriptors import get_switchable_as_dense as get_as_dense
|
||||
|
||||
|
||||
def add_land_use_constraint(n, config):
|
||||
def add_land_use_constraint(n, planning_horizons, config):
|
||||
if "m" in snakemake.wildcards.clusters:
|
||||
_add_land_use_constraint_m(n, config)
|
||||
_add_land_use_constraint_m(n, planning_horizons, config)
|
||||
else:
|
||||
_add_land_use_constraint(n, config)
|
||||
_add_land_use_constraint(n)
|
||||
|
||||
|
||||
def _add_land_use_constraint(n, config):
|
||||
def _add_land_use_constraint(n):
|
||||
# warning: this will miss existing offwind which is not classed AC-DC and has carrier 'offwind'
|
||||
|
||||
for carrier in ["solar", "onwind", "offwind-ac", "offwind-dc"]:
|
||||
@ -80,10 +80,10 @@ def _add_land_use_constraint(n, config):
|
||||
n.generators.p_nom_max.clip(lower=0, inplace=True)
|
||||
|
||||
|
||||
def _add_land_use_constraint_m(n, config):
|
||||
def _add_land_use_constraint_m(n, planning_horizons, config):
|
||||
# if generators clustering is lower than network clustering, land_use accounting is at generators clusters
|
||||
|
||||
planning_horizons = config["scenario"]["planning_horizons"]
|
||||
planning_horizons = param["planning_horizons"]
|
||||
grouping_years = config["existing_capacities"]["grouping_years"]
|
||||
current_horizon = snakemake.wildcards.planning_horizons
|
||||
|
||||
@ -141,7 +141,15 @@ def add_co2_sequestration_limit(n, limit=200):
|
||||
)
|
||||
|
||||
|
||||
def prepare_network(n, solve_opts=None, config=None):
|
||||
def prepare_network(
|
||||
n,
|
||||
solve_opts=None,
|
||||
config=None,
|
||||
foresight=None,
|
||||
planning_horizons=None,
|
||||
co2_sequestration_potential=None,
|
||||
):
|
||||
|
||||
if snakemake.config["existing_capacities"]["unit_commitment"]:
|
||||
add_unit_commitment(n, snakemake.input.unit_commitment_params)
|
||||
|
||||
@ -194,11 +202,11 @@ def prepare_network(n, solve_opts=None, config=None):
|
||||
n.set_snapshots(n.snapshots[:nhours])
|
||||
n.snapshot_weightings[:] = 8760.0 / nhours
|
||||
|
||||
if config["foresight"] == "myopic":
|
||||
add_land_use_constraint(n, config)
|
||||
if foresight == "myopic":
|
||||
add_land_use_constraint(n, planning_horizons, config)
|
||||
|
||||
if n.stores.carrier.eq("co2 stored").any():
|
||||
limit = config["sector"].get("co2_sequestration_potential", 200)
|
||||
limit = co2_sequestration_potential
|
||||
add_co2_sequestration_limit(n, limit=limit)
|
||||
|
||||
return n
|
||||
@ -606,16 +614,15 @@ def add_unit_commitment(n, fn):
|
||||
n.df(c).loc[gen_i, "committable"] = True
|
||||
|
||||
|
||||
def solve_network(n, config, opts="", **kwargs):
|
||||
set_of_options = config["solving"]["solver"]["options"]
|
||||
solver_options = (
|
||||
config["solving"]["solver_options"][set_of_options] if set_of_options else {}
|
||||
)
|
||||
solver_name = config["solving"]["solver"]["name"]
|
||||
cf_solving = config["solving"]["options"]
|
||||
def solve_network(n, config, solving, opts="", **kwargs):
|
||||
set_of_options = solving["solver"]["options"]
|
||||
solver_options = solving["solver_options"][set_of_options] if set_of_options else {}
|
||||
solver_name = solving["solver"]["name"]
|
||||
cf_solving = solving["options"]
|
||||
track_iterations = cf_solving.get("track_iterations", False)
|
||||
min_iterations = cf_solving.get("min_iterations", 4)
|
||||
max_iterations = cf_solving.get("max_iterations", 6)
|
||||
transmission_losses = cf_solving.get("transmission_losses", 0)
|
||||
linearized_unit_commitment = cf_solving.get("linearized_unit_commitment", False)
|
||||
|
||||
# add to network for extra_functionality
|
||||
@ -630,6 +637,7 @@ def solve_network(n, config, opts="", **kwargs):
|
||||
if skip_iterations:
|
||||
status, condition = n.optimize(
|
||||
solver_name=solver_name,
|
||||
transmission_losses=transmission_losses,
|
||||
extra_functionality=extra_functionality,
|
||||
linearized_unit_commitment=linearized_unit_commitment,
|
||||
**solver_options,
|
||||
@ -642,6 +650,7 @@ def solve_network(n, config, opts="", **kwargs):
|
||||
min_iterations=min_iterations,
|
||||
max_iterations=max_iterations,
|
||||
linearized_unit_commitment=linearized_unit_commitment,
|
||||
transmission_losses=transmission_losses,
|
||||
extra_functionality=extra_functionality,
|
||||
**solver_options,
|
||||
**kwargs,
|
||||
@ -682,25 +691,32 @@ if __name__ == "__main__":
|
||||
if "sector_opts" in snakemake.wildcards.keys():
|
||||
opts += "-" + snakemake.wildcards.sector_opts
|
||||
opts = [o for o in opts.split("-") if o != ""]
|
||||
solve_opts = snakemake.config["solving"]["options"]
|
||||
solve_opts = snakemake.params.solving["options"]
|
||||
|
||||
np.random.seed(solve_opts.get("seed", 123))
|
||||
|
||||
fn = getattr(snakemake.log, "memory", None)
|
||||
|
||||
if "overrides" in snakemake.input.keys():
|
||||
overrides = override_component_attrs(snakemake.input.overrides)
|
||||
n = pypsa.Network(snakemake.input.network, override_component_attrs=overrides)
|
||||
else:
|
||||
n = pypsa.Network(snakemake.input.network)
|
||||
|
||||
n = prepare_network(n, solve_opts, config=snakemake.config)
|
||||
n = prepare_network(
|
||||
n,
|
||||
solve_opts,
|
||||
config=snakemake.config,
|
||||
foresight=snakemake.params.foresight,
|
||||
planning_horizons=snakemake.params.planning_horizons,
|
||||
co2_sequestration_potential=snakemake.params["co2_sequestration_potential"],
|
||||
)
|
||||
|
||||
n = solve_network(
|
||||
n, config=snakemake.config, opts=opts, log_fn=snakemake.log.solver
|
||||
n,
|
||||
config=snakemake.config,
|
||||
solving=snakemake.params.solving,
|
||||
opts=opts,
|
||||
log_fn=snakemake.log.solver,
|
||||
)
|
||||
|
||||
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
||||
# logger.info("Maximum memory usage: {}".format(mem.mem_usage))
|
||||
|
@ -17,7 +17,6 @@ from _helpers import (
|
||||
update_config_with_sector_opts,
|
||||
)
|
||||
from solve_network import prepare_network, solve_network
|
||||
from vresutils.benchmark import memory_logger
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -42,17 +41,13 @@ if __name__ == "__main__":
|
||||
|
||||
opts = (snakemake.wildcards.opts + "-" + snakemake.wildcards.sector_opts).split("-")
|
||||
opts = [o for o in opts if o != ""]
|
||||
solve_opts = snakemake.config["solving"]["options"]
|
||||
solve_opts = snakemake.params.options
|
||||
|
||||
np.random.seed(solve_opts.get("seed", 123))
|
||||
|
||||
fn = getattr(snakemake.log, "memory", None)
|
||||
with memory_logger(filename=fn, interval=30.0) as mem:
|
||||
if "overrides" in snakemake.input:
|
||||
overrides = override_component_attrs(snakemake.input.overrides)
|
||||
n = pypsa.Network(
|
||||
snakemake.input.network, override_component_attrs=overrides
|
||||
)
|
||||
n = pypsa.Network(snakemake.input.network, override_component_attrs=overrides)
|
||||
else:
|
||||
n = pypsa.Network(snakemake.input.network)
|
||||
|
||||
@ -64,5 +59,3 @@ if __name__ == "__main__":
|
||||
|
||||
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
||||
logger.info("Maximum memory usage: {}".format(mem.mem_usage))
|
||||
|
Loading…
Reference in New Issue
Block a user