Merge branch 'master' into misc/deprecations
This commit is contained in:
commit
e097aa48ae
1
.github/workflows/ci.yaml
vendored
1
.github/workflows/ci.yaml
vendored
@ -83,6 +83,7 @@ jobs:
|
|||||||
snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime
|
snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime
|
||||||
snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime
|
snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime
|
||||||
snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime
|
snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime
|
||||||
|
snakemake -call all --configfile config/test/config.perfect.yaml --rerun-triggers=mtime
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
|
16
.gitignore
vendored
16
.gitignore
vendored
@ -8,6 +8,7 @@ __pycache__
|
|||||||
*dconf
|
*dconf
|
||||||
gurobi.log
|
gurobi.log
|
||||||
.vscode
|
.vscode
|
||||||
|
*.orig
|
||||||
|
|
||||||
/bak
|
/bak
|
||||||
/resources
|
/resources
|
||||||
@ -28,23 +29,24 @@ dconf
|
|||||||
/data/links_p_nom.csv
|
/data/links_p_nom.csv
|
||||||
/data/*totals.csv
|
/data/*totals.csv
|
||||||
/data/biomass*
|
/data/biomass*
|
||||||
/data/emobility/
|
/data/bundle-sector/emobility/
|
||||||
/data/eea*
|
/data/bundle-sector/eea*
|
||||||
/data/jrc*
|
/data/bundle-sector/jrc*
|
||||||
/data/heating/
|
/data/heating/
|
||||||
/data/eurostat*
|
/data/bundle-sector/eurostat*
|
||||||
/data/odyssee/
|
/data/odyssee/
|
||||||
/data/transport_data.csv
|
/data/transport_data.csv
|
||||||
/data/switzerland*
|
/data/bundle-sector/switzerland*
|
||||||
/data/.nfs*
|
/data/.nfs*
|
||||||
/data/Industrial_Database.csv
|
/data/bundle-sector/Industrial_Database.csv
|
||||||
/data/retro/tabula-calculator-calcsetbuilding.csv
|
/data/retro/tabula-calculator-calcsetbuilding.csv
|
||||||
/data/nuts*
|
/data/bundle-sector/nuts*
|
||||||
data/gas_network/scigrid-gas/
|
data/gas_network/scigrid-gas/
|
||||||
data/costs_*.csv
|
data/costs_*.csv
|
||||||
|
|
||||||
dask-worker-space/
|
dask-worker-space/
|
||||||
publications.jrc.ec.europa.eu/
|
publications.jrc.ec.europa.eu/
|
||||||
|
d1gam3xoknrgr2.cloudfront.net/
|
||||||
|
|
||||||
*.org
|
*.org
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ exclude: "^LICENSES"
|
|||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.4.0
|
rev: v4.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-merge-conflict
|
- id: check-merge-conflict
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
@ -17,7 +17,7 @@ repos:
|
|||||||
|
|
||||||
# Sort package imports alphabetically
|
# Sort package imports alphabetically
|
||||||
- repo: https://github.com/PyCQA/isort
|
- repo: https://github.com/PyCQA/isort
|
||||||
rev: 5.12.0
|
rev: 5.13.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: isort
|
||||||
args: ["--profile", "black", "--filter-files"]
|
args: ["--profile", "black", "--filter-files"]
|
||||||
@ -30,10 +30,10 @@ repos:
|
|||||||
|
|
||||||
# Find common spelling mistakes in comments and docstrings
|
# Find common spelling mistakes in comments and docstrings
|
||||||
- repo: https://github.com/codespell-project/codespell
|
- repo: https://github.com/codespell-project/codespell
|
||||||
rev: v2.2.5
|
rev: v2.2.6
|
||||||
hooks:
|
hooks:
|
||||||
- id: codespell
|
- id: codespell
|
||||||
args: ['--ignore-regex="(\b[A-Z]+\b)"', '--ignore-words-list=fom,appartment,bage,ore,setis,tabacco,berfore'] # Ignore capital case words, e.g. country codes
|
args: ['--ignore-regex="(\b[A-Z]+\b)"', '--ignore-words-list=fom,appartment,bage,ore,setis,tabacco,berfore,vor'] # Ignore capital case words, e.g. country codes
|
||||||
types_or: [python, rst, markdown]
|
types_or: [python, rst, markdown]
|
||||||
files: ^(scripts|doc)/
|
files: ^(scripts|doc)/
|
||||||
|
|
||||||
@ -45,13 +45,13 @@ repos:
|
|||||||
args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"]
|
args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"]
|
||||||
|
|
||||||
- repo: https://github.com/keewis/blackdoc
|
- repo: https://github.com/keewis/blackdoc
|
||||||
rev: v0.3.8
|
rev: v0.3.9
|
||||||
hooks:
|
hooks:
|
||||||
- id: blackdoc
|
- id: blackdoc
|
||||||
|
|
||||||
# Formatting with "black" coding style
|
# Formatting with "black" coding style
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 23.7.0
|
rev: 23.12.1
|
||||||
hooks:
|
hooks:
|
||||||
# Format Python files
|
# Format Python files
|
||||||
- id: black
|
- id: black
|
||||||
@ -67,14 +67,14 @@ repos:
|
|||||||
|
|
||||||
# Do YAML formatting (before the linter checks it for misses)
|
# Do YAML formatting (before the linter checks it for misses)
|
||||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||||
rev: v2.10.0
|
rev: v2.12.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: pretty-format-yaml
|
- id: pretty-format-yaml
|
||||||
args: [--autofix, --indent, "2", --preserve-quotes]
|
args: [--autofix, --indent, "2", --preserve-quotes]
|
||||||
|
|
||||||
# Format Snakemake rule / workflow files
|
# Format Snakemake rule / workflow files
|
||||||
- repo: https://github.com/snakemake/snakefmt
|
- repo: https://github.com/snakemake/snakefmt
|
||||||
rev: v0.8.4
|
rev: v0.8.5
|
||||||
hooks:
|
hooks:
|
||||||
- id: snakefmt
|
- id: snakefmt
|
||||||
|
|
||||||
|
@ -14,4 +14,3 @@ build:
|
|||||||
python:
|
python:
|
||||||
install:
|
install:
|
||||||
- requirements: doc/requirements.txt
|
- requirements: doc/requirements.txt
|
||||||
system_packages: false
|
|
||||||
|
11
.sync-send
Normal file
11
.sync-send
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
# SPDX-FileCopyrightText: : 2021-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: CC0-1.0
|
||||||
|
|
||||||
|
rules
|
||||||
|
scripts
|
||||||
|
config
|
||||||
|
config/test
|
||||||
|
envs
|
||||||
|
matplotlibrc
|
||||||
|
Snakefile
|
@ -1,21 +0,0 @@
|
|||||||
# SPDX-FileCopyrightText: : 2021-2023 The PyPSA-Eur Authors
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: CC0-1.0
|
|
||||||
|
|
||||||
.snakemake
|
|
||||||
.git
|
|
||||||
.pytest_cache
|
|
||||||
.ipynb_checkpoints
|
|
||||||
.vscode
|
|
||||||
.DS_Store
|
|
||||||
__pycache__
|
|
||||||
*.pyc
|
|
||||||
*.pyo
|
|
||||||
*.ipynb
|
|
||||||
notebooks
|
|
||||||
doc
|
|
||||||
cutouts
|
|
||||||
data
|
|
||||||
benchmarks
|
|
||||||
*.nc
|
|
||||||
configs
|
|
@ -1,23 +0,0 @@
|
|||||||
# SPDX-FileCopyrightText: : 2021-2023 The PyPSA-Eur Authors
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: CC0-1.0
|
|
||||||
|
|
||||||
.snakemake
|
|
||||||
.git
|
|
||||||
.pytest_cache
|
|
||||||
.ipynb_checkpoints
|
|
||||||
.vscode
|
|
||||||
.DS_Store
|
|
||||||
__pycache__
|
|
||||||
*.pyc
|
|
||||||
*.pyo
|
|
||||||
*.ipynb
|
|
||||||
notebooks
|
|
||||||
benchmarks
|
|
||||||
logs
|
|
||||||
resources*
|
|
||||||
results
|
|
||||||
networks*
|
|
||||||
cutouts
|
|
||||||
data/bundle
|
|
||||||
doc
|
|
@ -6,7 +6,7 @@ cff-version: 1.1.0
|
|||||||
message: "If you use this package, please cite it in the following way."
|
message: "If you use this package, please cite it in the following way."
|
||||||
title: "PyPSA-Eur: An open sector-coupled optimisation model of the European energy system"
|
title: "PyPSA-Eur: An open sector-coupled optimisation model of the European energy system"
|
||||||
repository: https://github.com/pypsa/pypsa-eur
|
repository: https://github.com/pypsa/pypsa-eur
|
||||||
version: 0.8.0
|
version: 0.8.1
|
||||||
license: MIT
|
license: MIT
|
||||||
authors:
|
authors:
|
||||||
- family-names: Brown
|
- family-names: Brown
|
||||||
|
25
README.md
25
README.md
@ -9,7 +9,7 @@ SPDX-License-Identifier: CC-BY-4.0
|
|||||||
![Size](https://img.shields.io/github/repo-size/pypsa/pypsa-eur)
|
![Size](https://img.shields.io/github/repo-size/pypsa/pypsa-eur)
|
||||||
[![Zenodo PyPSA-Eur](https://zenodo.org/badge/DOI/10.5281/zenodo.3520874.svg)](https://doi.org/10.5281/zenodo.3520874)
|
[![Zenodo PyPSA-Eur](https://zenodo.org/badge/DOI/10.5281/zenodo.3520874.svg)](https://doi.org/10.5281/zenodo.3520874)
|
||||||
[![Zenodo PyPSA-Eur-Sec](https://zenodo.org/badge/DOI/10.5281/zenodo.3938042.svg)](https://doi.org/10.5281/zenodo.3938042)
|
[![Zenodo PyPSA-Eur-Sec](https://zenodo.org/badge/DOI/10.5281/zenodo.3938042.svg)](https://doi.org/10.5281/zenodo.3938042)
|
||||||
[![Snakemake](https://img.shields.io/badge/snakemake-≥5.0.0-brightgreen.svg?style=flat)](https://snakemake.readthedocs.io)
|
[![Snakemake](https://img.shields.io/badge/snakemake-≥7.7.0-brightgreen.svg?style=flat)](https://snakemake.readthedocs.io)
|
||||||
[![REUSE status](https://api.reuse.software/badge/github.com/pypsa/pypsa-eur)](https://api.reuse.software/info/github.com/pypsa/pypsa-eur)
|
[![REUSE status](https://api.reuse.software/badge/github.com/pypsa/pypsa-eur)](https://api.reuse.software/info/github.com/pypsa/pypsa-eur)
|
||||||
[![Stack Exchange questions](https://img.shields.io/stackexchange/stackoverflow/t/pypsa)](https://stackoverflow.com/questions/tagged/pypsa)
|
[![Stack Exchange questions](https://img.shields.io/stackexchange/stackoverflow/t/pypsa)](https://stackoverflow.com/questions/tagged/pypsa)
|
||||||
|
|
||||||
@ -35,17 +35,18 @@ The model is designed to be imported into the open toolbox
|
|||||||
[PyPSA](https://github.com/PyPSA/PyPSA).
|
[PyPSA](https://github.com/PyPSA/PyPSA).
|
||||||
|
|
||||||
**WARNING**: PyPSA-Eur is under active development and has several
|
**WARNING**: PyPSA-Eur is under active development and has several
|
||||||
[limitations](https://pypsa-eur.readthedocs.io/en/latest/limitations.html)
|
[limitations](https://pypsa-eur.readthedocs.io/en/latest/limitations.html) which
|
||||||
which you should understand before using the model. The github repository
|
you should understand before using the model. The github repository
|
||||||
[issues](https://github.com/PyPSA/pypsa-eur/issues) collect known topics we are
|
[issues](https://github.com/PyPSA/pypsa-eur/issues) collect known topics we are
|
||||||
working on (please feel free to help or make suggestions). The
|
working on (please feel free to help or make suggestions). The
|
||||||
[documentation](https://pypsa-eur.readthedocs.io/) remains somewhat patchy. You
|
[documentation](https://pypsa-eur.readthedocs.io/) remains somewhat patchy. You
|
||||||
can find showcases of the model's capabilities in the preprint [Benefits of a
|
can find showcases of the model's capabilities in the Joule paper [The potential
|
||||||
Hydrogen Network in Europe](https://arxiv.org/abs/2207.05816), a [paper in Joule
|
role of a hydrogen network in
|
||||||
with a description of the industry sector](https://arxiv.org/abs/2109.09563), or
|
Europe](https://doi.org/10.1016/j.joule.2023.06.016), another [paper in Joule
|
||||||
in [a 2021 presentation at EMP-E](https://nworbmot.org/energy/brown-empe.pdf).
|
with a description of the industry
|
||||||
We cannot support this model if you choose to use it. We do not recommend to use
|
sector](https://doi.org/10.1016/j.joule.2022.04.016), or in [a 2021 presentation
|
||||||
the full resolution network model for simulations. At high granularity the
|
at EMP-E](https://nworbmot.org/energy/brown-empe.pdf). We do not recommend to
|
||||||
|
use the full resolution network model for simulations. At high granularity the
|
||||||
assignment of loads and generators to the nearest network node may not be a
|
assignment of loads and generators to the nearest network node may not be a
|
||||||
correct assumption, depending on the topology of the underlying distribution
|
correct assumption, depending on the topology of the underlying distribution
|
||||||
grid, and local grid bottlenecks may cause unrealistic load-shedding or
|
grid, and local grid bottlenecks may cause unrealistic load-shedding or
|
||||||
@ -60,9 +61,9 @@ The dataset consists of:
|
|||||||
|
|
||||||
- A grid model based on a modified [GridKit](https://github.com/bdw/GridKit)
|
- A grid model based on a modified [GridKit](https://github.com/bdw/GridKit)
|
||||||
extraction of the [ENTSO-E Transmission System
|
extraction of the [ENTSO-E Transmission System
|
||||||
Map](https://www.entsoe.eu/data/map/). The grid model contains 6763 lines
|
Map](https://www.entsoe.eu/data/map/). The grid model contains 7072 lines
|
||||||
(alternating current lines at and above 220kV voltage level and all high
|
(alternating current lines at and above 220kV voltage level and all high
|
||||||
voltage direct current lines) and 3642 substations.
|
voltage direct current lines) and 3803 substations.
|
||||||
- The open power plant database
|
- The open power plant database
|
||||||
[powerplantmatching](https://github.com/FRESNA/powerplantmatching).
|
[powerplantmatching](https://github.com/FRESNA/powerplantmatching).
|
||||||
- Electrical demand time series from the
|
- Electrical demand time series from the
|
||||||
@ -102,6 +103,6 @@ We strongly welcome anyone interested in contributing to this project. If you ha
|
|||||||
# Licence
|
# Licence
|
||||||
|
|
||||||
The code in PyPSA-Eur is released as free software under the
|
The code in PyPSA-Eur is released as free software under the
|
||||||
[MIT License](https://opensource.org/licenses/MIT), see `LICENSE.txt`.
|
[MIT License](https://opensource.org/licenses/MIT), see [`doc/licenses.rst`](doc/licenses.rst).
|
||||||
However, different licenses and terms of use may apply to the various
|
However, different licenses and terms of use may apply to the various
|
||||||
input data.
|
input data.
|
||||||
|
42
Snakefile
42
Snakefile
@ -40,7 +40,7 @@ localrules:
|
|||||||
|
|
||||||
wildcard_constraints:
|
wildcard_constraints:
|
||||||
simpl="[a-zA-Z0-9]*",
|
simpl="[a-zA-Z0-9]*",
|
||||||
clusters="[0-9]+m?|all",
|
clusters="[0-9]+(m|c)?|all",
|
||||||
ll="(v|c)([0-9\.]+|opt)",
|
ll="(v|c)([0-9\.]+|opt)",
|
||||||
opts="[-+a-zA-Z0-9\.]*",
|
opts="[-+a-zA-Z0-9\.]*",
|
||||||
sector_opts="[-+a-zA-Z0-9\.\s]*",
|
sector_opts="[-+a-zA-Z0-9\.\s]*",
|
||||||
@ -53,6 +53,7 @@ include: "rules/build_electricity.smk"
|
|||||||
include: "rules/build_sector.smk"
|
include: "rules/build_sector.smk"
|
||||||
include: "rules/solve_electricity.smk"
|
include: "rules/solve_electricity.smk"
|
||||||
include: "rules/postprocess.smk"
|
include: "rules/postprocess.smk"
|
||||||
|
include: "rules/validate.smk"
|
||||||
|
|
||||||
|
|
||||||
if config["foresight"] == "overnight":
|
if config["foresight"] == "overnight":
|
||||||
@ -65,13 +66,31 @@ if config["foresight"] == "myopic":
|
|||||||
include: "rules/solve_myopic.smk"
|
include: "rules/solve_myopic.smk"
|
||||||
|
|
||||||
|
|
||||||
|
if config["foresight"] == "perfect":
|
||||||
|
|
||||||
|
include: "rules/solve_perfect.smk"
|
||||||
|
|
||||||
|
|
||||||
|
rule all:
|
||||||
|
input:
|
||||||
|
RESULTS + "graphs/costs.pdf",
|
||||||
|
default_target: True
|
||||||
|
|
||||||
|
|
||||||
rule purge:
|
rule purge:
|
||||||
message:
|
|
||||||
"Purging generated resources, results and docs. Downloads are kept."
|
|
||||||
run:
|
run:
|
||||||
rmtree("resources/", ignore_errors=True)
|
import builtins
|
||||||
rmtree("results/", ignore_errors=True)
|
|
||||||
rmtree("doc/_build", ignore_errors=True)
|
do_purge = builtins.input(
|
||||||
|
"Do you really want to delete all generated resources, \nresults and docs (downloads are kept)? [y/N] "
|
||||||
|
)
|
||||||
|
if do_purge == "y":
|
||||||
|
rmtree("resources/", ignore_errors=True)
|
||||||
|
rmtree("results/", ignore_errors=True)
|
||||||
|
rmtree("doc/_build", ignore_errors=True)
|
||||||
|
print("Purging generated resources, results and docs. Downloads are kept.")
|
||||||
|
else:
|
||||||
|
raise Exception(f"Input {do_purge}. Aborting purge.")
|
||||||
|
|
||||||
|
|
||||||
rule dag:
|
rule dag:
|
||||||
@ -98,3 +117,14 @@ rule doc:
|
|||||||
directory("doc/_build"),
|
directory("doc/_build"),
|
||||||
shell:
|
shell:
|
||||||
"make -C doc html"
|
"make -C doc html"
|
||||||
|
|
||||||
|
|
||||||
|
rule sync:
|
||||||
|
params:
|
||||||
|
cluster=f"{config['remote']['ssh']}:{config['remote']['path']}",
|
||||||
|
shell:
|
||||||
|
"""
|
||||||
|
rsync -uvarh --ignore-missing-args --files-from=.sync-send . {params.cluster}
|
||||||
|
rsync -uvarh --no-g {params.cluster}/results . || echo "No results directory, skipping rsync"
|
||||||
|
rsync -uvarh --no-g {params.cluster}/logs . || echo "No logs directory, skipping rsync"
|
||||||
|
"""
|
||||||
|
@ -3,13 +3,21 @@
|
|||||||
# SPDX-License-Identifier: CC0-1.0
|
# SPDX-License-Identifier: CC0-1.0
|
||||||
|
|
||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#top-level-configuration
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#top-level-configuration
|
||||||
version: 0.8.0
|
version: 0.8.1
|
||||||
tutorial: false
|
tutorial: false
|
||||||
|
|
||||||
logging:
|
logging:
|
||||||
level: INFO
|
level: INFO
|
||||||
format: '%(levelname)s:%(name)s:%(message)s'
|
format: '%(levelname)s:%(name)s:%(message)s'
|
||||||
|
|
||||||
|
private:
|
||||||
|
keys:
|
||||||
|
entsoe_api:
|
||||||
|
|
||||||
|
remote:
|
||||||
|
ssh: ""
|
||||||
|
path: ""
|
||||||
|
|
||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#run
|
||||||
run:
|
run:
|
||||||
name: ""
|
name: ""
|
||||||
@ -60,6 +68,7 @@ enable:
|
|||||||
retrieve_sector_databundle: true
|
retrieve_sector_databundle: true
|
||||||
retrieve_cost_data: true
|
retrieve_cost_data: true
|
||||||
build_cutout: false
|
build_cutout: false
|
||||||
|
retrieve_irena: false
|
||||||
retrieve_cutout: true
|
retrieve_cutout: true
|
||||||
build_natura_raster: false
|
build_natura_raster: false
|
||||||
retrieve_natura_raster: true
|
retrieve_natura_raster: true
|
||||||
@ -77,7 +86,7 @@ co2_budget:
|
|||||||
|
|
||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#electricity
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#electricity
|
||||||
electricity:
|
electricity:
|
||||||
voltages: [220., 300., 380.]
|
voltages: [220., 300., 380., 500., 750.]
|
||||||
gaslimit: false
|
gaslimit: false
|
||||||
co2limit: 7.75e+7
|
co2limit: 7.75e+7
|
||||||
co2base: 1.487e+9
|
co2base: 1.487e+9
|
||||||
@ -126,14 +135,14 @@ atlite:
|
|||||||
# module: era5
|
# module: era5
|
||||||
europe-2013-era5:
|
europe-2013-era5:
|
||||||
module: era5 # in priority order
|
module: era5 # in priority order
|
||||||
x: [-12., 35.]
|
x: [-12., 42.]
|
||||||
y: [33., 72]
|
y: [33., 72]
|
||||||
dx: 0.3
|
dx: 0.3
|
||||||
dy: 0.3
|
dy: 0.3
|
||||||
time: ['2013', '2013']
|
time: ['2013', '2013']
|
||||||
europe-2013-sarah:
|
europe-2013-sarah:
|
||||||
module: [sarah, era5] # in priority order
|
module: [sarah, era5] # in priority order
|
||||||
x: [-12., 45.]
|
x: [-12., 42.]
|
||||||
y: [33., 65]
|
y: [33., 65]
|
||||||
dx: 0.2
|
dx: 0.2
|
||||||
dy: 0.2
|
dy: 0.2
|
||||||
@ -209,10 +218,14 @@ renewable:
|
|||||||
carriers: [ror, PHS, hydro]
|
carriers: [ror, PHS, hydro]
|
||||||
PHS_max_hours: 6
|
PHS_max_hours: 6
|
||||||
hydro_max_hours: "energy_capacity_totals_by_country" # one of energy_capacity_totals_by_country, estimate_by_large_installations or a float
|
hydro_max_hours: "energy_capacity_totals_by_country" # one of energy_capacity_totals_by_country, estimate_by_large_installations or a float
|
||||||
|
flatten_dispatch: false
|
||||||
|
flatten_dispatch_buffer: 0.2
|
||||||
clip_min_inflow: 1.0
|
clip_min_inflow: 1.0
|
||||||
|
|
||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#conventional
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#conventional
|
||||||
conventional:
|
conventional:
|
||||||
|
unit_commitment: false
|
||||||
|
dynamic_fuel_price: false
|
||||||
nuclear:
|
nuclear:
|
||||||
p_max_pu: "data/nuclear_p_max_pu.csv" # float of file name
|
p_max_pu: "data/nuclear_p_max_pu.csv" # float of file name
|
||||||
|
|
||||||
@ -222,11 +235,20 @@ lines:
|
|||||||
220.: "Al/St 240/40 2-bundle 220.0"
|
220.: "Al/St 240/40 2-bundle 220.0"
|
||||||
300.: "Al/St 240/40 3-bundle 300.0"
|
300.: "Al/St 240/40 3-bundle 300.0"
|
||||||
380.: "Al/St 240/40 4-bundle 380.0"
|
380.: "Al/St 240/40 4-bundle 380.0"
|
||||||
|
500.: "Al/St 240/40 4-bundle 380.0"
|
||||||
|
750.: "Al/St 560/50 4-bundle 750.0"
|
||||||
s_max_pu: 0.7
|
s_max_pu: 0.7
|
||||||
s_nom_max: .inf
|
s_nom_max: .inf
|
||||||
max_extension: .inf
|
max_extension: .inf
|
||||||
length_factor: 1.25
|
length_factor: 1.25
|
||||||
|
reconnect_crimea: true
|
||||||
under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity
|
under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity
|
||||||
|
dynamic_line_rating:
|
||||||
|
activate: false
|
||||||
|
cutout: europe-2013-era5
|
||||||
|
correction_factor: 0.95
|
||||||
|
max_voltage_difference: false
|
||||||
|
max_line_rating: false
|
||||||
|
|
||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#links
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#links
|
||||||
links:
|
links:
|
||||||
@ -430,10 +452,12 @@ sector:
|
|||||||
coal_cc: false
|
coal_cc: false
|
||||||
dac: true
|
dac: true
|
||||||
co2_vent: false
|
co2_vent: false
|
||||||
|
central_heat_vent: false
|
||||||
allam_cycle: false
|
allam_cycle: false
|
||||||
hydrogen_fuel_cell: true
|
hydrogen_fuel_cell: true
|
||||||
hydrogen_turbine: false
|
hydrogen_turbine: false
|
||||||
SMR: true
|
SMR: true
|
||||||
|
SMR_cc: true
|
||||||
regional_co2_sequestration_potential:
|
regional_co2_sequestration_potential:
|
||||||
enable: false
|
enable: false
|
||||||
attribute: 'conservative estimate Mt'
|
attribute: 'conservative estimate Mt'
|
||||||
@ -443,6 +467,7 @@ sector:
|
|||||||
years_of_storage: 25
|
years_of_storage: 25
|
||||||
co2_sequestration_potential: 200
|
co2_sequestration_potential: 200
|
||||||
co2_sequestration_cost: 10
|
co2_sequestration_cost: 10
|
||||||
|
co2_sequestration_lifetime: 50
|
||||||
co2_spatial: false
|
co2_spatial: false
|
||||||
co2network: false
|
co2network: false
|
||||||
cc_fraction: 0.9
|
cc_fraction: 0.9
|
||||||
@ -473,6 +498,20 @@ sector:
|
|||||||
OCGT: gas
|
OCGT: gas
|
||||||
biomass_to_liquid: false
|
biomass_to_liquid: false
|
||||||
biosng: false
|
biosng: false
|
||||||
|
limit_max_growth:
|
||||||
|
enable: false
|
||||||
|
# allowing 30% larger than max historic growth
|
||||||
|
factor: 1.3
|
||||||
|
max_growth: # unit GW
|
||||||
|
onwind: 16 # onshore max grow so far 16 GW in Europe https://www.iea.org/reports/renewables-2020/wind
|
||||||
|
solar: 28 # solar max grow so far 28 GW in Europe https://www.iea.org/reports/renewables-2020/solar-pv
|
||||||
|
offwind-ac: 35 # offshore max grow so far 3.5 GW in Europe https://windeurope.org/about-wind/statistics/offshore/european-offshore-wind-industry-key-trends-statistics-2019/
|
||||||
|
offwind-dc: 35
|
||||||
|
max_relative_growth:
|
||||||
|
onwind: 3
|
||||||
|
solar: 3
|
||||||
|
offwind-ac: 3
|
||||||
|
offwind-dc: 3
|
||||||
|
|
||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#industry
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#industry
|
||||||
industry:
|
industry:
|
||||||
@ -525,11 +564,13 @@ industry:
|
|||||||
hotmaps_locate_missing: false
|
hotmaps_locate_missing: false
|
||||||
reference_year: 2015
|
reference_year: 2015
|
||||||
|
|
||||||
|
|
||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#costs
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#costs
|
||||||
costs:
|
costs:
|
||||||
year: 2030
|
year: 2030
|
||||||
version: v0.6.0
|
version: v0.6.0
|
||||||
rooftop_share: 0.14 # based on the potentials, assuming (0.1 kW/m2 and 10 m2/person)
|
rooftop_share: 0.14 # based on the potentials, assuming (0.1 kW/m2 and 10 m2/person)
|
||||||
|
social_discountrate: 0.02
|
||||||
fill_values:
|
fill_values:
|
||||||
FOM: 0
|
FOM: 0
|
||||||
VOM: 0
|
VOM: 0
|
||||||
@ -557,6 +598,7 @@ costs:
|
|||||||
|
|
||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#clustering
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#clustering
|
||||||
clustering:
|
clustering:
|
||||||
|
focus_weights: false
|
||||||
simplify_network:
|
simplify_network:
|
||||||
to_substations: false
|
to_substations: false
|
||||||
algorithm: kmeans # choose from: [hac, kmeans]
|
algorithm: kmeans # choose from: [hac, kmeans]
|
||||||
@ -568,16 +610,12 @@ clustering:
|
|||||||
algorithm: kmeans
|
algorithm: kmeans
|
||||||
feature: solar+onwind-time
|
feature: solar+onwind-time
|
||||||
exclude_carriers: []
|
exclude_carriers: []
|
||||||
|
consider_efficiency_classes: false
|
||||||
aggregation_strategies:
|
aggregation_strategies:
|
||||||
generators:
|
generators:
|
||||||
p_nom_max: sum
|
|
||||||
p_nom_min: sum
|
|
||||||
p_min_pu: mean
|
|
||||||
marginal_cost: mean
|
|
||||||
committable: any
|
committable: any
|
||||||
ramp_limit_up: max
|
ramp_limit_up: max
|
||||||
ramp_limit_down: max
|
ramp_limit_down: max
|
||||||
efficiency: mean
|
|
||||||
|
|
||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#solving
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#solving
|
||||||
solving:
|
solving:
|
||||||
@ -585,13 +623,17 @@ solving:
|
|||||||
options:
|
options:
|
||||||
clip_p_max_pu: 1.e-2
|
clip_p_max_pu: 1.e-2
|
||||||
load_shedding: false
|
load_shedding: false
|
||||||
transmission_losses: 0
|
|
||||||
noisy_costs: true
|
noisy_costs: true
|
||||||
skip_iterations: true
|
skip_iterations: true
|
||||||
|
rolling_horizon: false
|
||||||
|
seed: 123
|
||||||
|
# options that go into the optimize function
|
||||||
track_iterations: false
|
track_iterations: false
|
||||||
min_iterations: 4
|
min_iterations: 4
|
||||||
max_iterations: 6
|
max_iterations: 6
|
||||||
seed: 123
|
transmission_losses: 0
|
||||||
|
linearized_unit_commitment: true
|
||||||
|
horizon: 365
|
||||||
|
|
||||||
solver:
|
solver:
|
||||||
name: gurobi
|
name: gurobi
|
||||||
@ -619,7 +661,6 @@ solving:
|
|||||||
AggFill: 0
|
AggFill: 0
|
||||||
PreDual: 0
|
PreDual: 0
|
||||||
GURO_PAR_BARDENSETHRESH: 200
|
GURO_PAR_BARDENSETHRESH: 200
|
||||||
seed: 10 # Consistent seed for all plattforms
|
|
||||||
gurobi-numeric-focus:
|
gurobi-numeric-focus:
|
||||||
name: gurobi
|
name: gurobi
|
||||||
NumericFocus: 3 # Favour numeric stability over speed
|
NumericFocus: 3 # Favour numeric stability over speed
|
||||||
@ -652,6 +693,7 @@ solving:
|
|||||||
glpk-default: {} # Used in CI
|
glpk-default: {} # Used in CI
|
||||||
|
|
||||||
mem: 30000 #memory in MB; 20 GB enough for 50+B+I+H2; 100 GB for 181+B+I+H2
|
mem: 30000 #memory in MB; 20 GB enough for 50+B+I+H2; 100 GB for 181+B+I+H2
|
||||||
|
walltime: "12:00:00"
|
||||||
|
|
||||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#plotting
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#plotting
|
||||||
plotting:
|
plotting:
|
||||||
@ -682,6 +724,9 @@ plotting:
|
|||||||
H2: "Hydrogen Storage"
|
H2: "Hydrogen Storage"
|
||||||
lines: "Transmission Lines"
|
lines: "Transmission Lines"
|
||||||
ror: "Run of River"
|
ror: "Run of River"
|
||||||
|
load: "Load Shedding"
|
||||||
|
ac: "AC"
|
||||||
|
dc: "DC"
|
||||||
|
|
||||||
tech_colors:
|
tech_colors:
|
||||||
# wind
|
# wind
|
||||||
@ -741,6 +786,7 @@ plotting:
|
|||||||
gas pipeline new: '#a87c62'
|
gas pipeline new: '#a87c62'
|
||||||
# oil
|
# oil
|
||||||
oil: '#c9c9c9'
|
oil: '#c9c9c9'
|
||||||
|
imported oil: '#a3a3a3'
|
||||||
oil boiler: '#adadad'
|
oil boiler: '#adadad'
|
||||||
residential rural oil boiler: '#a9a9a9'
|
residential rural oil boiler: '#a9a9a9'
|
||||||
services rural oil boiler: '#a5a5a5'
|
services rural oil boiler: '#a5a5a5'
|
||||||
@ -759,6 +805,7 @@ plotting:
|
|||||||
Coal: '#545454'
|
Coal: '#545454'
|
||||||
coal: '#545454'
|
coal: '#545454'
|
||||||
Coal marginal: '#545454'
|
Coal marginal: '#545454'
|
||||||
|
coal for industry: '#343434'
|
||||||
solid: '#545454'
|
solid: '#545454'
|
||||||
Lignite: '#826837'
|
Lignite: '#826837'
|
||||||
lignite: '#826837'
|
lignite: '#826837'
|
||||||
@ -835,6 +882,7 @@ plotting:
|
|||||||
services rural heat: '#ff9c9c'
|
services rural heat: '#ff9c9c'
|
||||||
central heat: '#cc1f1f'
|
central heat: '#cc1f1f'
|
||||||
urban central heat: '#d15959'
|
urban central heat: '#d15959'
|
||||||
|
urban central heat vent: '#a74747'
|
||||||
decentral heat: '#750606'
|
decentral heat: '#750606'
|
||||||
residential urban decentral heat: '#a33c3c'
|
residential urban decentral heat: '#a33c3c'
|
||||||
services urban decentral heat: '#cc1f1f'
|
services urban decentral heat: '#cc1f1f'
|
||||||
@ -872,6 +920,7 @@ plotting:
|
|||||||
H2 for shipping: "#ebaee0"
|
H2 for shipping: "#ebaee0"
|
||||||
H2: '#bf13a0'
|
H2: '#bf13a0'
|
||||||
hydrogen: '#bf13a0'
|
hydrogen: '#bf13a0'
|
||||||
|
retrofitted H2 boiler: '#e5a0d9'
|
||||||
SMR: '#870c71'
|
SMR: '#870c71'
|
||||||
SMR CC: '#4f1745'
|
SMR CC: '#4f1745'
|
||||||
H2 liquefaction: '#d647bd'
|
H2 liquefaction: '#d647bd'
|
||||||
@ -942,3 +991,4 @@ plotting:
|
|||||||
DC: "#8a1caf"
|
DC: "#8a1caf"
|
||||||
DC-DC: "#8a1caf"
|
DC-DC: "#8a1caf"
|
||||||
DC link: "#8a1caf"
|
DC link: "#8a1caf"
|
||||||
|
load: "#dd2e23"
|
||||||
|
43
config/config.entsoe-all.yaml
Normal file
43
config/config.entsoe-all.yaml
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
# SPDX-FileCopyrightText: 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: CC0-1.0
|
||||||
|
|
||||||
|
run:
|
||||||
|
name: "entsoe-all"
|
||||||
|
disable_progressbar: true
|
||||||
|
shared_resources: false
|
||||||
|
shared_cutouts: true
|
||||||
|
|
||||||
|
scenario:
|
||||||
|
simpl:
|
||||||
|
- ''
|
||||||
|
ll:
|
||||||
|
- vopt
|
||||||
|
clusters:
|
||||||
|
- 39
|
||||||
|
- 128
|
||||||
|
- 256
|
||||||
|
opts:
|
||||||
|
- ''
|
||||||
|
sector_opts:
|
||||||
|
- ''
|
||||||
|
planning_horizons:
|
||||||
|
- ''
|
||||||
|
|
||||||
|
# TODO add Turkey (TR)
|
||||||
|
countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MD', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK', 'UA']
|
||||||
|
|
||||||
|
electricity:
|
||||||
|
custom_powerplants: true
|
||||||
|
co2limit: 9.59e+7
|
||||||
|
co2base: 1.918e+9
|
||||||
|
|
||||||
|
lines:
|
||||||
|
reconnect_crimea: true
|
||||||
|
|
||||||
|
enable:
|
||||||
|
retrieve: true
|
||||||
|
retrieve_databundle: true
|
||||||
|
retrieve_sector_databundle: false
|
||||||
|
retrieve_cost_data: true
|
||||||
|
retrieve_cutout: true
|
43
config/config.perfect.yaml
Normal file
43
config/config.perfect.yaml
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: CC0-1.0
|
||||||
|
run:
|
||||||
|
name: "perfect"
|
||||||
|
|
||||||
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#foresight
|
||||||
|
foresight: perfect
|
||||||
|
|
||||||
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#scenario
|
||||||
|
# Wildcard docs in https://pypsa-eur.readthedocs.io/en/latest/wildcards.html
|
||||||
|
scenario:
|
||||||
|
simpl:
|
||||||
|
- ''
|
||||||
|
ll:
|
||||||
|
- v1.0
|
||||||
|
clusters:
|
||||||
|
- 37
|
||||||
|
opts:
|
||||||
|
- ''
|
||||||
|
sector_opts:
|
||||||
|
- 1p5-4380H-T-H-B-I-A-solar+p3-dist1
|
||||||
|
- 1p7-4380H-T-H-B-I-A-solar+p3-dist1
|
||||||
|
- 2p0-4380H-T-H-B-I-A-solar+p3-dist1
|
||||||
|
planning_horizons:
|
||||||
|
- 2020
|
||||||
|
- 2030
|
||||||
|
- 2040
|
||||||
|
- 2050
|
||||||
|
|
||||||
|
|
||||||
|
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#co2-budget
|
||||||
|
co2_budget:
|
||||||
|
# update of IPCC 6th AR compared to the 1.5SR. (discussed here: https://twitter.com/JoeriRogelj/status/1424743828339167233)
|
||||||
|
1p5: 34.2 # 25.7 # Budget in Gt CO2 for 1.5 for Europe, global 420 Gt, assuming per capita share
|
||||||
|
1p6: 43.259666 # 35 # Budget in Gt CO2 for 1.6 for Europe, global 580 Gt
|
||||||
|
1p7: 51.4 # 45 # Budget in Gt CO2 for 1.7 for Europe, global 800 Gt
|
||||||
|
2p0: 69.778 # 73.9 # Budget in Gt CO2 for 2 for Europe, global 1170 Gt
|
||||||
|
|
||||||
|
|
||||||
|
sector:
|
||||||
|
min_part_load_fischer_tropsch: 0
|
||||||
|
min_part_load_methanolisation: 0
|
98
config/config.validation.yaml
Normal file
98
config/config.validation.yaml
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: CC0-1.0
|
||||||
|
run:
|
||||||
|
name: "validation"
|
||||||
|
|
||||||
|
scenario:
|
||||||
|
ll:
|
||||||
|
- v1.0
|
||||||
|
clusters:
|
||||||
|
- 37
|
||||||
|
opts:
|
||||||
|
- 'Ept'
|
||||||
|
|
||||||
|
snapshots:
|
||||||
|
start: "2019-01-01"
|
||||||
|
end: "2020-01-01"
|
||||||
|
inclusive: 'left'
|
||||||
|
|
||||||
|
enable:
|
||||||
|
retrieve_cutout: false
|
||||||
|
|
||||||
|
electricity:
|
||||||
|
co2limit: 1e9
|
||||||
|
|
||||||
|
extendable_carriers:
|
||||||
|
Generator: []
|
||||||
|
StorageUnit: []
|
||||||
|
Store: []
|
||||||
|
Link: []
|
||||||
|
|
||||||
|
powerplants_filter: not (DateOut < 2019)
|
||||||
|
|
||||||
|
conventional_carriers: [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass]
|
||||||
|
renewable_carriers: [solar, onwind, offwind-ac, offwind-dc, hydro]
|
||||||
|
|
||||||
|
estimate_renewable_capacities:
|
||||||
|
year: 2019
|
||||||
|
|
||||||
|
atlite:
|
||||||
|
default_cutout: europe-2019-era5
|
||||||
|
cutouts:
|
||||||
|
europe-2019-era5:
|
||||||
|
module: era5
|
||||||
|
x: [-12., 35.]
|
||||||
|
y: [33., 72]
|
||||||
|
dx: 0.3
|
||||||
|
dy: 0.3
|
||||||
|
time: ['2019', '2019']
|
||||||
|
|
||||||
|
renewable:
|
||||||
|
onwind:
|
||||||
|
cutout: europe-2019-era5
|
||||||
|
offwind-ac:
|
||||||
|
cutout: europe-2019-era5
|
||||||
|
offwind-dc:
|
||||||
|
cutout: europe-2019-era5
|
||||||
|
solar:
|
||||||
|
cutout: europe-2019-era5
|
||||||
|
hydro:
|
||||||
|
cutout: europe-2019-era5
|
||||||
|
flatten_dispatch: 0.01
|
||||||
|
|
||||||
|
conventional:
|
||||||
|
unit_commitment: false
|
||||||
|
dynamic_fuel_price: true
|
||||||
|
nuclear:
|
||||||
|
p_max_pu: "data/nuclear_p_max_pu.csv"
|
||||||
|
biomass:
|
||||||
|
p_max_pu: 0.65
|
||||||
|
|
||||||
|
load:
|
||||||
|
power_statistics: false
|
||||||
|
|
||||||
|
lines:
|
||||||
|
s_max_pu: 0.23
|
||||||
|
under_construction: 'remove'
|
||||||
|
|
||||||
|
links:
|
||||||
|
include_tyndp: false
|
||||||
|
|
||||||
|
costs:
|
||||||
|
year: 2020
|
||||||
|
emission_prices:
|
||||||
|
co2: 25
|
||||||
|
|
||||||
|
clustering:
|
||||||
|
simplify_network:
|
||||||
|
exclude_carriers: [oil, coal, lignite, OCGT, CCGT]
|
||||||
|
cluster_network:
|
||||||
|
consider_efficiency_classes: true
|
||||||
|
|
||||||
|
solving:
|
||||||
|
options:
|
||||||
|
load_shedding: true
|
||||||
|
rolling_horizon: false
|
||||||
|
horizon: 1000
|
||||||
|
overlap: 48
|
@ -60,6 +60,12 @@ renewable:
|
|||||||
clustering:
|
clustering:
|
||||||
exclude_carriers: ["OCGT", "offwind-ac", "coal"]
|
exclude_carriers: ["OCGT", "offwind-ac", "coal"]
|
||||||
|
|
||||||
|
lines:
|
||||||
|
dynamic_line_rating:
|
||||||
|
activate: true
|
||||||
|
cutout: be-03-2013-era5
|
||||||
|
max_line_rating: 1.3
|
||||||
|
|
||||||
|
|
||||||
solving:
|
solving:
|
||||||
solver:
|
solver:
|
||||||
|
@ -30,6 +30,9 @@ snapshots:
|
|||||||
start: "2013-03-01"
|
start: "2013-03-01"
|
||||||
end: "2013-03-08"
|
end: "2013-03-08"
|
||||||
|
|
||||||
|
sector:
|
||||||
|
central_heat_vent: true
|
||||||
|
|
||||||
electricity:
|
electricity:
|
||||||
co2limit: 100.e+6
|
co2limit: 100.e+6
|
||||||
|
|
||||||
|
92
config/test/config.perfect.yaml
Normal file
92
config/test/config.perfect.yaml
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: CC0-1.0
|
||||||
|
|
||||||
|
tutorial: true
|
||||||
|
|
||||||
|
run:
|
||||||
|
name: "test-sector-perfect"
|
||||||
|
disable_progressbar: true
|
||||||
|
shared_resources: true
|
||||||
|
shared_cutouts: true
|
||||||
|
|
||||||
|
foresight: perfect
|
||||||
|
|
||||||
|
scenario:
|
||||||
|
ll:
|
||||||
|
- v1.0
|
||||||
|
clusters:
|
||||||
|
- 5
|
||||||
|
sector_opts:
|
||||||
|
- 8760H-T-H-B-I-A-solar+p3-dist1
|
||||||
|
planning_horizons:
|
||||||
|
- 2030
|
||||||
|
- 2040
|
||||||
|
- 2050
|
||||||
|
|
||||||
|
countries: ['BE']
|
||||||
|
|
||||||
|
snapshots:
|
||||||
|
start: "2013-03-01"
|
||||||
|
end: "2013-03-08"
|
||||||
|
|
||||||
|
electricity:
|
||||||
|
co2limit: 100.e+6
|
||||||
|
|
||||||
|
extendable_carriers:
|
||||||
|
Generator: [OCGT]
|
||||||
|
StorageUnit: [battery]
|
||||||
|
Store: [H2]
|
||||||
|
Link: [H2 pipeline]
|
||||||
|
|
||||||
|
renewable_carriers: [solar, onwind, offwind-ac, offwind-dc]
|
||||||
|
|
||||||
|
sector:
|
||||||
|
min_part_load_fischer_tropsch: 0
|
||||||
|
min_part_load_methanolisation: 0
|
||||||
|
|
||||||
|
atlite:
|
||||||
|
default_cutout: be-03-2013-era5
|
||||||
|
cutouts:
|
||||||
|
be-03-2013-era5:
|
||||||
|
module: era5
|
||||||
|
x: [4., 15.]
|
||||||
|
y: [46., 56.]
|
||||||
|
time: ["2013-03-01", "2013-03-08"]
|
||||||
|
|
||||||
|
renewable:
|
||||||
|
onwind:
|
||||||
|
cutout: be-03-2013-era5
|
||||||
|
offwind-ac:
|
||||||
|
cutout: be-03-2013-era5
|
||||||
|
max_depth: false
|
||||||
|
offwind-dc:
|
||||||
|
cutout: be-03-2013-era5
|
||||||
|
max_depth: false
|
||||||
|
solar:
|
||||||
|
cutout: be-03-2013-era5
|
||||||
|
|
||||||
|
industry:
|
||||||
|
St_primary_fraction:
|
||||||
|
2020: 0.8
|
||||||
|
2030: 0.6
|
||||||
|
2040: 0.5
|
||||||
|
2050: 0.4
|
||||||
|
|
||||||
|
solving:
|
||||||
|
solver:
|
||||||
|
name: glpk
|
||||||
|
options: glpk-default
|
||||||
|
mem: 4000
|
||||||
|
|
||||||
|
plotting:
|
||||||
|
map:
|
||||||
|
boundaries:
|
||||||
|
eu_node_location:
|
||||||
|
x: -5.5
|
||||||
|
y: 46.
|
||||||
|
costs_max: 1000
|
||||||
|
costs_threshold: 0.0000001
|
||||||
|
energy_max:
|
||||||
|
energy_min:
|
||||||
|
energy_threshold: 0.000001
|
151
data/GDP_PPP_30arcsec_v3_mapped_default.csv
Normal file
151
data/GDP_PPP_30arcsec_v3_mapped_default.csv
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
name,GDP_PPP,country
|
||||||
|
3140,632728.0438507323,MD
|
||||||
|
3139,806541.9318093687,MD
|
||||||
|
3142,1392454.6690911907,MD
|
||||||
|
3152,897871.2903553953,MD
|
||||||
|
3246,645554.8588933202,MD
|
||||||
|
7049,1150156.4449477682,MD
|
||||||
|
1924,162285.16792916053,UA
|
||||||
|
1970,751970.6071848695,UA
|
||||||
|
2974,368873.75840156944,UA
|
||||||
|
2977,294847.85539198935,UA
|
||||||
|
2979,197988.13680768458,UA
|
||||||
|
2980,301371.2491126519,UA
|
||||||
|
3031,56925.21878805953,UA
|
||||||
|
3032,139395.18279351242,UA
|
||||||
|
3033,145377.8061037629,UA
|
||||||
|
3035,52282.83655208812,UA
|
||||||
|
3036,497950.25890516065,UA
|
||||||
|
3037,1183293.1987702171,UA
|
||||||
|
3038,255005.98207636533,UA
|
||||||
|
3039,224711.50098325178,UA
|
||||||
|
3040,342959.943226467,UA
|
||||||
|
3044,69119.31486955672,UA
|
||||||
|
3045,246273.65986119965,UA
|
||||||
|
3047,146742.08407299497,UA
|
||||||
|
3049,107265.7028733467,UA
|
||||||
|
3050,1126147.985259493,UA
|
||||||
|
3051,69833.56303043803,UA
|
||||||
|
3052,67230.88206577855,UA
|
||||||
|
3053,27019.224685201345,UA
|
||||||
|
3054,260571.47337292184,UA
|
||||||
|
3055,88760.94152915622,UA
|
||||||
|
3056,101368.26196568517,UA
|
||||||
|
3058,55752.92329667119,UA
|
||||||
|
3059,89024.37880630122,UA
|
||||||
|
3062,358411.291265149,UA
|
||||||
|
3064,75081.64142862396,UA
|
||||||
|
3065,158101.42949135564,UA
|
||||||
|
3066,83763.89576442329,UA
|
||||||
|
3068,173474.51218344545,UA
|
||||||
|
3069,60327.01572375589,UA
|
||||||
|
3070,18073.687271955278,UA
|
||||||
|
3071,249069.43314695224,UA
|
||||||
|
3072,220707.35700825177,UA
|
||||||
|
3073,61342.30137462664,UA
|
||||||
|
3074,254235.98867635374,UA
|
||||||
|
3077,769558.9832370486,UA
|
||||||
|
3078,132674.2315809836,UA
|
||||||
|
3079,1388517.1478032232,UA
|
||||||
|
3080,1861003.8718246964,UA
|
||||||
|
3082,140123.73854745473,UA
|
||||||
|
3083,834887.5595419679,UA
|
||||||
|
3084,1910795.5590558557,UA
|
||||||
|
3086,93828.36549170096,UA
|
||||||
|
3088,347197.65113392205,UA
|
||||||
|
3089,3754718.141734592,UA
|
||||||
|
3090,521912.69768585655,UA
|
||||||
|
3093,232818.05269714879,UA
|
||||||
|
3095,435376.20361377904,UA
|
||||||
|
3099,345596.5288937008,UA
|
||||||
|
3100,175689.10947424968,UA
|
||||||
|
3105,538438.9311459162,UA
|
||||||
|
3107,88096.86032871014,UA
|
||||||
|
3108,79847.68447063807,UA
|
||||||
|
3109,348504.73449373,UA
|
||||||
|
3144,71657.0165675802,UA
|
||||||
|
3146,80342.05037424155,UA
|
||||||
|
3158,74465.12922576343,UA
|
||||||
|
3164,3102112.2672631275,UA
|
||||||
|
3165,65215.04081671433,UA
|
||||||
|
3166,413924.2225725632,UA
|
||||||
|
3167,135060.0056434935,UA
|
||||||
|
3168,54980.442979330146,UA
|
||||||
|
3170,29584.879122227037,UA
|
||||||
|
3171,142780.68163047134,UA
|
||||||
|
3172,40436.63814695243,UA
|
||||||
|
3173,1253342.1790126422,UA
|
||||||
|
3174,173842.03139155387,UA
|
||||||
|
3176,65699.76352408895,UA
|
||||||
|
3177,143591.75419817626,UA
|
||||||
|
3178,56434.04525832523,UA
|
||||||
|
3179,389996.1670051216,UA
|
||||||
|
3180,138452.84503524794,UA
|
||||||
|
3181,67402.59500436619,UA
|
||||||
|
3184,51204.293695376415,UA
|
||||||
|
3185,46867.82356528432,UA
|
||||||
|
3186,103892.35612417295,UA
|
||||||
|
3187,193668.91476930346,UA
|
||||||
|
3189,54584.176457692694,UA
|
||||||
|
3190,219077.64942830536,UA
|
||||||
|
3197,88516.52699983507,UA
|
||||||
|
3198,298166.8272673622,UA
|
||||||
|
3199,61334.952541812374,UA
|
||||||
|
3229,175692.61136747137,UA
|
||||||
|
3230,106722.62773321665,UA
|
||||||
|
3236,61542.06264321315,UA
|
||||||
|
3241,83752.90489164277,UA
|
||||||
|
4301,48419.52825967164,UA
|
||||||
|
4305,147759.74280349456,UA
|
||||||
|
4306,53156.905740992224,UA
|
||||||
|
4315,218025.78516351627,UA
|
||||||
|
4317,155240.40554731718,UA
|
||||||
|
4318,1342144.2459407183,UA
|
||||||
|
4319,91669.1449633853,UA
|
||||||
|
4321,85852.49282415409,UA
|
||||||
|
4347,67938.7698430624,UA
|
||||||
|
4357,20064.979012172935,UA
|
||||||
|
4360,47840.51245168512,UA
|
||||||
|
4361,55580.924388032574,UA
|
||||||
|
4362,165753.82588729708,UA
|
||||||
|
4363,46390.2448142152,UA
|
||||||
|
4365,96265.47592938849,UA
|
||||||
|
4366,272003.25510057947,UA
|
||||||
|
4367,80878.50229245829,UA
|
||||||
|
4370,330072.35444044066,UA
|
||||||
|
4371,7707066.181975477,UA
|
||||||
|
4373,2019766.7891575783,UA
|
||||||
|
4374,985354.331818515,UA
|
||||||
|
4377,230805.08833664874,UA
|
||||||
|
4382,125670.67125287943,UA
|
||||||
|
4383,46914.065511740075,UA
|
||||||
|
4384,48020.804310510954,UA
|
||||||
|
4385,55612.34707641123,UA
|
||||||
|
4387,74558.3475791577,UA
|
||||||
|
4388,245243.33449409154,UA
|
||||||
|
4389,95696.56767732685,UA
|
||||||
|
4391,251085.7523045193,UA
|
||||||
|
4401,66375.82996856027,UA
|
||||||
|
4403,111954.41038437477,UA
|
||||||
|
4405,46911.68560148837,UA
|
||||||
|
4408,150782.51691456966,UA
|
||||||
|
4409,112776.7399582134,UA
|
||||||
|
4410,153076.56860965435,UA
|
||||||
|
4412,192629.31238456024,UA
|
||||||
|
4413,181295.3120834606,UA
|
||||||
|
4414,995694.9413199169,UA
|
||||||
|
4416,157640.7868989174,UA
|
||||||
|
4418,77580.20674809469,UA
|
||||||
|
4420,122320.99275223716,UA
|
||||||
|
4424,184891.10924920067,UA
|
||||||
|
4425,84486.75974340564,UA
|
||||||
|
4431,50485.84380961137,UA
|
||||||
|
4435,231040.45446464577,UA
|
||||||
|
4436,81222.18707585508,UA
|
||||||
|
4438,114819.76472988473,UA
|
||||||
|
4439,76839.1052178896,UA
|
||||||
|
4440,135337.0313562152,UA
|
||||||
|
4441,49159.485269198034,UA
|
||||||
|
7031,42001.73757065917,UA
|
||||||
|
7059,159790.48382874,UA
|
||||||
|
7063,39599.10564971086,UA
|
|
195
data/costs.csv
195
data/costs.csv
@ -1,195 +0,0 @@
|
|||||||
technology,year,parameter,value,unit,source
|
|
||||||
solar-rooftop,2030,discount rate,0.04,per unit,standard for decentral
|
|
||||||
onwind,2030,lifetime,30,years,DEA https://ens.dk/en/our-services/projections-and-models/technology-data
|
|
||||||
offwind,2030,lifetime,30,years,DEA https://ens.dk/en/our-services/projections-and-models/technology-data
|
|
||||||
solar,2030,lifetime,25,years,IEA2010
|
|
||||||
solar-rooftop,2030,lifetime,25,years,IEA2010
|
|
||||||
solar-utility,2030,lifetime,25,years,IEA2010
|
|
||||||
PHS,2030,lifetime,80,years,IEA2010
|
|
||||||
hydro,2030,lifetime,80,years,IEA2010
|
|
||||||
ror,2030,lifetime,80,years,IEA2010
|
|
||||||
OCGT,2030,lifetime,30,years,IEA2010
|
|
||||||
nuclear,2030,lifetime,45,years,ECF2010 in DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
CCGT,2030,lifetime,30,years,IEA2010
|
|
||||||
coal,2030,lifetime,40,years,IEA2010
|
|
||||||
lignite,2030,lifetime,40,years,IEA2010
|
|
||||||
geothermal,2030,lifetime,40,years,IEA2010
|
|
||||||
biomass,2030,lifetime,30,years,ECF2010 in DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
oil,2030,lifetime,30,years,ECF2010 in DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
onwind,2030,investment,1040,EUR/kWel,DEA https://ens.dk/en/our-services/projections-and-models/technology-data
|
|
||||||
offwind,2030,investment,1640,EUR/kWel,DEA https://ens.dk/en/our-services/projections-and-models/technology-data
|
|
||||||
offwind-ac-station,2030,investment,250,EUR/kWel,DEA https://ens.dk/en/our-services/projections-and-models/technology-data
|
|
||||||
offwind-ac-connection-submarine,2030,investment,2685,EUR/MW/km,DEA https://ens.dk/en/our-services/projections-and-models/technology-data
|
|
||||||
offwind-ac-connection-underground,2030,investment,1342,EUR/MW/km,DEA https://ens.dk/en/our-services/projections-and-models/technology-data
|
|
||||||
offwind-dc-station,2030,investment,400,EUR/kWel,Haertel 2017; assuming one onshore and one offshore node + 13% learning reduction
|
|
||||||
offwind-dc-connection-submarine,2030,investment,2000,EUR/MW/km,DTU report based on Fig 34 of https://ec.europa.eu/energy/sites/ener/files/documents/2014_nsog_report.pdf
|
|
||||||
offwind-dc-connection-underground,2030,investment,1000,EUR/MW/km,Haertel 2017; average + 13% learning reduction
|
|
||||||
solar,2030,investment,600,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
biomass,2030,investment,2209,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
geothermal,2030,investment,3392,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
coal,2030,investment,1300,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348 PC (Advanced/SuperC)
|
|
||||||
lignite,2030,investment,1500,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
solar-rooftop,2030,investment,725,EUR/kWel,ETIP PV
|
|
||||||
solar-utility,2030,investment,425,EUR/kWel,ETIP PV
|
|
||||||
PHS,2030,investment,2000,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
hydro,2030,investment,2000,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
ror,2030,investment,3000,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
OCGT,2030,investment,400,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
nuclear,2030,investment,6000,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
CCGT,2030,investment,800,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
oil,2030,investment,400,EUR/kWel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
onwind,2030,FOM,2.450549,%/year,DEA https://ens.dk/en/our-services/projections-and-models/technology-data
|
|
||||||
offwind,2030,FOM,2.304878,%/year,DEA https://ens.dk/en/our-services/projections-and-models/technology-data
|
|
||||||
solar,2030,FOM,4.166667,%/year,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
solar-rooftop,2030,FOM,2,%/year,ETIP PV
|
|
||||||
solar-utility,2030,FOM,3,%/year,ETIP PV
|
|
||||||
biomass,2030,FOM,4.526935,%/year,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
geothermal,2030,FOM,2.358491,%/year,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
coal,2030,FOM,1.923076,%/year,DIW DataDoc http://hdl.handle.net/10419/80348 PC (Advanced/SuperC)
|
|
||||||
lignite,2030,FOM,2.0,%/year,DIW DataDoc http://hdl.handle.net/10419/80348 PC (Advanced/SuperC)
|
|
||||||
oil,2030,FOM,1.5,%/year,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
PHS,2030,FOM,1,%/year,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
hydro,2030,FOM,1,%/year,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
ror,2030,FOM,2,%/year,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
CCGT,2030,FOM,2.5,%/year,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
OCGT,2030,FOM,3.75,%/year,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
onwind,2030,VOM,2.3,EUR/MWhel,DEA https://ens.dk/en/our-services/projections-and-models/technology-data
|
|
||||||
offwind,2030,VOM,2.7,EUR/MWhel,DEA https://ens.dk/en/our-services/projections-and-models/technology-data
|
|
||||||
solar,2030,VOM,0.01,EUR/MWhel,RES costs made up to fix curtailment order
|
|
||||||
coal,2030,VOM,6,EUR/MWhel,DIW DataDoc http://hdl.handle.net/10419/80348 PC (Advanced/SuperC)
|
|
||||||
lignite,2030,VOM,7,EUR/MWhel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
CCGT,2030,VOM,4,EUR/MWhel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
OCGT,2030,VOM,3,EUR/MWhel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
nuclear,2030,VOM,8,EUR/MWhel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
gas,2030,fuel,21.6,EUR/MWhth,IEA2011b
|
|
||||||
uranium,2030,fuel,3,EUR/MWhth,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
oil,2030,VOM,3,EUR/MWhel,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
nuclear,2030,fuel,3,EUR/MWhth,IEA2011b
|
|
||||||
biomass,2030,fuel,7,EUR/MWhth,IEA2011b
|
|
||||||
coal,2030,fuel,8.4,EUR/MWhth,IEA2011b
|
|
||||||
lignite,2030,fuel,2.9,EUR/MWhth,IEA2011b
|
|
||||||
oil,2030,fuel,50,EUR/MWhth,IEA WEM2017 97USD/boe = http://www.iea.org/media/weowebsite/2017/WEM_Documentation_WEO2017.pdf
|
|
||||||
PHS,2030,efficiency,0.75,per unit,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
hydro,2030,efficiency,0.9,per unit,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
ror,2030,efficiency,0.9,per unit,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
OCGT,2030,efficiency,0.39,per unit,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
CCGT,2030,efficiency,0.5,per unit,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
biomass,2030,efficiency,0.468,per unit,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
geothermal,2030,efficiency,0.239,per unit,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
nuclear,2030,efficiency,0.337,per unit,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
gas,2030,CO2 intensity,0.187,tCO2/MWth,https://www.eia.gov/environment/emissions/co2_vol_mass.php
|
|
||||||
coal,2030,efficiency,0.464,per unit,DIW DataDoc http://hdl.handle.net/10419/80348 PC (Advanced/SuperC)
|
|
||||||
lignite,2030,efficiency,0.447,per unit,DIW DataDoc http://hdl.handle.net/10419/80348
|
|
||||||
oil,2030,efficiency,0.393,per unit,DIW DataDoc http://hdl.handle.net/10419/80348 CT
|
|
||||||
coal,2030,CO2 intensity,0.354,tCO2/MWth,https://www.eia.gov/environment/emissions/co2_vol_mass.php
|
|
||||||
lignite,2030,CO2 intensity,0.334,tCO2/MWth,https://www.eia.gov/environment/emissions/co2_vol_mass.php
|
|
||||||
oil,2030,CO2 intensity,0.248,tCO2/MWth,https://www.eia.gov/environment/emissions/co2_vol_mass.php
|
|
||||||
geothermal,2030,CO2 intensity,0.026,tCO2/MWth,https://www.eia.gov/environment/emissions/co2_vol_mass.php
|
|
||||||
electrolysis,2030,investment,350,EUR/kWel,Palzer Thesis
|
|
||||||
electrolysis,2030,FOM,4,%/year,NREL http://www.nrel.gov/docs/fy09osti/45873.pdf; budischak2013
|
|
||||||
electrolysis,2030,lifetime,18,years,NREL http://www.nrel.gov/docs/fy09osti/45873.pdf; budischak2013
|
|
||||||
electrolysis,2030,efficiency,0.8,per unit,NREL http://www.nrel.gov/docs/fy09osti/45873.pdf; budischak2013
|
|
||||||
fuel cell,2030,investment,339,EUR/kWel,NREL http://www.nrel.gov/docs/fy09osti/45873.pdf; budischak2013
|
|
||||||
fuel cell,2030,FOM,3,%/year,NREL http://www.nrel.gov/docs/fy09osti/45873.pdf; budischak2013
|
|
||||||
fuel cell,2030,lifetime,20,years,NREL http://www.nrel.gov/docs/fy09osti/45873.pdf; budischak2013
|
|
||||||
fuel cell,2030,efficiency,0.58,per unit,NREL http://www.nrel.gov/docs/fy09osti/45873.pdf; budischak2013 conservative 2020
|
|
||||||
hydrogen storage,2030,investment,11.2,USD/kWh,budischak2013
|
|
||||||
hydrogen storage,2030,lifetime,20,years,budischak2013
|
|
||||||
hydrogen underground storage,2030,investment,0.5,EUR/kWh,maximum from https://www.nrel.gov/docs/fy10osti/46719.pdf
|
|
||||||
hydrogen underground storage,2030,lifetime,40,years,http://www.acatech.de/fileadmin/user_upload/Baumstruktur_nach_Website/Acatech/root/de/Publikationen/Materialien/ESYS_Technologiesteckbrief_Energiespeicher.pdf
|
|
||||||
H2 pipeline,2030,investment,267,EUR/MW/km,Welder et al https://doi.org/10.1016/j.ijhydene.2018.12.156
|
|
||||||
H2 pipeline,2030,lifetime,40,years,Krieg2012 http://juser.fz-juelich.de/record/136392/files/Energie%26Umwelt_144.pdf
|
|
||||||
H2 pipeline,2030,FOM,5,%/year,Krieg2012 http://juser.fz-juelich.de/record/136392/files/Energie%26Umwelt_144.pdf
|
|
||||||
H2 pipeline,2030,efficiency,0.98,per unit,Krieg2012 http://juser.fz-juelich.de/record/136392/files/Energie%26Umwelt_144.pdf
|
|
||||||
methanation,2030,investment,1000,EUR/kWH2,Schaber thesis
|
|
||||||
methanation,2030,lifetime,25,years,Schaber thesis
|
|
||||||
methanation,2030,FOM,3,%/year,Schaber thesis
|
|
||||||
methanation,2030,efficiency,0.6,per unit,Palzer; Breyer for DAC
|
|
||||||
helmeth,2030,investment,1000,EUR/kW,no source
|
|
||||||
helmeth,2030,lifetime,25,years,no source
|
|
||||||
helmeth,2030,FOM,3,%/year,no source
|
|
||||||
helmeth,2030,efficiency,0.8,per unit,HELMETH press release
|
|
||||||
DAC,2030,investment,250,EUR/(tCO2/a),Fasihi/Climeworks
|
|
||||||
DAC,2030,lifetime,30,years,Fasihi
|
|
||||||
DAC,2030,FOM,4,%/year,Fasihi
|
|
||||||
battery inverter,2030,investment,411,USD/kWel,budischak2013
|
|
||||||
battery inverter,2030,lifetime,20,years,budischak2013
|
|
||||||
battery inverter,2030,efficiency,0.9,per unit charge/discharge,budischak2013; Lund and Kempton (2008) http://dx.doi.org/10.1016/j.enpol.2008.06.007
|
|
||||||
battery inverter,2030,FOM,3,%/year,budischak2013
|
|
||||||
battery storage,2030,investment,192,USD/kWh,budischak2013
|
|
||||||
battery storage,2030,lifetime,15,years,budischak2013
|
|
||||||
decentral air-sourced heat pump,2030,investment,1050,EUR/kWth,HP; Palzer thesis
|
|
||||||
decentral air-sourced heat pump,2030,lifetime,20,years,HP; Palzer thesis
|
|
||||||
decentral air-sourced heat pump,2030,FOM,3.5,%/year,Palzer thesis
|
|
||||||
decentral air-sourced heat pump,2030,efficiency,3,per unit,default for costs
|
|
||||||
decentral air-sourced heat pump,2030,discount rate,0.04,per unit,Palzer thesis
|
|
||||||
decentral ground-sourced heat pump,2030,investment,1400,EUR/kWth,Palzer thesis
|
|
||||||
decentral ground-sourced heat pump,2030,lifetime,20,years,Palzer thesis
|
|
||||||
decentral ground-sourced heat pump,2030,FOM,3.5,%/year,Palzer thesis
|
|
||||||
decentral ground-sourced heat pump,2030,efficiency,4,per unit,default for costs
|
|
||||||
decentral ground-sourced heat pump,2030,discount rate,0.04,per unit,Palzer thesis
|
|
||||||
central air-sourced heat pump,2030,investment,700,EUR/kWth,Palzer thesis
|
|
||||||
central air-sourced heat pump,2030,lifetime,20,years,Palzer thesis
|
|
||||||
central air-sourced heat pump,2030,FOM,3.5,%/year,Palzer thesis
|
|
||||||
central air-sourced heat pump,2030,efficiency,3,per unit,default for costs
|
|
||||||
retrofitting I,2030,discount rate,0.04,per unit,Palzer thesis
|
|
||||||
retrofitting I,2030,lifetime,50,years,Palzer thesis
|
|
||||||
retrofitting I,2030,FOM,1,%/year,Palzer thesis
|
|
||||||
retrofitting I,2030,investment,50,EUR/m2/fraction reduction,Palzer thesis
|
|
||||||
retrofitting II,2030,discount rate,0.04,per unit,Palzer thesis
|
|
||||||
retrofitting II,2030,lifetime,50,years,Palzer thesis
|
|
||||||
retrofitting II,2030,FOM,1,%/year,Palzer thesis
|
|
||||||
retrofitting II,2030,investment,250,EUR/m2/fraction reduction,Palzer thesis
|
|
||||||
water tank charger,2030,efficiency,0.9,per unit,HP
|
|
||||||
water tank discharger,2030,efficiency,0.9,per unit,HP
|
|
||||||
decentral water tank storage,2030,investment,860,EUR/m3,IWES Interaktion
|
|
||||||
decentral water tank storage,2030,FOM,1,%/year,HP
|
|
||||||
decentral water tank storage,2030,lifetime,20,years,HP
|
|
||||||
decentral water tank storage,2030,discount rate,0.04,per unit,Palzer thesis
|
|
||||||
central water tank storage,2030,investment,30,EUR/m3,IWES Interaktion
|
|
||||||
central water tank storage,2030,FOM,1,%/year,HP
|
|
||||||
central water tank storage,2030,lifetime,40,years,HP
|
|
||||||
decentral resistive heater,2030,investment,100,EUR/kWhth,Schaber thesis
|
|
||||||
decentral resistive heater,2030,lifetime,20,years,Schaber thesis
|
|
||||||
decentral resistive heater,2030,FOM,2,%/year,Schaber thesis
|
|
||||||
decentral resistive heater,2030,efficiency,0.9,per unit,Schaber thesis
|
|
||||||
decentral resistive heater,2030,discount rate,0.04,per unit,Palzer thesis
|
|
||||||
central resistive heater,2030,investment,100,EUR/kWhth,Schaber thesis
|
|
||||||
central resistive heater,2030,lifetime,20,years,Schaber thesis
|
|
||||||
central resistive heater,2030,FOM,2,%/year,Schaber thesis
|
|
||||||
central resistive heater,2030,efficiency,0.9,per unit,Schaber thesis
|
|
||||||
decentral gas boiler,2030,investment,175,EUR/kWhth,Palzer thesis
|
|
||||||
decentral gas boiler,2030,lifetime,20,years,Palzer thesis
|
|
||||||
decentral gas boiler,2030,FOM,2,%/year,Palzer thesis
|
|
||||||
decentral gas boiler,2030,efficiency,0.9,per unit,Palzer thesis
|
|
||||||
decentral gas boiler,2030,discount rate,0.04,per unit,Palzer thesis
|
|
||||||
central gas boiler,2030,investment,63,EUR/kWhth,Palzer thesis
|
|
||||||
central gas boiler,2030,lifetime,22,years,Palzer thesis
|
|
||||||
central gas boiler,2030,FOM,1,%/year,Palzer thesis
|
|
||||||
central gas boiler,2030,efficiency,0.9,per unit,Palzer thesis
|
|
||||||
decentral CHP,2030,lifetime,25,years,HP
|
|
||||||
decentral CHP,2030,investment,1400,EUR/kWel,HP
|
|
||||||
decentral CHP,2030,FOM,3,%/year,HP
|
|
||||||
decentral CHP,2030,discount rate,0.04,per unit,Palzer thesis
|
|
||||||
central CHP,2030,lifetime,25,years,HP
|
|
||||||
central CHP,2030,investment,650,EUR/kWel,HP
|
|
||||||
central CHP,2030,FOM,3,%/year,HP
|
|
||||||
decentral solar thermal,2030,discount rate,0.04,per unit,Palzer thesis
|
|
||||||
decentral solar thermal,2030,FOM,1.3,%/year,HP
|
|
||||||
decentral solar thermal,2030,investment,270000,EUR/1000m2,HP
|
|
||||||
decentral solar thermal,2030,lifetime,20,years,HP
|
|
||||||
central solar thermal,2030,FOM,1.4,%/year,HP
|
|
||||||
central solar thermal,2030,investment,140000,EUR/1000m2,HP
|
|
||||||
central solar thermal,2030,lifetime,20,years,HP
|
|
||||||
HVAC overhead,2030,investment,400,EUR/MW/km,Hagspiel
|
|
||||||
HVAC overhead,2030,lifetime,40,years,Hagspiel
|
|
||||||
HVAC overhead,2030,FOM,2,%/year,Hagspiel
|
|
||||||
HVDC overhead,2030,investment,400,EUR/MW/km,Hagspiel
|
|
||||||
HVDC overhead,2030,lifetime,40,years,Hagspiel
|
|
||||||
HVDC overhead,2030,FOM,2,%/year,Hagspiel
|
|
||||||
HVDC submarine,2030,investment,2000,EUR/MW/km,DTU report based on Fig 34 of https://ec.europa.eu/energy/sites/ener/files/documents/2014_nsog_report.pdf
|
|
||||||
HVDC submarine,2030,lifetime,40,years,Hagspiel
|
|
||||||
HVDC submarine,2030,FOM,2,%/year,Hagspiel
|
|
||||||
HVDC inverter pair,2030,investment,150000,EUR/MW,Hagspiel
|
|
||||||
HVDC inverter pair,2030,lifetime,40,years,Hagspiel
|
|
||||||
HVDC inverter pair,2030,FOM,2,%/year,Hagspiel
|
|
|
@ -1 +1,37 @@
|
|||||||
Name,Fueltype,Technology,Set,Country,Capacity,Efficiency,Duration,Volume_Mm3,DamHeight_m,YearCommissioned,Retrofit,lat,lon,projectID,YearDecommissioning
|
,Name,Fueltype,Technology,Set,Country,Capacity,Efficiency,Duration,Volume_Mm3,DamHeight_m,StorageCapacity_MWh,DateIn,DateRetrofit,DateMothball,DateOut,lat,lon,EIC,projectID
|
||||||
|
1266,Khmelnitskiy,Nuclear,,PP,UA,1901.8916595755832,,0.0,0.0,0.0,0.0,1988.0,2005.0,,,50.3023,26.6466,[nan],"{'GEO': ['GEO3842'], 'GPD': ['WRI1005111'], 'CARMA': ['CARMA22000']}"
|
||||||
|
1268,Kaniv,Hydro,Reservoir,PP,UA,452.1656050955414,,0.0,0.0,0.0,0.0,1972.0,2003.0,,,49.76653,31.47165,[nan],"{'GEO': ['GEO43017'], 'GPD': ['WRI1005122'], 'CARMA': ['CARMA21140']}"
|
||||||
|
1269,Kahovska kakhovka,Hydro,Reservoir,PP,UA,352.45222929936307,,0.0,0.0,0.0,0.0,1955.0,1956.0,,,46.77858,33.36965,[nan],"{'GEO': ['GEO43018'], 'GPD': ['WRI1005118'], 'CARMA': ['CARMA20855']}"
|
||||||
|
1347,Kharkiv,Natural Gas,Steam Turbine,CHP,UA,494.94274967602314,,0.0,0.0,0.0,0.0,1979.0,1980.0,,,49.9719,36107,[nan],"{'GEO': ['GEO43027'], 'GPD': ['WRI1005126'], 'CARMA': ['CARMA21972']}"
|
||||||
|
1348,Kremenchuk,Hydro,Reservoir,PP,UA,617.0382165605096,,0.0,0.0,0.0,0.0,1959.0,1960.0,,,49.07759,33.2505,[nan],"{'GEO': ['GEO43019'], 'GPD': ['WRI1005121'], 'CARMA': ['CARMA23072']}"
|
||||||
|
1377,Krivorozhskaya,Hard Coal,Steam Turbine,PP,UA,2600.0164509342876,,0.0,0.0,0.0,0.0,1965.0,1992.0,,,47.5432,33.6583,[nan],"{'GEO': ['GEO42989'], 'GPD': ['WRI1005100'], 'CARMA': ['CARMA23176']}"
|
||||||
|
1407,Zmiyevskaya zmiivskaya,Hard Coal,Steam Turbine,PP,UA,2028.3816283884514,,0.0,0.0,0.0,0.0,1960.0,2005.0,,,49.5852,36.5231,[nan],"{'GEO': ['GEO42999'], 'GPD': ['WRI1005103'], 'CARMA': ['CARMA51042']}"
|
||||||
|
1408,Pridneprovskaya,Hard Coal,Steam Turbine,CHP,UA,1627.3152609570984,,0.0,0.0,0.0,0.0,1959.0,1966.0,,,48.4051,35.1131,[nan],"{'GEO': ['GEO42990'], 'GPD': ['WRI1005102'], 'CARMA': ['CARMA35874']}"
|
||||||
|
1409,Kurakhovskaya,Hard Coal,Steam Turbine,PP,UA,1371.0015824607397,,0.0,0.0,0.0,0.0,1972.0,2003.0,,,47.9944,37.24022,[nan],"{'GEO': ['GEO42994'], 'GPD': ['WRI1005104'], 'CARMA': ['CARMA23339']}"
|
||||||
|
1410,Dobrotvorsky,Hard Coal,Steam Turbine,PP,UA,553.1949895604868,,0.0,0.0,0.0,0.0,1960.0,1964.0,,,50.2133,24375,[nan],"{'GEO': ['GEO42992'], 'GPD': ['WRI1005096'], 'CARMA': ['CARMA10971']}"
|
||||||
|
1422,Zuyevskaya,Hard Coal,Steam Turbine,PP,UA,1147.87960333801,,0.0,0.0,0.0,0.0,1982.0,2007.0,,,48.0331,38.28615,[nan],"{'GEO': ['GEO42995'], 'GPD': ['WRI1005106'], 'CARMA': ['CARMA51083']}"
|
||||||
|
1423,Zaporozhye,Nuclear,,PP,UA,5705.67497872675,,0.0,0.0,0.0,0.0,1985.0,1996.0,,,47.5119,34.5863,[nan],"{'GEO': ['GEO6207'], 'GPD': ['WRI1005114'], 'CARMA': ['CARMA50875']}"
|
||||||
|
1424,Trypilska,Hard Coal,Steam Turbine,PP,UA,1659.5849686814602,,0.0,0.0,0.0,0.0,1969.0,1972.0,,,50.1344,30.7468,[nan],"{'GEO': ['GEO43000'], 'GPD': ['WRI1005099'], 'CARMA': ['CARMA46410']}"
|
||||||
|
1425,Tashlyk,Hydro,Pumped Storage,Store,UA,285.55968954109585,,0.0,0.0,0.0,0.0,2006.0,2007.0,,,47.7968,31.1811,[nan],"{'GEO': ['GEO43025'], 'GPD': ['WRI1005117'], 'CARMA': ['CARMA44696']}"
|
||||||
|
1426,Starobeshivska,Hard Coal,Steam Turbine,PP,UA,1636.5351774497733,,0.0,0.0,0.0,0.0,1961.0,1967.0,,,47.7997,38.00612,[nan],"{'GEO': ['GEO43003'], 'GPD': ['WRI1005105'], 'CARMA': ['CARMA43083']}"
|
||||||
|
1427,South,Nuclear,,PP,UA,2852.837489363375,,0.0,0.0,0.0,0.0,1983.0,1989.0,,,47812,31.22,[nan],"{'GEO': ['GEO5475'], 'GPD': ['WRI1005113'], 'CARMA': ['CARMA42555']}"
|
||||||
|
1428,Rovno rivne,Nuclear,,PP,UA,2695.931427448389,,0.0,0.0,0.0,0.0,1981.0,2006.0,,,51.3245,25.89744,[nan],"{'GEO': ['GEO5174'], 'GPD': ['WRI1005112'], 'CARMA': ['CARMA38114']}"
|
||||||
|
1429,Ladyzhinska,Hard Coal,Steam Turbine,PP,UA,1659.5849686814602,,0.0,0.0,0.0,0.0,1970.0,1971.0,,,48706,29.2202,[nan],"{'GEO': ['GEO42993'], 'GPD': ['WRI1005098'], 'CARMA': ['CARMA24024']}"
|
||||||
|
1430,Kiev,Hydro,Pumped Storage,PP,UA,635.8694635681177,,0.0,0.0,0.0,0.0,1964.0,1972.0,,,50.5998,30501,"[nan, nan]","{'GEO': ['GEO43024', 'GEO43023'], 'GPD': ['WRI1005123', 'WRI1005124'], 'CARMA': ['CARMA23516', 'CARMA23517']}"
|
||||||
|
2450,Cet chisinau,Natural Gas,,PP,MD,306.0,,0.0,0.0,0.0,0.0,,,,,47.027550000000005,28.8801,"[nan, nan]","{'GPD': ['WRI1002985', 'WRI1002984'], 'CARMA': ['CARMA8450', 'CARMA8451']}"
|
||||||
|
2460,Hydropower che costesti,Hydro,,PP,MD,16.0,,0.0,0.0,0.0,0.0,1978.0,,,,47.8381,27.2246,[nan],"{'GPD': ['WRI1002987'], 'CARMA': ['CARMA9496']}"
|
||||||
|
2465,Moldavskaya gres,Hard Coal,,PP,MD,2520.0,,0.0,0.0,0.0,0.0,,,,,46.6292,29.9407,[nan],"{'GPD': ['WRI1002989'], 'CARMA': ['CARMA28979']}"
|
||||||
|
2466,Hydropower dubasari,Hydro,,PP,MD,48.0,,0.0,0.0,0.0,0.0,,,,,47.2778,29123,[nan],"{'GPD': ['WRI1002988'], 'CARMA': ['CARMA11384']}"
|
||||||
|
2676,Cet nord balti,Natural Gas,,PP,MD,24.0,,0.0,0.0,0.0,0.0,,,,,47.7492,27.8938,[nan],"{'GPD': ['WRI1002986'], 'CARMA': ['CARMA3071']}"
|
||||||
|
2699,Dniprodzerzhynsk,Hydro,Reservoir,PP,UA,360.3503184713376,,0.0,0.0,0.0,0.0,1963.0,1964.0,,,48.5485,34.541015,[nan],"{'GEO': ['GEO43020'], 'GPD': ['WRI1005119']}"
|
||||||
|
2707,Burshtynska tes,Hard Coal,Steam Turbine,PP,UA,2212.779958241947,,0.0,0.0,0.0,0.0,1965.0,1984.0,,,49.21038,24.66654,[nan],"{'GEO': ['GEO42991'], 'GPD': ['WRI1005097']}"
|
||||||
|
2708,Danipro dnieper,Hydro,Reservoir,PP,UA,1484.8407643312103,,0.0,0.0,0.0,0.0,1932.0,1947.0,,,47.86944,35.08611,[nan],"{'GEO': ['GEO43016'], 'GPD': ['WRI1005120']}"
|
||||||
|
2709,Dniester,Hydro,Pumped Storage,Store,UA,612.7241020616891,,0.0,0.0,0.0,0.0,2009.0,2011.0,,,48.51361,27.47333,[nan],"{'GEO': ['GEO43022'], 'GPD': ['WRI1005116', 'WRI1005115']}"
|
||||||
|
2710,Kiev,Natural Gas,Steam Turbine,CHP,UA,458.2803237740955,,0.0,0.0,0.0,0.0,1982.0,1984.0,,,50532,30.6625,[nan],"{'GEO': ['GEO42998'], 'GPD': ['WRI1005125']}"
|
||||||
|
2712,Luganskaya,Hard Coal,Steam Turbine,PP,UA,1060.2903966575996,,0.0,0.0,0.0,0.0,1962.0,1969.0,,,48.74781,39.2624,[nan],"{'GEO': ['GEO42996'], 'GPD': ['WRI1005110']}"
|
||||||
|
2713,Slavyanskaya,Hard Coal,Steam Turbine,PP,UA,737.5933194139823,,0.0,0.0,0.0,0.0,1971.0,1971.0,,,48872,37.76567,[nan],"{'GEO': ['GEO43002'], 'GPD': ['WRI1005109']}"
|
||||||
|
2714,Vuhlehirska uglegorskaya,Hard Coal,Steam Turbine,PP,UA,3319.1699373629203,,0.0,0.0,0.0,0.0,1972.0,1977.0,,,48.4633,38.20328,[nan],"{'GEO': ['GEO43001'], 'GPD': ['WRI1005107']}"
|
||||||
|
2715,Zaporiska,Hard Coal,Steam Turbine,PP,UA,3319.1699373629203,,0.0,0.0,0.0,0.0,1972.0,1977.0,,,47.5089,34.6253,[nan],"{'GEO': ['GEO42988'], 'GPD': ['WRI1005101']}"
|
||||||
|
3678,Mironovskaya,Hard Coal,,PP,UA,815.0,,0.0,0.0,0.0,0.0,,,,,48.3407,38.4049,[nan],"{'GPD': ['WRI1005108'], 'CARMA': ['CARMA28679']}"
|
||||||
|
3679,Kramatorskaya,Hard Coal,,PP,UA,120.0,,0.0,0.0,0.0,0.0,1974.0,,,,48.7477,37.5723,[nan],"{'GPD': ['WRI1075856'], 'CARMA': ['CARMA54560']}"
|
||||||
|
3680,Chernihiv,Hard Coal,,PP,UA,200.0,,0.0,0.0,0.0,0.0,1968.0,,,,51455,31.2602,[nan],"{'GPD': ['WRI1075853'], 'CARMA': ['CARMA8190']}"
|
||||||
|
|
@ -1,50 +1,53 @@
|
|||||||
https://www.eia.gov/international/data/world/electricity/electricity-generation?pd=2&p=000000000000000000000000000000g&u=1&f=A&v=mapbubble&a=-&i=none&vo=value&t=R&g=000000000000002&l=73-1028i008017kg6368g80a4k000e0ag00gg0004g8g0ho00g000400008&s=315532800000&e=1577836800000&ev=false&
|
https://www.eia.gov/international/data/world/electricity/electricity-generation?pd=2&p=000000000000000000000000000000g&u=1&f=A&v=mapbubble&a=-&i=none&vo=value&t=R&g=000000000000002&l=73-1028i008017kg6368g80a4k000e0ag00gg0004g8g0ho00g000400008&l=72-00000000000000000000000000080000000000000000000g&s=315532800000&e=1609459200000&ev=false&,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Report generated on: 03-28-2022 11:20:48
|
Report generated on: 01-06-2023 21:17:46,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
"API","","1980","1981","1982","1983","1984","1985","1986","1987","1988","1989","1990","1991","1992","1993","1994","1995","1996","1997","1998","1999","2000","2001","2002","2003","2004","2005","2006","2007","2008","2009","2010","2011","2012","2013","2014","2015","2016","2017","2018","2019","2020"
|
API,,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021
|
||||||
"","hydroelectricity net generation (billion kWh)","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","",""
|
,hydroelectricity net generation (billion kWh),,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
"INTL.33-12-EURO-BKWH.A"," Europe","458.018","464.155","459.881","473.685","481.241","476.739","459.535","491.085","534.517","465.365","474.466","475.47","509.041","526.448","531.815","543.743","529.114164","543.845616","562.441501","569.308453","591.206662","587.371195","541.542535","506.19703","544.536443","545.176179","537.335934","540.934407","567.557921","564.244482","619.96477","543.05273","600.46622","631.86431","619.59229","615.53013","629.98906","562.59258","619.31106","610.62616","670.925"
|
INTL.33-12-EURO-BKWH.A, Europe,"458,018","464,155","459,881","473,685","481,241","476,739","459,535","491,085","534,517","465,365","474,466","475,47","509,041","526,448","531,815","543,743","529,114164","543,845616","562,491501","566,861453","588,644662","584,806195","539,051405","503,7067","542,112443","542,974669","535,006084","538,449707","565,143111","561,761402","617,547148","540,926277","598,055253","629,44709","617,111295","613,079848","627,720566217","560,362524","616,5081462","606,5997419","644,1106599","628,1390143"
|
||||||
"INTL.33-12-ALB-BKWH.A"," Albania","2.919","3.018","3.093","3.167","3.241","3.315","3.365","3.979","3.713","3.846","2.82","3.483","3.187","3.281","3.733","4.162","5.669","4.978","4.872","5.231","4.548","3.519","3.477","5.117","5.411","5.319","4.951","2.76","3.759","5.201","7.49133","4.09068","4.67775","6.88941","4.67676","5.83605","7.70418","4.47975","8.46648","5.15394","5.281"
|
INTL.33-12-ALB-BKWH.A, Albania,"2,919","3,018","3,093","3,167","3,241","3,315","3,365","3,979","3,713","3,846","2,82","3,483","3,187","3,281","3,733","4,162","5,669","4,978","4,872","5,231","4,548","3,519","3,477","5,117","5,411","5,319","4,951","2,76","3,759","5,201","7,49133","4,09068","4,67775","6,88941","4,67676","5,83605","7,70418","4,47975","8,46648","5,15394","5,281","8,891943"
|
||||||
"INTL.33-12-AUT-BKWH.A"," Austria","28.501","30.008","29.893","29.577","28.384","30.288","30.496","25.401","35.151","34.641","31.179","31.112","34.483","36.336","35.349","36.696","33.874","35.744","36.792","40.292","41.418","40.05","39.825","32.883","36.394","36.31","35.48","36.732","37.969","40.487","36.466","32.511","41.862","40.138","39.001","35.255","37.954","36.462","35.73","40.43655","45.344"
|
INTL.33-12-AUT-BKWH.A, Austria,"28,501","30,008","29,893","29,577","28,384","30,288","30,496","25,401","35,151","34,641","31,179","31,112","34,483","36,336","35,349","36,696","33,874","35,744","36,792","40,292","41,418","40,05","39,825","32,883","36,394","36,31","35,48","36,732","37,969","40,487","36,466","32,511","41,862","40,138","39,001","35,255","37,954","36,462","35,73","40,43655","41,9356096","38,75133"
|
||||||
"INTL.33-12-BEL-BKWH.A"," Belgium","0.274","0.377","0.325","0.331","0.348","0.282","0.339","0.425","0.354","0.3","0.263","0.226","0.338","0.252","0.342","0.335","0.237","0.30195","0.38511","0.338","0.455","0.437","0.356","0.245","0.314","0.285","0.355","0.385","0.406","0.325","0.298","0.193","0.353","0.376","0.289","0.314","0.367","0.268","0.311","0.108","1.29"
|
INTL.33-12-BEL-BKWH.A, Belgium,"0,274","0,377","0,325","0,331","0,348","0,282","0,339","0,425","0,354","0,3","0,263","0,226","0,338","0,252","0,342","0,335","0,237","0,30195","0,38511","0,338","0,455","0,437","0,356","0,245","0,314","0,285","0,355","0,385","0,406","0,325","0,298","0,193","0,353","0,376","0,289","0,314","0,367","0,268","0,3135","0,302","0,2669","0,3933"
|
||||||
"INTL.33-12-BIH-BKWH.A"," Bosnia and Herzegovina","--","--","--","--","--","--","--","--","--","--","--","--","3.374","2.343","3.424","3.607","5.104","4.608","4.511","5.477","5.043","5.129","5.215","4.456","5.919","5.938","5.798","3.961","4.818","6.177","7.946","4.343","4.173","7.164","5.876","5.495","5.585","3.7521","6.35382","6.02019","6.1"
|
INTL.33-12-BIH-BKWH.A, Bosnia and Herzegovina,--,--,--,--,--,--,--,--,--,--,--,--,"3,374","2,343","3,424","3,607","5,104","4,608","4,511","5,477","5,043","5,129","5,215","4,456","5,919","5,938","5,798","3,961","4,818","6,177","7,946","4,343","4,173","7,164","5,876","5,495","5,585","3,7521","6,35382","6,02019","4,58","6,722"
|
||||||
"INTL.33-12-BGR-BKWH.A"," Bulgaria","3.674","3.58","3.018","3.318","3.226","2.214","2.302","2.512","2.569","2.662","1.859","2.417","2.042","1.923","1.453","2.291","2.89","2.726","3.066","2.725","2.646","1.72","2.172","2.999","3.136","4.294","4.196","2.845","2.796","3.435","4.98168","2.84328","3.14622","3.99564","4.55598","5.59845","3.8412","2.79972","5.09553","3.34917","3.37"
|
INTL.33-12-BGR-BKWH.A, Bulgaria,"3,674","3,58","3,018","3,318","3,226","2,214","2,302","2,512","2,569","2,662","1,859","2,417","2,042","1,923","1,453","2,291","2,89","2,726","3,066","2,725","2,646","1,72","2,172","2,999","3,136","4,294","4,196","2,845","2,796","3,435","4,98168","2,84328","3,14622","3,99564","4,55598","5,59845","3,8412","2,79972","5,09553","2,929499","2,820398","4,819205"
|
||||||
"INTL.33-12-HRV-BKWH.A"," Croatia","--","--","--","--","--","--","--","--","--","--","--","--","4.298","4.302","4.881","5.212","7.156","5.234","5.403","6.524","5.794","6.482","5.311","4.827","6.888","6.27","5.94","4.194","5.164","6.663","9.035","4.983","4.789","8.536","8.917","6.327","6.784","5.255","7.62399","5.87268","3.4"
|
INTL.33-12-HRV-BKWH.A, Croatia,--,--,--,--,--,--,--,--,--,--,--,--,"4,298","4,302","4,881","5,212","7,156","5,234","5,403","6,524","5,794","6,482","5,311","4,827","6,888","6,27","5,94","4,194","5,164","6,663","9,035","4,983","4,789","8,536","8,917","6,327","6,784","5,255","7,62399","5,87268","5,6624","7,1277"
|
||||||
"INTL.33-12-CYP-BKWH.A"," Cyprus","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0"
|
INTL.33-12-CYP-BKWH.A, Cyprus,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
|
||||||
"INTL.33-12-CZE-BKWH.A"," Czech Republic","--","--","--","--","--","--","--","--","--","--","--","--","--","1.355","1.445","1.982","1.949","1.68201","1.382","1.664","1.7404","2.033","2.467","1.369","1.999","2.356","2.525","2.068","2.004","2.405","2.775","1.95","2.107","2.704","1.909","1.779","1.983","1.852","1.615","1.98792","3.4"
|
INTL.33-12-CZE-BKWH.A, Czechia,--,--,--,--,--,--,--,--,--,--,--,--,--,"1,355","1,445","1,982","1,949","1,68201","1,382","1,664","1,7404","2,033","2,467","1,369","1,999","2,356","2,525","2,068","2,004","2,405","2,775","1,95","2,107","2,704","1,909","1,779","1,983","1,852","1,615","1,98792","2,143884","2,40852"
|
||||||
"INTL.33-12-DNK-BKWH.A"," Denmark","0.03","0.031","0.028","0.036","0.028","0.027","0.029","0.029","0.032","0.027","0.027","0.026","0.028","0.027","0.033","0.03","0.019","0.019","0.02673","0.031","0.03","0.028","0.032","0.021","0.027","0.023","0.023","0.028","0.026","0.019","0.021","0.017","0.017","0.013","0.015","0.018","0.019","0.018","0.015","0.01584","0.02"
|
INTL.33-12-DNK-BKWH.A, Denmark,"0,03","0,031","0,028","0,036","0,028","0,027","0,029","0,029","0,032","0,027","0,027","0,026","0,028","0,027","0,033","0,03","0,019","0,019","0,02673","0,031","0,03","0,028","0,032","0,021","0,027","0,023","0,023","0,028","0,026","0,019","0,021","0,017","0,017","0,013","0,015","0,01803","0,01927","0,017871","0,0148621","0,0172171","0,017064","0,016295"
|
||||||
"INTL.33-12-EST-BKWH.A"," Estonia","--","--","--","--","--","--","--","--","--","--","--","--","0.001","0.001","0.003","0.002","0.002","0.003","0.004","0.004","0.005","0.007","0.006","0.013","0.022","0.022","0.014","0.021","0.028","0.032","0.027","0.03","0.042","0.026","0.027","0.027","0.035","0.026","0.015","0.01881","0.04"
|
INTL.33-12-EST-BKWH.A, Estonia,--,--,--,--,--,--,--,--,--,--,--,--,"0,001","0,001","0,003","0,002","0,002","0,003","0,004","0,004","0,005","0,007","0,006","0,013","0,022","0,022","0,014","0,021","0,028","0,032","0,027","0,029999","0,042","0,026","0,027","0,027","0,035","0,025999","0,0150003","0,0189999","0,03","0,0248"
|
||||||
"INTL.33-12-FRO-BKWH.A"," Faroe Islands","0.049","0.049","0.049","0.049","0.049","0.049","0.049","0.049","0.062","0.071","0.074","0.074","0.083","0.073","0.075","0.075","0.069564","0.075066","0.076501","0.069453","0.075262","0.075195","0.095535","0.08483","0.093443","0.097986","0.099934","0.103407","0.094921","0.091482","0.06676","0.092","0.099","0.091","0.121","0.132","0.105","0.11","0.107","0.102","0.11"
|
INTL.33-12-FRO-BKWH.A, Faroe Islands,"0,049","0,049","0,049","0,049","0,049","0,049","0,049","0,049","0,062","0,071","0,074","0,074","0,083","0,073","0,075","0,075","0,069564","0,075066","0,076501","0,069453","0,075262","0,075195","0,095535","0,08483","0,093443","0,097986","0,099934","0,103407","0,094921","0,091482","0,06676","0,092","0,099","0,091","0,121","0,132","0,105","0,11","0,107","0,102","0,11","0,11"
|
||||||
"INTL.33-12-FIN-BKWH.A"," Finland","10.115","13.518","12.958","13.445","13.115","12.211","12.266","13.658","13.229","12.9","10.75","13.065","14.956","13.341","11.669","12.796","11.742","12.11958","14.9","12.652","14.513","13.073","10.668","9.495","14.919","13.646","11.379","14.035","16.941","12.559","12.743","12.278","16.667","12.672","13.24","16.584","15.634","14.61","13.137","12.31461","15.56"
|
INTL.33-12-FIN-BKWH.A, Finland,"10,115","13,518","12,958","13,445","13,115","12,211","12,266","13,658","13,229","12,9","10,75","13,065","14,956","13,341","11,669","12,796","11,742","12,11958","14,9","12,652","14,513","13,073","10,668","9,495","14,919","13,646","11,379","14,035","16,941","12,559","12,743","12,278001","16,666998","12,672","13,240001","16,583999","15,634127","14,609473","13,1369998","12,2454823","15,883","15,766"
|
||||||
"INTL.33-12-CSK-BKWH.A"," Former Czechoslovakia","4.8","4.2","3.7","3.9","3.2","4.3","4","4.853","4.355","4.229","3.919","3.119","3.602","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--"
|
INTL.33-12-CSK-BKWH.A, Former Czechoslovakia,"4,8","4,2","3,7","3,9","3,2","4,3",4,"4,853","4,355","4,229","3,919","3,119","3,602",--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--
|
||||||
"INTL.33-12-SCG-BKWH.A"," Former Serbia and Montenegro","--","--","--","--","--","--","--","--","--","--","--","--","11.23","10.395","11.016","12.071","14.266","12.636","12.763","13.243","11.88","12.326","11.633","9.752","11.01","11.912","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--"
|
INTL.33-12-SCG-BKWH.A, Former Serbia and Montenegro,--,--,--,--,--,--,--,--,--,--,--,--,"11,23","10,395","11,016","12,071","14,266","12,636","12,763","13,243","11,88","12,326","11,633","9,752","11,01","11,912",--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--
|
||||||
"INTL.33-12-YUG-BKWH.A"," Former Yugoslavia","27.868","25.044","23.295","21.623","25.645","24.363","27.474","25.98","25.612","23.256","19.601","18.929","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--"
|
INTL.33-12-YUG-BKWH.A, Former Yugoslavia,"27,868","25,044","23,295","21,623","25,645","24,363","27,474","25,98","25,612","23,256","19,601","18,929",--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--
|
||||||
"INTL.33-12-FRA-BKWH.A"," France","68.253","70.358","68.6","67.515","64.01","60.248","60.953","68.623","73.952","45.744","52.796","56.277","68.313","64.3","78.057","72.196","64.43","63.151","61.479","71.832","66.466","73.888","59.992","58.567","59.276","50.965","55.741","57.029","63.017","56.428","61.945","45.184","59.099","71.042","62.993","54.876","60.094","49.389","64.485","56.98242","64.84"
|
INTL.33-12-FRA-BKWH.A, France,"68,253","70,358","68,6","67,515","64,01","60,248","60,953","68,623","73,952","45,744","52,796","56,277","68,313","64,3","78,057","72,196","64,43","63,151","61,479","71,832","66,466","73,888","59,992","58,567","59,276","50,965","55,741","57,029","63,017","56,428","61,945","45,184","59,099","71,042","62,993","54,876","60,094","49,389","64,485","56,913891","62,06191","58,856657"
|
||||||
"INTL.33-12-DEU-BKWH.A"," Germany","--","--","--","--","--","--","--","--","--","--","--","14.742","17.223","17.699","19.731","21.562","21.737","17.18343","17.044","19.451","21.515","22.506","22.893","19.071","20.866","19.442","19.808","20.957","20.239","18.841","20.678","17.323","21.331","22.66","19.31","18.664","20.214","19.985","17.815","19.86039","24.75"
|
INTL.33-12-DEU-BKWH.A, Germany,--,--,--,--,--,--,--,--,--,--,--,"14,742","17,223","17,699","19,731","21,562","21,737","17,18343","17,044","19,451","21,515","22,506","22,893","19,071","20,866","19,442","19,808","20,957","20,239","18,841","20,678","17,323","21,331","22,66","19,31","18,664","20,214","19,985","17,694","19,731","18,322","19,252"
|
||||||
"INTL.33-12-DDR-BKWH.A"," Germany, East","1.658","1.718","1.748","1.683","1.748","1.758","1.767","1.726","1.719","1.551","1.389","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--"
|
INTL.33-12-DDR-BKWH.A," Germany, East","1,658","1,718","1,748","1,683","1,748","1,758","1,767","1,726","1,719","1,551","1,389",--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--
|
||||||
"INTL.33-12-DEUW-BKWH.A"," Germany, West","17.125","17.889","17.694","16.713","16.434","15.354","16.526","18.36","18.128","16.482","15.769","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--"
|
INTL.33-12-DEUW-BKWH.A," Germany, West","17,125","17,889","17,694","16,713","16,434","15,354","16,526","18,36","18,128","16,482","15,769",--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--
|
||||||
"INTL.33-12-GIB-BKWH.A"," Gibraltar","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0"
|
INTL.33-12-GIB-BKWH.A, Gibraltar,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
|
||||||
"INTL.33-12-GRC-BKWH.A"," Greece","3.396","3.398","3.551","2.331","2.852","2.792","3.222","2.768","2.354","1.888","1.751","3.068","2.181","2.26","2.573","3.494","4.305","3.84318","3.68","4.546","3.656","2.076","2.772","4.718","4.625","4.967","5.806","2.565","3.279","5.32","7.431","3.998","4.387","6.337","4.464","5.782","5.543","3.962","5.035","3.9798","3.43"
|
INTL.33-12-GRC-BKWH.A, Greece,"3,396","3,398","3,551","2,331","2,852","2,792","3,222","2,768","2,354","1,888","1,751","3,068","2,181","2,26","2,573","3,494","4,305","3,84318","3,68","4,546","3,656","2,076","2,772","4,718","4,625","4,967","5,806","2,565","3,279","5,32","7,431","3,998","4,387","6,337","4,464","5,782","5,543","3,962","5,035","3,9798","3,343687","5,909225"
|
||||||
"INTL.33-12-HUN-BKWH.A"," Hungary","0.111","0.166","0.158","0.153","0.179","0.153","0.152","0.167","0.167","0.156","0.176","0.192","0.156","0.164","0.159","0.161","0.205","0.21384","0.15345","0.179","0.176","0.184","0.192","0.169","0.203","0.2","0.184","0.208","0.211","0.226","0.184","0.216","0.206","0.208","0.294","0.227","0.253","0.214","0.216","0.21681","0.24"
|
INTL.33-12-HUN-BKWH.A, Hungary,"0,111","0,166","0,158","0,153","0,179","0,153","0,152","0,167","0,167","0,156","0,176","0,192","0,156","0,164","0,159","0,161","0,205","0,21384","0,15345","0,179","0,176","0,184","0,192","0,169","0,203","0,2","0,184","0,208","0,211","0,226","0,184","0,215999","0,205999","0,207999","0,294001","0,226719","0,253308","0,213999","0,216","0,2129999","0,238","0,202379"
|
||||||
"INTL.33-12-ISL-BKWH.A"," Iceland","3.053","3.085","3.407","3.588","3.738","3.667","3.846","3.918","4.169","4.217","4.162","4.162","4.267","4.421","4.47","4.635","4.724","5.15493","5.565","5.987","6.292","6.512","6.907","7.017","7.063","6.949","7.22","8.31","12.303","12.156","12.51","12.382","12.214","12.747","12.554","13.541","13.092","13.892","13.679","13.32441","12.46"
|
INTL.33-12-ISL-BKWH.A, Iceland,"3,053","3,085","3,407","3,588","3,738","3,667","3,846","3,918","4,169","4,217","4,162","4,162","4,267","4,421","4,47","4,635","4,724","5,15493","5,565","5,987","6,292","6,512","6,907","7,017","7,063","6,949","7,22","8,31","12,303","12,156","12,509999","12,381999","12,213999","12,747001","12,554","13,541","13,091609","13,891929","13,679377","13,32911","12,9196201","13,5746171"
|
||||||
"INTL.33-12-IRL-BKWH.A"," Ireland","0.833","0.855","0.792","0.776","0.68","0.824","0.91","0.673","0.862","0.684","0.69","0.738","0.809","0.757","0.911","0.706","0.715","0.67122","0.907","0.838","0.838","0.59","0.903","0.592","0.624","0.625","0.717","0.66","0.959","0.893","0.593","0.699","0.795","0.593","0.701","0.798","0.674","0.685","0.687","0.87813","1.21"
|
INTL.33-12-IRL-BKWH.A, Ireland,"0,833","0,855","0,792","0,776","0,68","0,824","0,91","0,673","0,862","0,684","0,69","0,738","0,809","0,757","0,911","0,706","0,715","0,67122","0,907","0,838","0,838","0,59","0,903","0,592","0,624","0,625","0,717","0,66","0,959","0,893","0,593","0,699","0,795","0,593","0,701","0,798","0,674","0,685","0,687","0,87813","0,932656","0,750122"
|
||||||
"INTL.33-12-ITA-BKWH.A"," Italy","44.997","42.782","41.216","40.96","41.923","40.616","40.626","39.05","40.205","33.647","31.31","41.817","41.778","41.011","44.212","37.404","41.617","41.18697","40.808","44.911","43.763","46.343","39.125","33.303","41.915","35.706","36.624","32.488","41.207","48.647","50.506","45.36477","41.45625","52.24626","57.95955","45.08163","42.00768","35.83701","48.29913","45.31824","47.72"
|
INTL.33-12-ITA-BKWH.A, Italy,"44,997","42,782","41,216","40,96","41,923","40,616","40,626","39,05","40,205","33,647","31,31","41,817","41,778","41,011","44,212","37,404","41,617","41,18697","40,808","44,911","43,763","46,343","39,125","33,303","41,915","35,706","36,624","32,488","41,207","48,647","50,506","45,36477","41,45625","52,24626","57,95955","45,08163","42,00768","35,83701","48,29913","45,31824","47,551784","44,739"
|
||||||
"INTL.33-12-XKS-BKWH.A"," Kosovo","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","0.075","0.119","0.154","0.104","0.095","0.142","0.149","0.139","0.243","0.177","0.27027","0.2079","0.26"
|
INTL.33-12-XKS-BKWH.A, Kosovo,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,"0,075","0,119","0,154","0,104","0,095","0,142","0,149","0,139","0,243","0,177","0,27027","0,2079","0,262826","0,300635"
|
||||||
"INTL.33-12-LVA-BKWH.A"," Latvia","--","--","--","--","--","--","--","--","--","--","--","--","2.498","2.846","3.272","2.908","1.841","2.922","2.99","2.729","2.791","2.805","2.438","2.243","3.078","3.293","2.671","2.706","3.078","3.422","3.488","2.857","3.677","2.838","1.953","1.841","2.523","4.356","2.417","2.08692","2.59"
|
INTL.33-12-LVA-BKWH.A, Latvia,--,--,--,--,--,--,--,--,--,--,--,--,"2,498","2,846","3,272","2,908","1,841","2,922","2,99","2,729","2,791","2,805","2,438","2,243","3,078","3,293","2,671","2,706","3,078","3,422","3,487998","2,8568","3,677","2,838","1,953","1,841","2,522819","4,355513","2,4170639","2,0958919","2,5840101","2,6889293"
|
||||||
"INTL.33-12-LTU-BKWH.A"," Lithuania","--","--","--","--","--","--","--","--","--","--","--","--","0.308","0.389","0.447","0.369","0.323","0.291","0.413","0.409","0.336","0.322","0.35","0.323","0.417","0.446193","0.393","0.417","0.398","0.42","0.535","0.475","0.419","0.516","0.395","0.346","0.45","0.597","0.427","0.34254","1.06"
|
INTL.33-12-LTU-BKWH.A, Lithuania,--,--,--,--,--,--,--,--,--,--,--,--,"0,308","0,389","0,447","0,369","0,323","0,291","0,413","0,409","0,336","0,322","0,35","0,323","0,417","0,446193","0,393","0,417","0,398","0,42","0,535","0,475","0,419","0,516","0,395","0,346","0,45","0,597","0,427","0,34254","0,3006","0,3837"
|
||||||
"INTL.33-12-LUX-BKWH.A"," Luxembourg","0.086","0.095","0.084","0.083","0.088","0.071","0.084","0.101","0.097","0.072","0.07","0.083","0.069","0.066","0.117","0.087","0.059","0.082","0.114","0.084","0.119","0.117","0.098","0.078","0.103","0.093","0.11","0.116","0.131","0.105","0.104","0.061","0.095","0.114","0.104","0.095","0.111","0.082","0.089","0.10593","1.09"
|
INTL.33-12-LUX-BKWH.A, Luxembourg,"0,086","0,095","0,084","0,083","0,088","0,071","0,084","0,101","0,097","0,072","0,07","0,083","0,069","0,066","0,117","0,087","0,059","0,082","0,114","0,084","0,119","0,117","0,098","0,078","0,103","0,093","0,11","0,116","0,131","0,105","0,104","0,061","0,095","0,114","0,104","0,095","0,111","0,082","0,089","0,10593","0,091602","0,1068"
|
||||||
"INTL.33-12-MLT-BKWH.A"," Malta","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0"
|
INTL.33-12-MLT-BKWH.A, Malta,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
|
||||||
"INTL.33-12-MNE-BKWH.A"," Montenegro","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","1.733","1.271","1.524","2.05","2.723","1.192","1.462","2.479","1.734","1.476","1.825","1.014","2.09187","1.78","1.8"
|
INTL.33-12-MNE-BKWH.A, Montenegro,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,"1,733","1,271","1,524","2,05","2,723","1,192","1,462","2,479","1,734","1,476","1,825","1,014","1,693443","1,262781","0,867637","1,212652"
|
||||||
"INTL.33-12-NLD-BKWH.A"," Netherlands","0","0","0","0","0","0.003","0.003","0.001","0.002","0.037","0.119","0.079","0.119","0.091","0.1","0.087","0.079","0.09108","0.111","0.089","0.141","0.116","0.109","0.071","0.094","0.087","0.105","0.106","0.101","0.097","0.105","0.057","0.104","0.114","0.112","0.093","0.1","0.061","0.072","0.07326","0.05"
|
INTL.33-12-NLD-BKWH.A, Netherlands,0,0,0,0,0,"0,003","0,003","0,001","0,002","0,037","0,119","0,079","0,119","0,091","0,1","0,087","0,079","0,09108","0,111","0,089","0,141","0,116","0,109","0,071","0,094","0,087","0,105","0,106","0,101","0,097","0,105","0,057","0,104389","0,11431","0,112202","0,0927","0,100078","0,060759","0,0723481","0,074182","0,0462851","0,0838927"
|
||||||
"INTL.33-12-MKD-BKWH.A"," North Macedonia","--","--","--","--","--","--","--","--","--","--","--","--","0.817","0.517","0.696","0.793","0.842","0.891","1.072","1.375","1.158","0.62","0.749","1.36","1.467","1.477","1.634","1","0.832","1.257","2.407","1.419","1.031","1.568","1.195","1.846","1.878","1.099","1.773","1.15236","1.24"
|
INTL.33-12-MKD-BKWH.A, North Macedonia,--,--,--,--,--,--,--,--,--,--,--,--,"0,817","0,517","0,696","0,793","0,842","0,891","1,072","1,375","1,158","0,62","0,749","1,36","1,467","1,477","1,634",1,"0,832","1,257","2,407","1,419","1,031","1,568","1,195","1,846","1,878","1,099","1,773","1,15236","1,277144","1,451623"
|
||||||
"INTL.33-12-NOR-BKWH.A"," Norway","82.717","91.876","91.507","104.704","104.895","101.464","95.321","102.341","107.919","117.369","119.933","109.032","115.505","118.024","110.398","120.315","102.823","108.677","114.546","120.237","140.4","119.258","128.078","104.425","107.693","134.331","118.175","132.319","137.654","124.03","116.257","119.78","141.189","127.551","134.844","136.662","142.244","141.651","138.202","123.66288","141.69"
|
INTL.33-12-NOR-BKWH.A, Norway,"82,717","91,876","91,507","104,704","104,895","101,464","95,321","102,341","107,919","117,369","119,933","109,032","115,505","118,024","110,398","120,315","102,823","108,677","114,546","120,237","140,4","119,258","128,078","104,425","107,693","134,331","118,175","132,319","137,654","124,03","116,257","119,78","141,189","127,551","134,844","136,662","142,244","141,651","138,202","123,66288","141,69",144
|
||||||
"INTL.33-12-POL-BKWH.A"," Poland","2.326","2.116","1.528","1.658","1.394","1.833","1.534","1.644","1.775","1.593","1.403","1.411","1.492","1.473","1.716","1.868","1.912","1.941","2.286","2.133","2.085","2.302","2.256","1.654","2.06","2.179","2.022","2.328","2.13","2.351","2.9","2.313","2.02","2.421","2.165","1.814","2.117","2.552","1.949","1.93842","2.93"
|
INTL.33-12-POL-BKWH.A, Poland,"2,326","2,116","1,528","1,658","1,394","1,833","1,534","1,644","1,775","1,593","1,403","1,411","1,492","1,473","1,716","1,868","1,912","1,941","2,286","2,133","2,085","2,302","2,256","1,654","2,06","2,179","2,022","2,328","2,13","2,351","2,9","2,313","2,02","2,421","2,165","1,814","2,117","2,552","1,949","1,93842","2,118337","2,339192"
|
||||||
"INTL.33-12-PRT-BKWH.A"," Portugal","7.873","4.934","6.82","7.897","9.609","10.512","8.364","9.005","12.037","5.72","9.065","8.952","4.599","8.453","10.551","8.26","14.613","12.97395","12.853","7.213","11.21","13.894","7.722","15.566","9.77","4.684","10.892","9.991","6.73","8.201","15.954","11.423","5.589","13.652","15.471","8.615","15.608","5.79","12.316","8.6526","13.96"
|
INTL.33-12-PRT-BKWH.A, Portugal,"7,873","4,934","6,82","7,897","9,609","10,512","8,364","9,005","12,037","5,72","9,065","8,952","4,599","8,453","10,551","8,26","14,613","12,97395","12,853","7,213","11,21","13,894","7,722","15,566","9,77","4,684","10,892","9,991","6,73","8,201","15,954","11,423","5,589","13,652","15,471","8,615","15,608","5,79","12,316","8,6526","12,082581","11,846464"
|
||||||
"INTL.33-12-ROU-BKWH.A"," Romania","12.506","12.605","11.731","9.934","11.208","11.772","10.688","11.084","13.479","12.497","10.87","14.107","11.583","12.64","12.916","16.526","15.597","17.334","18.69","18.107","14.63","14.774","15.886","13.126","16.348","20.005","18.172","15.806","17.023","15.379","19.684","14.581","11.945","14.807","18.618","16.467","17.848","14.349","17.48736","15.65289","15.53"
|
INTL.33-12-ROU-BKWH.A, Romania,"12,506","12,605","11,731","9,934","11,208","11,772","10,688","11,084","13,479","12,497","10,87","14,107","11,583","12,64","12,916","16,526","15,597","17,334","18,69","18,107","14,63","14,774","15,886","13,126","16,348","20,005","18,172","15,806","17,023","15,379","19,684","14,581","11,945","14,807","18,618","16,467","17,848","14,349","17,48736","15,580622","15,381243","17,376933"
|
||||||
"INTL.33-12-SRB-BKWH.A"," Serbia","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","10.855","9.937","9.468","10.436","11.772","8.58","9.193","10.101","10.893","9.979","10.684","9.061","10.53261","10.07028","9.66"
|
INTL.33-12-SRB-BKWH.A, Serbia,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,--,"10,855","9,937","9,468","10,436","11,772","8,58","9,193","10,101","10,893","9,979","10,684","9,061","10,53261","9,457175","9,034496","11,284232"
|
||||||
"INTL.33-12-SVK-BKWH.A"," Slovakia","--","--","--","--","--","--","--","--","--","--","--","--","--","3.432","4.311","4.831","4.185","4.023","4.224","4.429","4.569","4.878","5.215","3.4452","4.059","4.592","4.355","4.406","4","4.324","5.184","3.211","3.687","4.329","3.762","3.701","4.302","4.321","3.506","4.27383","4.67"
|
INTL.33-12-SVK-BKWH.A, Slovakia,--,--,--,--,--,--,--,--,--,--,--,--,--,"3,432","4,311","4,831","4,185","4,023","4,224","4,429","4,569","4,878","5,215","3,4452","4,059","4,592","4,355","4,406",4,"4,324","5,184","3,211","3,687","4,329","3,762","3,701","4,302","4,321","3,506","4,27383","4,517","4,17"
|
||||||
"INTL.33-12-SVN-BKWH.A"," Slovenia","--","--","--","--","--","--","--","--","--","--","--","--","3.379","2.974","3.348","3.187","3.616","3.046","3.4","3.684","3.771","3.741","3.265","2.916","4.033","3.426","3.555","3.233","3.978","4.666","4.452","3.506","3.841","4.562","6.011","3.75","4.443","3.814","4.643","4.43421","5.24"
|
INTL.33-12-SVN-BKWH.A, Slovenia,--,--,--,--,--,--,--,--,--,--,--,--,"3,379","2,974","3,348","3,187","3,616","3,046","3,4","3,684","3,771","3,741","3,265","2,916","4,033","3,426","3,555","3,233","3,978","4,666","4,452","3,506","3,841","4,562","6,011","3,75","4,443","3,814","4,643","4,43421","4,93406","4,711944"
|
||||||
"INTL.33-12-ESP-BKWH.A"," Spain","29.16","21.64","25.99","26.696","31.088","30.895","26.105","27.016","34.76","19.046","25.16","27.01","18.731","24.133","27.898","22.881","39.404","34.43","33.665","22.634","29.274","40.617","22.691","40.643","31.359","18.209","25.699","27.036","23.13","26.147","41.576","30.07","20.192","36.45","38.815","27.656","35.77","18.007","33.743","24.23025","33.34"
|
INTL.33-12-ESP-BKWH.A, Spain,"29,16","21,64","25,99","26,696","31,088","30,895","26,105","27,016","34,76","19,046","25,16","27,01","18,731","24,133","27,898","22,881","39,404","34,43","33,665","22,634","29,274","40,617","22,691","40,643","31,359","18,209","25,699","27,036","23,13","26,147","41,576","30,07","20,192","36,45","38,815","27,656","35,77","18,007","33,743","24,23025","30,507","29,626"
|
||||||
"INTL.33-12-SWE-BKWH.A"," Sweden","58.133","59.006","54.369","62.801","67.106","70.095","60.134","70.95","69.016","70.911","71.778","62.603","73.588","73.905","58.508","67.421","51.2226","68.365","74.25","70.974","77.798","78.269","65.696","53.005","59.522","72.075","61.106","65.497","68.378","65.193","66.279","66.047","78.333","60.81","63.227","74.734","61.645","64.651","61.79","64.46583","71.6"
|
INTL.33-12-SWE-BKWH.A, Sweden,"58,133","59,006","54,369","62,801","67,106","70,095","60,134","70,95","69,016","70,911","71,778","62,603","73,588","73,905","58,508","67,421","51,2226","68,365","74,25","70,974","77,798","78,269","65,696","53,005","59,522","72,075","61,106","65,497","68,378","65,193","66,279","66,047","78,333","60,81","63,227","74,734","61,645","64,651","61,79","64,46583","71,6","71,086"
|
||||||
"INTL.33-12-CHE-BKWH.A"," Switzerland","32.481","35.13","35.974","35.069","29.871","31.731","32.576","34.328","35.437","29.477","29.497","31.756","32.373","35.416","38.678","34.817","28.458","33.70257","33.136","39.604","36.466","40.895","34.862","34.471","33.411","30.914","30.649","34.898","35.676","35.366","35.704","32.069","38.218","38.08","37.659","37.879","34.281","33.754","34.637","37.6596","40.62"
|
INTL.33-12-CHE-BKWH.A, Switzerland,"32,481","35,13","35,974","35,069","29,871","31,731","32,576","34,328","35,437","29,477","29,497","31,756","32,373","35,416","38,678","34,817","28,458","33,70257","33,136","37,104","33,854","38,29","32,323","31,948","30,938","28,664","28,273","32,362","33,214","32,833","33,261","29,906","35,783","35,628","35,122","35,378","31,984","31,47968","32,095881","35,156989","37,867647","36,964485"
|
||||||
"INTL.33-12-TUR-BKWH.A"," Turkey","11.159","12.308","13.81","11.13","13.19","11.822","11.637","18.314","28.447","17.61","22.917","22.456","26.302","33.611","30.28","35.186","40.07","39.41784","41.80671","34.33","30.57","23.77","33.346","34.977","45.623","39.165","43.802","35.492","32.937","35.598","51.423","51.155","56.669","58.225","39.75","65.856","66.686","57.824","59.49","87.99714","77.39"
|
INTL.33-12-TUR-BKWH.A, Turkey,"11,159","12,308","13,81","11,13","13,19","11,822","11,637","18,314","28,447","17,61","22,917","22,456","26,302","33,611","30,28","35,186","40,07","39,41784","41,80671","34,33","30,57","23,77","33,346","34,977","45,623","39,165","43,802","35,492","32,937","35,598","51,423001","51,154999","56,668998","58,225","39,750001","65,856","66,685883","57,823851","59,490211","88,2094218","78,094369","55,1755392"
|
||||||
"INTL.33-12-GBR-BKWH.A"," United Kingdom","3.921","4.369","4.543","4.548","3.992","4.08","4.767","4.13","4.915","4.732","5.119","4.534","5.329","4.237","5.043","4.79","3.359","4.127","5.067","5.283","5.035","4.015","4.74","3.195","4.795","4.873","4.547","5.026","5.094","5.178","3.566","5.655","5.286","4.667","5.832","6.246","5.342","5.836","5.189","5.89941","7.64"
|
INTL.33-12-GBR-BKWH.A, United Kingdom,"3,921","4,369","4,543","4,548","3,992","4,08","4,767","4,13","4,915","4,732","5,119","4,534","5,329","4,237","5,043","4,79","3,359","4,127","5,117","5,336","5,085","4,055","4,78787","3,22767","4,844","4,92149","4,59315","5,0773","5,14119","5,22792","3,59138","5,69175","5,30965","4,70147","5,8878","6,29727","5,370412217","5,88187","5,44327","5,84628","6,75391","5,0149"
|
||||||
|
, Eurasia,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
|
INTL.33-12-MDA-BKWH.A, Moldova,--,--,--,--,--,--,--,--,--,--,--,--,"0,255","0,371","0,275","0,321","0,362","0,378","0,387","0,363","0,392","0,359","0,348","0,358","0,35","0,359","0,365","0,354","0,385","0,354","0,403","0,348","0,266","0,311","0,317","0,265","0,228","0,282","0,27324","0,29799","0,276","0,316"
|
||||||
|
INTL.33-12-UKR-BKWH.A, Ukraine,--,--,--,--,--,--,--,--,--,--,--,--,"7,725","10,929","11,997","9,853","8,546","9,757","15,756","14,177","11,161","11,912","9,531","9,146","11,635","12,239","12,757","10,042","11,397","11,817","13,02","10,837","10,374","13,663","8,393","5,343","7,594","8,856","10,32372","6,5083","7,5638","10,3326"
|
||||||
|
Can't render this file because it has a wrong number of fields in line 3.
|
@ -1,34 +1,34 @@
|
|||||||
Country/area,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018
|
Country/area,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022
|
||||||
Albania,,,,,,,,,,,,,,,,,,,
|
Albania,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Austria,,,,,,,,,,,,,,,,,,,
|
Austria,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Belgium,,,,,,,,,,31.5,196.5,196.5,381,707.7,707.7,712,712.2,877.2,1185.9
|
Belgium,,,,,,,,,,31.5,196.5,196.5,381.0,707.7,707.7,712.0,712.2,877.2,1185.9,1555.5,2261.8,2261.8,2261.8
|
||||||
Bosnia Herzg,,,,,,,,,,,,,,,,,,,
|
Bosnia Herzg,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Bulgaria,,,,,,,,,,,,,,,,,,,
|
Bulgaria,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Croatia,,,,,,,,,,,,,,,,,,,
|
Croatia,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Czechia,,,,,,,,,,,,,,,,,,,
|
Czechia,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Denmark,50,50,214,423.4,423.4,423.4,423.4,423.4,423.4,660.9,867.9,871.5,921.9,1271.1,1271.1,1271.1,1271.1,1263.8,1700.8
|
Denmark,49.95,49.95,213.95,423.35,423.35,423.35,423.35,423.35,423.35,660.85,867.85,871.45,921.85,1271.05,1271.05,1271.05,1271.05,1263.8,1700.8,1700.8,1700.8,2305.6,2305.6
|
||||||
Estonia,,,,,,,,,,,,,,,,,,,
|
Estonia,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Finland,,,,,,,,,24,24,26.3,26.3,26.3,26.3,26.3,32,32,72.7,72.7
|
Finland,,,,,,,,,24.0,24.0,26.3,26.3,26.3,26.3,26.3,32.0,32.0,72.7,72.7,73.0,73.0,73.0,73.0
|
||||||
France,,,,,,,,,,,,,,,,,,2,2
|
France,,,,,,,,,,,,,,,,,,2.0,2.0,2.0,2.0,2.0,482.0
|
||||||
Germany,,,,,,,,,,35,80,188,268,508,994,3283,4132,5406,6396
|
Germany,,,,,,,,,,35.0,80.0,188.0,268.0,508.0,994.0,3283.0,4132.0,5406.0,6393.0,7555.0,7787.0,7787.0,8129.0
|
||||||
Greece,,,,,,,,,,,,,,,,,,,
|
Greece,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Hungary,,,,,,,,,,,,,,,,,,,
|
Hungary,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Ireland,,,,,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2
|
Ireland,,,,,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2,25.2
|
||||||
Italy,,,,,,,,,,,,,,,,,,,
|
Italy,,,,,,,,,,,,,,,,,,,,,,,30.0
|
||||||
Latvia,,,,,,,,,,,,,,,,,,,
|
Latvia,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Lithuania,,,,,,,,,,,,,,,,,,,
|
Lithuania,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Luxembourg,,,,,,,,,,,,,,,,,,,
|
Luxembourg,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Montenegro,,,,,,,,,,,,,,,,,,,
|
Montenegro,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Netherlands,,,,,,,108,108,228,228,228,228,228,228,228,357,957,957,957
|
Netherlands,,,,,,,108.0,108.0,228.0,228.0,228.0,228.0,228.0,228.0,228.0,357.0,957.0,957.0,957.0,957.0,2459.5,2459.5,2571.0
|
||||||
North Macedonia,,,,,,,,,,,,,,,,,,,
|
North Macedonia,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Norway,,,,,,,,,,2.3,2.3,2.3,2.3,2.3,2.3,2.3,2.3,2.3,2.3
|
Norway,,,,,,,,,,2.3,2.3,2.3,2.3,2.3,2.3,2.3,2.3,2.3,2.3,2.3,2.3,6.3,66.3
|
||||||
Poland,,,,,,,,,,,,,,,,,,,
|
Poland,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Portugal,,,,,,,,,,,,1.9,2,2,2,2,,,
|
Portugal,,,,,,,,,,,,1.86,2.0,2.0,2.0,2.0,,,,,25.0,25.0,25.0
|
||||||
Romania,,,,,,,,,,,,,,,,,,,
|
Romania,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Serbia,,,,,,,,,,,,,,,,,,,
|
Serbia,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Slovakia,,,,,,,,,,,,,,,,,,,
|
Slovakia,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Slovenia,,,,,,,,,,,,,,,,,,,
|
Slovenia,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Spain,,,,,,,,,,,,,,5,5,5,5,5,5
|
Spain,,,,,,,,,,,,,,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0,5.0
|
||||||
Sweden,13,22,22,22,22,22,22,131,133,163,163,163,163,212,213,213,203,203,203
|
Sweden,13.0,22.0,22.0,22.0,22.0,22.0,22.0,131.0,133.0,163.0,163.0,163.0,163.0,212.0,213.0,213.0,203.0,203.0,203.0,203.0,203.0,193.0,193.0
|
||||||
Switzerland,,,,,,,,,,,,,,,,,,,
|
Switzerland,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
UK,3.8,3.8,3.8,63.8,123.8,213.8,303.8,393.8,596.2,951.2,1341.5,1838.3,2995.5,3696,4501.3,5093.4,5293.4,6987.9,8216.5
|
UK,4.0,4.0,4.0,64.0,124.0,214.0,304.0,394.0,596.2,951.0,1341.0,1838.0,2995.0,3696.0,4501.0,5093.0,5293.0,6988.0,8181.0,9888.0,10383.0,11255.0,13928.0
|
||||||
|
|
@ -1,34 +1,34 @@
|
|||||||
Country/area,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018
|
Country/area,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022
|
||||||
Albania,,,,,,,,,,,,,,,,,,,
|
Albania,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Austria,50,67,109,322,581,825.2,968.3,991.2,992,1001,1015.8,1106,1337.2,1674.5,2110.3,2488.7,2730,2886.7,3132.7
|
Austria,50.0,67.0,109.0,322.0,581.0,825.22,968.27,991.16,991.97,1000.99,1015.83,1105.97,1337.15,1674.54,2110.28,2488.73,2730.0,2886.7,3132.71,3224.12,3225.98,3407.81,3735.81
|
||||||
Belgium,14,26,31,67,96,167,212,276,324,576.5,715.5,872.5,989,1072.3,1236.3,1464,1657.8,1919.3,2074.8
|
Belgium,14.0,26.0,31.0,67.0,96.0,167.0,212.0,276.0,324.0,576.5,715.5,872.5,985.9,1061.3,1225.0,1469.3,1621.6,1902.2,2119.0,2308.0,2410.9,2686.6,2989.6
|
||||||
Bosnia Herzg,,,,,,,,,,,,0.3,0.3,0.3,0.3,0.3,0.3,0.3,50.9
|
Bosnia Herzg,,,,,,,,,,,,0.3,0.3,0.3,0.3,0.3,0.3,0.3,51.0,87.0,87.0,135.0,135.0
|
||||||
Bulgaria,,,,,1,8,27,30,114,333,488,541,677,683,699,699,699,698.4,698.9
|
Bulgaria,,,,,1.0,8.0,27.0,30.0,114.0,333.0,488.0,541.0,677.0,683.0,699.0,699.0,699.0,698.39,698.92,703.12,702.8,704.38,704.38
|
||||||
Croatia,,,,,6,6,17,17,17,70,79,130,180,254,339,418,483,576.1,586.3
|
Croatia,,,,,6.0,6.0,17.0,17.0,17.0,70.0,79.0,130.0,180.0,254.0,339.0,418.0,483.0,576.1,586.3,646.3,801.3,986.9,1042.9
|
||||||
Czechia,2,,6.4,10.6,16.5,22,43.5,113.8,150,193,213,213,258,262,278,281,282,308.2,316.2
|
Czechia,2.0,,6.4,10.6,16.5,22.0,43.5,113.8,150.0,193.0,213.0,213.0,258.0,262.0,278.0,281.0,282.0,308.21,316.2,339.41,339.42,339.41,339.41
|
||||||
Denmark,2340.1,2447.2,2680.6,2696.6,2700.4,2704.5,2712.3,2700.9,2739.5,2821.2,2934,3080.5,3240.1,3547.9,3615.4,3805.9,3974.5,4225.8,4419.8
|
Denmark,2340.07,2447.2,2680.58,2696.57,2700.36,2704.49,2712.35,2700.86,2739.52,2821.24,2933.98,3080.53,3240.09,3547.87,3615.35,3805.92,3974.09,4225.15,4421.86,4409.74,4566.23,4715.24,4782.24
|
||||||
Estonia,,,1,3,7,31,31,50,77,104,108,180,266,248,275,300,310,311.8,310
|
Estonia,,,1.0,3.0,7.0,31.0,31.0,50.0,77.0,104.0,108.0,180.0,266.0,248.0,275.0,300.0,310.0,311.8,310.0,316.0,317.0,315.0,315.0
|
||||||
Finland,38,39,43,52,82,82,86,110,119,123,170.7,172.7,230.7,420.7,600.7,973,1533,1971.3,1968.3
|
Finland,38.0,39.0,43.0,52.0,82.0,82.0,86.0,110.0,119.0,123.0,170.7,172.7,230.7,420.7,600.7,973.0,1533.0,1971.3,1968.3,2211.0,2513.0,3184.0,5541.0
|
||||||
France,38,66,138,218,358,690,1412,2223,3403,4582,5912,6758,7607.5,8156,9201.4,10298.2,11566.6,13497.4,14898.1
|
France,38.0,66.0,138.0,218.0,358.0,690.0,1412.0,2223.0,3403.0,4582.0,5912.0,6758.02,7607.5,8155.96,9201.42,10298.18,11566.56,13497.35,14898.14,16424.85,17512.0,18737.98,20637.98
|
||||||
Germany,6095,8754,12001,14381,16419,18248,20474,22116,22794,25697,26823,28524,30711,32969,37620,41297,45303,50174,52447
|
Germany,6095.0,8754.0,12001.0,14381.0,16419.0,18248.0,20474.0,22116.0,22794.0,25697.0,26823.0,28524.0,30711.0,32969.0,37620.0,41297.0,45303.0,50174.0,52328.0,53187.0,54414.0,56046.0,58165.0
|
||||||
Greece,226,270,287,371,470,491,749,846,1022,1171,1298,1640,1753,1809,1978,2091,2370,2624,2877.5
|
Greece,226.0,270.0,287.0,371.0,470.0,491.0,749.0,846.0,1022.0,1171.0,1298.0,1640.0,1753.0,1809.0,1978.0,2091.0,2370.0,2624.0,2877.5,3589.0,4119.25,4649.13,4879.13
|
||||||
Hungary,,1,1,3,3,17,33,61,134,203,293,331,325,329,329,329,329,329,329
|
Hungary,,1.0,1.0,3.0,3.0,17.0,33.0,61.0,134.0,203.0,293.0,331.0,325.0,329.0,329.0,329.0,329.0,329.0,329.0,323.0,323.0,324.0,324.0
|
||||||
Ireland,116.5,122.9,134.8,210.3,311.2,468.1,651.3,715.3,917.1,1226.1,1365.2,1559.4,1679.2,1983,2258.1,2426,2760.8,3292.8,3650.9
|
Ireland,116.5,122.9,134.8,210.3,311.2,468.1,651.3,715.3,917.1,1226.1,1365.2,1559.4,1679.15,1898.1,2258.05,2425.95,2776.45,3293.95,3648.65,4101.25,4281.5,4313.84,4593.84
|
||||||
Italy,363,664,780,874,1127,1635,1902,2702,3525,4879,5794,6918,8102,8542,8683,9137,9384,9736.6,10230.2
|
Italy,363.0,664.0,780.0,874.0,1127.0,1635.0,1902.0,2702.0,3525.0,4879.0,5794.0,6918.0,8102.0,8542.0,8683.0,9137.0,9384.0,9736.58,10230.25,10679.46,10870.62,11253.73,11749.73
|
||||||
Latvia,2,2,22,26,26,26,26,26,28,29,30,36,59,65.9,68.9,68.2,69.9,77.1,78.2
|
Latvia,2.0,2.0,22.0,26.0,26.0,26.0,26.0,26.0,28.0,29.0,30.0,36.0,59.0,65.89,68.92,68.17,69.91,77.11,78.17,78.07,78.07,77.13,136.13
|
||||||
Lithuania,,,,,1,1,31,47,54,98,133,202,275,279,288,436,509,518,533
|
Lithuania,,,,,1.0,1.0,31.0,47.0,54.0,98.0,133.0,202.0,275.0,279.0,288.0,436.0,509.0,518.0,533.0,534.0,540.0,671.0,814.0
|
||||||
Luxembourg,14,13.9,13.9,20.5,34.9,34.9,34.9,34.9,42.9,42.9,43.7,44.5,58.3,58.3,58.3,63.8,119.7,119.7,122.9
|
Luxembourg,14.0,13.9,13.9,20.5,34.9,34.9,34.9,34.9,42.92,42.93,43.73,44.53,58.33,58.33,58.34,63.79,119.69,119.69,122.89,135.79,152.74,136.44,165.44
|
||||||
Montenegro,,,,,,,,,,,,,,,,,,72,118
|
Montenegro,,,,,,,,,,,,,,,,,,72.0,72.0,118.0,118.0,118.0,118.0
|
||||||
Netherlands,447,486,672,905,1075,1224,1453,1641,1921,1994,2009,2088,2205,2485,2637,3034,3300,3245,3436
|
Netherlands,447.0,486.0,672.0,905.0,1075.0,1224.0,1453.0,1641.0,1921.0,1994.0,2009.0,2088.0,2205.0,2485.0,2637.0,3033.84,3300.12,3245.0,3436.11,3527.16,4188.38,5309.87,6176.0
|
||||||
North Macedonia,,,,,,,,,,,,,,,37,37,37,37,37
|
North Macedonia,,,,,,,,,,,,,,,37.0,37.0,37.0,37.0,37.0,37.0,37.0,37.0,37.0
|
||||||
Norway,13,13,97,97,152,265,284,348,395,420.7,422.7,509.7,702.7,815.7,856.7,864.7,880.7,1204.7,1708
|
Norway,13.0,13.0,97.0,97.0,152.0,265.0,284.0,348.0,395.0,420.7,422.7,509.7,702.7,815.7,856.7,864.7,880.7,1204.7,1707.7,2911.7,4027.7,5042.7,5067.7
|
||||||
Poland,4,19,32,35,40,121,172,306,526,709,1108,1800,2564,3429,3836,4886,5747,5759.4,5766.1
|
Poland,4.0,19.0,32.0,35.0,40.0,121.0,172.0,306.0,526.0,709.0,1108.0,1800.0,2564.0,3429.0,3836.0,4886.0,5747.0,5759.36,5766.08,5837.76,6298.25,6967.34,7987.34
|
||||||
Portugal,83,125,190,268,553,1064,1681,2201,2857,3326,3796,4254.4,4409.6,4607.9,4854.6,4934.8,5124.1,5124.1,5172.4
|
Portugal,83.0,125.0,190.0,268.0,553.0,1064.0,1681.0,2201.0,2857.0,3326.0,3796.0,4254.35,4409.55,4607.95,4854.56,4934.84,5124.1,5124.1,5172.36,5222.75,5097.26,5402.33,5430.33
|
||||||
Romania,,,,,,1,1,3,5,15,389,988,1822,2773,3244,3130,3025,3029.8,3032.3
|
Romania,,,,,,1.0,1.0,3.0,5.0,15.0,389.0,988.0,1822.0,2773.0,3244.0,3130.0,3025.0,3029.8,3032.26,3037.52,3012.53,3014.96,3014.96
|
||||||
Serbia,,,,,,,,,,,,,0.5,0.5,0.5,10.4,17,25,25
|
Serbia,,,,,,,,,,,,,0.5,0.5,0.5,10.4,17.0,25.0,227.0,398.0,398.0,398.0,398.0
|
||||||
Slovakia,,,,3,3,5,5,5,5,3,3,3,3,5,3,3,3,4,3
|
Slovakia,,,,3.0,3.0,5.0,5.0,5.0,5.0,3.0,3.0,3.0,3.0,5.0,3.0,3.0,3.0,4.0,3.0,4.0,4.0,4.0,4.0
|
||||||
Slovenia,,,,,,,,,,,,,,4,4,5,5,5,5.2
|
Slovenia,,,,,,,,,,,,,2.0,2.0,3.0,3.0,3.0,3.3,3.3,3.3,3.3,3.33,3.33
|
||||||
Spain,2206,3397,4891,5945,8317,9918,11722,14820,16555,19176,20693,21529,22789,22953,22920,22938,22985,23119.5,23400.1
|
Spain,2206.0,3397.0,4891.0,5945.0,8317.0,9918.0,11722.0,14820.0,16555.0,19176.0,20693.0,21529.0,22789.0,22953.0,22920.0,22938.0,22985.0,23119.48,23400.06,25585.08,26814.19,27902.65,29302.84
|
||||||
Sweden,196,273,335,395,453,500,563,692,956,1312,1854,2601,3443,3982,4875,5606,6232,6408,7097
|
Sweden,196.0,273.0,335.0,395.0,453.0,500.0,563.0,692.0,956.0,1312.0,1854.0,2601.0,3443.0,3982.0,4875.0,5606.0,6232.0,6408.0,7097.0,8478.0,9773.0,11923.0,14364.0
|
||||||
Switzerland,3,5,5,5,9,12,12,12,14,18,42,46,49,60,60,60,75,75,75
|
Switzerland,3.0,5.0,5.0,5.0,9.0,12.0,12.0,12.0,14.0,18.0,42.0,46.0,49.0,60.0,60.0,60.0,75.0,75.0,75.0,75.0,87.0,87.0,87.0
|
||||||
UK,408.2,489.2,530.2,678.2,809.2,1351.2,1651.2,2083.2,2849.8,3470.8,4079.8,4758,6035,7586.3,8572.7,9212.2,10832.3,12596.9,13553.9
|
UK,431.0,490.0,531.0,678.0,809.0,1351.0,1651.0,2083.0,2849.8,3468.0,4080.0,4758.0,6035.0,7586.0,8573.0,9212.0,10833.0,12597.0,13425.0,13999.0,14075.0,14492.0,14832.0
|
||||||
|
|
@ -1,34 +1,34 @@
|
|||||||
Country/area,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018
|
Country/area,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022
|
||||||
Albania,,0.1,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.3,0.4,0.6,0.7,0.8,0.9,1.1,1,1,1
|
Albania,,0.1,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.3,0.4,0.56,0.68,0.76,0.87,1.05,1.0,1.0,1.0,14.0,21.0,23.0,28.6
|
||||||
Austria,5,7,9,23,27,21,22.4,24.2,30.1,48.9,88.8,174.1,337.5,626,785.2,937.1,1096,1269,1437.6
|
Austria,5.0,7.0,9.0,23.0,27.0,18.49,19.61,21.42,27.0,45.56,85.27,169.88,333.09,620.78,779.76,931.56,1089.53,1262.01,1447.94,1694.4,2034.74,2773.91,3538.91
|
||||||
Belgium,,,1,1,1,2,2,20,62,386,1007,1979,2647,2902,3015.2,3131.7,3327,3616.2,3986.5
|
Belgium,,,1.0,1.0,1.0,2.0,2.0,20.0,62.0,386.0,1006.6,1978.6,2646.6,2901.6,3015.0,3131.6,3328.8,3620.6,4000.0,4636.6,5572.8,6012.4,6898.4
|
||||||
Bosnia Herzg,,,,0.1,0.2,0.3,0.3,0.3,0.3,0.3,0.3,0.3,0.3,1.3,7.2,8.2,14.1,16,18.2
|
Bosnia Herzg,,,,0.1,0.2,0.3,0.3,0.3,0.3,0.3,0.3,0.3,0.35,1.34,7.17,8.17,14.12,16.0,18.15,22.35,34.89,56.51,107.47
|
||||||
Bulgaria,,,,,,,,0,0.1,2,25,154,1013,1020,1026,1029,1028,1035.6,1032.7
|
Bulgaria,,,,,,,,0.03,0.1,2.0,25.0,154.0,921.99,1038.54,1028.92,1027.89,1029.89,1030.7,1033.06,1044.39,1100.21,1274.71,1948.36
|
||||||
Croatia,,,,,,,,,,0.3,0.3,0.3,4,19,33,47.8,55.8,60,67.7
|
Croatia,,,,,,,,,,0.3,0.3,0.3,4.0,19.0,33.0,47.8,55.8,60.0,67.7,84.8,108.5,138.3,182.3
|
||||||
Czechia,0.1,0.1,0.2,0.3,0.4,0.6,0.8,4,39.5,464.6,1727,1913,2022,2063.5,2067.4,2074.9,2067.9,2069.5,2075.1
|
Czechia,0.1,0.1,0.2,0.3,0.4,0.59,0.84,3.96,39.5,464.6,1727.0,1913.0,2022.0,2063.5,2067.4,2074.9,2067.9,2075.44,2081.05,2110.67,2171.96,2246.09,2627.09
|
||||||
Denmark,1,1,2,2,2,3,3,3,3,5,7,17,402,571,607,782.1,851,906.4,998
|
Denmark,1.0,1.0,2.0,2.0,2.0,3.0,3.0,3.0,3.0,5.0,7.0,17.0,402.0,571.0,607.0,782.11,850.95,906.35,998.0,1080.0,1304.29,1704.04,3122.04
|
||||||
Estonia,,,,,,,,,,0.1,0.1,0.2,0.4,1.5,3.3,6.5,10,15,31.9
|
Estonia,,,,,,,,,,0.1,0.1,0.2,0.38,1.5,3.34,6.5,10.0,15.0,31.9,120.6,207.67,394.77,534.77
|
||||||
Finland,2,3,3,3,4,4,5,5,6,6,7,7,8,9,11,17,39,82,140
|
Finland,2.0,3.0,3.0,3.0,4.0,4.0,5.0,5.0,6.0,6.0,7.0,7.0,8.0,9.0,11.0,17.0,39.0,82.0,140.0,222.0,318.0,425.0,590.6
|
||||||
France,7,7,8,9,11,13,15,26,80,277,1044,3003.6,4358.8,5277.3,6034.4,7137.5,7702.1,8610.4,9617
|
France,7.0,7.0,8.0,9.0,11.0,13.0,15.0,26.0,80.0,277.0,1044.0,3003.57,4358.75,5277.29,6034.42,7137.52,7702.08,8610.44,9638.88,10738.39,11812.2,14436.97,17036.97
|
||||||
Germany,114,195,260,435,1105,2056,2899,4170,6120,10564,18004,25914,34075,36708,37898,39222,40677,42291,45179
|
Germany,114.0,195.0,260.0,435.0,1105.0,2056.0,2899.0,4170.0,6120.0,10564.0,18004.0,25914.0,34075.0,36708.0,37898.0,39222.0,40677.0,42291.0,45156.0,48912.0,53669.0,59371.0,66662.0
|
||||||
Greece,,1,1,1,1,1,5,9,12,46,202,612,1536,2579,2596,2604,2604,2605.5,2651.6
|
Greece,,1.0,1.0,1.0,1.0,1.0,5.0,9.0,12.0,46.0,202.0,612.0,1536.0,2579.0,2596.0,2604.0,2604.0,2605.53,2651.57,2833.79,3287.72,4277.42,5557.42
|
||||||
Hungary,,,,,,,,0.4,1,1,2,4,12,35,89,172,235,344,726
|
Hungary,,,,,,,,0.4,1.0,1.0,2.0,4.0,12.0,35.0,89.0,172.0,235.0,344.0,728.0,1400.0,2131.0,2968.0,2988.0
|
||||||
Ireland,,,,,,,,,,0.6,0.7,0.8,0.9,1,1.6,2.4,5.9,15.7,24.2
|
Ireland,,,,,,,,,,,,,,,,,,,,,,,
|
||||||
Italy,19,20,22,26,31,34,45,110,483,1264,3592,13131,16785,18185,18594,18901,19283,19682.3,20107.6
|
Italy,19.0,20.0,22.0,26.0,31.0,34.0,45.0,110.0,483.0,1264.0,3592.0,13131.0,16785.0,18185.0,18594.0,18901.0,19283.0,19682.29,20107.59,20865.28,21650.04,22594.26,25076.56
|
||||||
Latvia,,,,,,,,,,,,,0.2,0.2,0.2,0.2,0.7,0.7,2
|
Latvia,,,,,,,,,,,,,,,,,0.69,0.69,1.96,3.3,5.1,7.16,56.16
|
||||||
Lithuania,,,,,,,,,0.1,0.1,0.1,0.3,7,68,69,69,70,73.8,82
|
Lithuania,,,,,,,,,0.1,0.1,0.1,0.3,7.0,68.0,69.0,69.0,70.0,70.08,72.0,73.0,80.0,84.0,397.0
|
||||||
Luxembourg,,0.2,1.6,14.2,23.6,23.6,23.7,23.9,24.6,26.4,29.5,40.7,74.7,95,109.9,116.3,121.9,128.1,130.6
|
Luxembourg,,0.16,1.59,14.17,23.56,23.58,23.7,23.93,24.56,26.36,29.45,40.67,74.65,95.02,109.93,116.27,121.9,128.1,130.62,159.74,186.64,277.16,319.16
|
||||||
Montenegro,,,,,,,0,0.2,0.4,0.4,0.6,0.8,0.9,1.1,2.1,2.7,3.1,3.4,3.4
|
Montenegro,,,,,,,,,,,,,,,,,,,,,2.57,2.57,22.2
|
||||||
Netherlands,13,21,26,46,50,51,53,54,59,69,90,149,369,746,1048,1515,2049,2903,4522
|
Netherlands,13.0,21.0,26.0,46.0,50.0,51.0,53.0,54.0,59.0,69.0,90.0,149.0,287.0,650.0,1007.0,1526.26,2135.02,2910.89,4608.0,7226.0,11108.43,14910.69,18848.69
|
||||||
North Macedonia,,,,,,,,,,,0,2,4,7,15,17,16.7,16.7,20.6
|
North Macedonia,,,,,,,,,,,,2.0,4.0,7.0,15.0,17.0,16.7,16.7,16.7,16.71,84.93,84.93,84.93
|
||||||
Norway,6,6,6,7,7,7,8,8,8.3,8.7,9.1,9.5,10,11,13,15,26.7,44.9,68.4
|
Norway,6.0,6.0,6.0,7.0,7.0,7.0,8.0,8.0,8.3,8.7,9.1,9.5,10.0,11.0,13.0,15.0,26.7,44.9,53.11,102.53,141.53,186.53,302.53
|
||||||
Poland,,,,,,,,,,,,1.1,1.3,2.4,27.2,107.8,187.2,287.1,562
|
Poland,,,,,,,,,,,,1.11,1.3,2.39,27.15,107.78,187.25,287.09,561.98,1539.26,3954.96,7415.52,11166.52
|
||||||
Portugal,1,1,1,2,2,2,3,24,59,115,134,172,238,296,415,447,512.8,579.2,667.4
|
Portugal,1.0,1.0,1.0,2.0,2.0,2.0,3.0,24.0,59.0,115.0,134.0,169.6,235.6,293.6,412.6,441.75,493.05,539.42,617.85,832.74,1010.07,1474.78,2364.78
|
||||||
Romania,,,,,,,,,0.1,0.1,0.1,1,41,761,1293,1326,1372,1374.1,1385.8
|
Romania,,,,,,,,,0.1,0.1,0.1,1.0,41.0,761.0,1293.0,1326.0,1372.0,1374.13,1385.82,1397.71,1382.54,1393.92,1413.92
|
||||||
Serbia,,,,,,0.1,0.2,0.4,0.9,1.2,1.3,1.5,3.1,4.7,6,9,11,10,10
|
Serbia,,,,,,0.1,0.2,0.4,0.9,1.2,1.3,1.5,3.1,4.7,6.0,9.0,11.0,10.0,11.0,11.0,11.5,11.94,11.94
|
||||||
Slovakia,,,,,,,,,,,19,496,513,533,533,533,533,528,472
|
Slovakia,,,,,,,,,,,19.0,496.0,513.0,533.0,533.0,533.0,533.0,528.0,472.0,590.0,535.0,537.0,537.0
|
||||||
Slovenia,,,0,0,0,0,0.2,0.6,1,4,12,57,142,187,223,238,233,246.8,221.3
|
Slovenia,1.0,1.0,,,,0.05,0.19,0.59,1.0,4.0,12.0,57.0,142.0,187.0,223.0,238.0,233.0,246.8,246.8,277.88,369.78,461.16,632.16
|
||||||
Spain,10,13,17,22,33,52,130,494,3384,3423,3873,4283,4569,4690,4697,4704,4713,4723,4763.5
|
Spain,1.0,3.0,6.0,10.0,19.0,37.0,113.0,476.0,3365.0,3403.0,3851.0,4260.0,4545.0,4665.0,4672.0,4677.0,4687.0,4696.0,4730.7,8772.02,10100.42,13678.4,18176.73
|
||||||
Sweden,3,3,3,4,4,4,5,6,8,9,11,12,24,43,60,104,153,402,492
|
Sweden,3.0,3.0,3.0,4.0,4.0,4.0,5.0,6.0,8.0,9.0,11.0,12.0,24.0,43.0,60.0,104.0,153.0,231.0,411.0,698.0,1090.0,1587.0,2587.0
|
||||||
Switzerland,16,18,20,22,24,28,30,37,49,79,125,223,437,756,1061,1394,1664,1906,2171
|
Switzerland,16.0,18.0,20.0,22.0,24.0,28.0,30.0,37.0,49.0,79.0,125.0,223.0,437.0,756.0,1061.0,1394.0,1664.0,1906.0,2173.0,2498.0,2973.0,3655.0,4339.92
|
||||||
UK,2,3,4,6,8,11,14,18,23,27,95,1000,1753,2937,5528,9601.2,11930.5,12781.8,13118.3
|
UK,2.0,3.0,4.0,6.0,8.0,11.0,14.0,18.0,23.0,27.0,95.0,1000.0,1753.0,2937.0,5528.0,9601.0,11914.0,12760.0,13059.0,13345.0,13579.0,13965.0,14660.0
|
||||||
|
|
@ -1,16 +1,16 @@
|
|||||||
country,factor
|
country,factor
|
||||||
BE,0.65
|
BE,0.796
|
||||||
BG,0.89
|
BG,0.894
|
||||||
CZ,0.82
|
CZ,0.827
|
||||||
FI,0.92
|
FI,0.936
|
||||||
FR,0.70
|
FR,0.71
|
||||||
DE,0.88
|
DE,0.871
|
||||||
HU,0.90
|
HU,0.913
|
||||||
NL,0.86
|
NL,0.868
|
||||||
RO,0.92
|
RO,0.909
|
||||||
SK,0.89
|
SK,0.9
|
||||||
SI,0.94
|
SI,0.913
|
||||||
ES,0.89
|
ES,0.897
|
||||||
SE,0.82
|
SE,0.851
|
||||||
CH,0.86
|
CH,0.87
|
||||||
GB,0.67
|
GB,0.656
|
||||||
|
|
8
data/unit_commitment.csv
Normal file
8
data/unit_commitment.csv
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
attribute,OCGT,CCGT,coal,lignite,nuclear
|
||||||
|
ramp_limit_up,1,1,1,1,0.3
|
||||||
|
ramp_limit_start_up,0.2,0.45,0.38,0.4,0.5
|
||||||
|
ramp_limit_shut_down,0.2,0.45,0.38,0.4,0.5
|
||||||
|
p_min_pu,0.2,0.45,0.325,0.4,0.5
|
||||||
|
min_up_time,,3,5,7,6
|
||||||
|
min_down_time,,2,6,6,10
|
||||||
|
start_up_cost,9.6,34.2,35.64,19.14,16.5
|
|
@ -82,7 +82,7 @@ author = "Tom Brown (KIT, TUB, FIAS), Jonas Hoersch (KIT, FIAS), Fabian Hofmann
|
|||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = "0.8"
|
version = "0.8"
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = "0.8.0"
|
release = "0.8.1"
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
|
@ -1,15 +1,17 @@
|
|||||||
,Unit,Values,Description
|
,Unit,Values,Description
|
||||||
|
focus_weights,,,Optionally specify the focus weights for the clustering of countries. For instance: `DE: 0.8` will distribute 80% of all nodes to Germany and 20% to the rest of the countries.
|
||||||
simplify_network,,,
|
simplify_network,,,
|
||||||
-- to_substations,bool,"{'true','false'}","Aggregates all nodes without power injection (positive or negative, i.e. demand or generation) to electrically closest ones"
|
-- to_substations,bool,"{'true','false'}","Aggregates all nodes without power injection (positive or negative, i.e. demand or generation) to electrically closest ones"
|
||||||
-- algorithm,str,"One of {‘kmeans’, ‘hac’, ‘modularity‘}",
|
-- algorithm,str,"One of {‘kmeans’, ‘hac’, ‘modularity‘}",
|
||||||
-- feature,str,"Str in the format ‘carrier1+carrier2+...+carrierN-X’, where CarrierI can be from {‘solar’, ‘onwind’, ‘offwind’, ‘ror’} and X is one of {‘cap’, ‘time’}.",
|
-- feature,str,"Str in the format ‘carrier1+carrier2+...+carrierN-X’, where CarrierI can be from {‘solar’, ‘onwind’, ‘offwind’, ‘ror’} and X is one of {‘cap’, ‘time’}.",
|
||||||
-- exclude_carriers,list,"List of Str like [ 'solar', 'onwind'] or empy list []","List of carriers which will not be aggregated. If empty, all carriers will be aggregated."
|
-- exclude_carriers,list,"List of Str like [ 'solar', 'onwind'] or empy list []","List of carriers which will not be aggregated. If empty, all carriers will be aggregated."
|
||||||
-- remove stubs,bool,"true/false","Controls whether radial parts of the network should be recursively aggregated. Defaults to true."
|
-- remove stubs,bool,"{'true','false'}",Controls whether radial parts of the network should be recursively aggregated. Defaults to true.
|
||||||
-- remove_stubs_across_borders,bool,"true/false","Controls whether radial parts of the network should be recursively aggregated across borders. Defaults to true."
|
-- remove_stubs_across_borders,bool,"{'true','false'}",Controls whether radial parts of the network should be recursively aggregated across borders. Defaults to true.
|
||||||
cluster_network,,,
|
cluster_network,,,
|
||||||
-- algorithm,str,"One of {‘kmeans’, ‘hac’}",
|
-- algorithm,str,"One of {‘kmeans’, ‘hac’}",
|
||||||
-- feature,str,"Str in the format ‘carrier1+carrier2+...+carrierN-X’, where CarrierI can be from {‘solar’, ‘onwind’, ‘offwind’, ‘ror’} and X is one of {‘cap’, ‘time’}.",
|
-- feature,str,"Str in the format ‘carrier1+carrier2+...+carrierN-X’, where CarrierI can be from {‘solar’, ‘onwind’, ‘offwind’, ‘ror’} and X is one of {‘cap’, ‘time’}.",
|
||||||
-- exclude_carriers,list,"List of Str like [ 'solar', 'onwind'] or empy list []","List of carriers which will not be aggregated. If empty, all carriers will be aggregated."
|
-- exclude_carriers,list,"List of Str like [ 'solar', 'onwind'] or empy list []","List of carriers which will not be aggregated. If empty, all carriers will be aggregated."
|
||||||
|
-- consider_efficiency_classes,bool,"{'true','false'}","Aggregated each carriers into the top 10-quantile (high), the bottom 90-quantile (low), and everything in between (medium)."
|
||||||
aggregation_strategies,,,
|
aggregation_strategies,,,
|
||||||
-- generators,,,
|
-- generators,,,
|
||||||
-- -- {key},str,"{key} can be any of the component of the generator (str). It’s value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator."
|
-- -- {key},str,"{key} can be any of the component of the generator (str). It’s value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator."
|
||||||
|
|
@ -1,3 +1,5 @@
|
|||||||
,Unit,Values,Description
|
,Unit,Values,Description
|
||||||
{name},--,"string","For any carrier/technology overwrite attributes as listed below."
|
unit_commitment ,bool,"{true, false}","Allow the overwrite of ramp_limit_up, ramp_limit_start_up, ramp_limit_shut_down, p_min_pu, min_up_time, min_down_time, and start_up_cost of conventional generators. Refer to the CSV file „unit_commitment.csv“."
|
||||||
-- {attribute},--,"string or float","For any attribute, can specify a float or reference to a file path to a CSV file giving floats for each country (2-letter code)."
|
dynamic_fuel_price ,bool,"{true, false}","Consider the monthly fluctuating fuel prices for each conventional generator. Refer to the CSV file ""data/validation/monthly_fuel_price.csv""."
|
||||||
|
{name},--,string,For any carrier/technology overwrite attributes as listed below.
|
||||||
|
-- {attribute},--,string or float,"For any attribute, can specify a float or reference to a file path to a CSV file giving floats for each country (2-letter code)."
|
||||||
|
|
@ -5,6 +5,7 @@ retrieve_databundle,bool,"{true, false}","Switch to retrieve databundle from zen
|
|||||||
retrieve_sector_databundle,bool,"{true, false}","Switch to retrieve sector databundle from zenodo via the rule :mod:`retrieve_sector_databundle` or whether to keep a custom databundle located in the corresponding folder."
|
retrieve_sector_databundle,bool,"{true, false}","Switch to retrieve sector databundle from zenodo via the rule :mod:`retrieve_sector_databundle` or whether to keep a custom databundle located in the corresponding folder."
|
||||||
retrieve_cost_data,bool,"{true, false}","Switch to retrieve technology cost data from `technology-data repository <https://github.com/PyPSA/technology-data>`_."
|
retrieve_cost_data,bool,"{true, false}","Switch to retrieve technology cost data from `technology-data repository <https://github.com/PyPSA/technology-data>`_."
|
||||||
build_cutout,bool,"{true, false}","Switch to enable the building of cutouts via the rule :mod:`build_cutout`."
|
build_cutout,bool,"{true, false}","Switch to enable the building of cutouts via the rule :mod:`build_cutout`."
|
||||||
|
retrieve_irena,bool,"{true, false}",Switch to enable the retrieval of ``existing_capacities`` from IRENASTAT with :mod:`retrieve_irena`.
|
||||||
retrieve_cutout,bool,"{true, false}","Switch to enable the retrieval of cutouts from zenodo with :mod:`retrieve_cutout`."
|
retrieve_cutout,bool,"{true, false}","Switch to enable the retrieval of cutouts from zenodo with :mod:`retrieve_cutout`."
|
||||||
build_natura_raster,bool,"{true, false}","Switch to enable the creation of the raster ``natura.tiff`` via the rule :mod:`build_natura_raster`."
|
build_natura_raster,bool,"{true, false}","Switch to enable the creation of the raster ``natura.tiff`` via the rule :mod:`build_natura_raster`."
|
||||||
retrieve_natura_raster,bool,"{true, false}","Switch to enable the retrieval of ``natura.tiff`` from zenodo with :mod:`retrieve_natura_raster`."
|
retrieve_natura_raster,bool,"{true, false}","Switch to enable the retrieval of ``natura.tiff`` from zenodo with :mod:`retrieve_natura_raster`."
|
||||||
|
|
@ -1,6 +1,8 @@
|
|||||||
,Unit,Values,Description
|
,Unit,Values,Description
|
||||||
cutout,--,"Must be 'europe-2013-era5'","Specifies the directory where the relevant weather data ist stored."
|
cutout,--,Must be 'europe-2013-era5',Specifies the directory where the relevant weather data ist stored.
|
||||||
carriers,--,"Any subset of {'ror', 'PHS', 'hydro'}","Specifies the types of hydro power plants to build per-unit availability time series for. 'ror' stands for run-of-river plants, 'PHS' represents pumped-hydro storage, and 'hydro' stands for hydroelectric dams."
|
carriers,--,"Any subset of {'ror', 'PHS', 'hydro'}","Specifies the types of hydro power plants to build per-unit availability time series for. 'ror' stands for run-of-river plants, 'PHS' represents pumped-hydro storage, and 'hydro' stands for hydroelectric dams."
|
||||||
PHS_max_hours,h,float,"Maximum state of charge capacity of the pumped-hydro storage (PHS) in terms of hours at full output capacity ``p_nom``. Cf. `PyPSA documentation <https://pypsa.readthedocs.io/en/latest/components.html#storage-unit>`_."
|
PHS_max_hours,h,float,Maximum state of charge capacity of the pumped-hydro storage (PHS) in terms of hours at full output capacity ``p_nom``. Cf. `PyPSA documentation <https://pypsa.readthedocs.io/en/latest/components.html#storage-unit>`_.
|
||||||
hydro_max_hours,h,"Any of {float, 'energy_capacity_totals_by_country', 'estimate_by_large_installations'}","Maximum state of charge capacity of the pumped-hydro storage (PHS) in terms of hours at full output capacity ``p_nom`` or heuristically determined. Cf. `PyPSA documentation <https://pypsa.readthedocs.io/en/latest/components.html#storage-unit>`_."
|
hydro_max_hours,h,"Any of {float, 'energy_capacity_totals_by_country', 'estimate_by_large_installations'}",Maximum state of charge capacity of the pumped-hydro storage (PHS) in terms of hours at full output capacity ``p_nom`` or heuristically determined. Cf. `PyPSA documentation <https://pypsa.readthedocs.io/en/latest/components.html#storage-unit>`_.
|
||||||
|
flatten_dispatch,bool,"{true, false}",Consider an upper limit for the hydro dispatch. The limit is given by the average capacity factor plus the buffer given in ``flatten_dispatch_buffer``
|
||||||
|
flatten_dispatch_buffer,--,float,"If ``flatten_dispatch`` is true, specify the value added above the average capacity factor."
|
||||||
clip_min_inflow,MW,float,"To avoid too small values in the inflow time series, values below this threshold are set to zero."
|
clip_min_inflow,MW,float,"To avoid too small values in the inflow time series, values below this threshold are set to zero."
|
||||||
|
|
@ -5,7 +5,7 @@
|
|||||||
"naturalearth/*",,,,,http://www.naturalearthdata.com/about/terms-of-use/
|
"naturalearth/*",,,,,http://www.naturalearthdata.com/about/terms-of-use/
|
||||||
"NUTS_2013 _60M_SH/*","x","x",,"x",https://ec.europa.eu/eurostat/web/gisco/geodata/reference-data/administrative-units-statistical-units
|
"NUTS_2013 _60M_SH/*","x","x",,"x",https://ec.europa.eu/eurostat/web/gisco/geodata/reference-data/administrative-units-statistical-units
|
||||||
"cantons.csv","x",,"x",,https://en.wikipedia.org/wiki/Data_codes_for_Switzerland
|
"cantons.csv","x",,"x",,https://en.wikipedia.org/wiki/Data_codes_for_Switzerland
|
||||||
"EIA_hydro_generation _2000_2014.csv","x",,,,https://www.eia.gov/about/copyrights_reuse.php
|
"eia_hydro_annual_generation.csv","x",,,,https://www.eia.gov/about/copyrights_reuse.php
|
||||||
"GEBCO_2014_2D.nc","x",,,,https://www.gebco.net/data_and_products/gridded_bathymetry_data/documents/gebco_2014_historic.pdf
|
"GEBCO_2014_2D.nc","x",,,,https://www.gebco.net/data_and_products/gridded_bathymetry_data/documents/gebco_2014_historic.pdf
|
||||||
"hydro_capacities.csv","x",,,,
|
"hydro_capacities.csv","x",,,,
|
||||||
"je-e-21.03.02.xls","x","x",,,https://www.bfs.admin.ch/bfs/en/home/fso/swiss-federal-statistical-office/terms-of-use.html
|
"je-e-21.03.02.xls","x","x",,,https://www.bfs.admin.ch/bfs/en/home/fso/swiss-federal-statistical-office/terms-of-use.html
|
||||||
|
|
@ -5,3 +5,10 @@ s_nom_max,MW,"float","Global upper limit for the maximum capacity of each extend
|
|||||||
max_extension,MW,"float","Upper limit for the extended capacity of each extendable line."
|
max_extension,MW,"float","Upper limit for the extended capacity of each extendable line."
|
||||||
length_factor,--,float,"Correction factor to account for the fact that buses are *not* connected by lines through air-line distance."
|
length_factor,--,float,"Correction factor to account for the fact that buses are *not* connected by lines through air-line distance."
|
||||||
under_construction,--,"One of {'zero': set capacity to zero, 'remove': remove completely, 'keep': keep with full capacity}","Specifies how to handle lines which are currently under construction."
|
under_construction,--,"One of {'zero': set capacity to zero, 'remove': remove completely, 'keep': keep with full capacity}","Specifies how to handle lines which are currently under construction."
|
||||||
|
reconnect_crimea,--,"true or false","Whether to reconnect Crimea to the Ukrainian grid"
|
||||||
|
dynamic_line_rating,,,
|
||||||
|
-- activate,bool,"true or false","Whether to take dynamic line rating into account"
|
||||||
|
-- cutout,--,"Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module must be ERA5.","Specifies the directory where the relevant weather data ist stored."
|
||||||
|
-- correction_factor,--,"float","Factor to compensate for overestimation of wind speeds in hourly averaged wind data"
|
||||||
|
-- max_voltage_difference,deg,"float","Maximum voltage angle difference in degrees or 'false' to disable"
|
||||||
|
-- max_line_rating,--,"float","Maximum line rating relative to nominal capacity without DLR, e.g. 1.3 or 'false' to disable"
|
||||||
|
|
@ -3,6 +3,7 @@ Trigger, Description, Definition, Status
|
|||||||
``nSEG``; e.g. ``4380SEG``, "Apply time series segmentation with `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package to ``n`` adjacent snapshots of varying lengths based on capacity factors of varying renewables, hydro inflow and load.", ``prepare_network``: apply_time_segmentation(), In active use
|
``nSEG``; e.g. ``4380SEG``, "Apply time series segmentation with `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package to ``n`` adjacent snapshots of varying lengths based on capacity factors of varying renewables, hydro inflow and load.", ``prepare_network``: apply_time_segmentation(), In active use
|
||||||
``Co2L``, Add an overall absolute carbon-dioxide emissions limit configured in ``electricity: co2limit``. If a float is appended an overall emission limit relative to the emission level given in ``electricity: co2base`` is added (e.g. ``Co2L0.05`` limits emissisions to 5% of what is given in ``electricity: co2base``), ``prepare_network``: `add_co2limit() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L19>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L154>`__, In active use
|
``Co2L``, Add an overall absolute carbon-dioxide emissions limit configured in ``electricity: co2limit``. If a float is appended an overall emission limit relative to the emission level given in ``electricity: co2base`` is added (e.g. ``Co2L0.05`` limits emissisions to 5% of what is given in ``electricity: co2base``), ``prepare_network``: `add_co2limit() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L19>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L154>`__, In active use
|
||||||
``Ep``, Add cost for a carbon-dioxide price configured in ``costs: emission_prices: co2`` to ``marginal_cost`` of generators (other emission types listed in ``network.carriers`` possible as well), ``prepare_network``: `add_emission_prices() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L24>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L158>`__, In active use
|
``Ep``, Add cost for a carbon-dioxide price configured in ``costs: emission_prices: co2`` to ``marginal_cost`` of generators (other emission types listed in ``network.carriers`` possible as well), ``prepare_network``: `add_emission_prices() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L24>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L158>`__, In active use
|
||||||
|
``Ept``, Add monthly cost for a carbon-dioxide price based on historical values built by the rule ``build_monthly_prices``, In active use
|
||||||
``CCL``, Add minimum and maximum levels of generator nominal capacity per carrier for individual countries. These can be specified in the file linked at ``electricity: agg_p_nom_limits`` in the configuration. File defaults to ``data/agg_p_nom_minmax.csv``., ``solve_network``, In active use
|
``CCL``, Add minimum and maximum levels of generator nominal capacity per carrier for individual countries. These can be specified in the file linked at ``electricity: agg_p_nom_limits`` in the configuration. File defaults to ``data/agg_p_nom_minmax.csv``., ``solve_network``, In active use
|
||||||
``EQ``, "Require each country or node to on average produce a minimal share of its total consumption itself. Example: ``EQ0.5c`` demands each country to produce on average at least 50% of its consumption; ``EQ0.5`` demands each node to produce on average at least 50% of its consumption.", ``solve_network``, In active use
|
``EQ``, "Require each country or node to on average produce a minimal share of its total consumption itself. Example: ``EQ0.5c`` demands each country to produce on average at least 50% of its consumption; ``EQ0.5`` demands each node to produce on average at least 50% of its consumption.", ``solve_network``, In active use
|
||||||
``ATK``, "Require each node to be autarkic. Example: ``ATK`` removes all lines and links. ``ATKc`` removes all cross-border lines and links.", ``prepare_network``, In active use
|
``ATK``, "Require each node to be autarkic. Example: ``ATK`` removes all lines and links. ``ATKc`` removes all cross-border lines and links.", ``prepare_network``, In active use
|
||||||
|
Can't render this file because it has a wrong number of fields in line 6.
|
@ -79,6 +79,7 @@ allam_cycle,--,"{true, false}",Add option to include `Allam cycle gas power plan
|
|||||||
hydrogen_fuel_cell,--,"{true, false}",Add option to include hydrogen fuel cell for re-electrification. Assuming OCGT technology costs
|
hydrogen_fuel_cell,--,"{true, false}",Add option to include hydrogen fuel cell for re-electrification. Assuming OCGT technology costs
|
||||||
hydrogen_turbine,--,"{true, false}",Add option to include hydrogen turbine for re-electrification. Assuming OCGT technology costs
|
hydrogen_turbine,--,"{true, false}",Add option to include hydrogen turbine for re-electrification. Assuming OCGT technology costs
|
||||||
SMR,--,"{true, false}",Add option for transforming natural gas into hydrogen and CO2 using Steam Methane Reforming (SMR)
|
SMR,--,"{true, false}",Add option for transforming natural gas into hydrogen and CO2 using Steam Methane Reforming (SMR)
|
||||||
|
SMR CC,--,"{true, false}",Add option for transforming natural gas into hydrogen and CO2 using Steam Methane Reforming (SMR) and Carbon Capture (CC)
|
||||||
regional_co2 _sequestration_potential,,,
|
regional_co2 _sequestration_potential,,,
|
||||||
-- enable,--,"{true, false}",Add option for regionally-resolved geological carbon dioxide sequestration potentials based on `CO2StoP <https://setis.ec.europa.eu/european-co2-storage-database_en>`_.
|
-- enable,--,"{true, false}",Add option for regionally-resolved geological carbon dioxide sequestration potentials based on `CO2StoP <https://setis.ec.europa.eu/european-co2-storage-database_en>`_.
|
||||||
-- attribute,--,string,Name of the attribute for the sequestration potential
|
-- attribute,--,string,Name of the attribute for the sequestration potential
|
||||||
|
|
@ -1,17 +1,19 @@
|
|||||||
,Unit,Values,Description
|
,Unit,Values,Description
|
||||||
options,,,
|
options,,,
|
||||||
|
-- clip_p_max_pu,p.u.,float,To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero.
|
||||||
-- load_shedding,bool/float,"{'true','false', float}","Add generators with very high marginal cost to simulate load shedding and avoid problem infeasibilities. If load shedding is a float, it denotes the marginal cost in EUR/kWh."
|
-- load_shedding,bool/float,"{'true','false', float}","Add generators with very high marginal cost to simulate load shedding and avoid problem infeasibilities. If load shedding is a float, it denotes the marginal cost in EUR/kWh."
|
||||||
-- transmission_losses,int,"[0-9]","Add piecewise linear approximation of transmission losses based on n tangents. Defaults to 0, which means losses are ignored."
|
|
||||||
-- noisy_costs,bool,"{'true','false'}","Add random noise to marginal cost of generators by :math:`\mathcal{U}(0.009,0,011)` and capital cost of lines and links by :math:`\mathcal{U}(0.09,0,11)`."
|
-- noisy_costs,bool,"{'true','false'}","Add random noise to marginal cost of generators by :math:`\mathcal{U}(0.009,0,011)` and capital cost of lines and links by :math:`\mathcal{U}(0.09,0,11)`."
|
||||||
-- min_iterations,--,int,"Minimum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run."
|
|
||||||
-- max_iterations,--,int,"Maximum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run."
|
|
||||||
-- nhours,--,int,"Specifies the :math:`n` first snapshots to take into account. Must be less than the total number of snapshots. Rather recommended only for debugging."
|
|
||||||
-- clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
|
|
||||||
-- skip_iterations,bool,"{'true','false'}","Skip iterating, do not update impedances of branches. Defaults to true."
|
-- skip_iterations,bool,"{'true','false'}","Skip iterating, do not update impedances of branches. Defaults to true."
|
||||||
-- track_iterations,bool,"{'true','false'}","Flag whether to store the intermediate branch capacities and objective function values are recorded for each iteration in ``network.lines['s_nom_opt_X']`` (where ``X`` labels the iteration)"
|
-- rolling_horizon,bool,"{'true','false'}","Whether to optimize the network in a rolling horizon manner, where the snapshot range is split into slices of size `horizon` which are solved consecutively."
|
||||||
-- seed,--,int,"Random seed for increased deterministic behaviour."
|
-- seed,--,int,Random seed for increased deterministic behaviour.
|
||||||
|
-- track_iterations,bool,"{'true','false'}",Flag whether to store the intermediate branch capacities and objective function values are recorded for each iteration in ``network.lines['s_nom_opt_X']`` (where ``X`` labels the iteration)
|
||||||
|
-- min_iterations,--,int,Minimum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run.
|
||||||
|
-- max_iterations,--,int,Maximum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run.
|
||||||
|
-- transmission_losses,int,[0-9],"Add piecewise linear approximation of transmission losses based on n tangents. Defaults to 0, which means losses are ignored."
|
||||||
|
-- linearized_unit_commitment,bool,"{'true','false'}",Whether to optimise using the linearized unit commitment formulation.
|
||||||
|
-- horizon,--,int,Number of snapshots to consider in each iteration. Defaults to 100.
|
||||||
solver,,,
|
solver,,,
|
||||||
-- name,--,"One of {'gurobi', 'cplex', 'cbc', 'glpk', 'ipopt'}; potentially more possible","Solver to use for optimisation problems in the workflow; e.g. clustering and linear optimal power flow."
|
-- name,--,"One of {'gurobi', 'cplex', 'cbc', 'glpk', 'ipopt'}; potentially more possible",Solver to use for optimisation problems in the workflow; e.g. clustering and linear optimal power flow.
|
||||||
-- options,--,"Key listed under ``solver_options``.","Link to specific parameter settings."
|
-- options,--,Key listed under ``solver_options``.,Link to specific parameter settings.
|
||||||
solver_options,,"dict","Dictionaries with solver-specific parameter settings."
|
solver_options,,dict,Dictionaries with solver-specific parameter settings.
|
||||||
mem,MB,"int","Estimated maximum memory requirement for solving networks."
|
mem,MB,int,Estimated maximum memory requirement for solving networks.
|
||||||
|
|
@ -1,6 +1,12 @@
|
|||||||
,Unit,Values,Description
|
,Unit,Values,Description
|
||||||
version,--,0.x.x,"Version of PyPSA-Eur. Descriptive only."
|
version,--,0.x.x,Version of PyPSA-Eur. Descriptive only.
|
||||||
tutorial,bool,"{true, false}","Switch to retrieve the tutorial data set instead of the full data set."
|
tutorial,bool,"{true, false}",Switch to retrieve the tutorial data set instead of the full data set.
|
||||||
logging,,,
|
logging,,,
|
||||||
-- level,--,"Any of {'INFO', 'WARNING', 'ERROR'}","Restrict console outputs to all infos, warning or errors only"
|
-- level,--,"Any of {'INFO', 'WARNING', 'ERROR'}","Restrict console outputs to all infos, warning or errors only"
|
||||||
-- format,--,"","Custom format for log messages. See `LogRecord <https://docs.python.org/3/library/logging.html#logging.LogRecord>`_ attributes."
|
-- format,--,,Custom format for log messages. See `LogRecord <https://docs.python.org/3/library/logging.html#logging.LogRecord>`_ attributes.
|
||||||
|
private,,,
|
||||||
|
-- keys,,,
|
||||||
|
-- -- entsoe_api,--,,Optionally specify the ENTSO-E API key. See the guidelines to get `ENTSO-E API key <https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html>`_
|
||||||
|
remote,,,
|
||||||
|
-- ssh,--,,Optionally specify the SSH of a remote cluster to be synchronized.
|
||||||
|
-- path,--,,Optionally specify the file path within the remote cluster to be synchronized.
|
||||||
|
|
@ -16,12 +16,13 @@ PyPSA-Eur has several configuration options which are documented in this section
|
|||||||
Top-level configuration
|
Top-level configuration
|
||||||
=======================
|
=======================
|
||||||
|
|
||||||
|
"Private" refers to local, machine-specific settings or data meant for personal use, not to be shared. "Remote" indicates the address of a server used for data exchange, often for clusters and data pushing/pulling.
|
||||||
|
|
||||||
.. literalinclude:: ../config/config.default.yaml
|
.. literalinclude:: ../config/config.default.yaml
|
||||||
:language: yaml
|
:language: yaml
|
||||||
:start-at: version:
|
:start-at: version:
|
||||||
:end-before: # docs
|
:end-before: # docs
|
||||||
|
|
||||||
|
|
||||||
.. csv-table::
|
.. csv-table::
|
||||||
:header-rows: 1
|
:header-rows: 1
|
||||||
:widths: 22,7,22,33
|
:widths: 22,7,22,33
|
||||||
|
@ -41,10 +41,10 @@ Perfect foresight scenarios
|
|||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
Perfect foresight is currently under development and not yet implemented.
|
Perfect foresight is currently implemented as a first test version.
|
||||||
|
|
||||||
For running perfect foresight scenarios, in future versions you will be able to
|
For running perfect foresight scenarios, you can adjust the
|
||||||
set in the ``config/config.yaml``:
|
``config/config.perfect.yaml``:
|
||||||
|
|
||||||
.. code:: yaml
|
.. code:: yaml
|
||||||
|
|
||||||
|
BIN
doc/img/base.png
BIN
doc/img/base.png
Binary file not shown.
Before Width: | Height: | Size: 1.6 MiB After Width: | Height: | Size: 1.8 MiB |
Binary file not shown.
Before Width: | Height: | Size: 789 KiB After Width: | Height: | Size: 1.2 MiB |
@ -78,10 +78,10 @@ them:
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
You can find showcases of the model's capabilities in the Supplementary Materials of the
|
You can find showcases of the model's capabilities in the Supplementary Materials of the
|
||||||
preprint `Benefits of a Hydrogen Network in Europe
|
Joule paper `The potential role of a hydrogen network in Europe
|
||||||
<https://arxiv.org/abs/2207.05816>`_, the Supplementary Materials of the `paper in Joule with a
|
<https://doi.org/10.1016/j.joule.2023.06.016>`_, the Supplementary Materials of another `paper in Joule with a
|
||||||
description of the industry sector
|
description of the industry sector
|
||||||
<https://arxiv.org/abs/2109.09563>`_, or in `a 2021 presentation
|
<https://doi.org/10.1016/j.joule.2022.04.016>`_, or in `a 2021 presentation
|
||||||
at EMP-E <https://nworbmot.org/energy/brown-empe.pdf>`_.
|
at EMP-E <https://nworbmot.org/energy/brown-empe.pdf>`_.
|
||||||
The sector-coupled extension of PyPSA-Eur was
|
The sector-coupled extension of PyPSA-Eur was
|
||||||
initially described in the paper `Synergies of sector coupling and transmission
|
initially described in the paper `Synergies of sector coupling and transmission
|
||||||
@ -179,10 +179,13 @@ For sector-coupling studies: ::
|
|||||||
|
|
||||||
@misc{PyPSAEurSec,
|
@misc{PyPSAEurSec,
|
||||||
author = "Fabian Neumann and Elisabeth Zeyen and Marta Victoria and Tom Brown",
|
author = "Fabian Neumann and Elisabeth Zeyen and Marta Victoria and Tom Brown",
|
||||||
title = "The Potential Role of a Hydrogen Network in Europe",
|
title = "The potential role of a hydrogen network in Europe",
|
||||||
year = "2022",
|
journal "Joule",
|
||||||
|
volume = "7",
|
||||||
|
pages = "1--25"
|
||||||
|
year = "2023",
|
||||||
eprint = "2207.05816",
|
eprint = "2207.05816",
|
||||||
url = "https://arxiv.org/abs/2207.05816",
|
doi = "10.1016/j.joule.2023.06.016",
|
||||||
}
|
}
|
||||||
|
|
||||||
For sector-coupling studies with pathway optimisation: ::
|
For sector-coupling studies with pathway optimisation: ::
|
||||||
@ -277,6 +280,7 @@ The PyPSA-Eur workflow is continuously tested for Linux, macOS and Windows (WSL
|
|||||||
|
|
||||||
release_notes
|
release_notes
|
||||||
licenses
|
licenses
|
||||||
|
validation
|
||||||
limitations
|
limitations
|
||||||
contributing
|
contributing
|
||||||
support
|
support
|
||||||
|
@ -10,43 +10,179 @@ Release Notes
|
|||||||
Upcoming Release
|
Upcoming Release
|
||||||
================
|
================
|
||||||
|
|
||||||
* ``param:`` section in rule definition are added to track changed settings in ``config.yaml``. The goal is to automatically re-execute rules whose parameters have changed. See `Non-file parameters for rules <https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules>`_ in the snakemake documentation.
|
* Pin ``snakemake`` version to below 8.0.0, as the new version is not yet
|
||||||
|
supported by ``pypsa-eur``.
|
||||||
|
|
||||||
* **Important:** The configuration files are now located in the ``config`` directory. This counts for ``config.default.yaml``, ``config.yaml`` as well as the test configuration files which are now located in ``config/test``. Config files that are still in the root directory will be ignored.
|
* Updated Global Energy Monitor LNG terminal data to March 2023 version.
|
||||||
|
|
||||||
* Bugfix: Correct typo in the CPLEX solver configuration in ``config.default.yaml``.
|
* For industry distribution, use EPRTR as fallback if ETS data is not available.
|
||||||
|
|
||||||
* Bugfix: Error in ``add_electricity`` where carriers were added multiple times to the network, resulting in a non-unique carriers error.
|
* The minimum capacity for renewable generators when using the myopic option has been fixed.
|
||||||
|
|
||||||
* Renamed script file from PyPSA-EUR ``build_load_data`` to ``build_electricity_demand`` and ``retrieve_load_data`` to ``retrieve_electricity_demand``.
|
* Files downloaded from zenodo are now write-protected to prevent accidental re-download.
|
||||||
|
|
||||||
* Fix docs readthedocs built
|
* Files extracted from sector-coupled data bundle have been moved from ``data/`` to ``data/sector-bundle``.
|
||||||
|
|
||||||
|
* New feature multi-decade optimisation with perfect foresight.
|
||||||
|
|
||||||
|
* It is now possible to specify years for biomass potentials which do not exist
|
||||||
|
in the JRC-ENSPRESO database, e.g. 2037. These are linearly interpolated.
|
||||||
|
|
||||||
|
* In pathway mode, the biomass potential is linked to the investment year.
|
||||||
|
|
||||||
|
* Rule ``purge`` now initiates a dialog to confirm if purge is desired.
|
||||||
|
|
||||||
|
* Rule ``retrieve_irena`` get updated values for renewables capacities.
|
||||||
|
|
||||||
|
* Rule ``retrieve_wdpa`` updated to not only check for current and previous, but also potentially next months dataset availability.
|
||||||
|
|
||||||
|
* Split configuration to enable SMR and SMR CC.
|
||||||
|
|
||||||
|
* The configuration setting for country focus weights when clustering the
|
||||||
|
network has been moved from ``focus_weights:`` to ``clustering:
|
||||||
|
focus_weights:``. Backwards compatibility to old config files is maintained.
|
||||||
|
|
||||||
|
* The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument.
|
||||||
|
|
||||||
|
* Merged option to extend geographical scope to Ukraine and Moldova. These
|
||||||
|
countries are excluded by default and is currently constrained to power-sector
|
||||||
|
only parts of the workflow. A special config file
|
||||||
|
`config/config.entsoe-all.yaml` was added as an example to run the workflow
|
||||||
|
with all ENTSO-E member countries (including observer members like Ukraine and
|
||||||
|
Moldova). Moldova can currently only be included in conjunction with Ukraine
|
||||||
|
due to the absence of demand data. The Crimean power system is manually
|
||||||
|
reconnected to the main Ukrainian grid with the configuration option
|
||||||
|
`reconnect_crimea`.
|
||||||
|
|
||||||
|
|
||||||
|
**Bugs and Compatibility**
|
||||||
|
|
||||||
|
* A bug preventing custom powerplants specified in ``data/custom_powerplants.csv`` was fixed. (https://github.com/PyPSA/pypsa-eur/pull/732)
|
||||||
|
* Fix nodal fraction in ``add_existing_year`` when using distributed generators
|
||||||
|
* Fix typo in buses definition for oil boilers in ``add_industry`` in ``prepare_sector_network``
|
||||||
|
|
||||||
|
|
||||||
|
PyPSA-Eur 0.8.1 (27th July 2023)
|
||||||
|
================================
|
||||||
|
|
||||||
|
**New Features**
|
||||||
|
|
||||||
|
* Add option to consider dynamic line rating based on wind speeds and
|
||||||
|
temperature according to `Glaum and Hofmann (2022)
|
||||||
|
<https://arxiv.org/abs/2208.04716>`_. See configuration section ``lines:
|
||||||
|
dynamic_line_rating:`` for more details. (https://github.com/PyPSA/pypsa-eur/pull/675)
|
||||||
|
|
||||||
|
* Add option to include a piecewise linear approximation of transmission losses,
|
||||||
|
e.g. by setting ``solving: options: transmission_losses: 2`` for an
|
||||||
|
approximation with two tangents. (https://github.com/PyPSA/pypsa-eur/pull/664)
|
||||||
|
|
||||||
* Add plain hydrogen turbine as additional re-electrification option besides
|
* Add plain hydrogen turbine as additional re-electrification option besides
|
||||||
hydrogen fuel cell. Add switches for both re-electrification options under
|
hydrogen fuel cell. Add switches for both re-electrification options under
|
||||||
``sector: hydrogen_turbine:`` and ``sector: hydrogen_fuel_cell:``.
|
``sector: hydrogen_turbine:`` and ``sector: hydrogen_fuel_cell:``.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/647)
|
||||||
* A new function named ``sanitize_carrier`` ensures that all unique carrier names are present in the network's carriers attribute, and adds nice names and colors for each carrier according to the provided configuration dictionary.
|
|
||||||
|
|
||||||
* Additional tech_color are added to include previously unlisted carriers.
|
|
||||||
|
|
||||||
* Remove ``vresutils`` dependency.
|
|
||||||
|
|
||||||
* Added configuration option ``lines: max_extension:`` and ``links:
|
* Added configuration option ``lines: max_extension:`` and ``links:
|
||||||
max_extension:``` to control the maximum capacity addition per line or link in
|
max_extension:``` to control the maximum capacity addition per line or link in
|
||||||
MW.
|
MW. (https://github.com/PyPSA/pypsa-eur/pull/665)
|
||||||
|
|
||||||
* Add option to include a piecewise linear approximation of transmission losses,
|
* A ``param:`` section in the snakemake rule definitions was added to track
|
||||||
e.g. by setting ``solving: options: transmission_losses: 2`` for an
|
changed settings in ``config.yaml``. The goal is to automatically re-execute
|
||||||
approximation with two tangents.
|
rules where parameters have changed. See `Non-file parameters for rules
|
||||||
|
<https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules>`_
|
||||||
|
in the snakemake documentation. (https://github.com/PyPSA/pypsa-eur/pull/663)
|
||||||
|
|
||||||
|
* A new function named ``sanitize_carrier`` ensures that all unique carrier
|
||||||
|
names are present in the network's carriers attribute, and adds nice names and
|
||||||
|
colors for each carrier according to the provided configuration dictionary.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/653,
|
||||||
|
https://github.com/PyPSA/pypsa-eur/pull/690)
|
||||||
|
|
||||||
|
* The configuration settings have been documented in more detail.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/685)
|
||||||
|
|
||||||
|
**Breaking Changes**
|
||||||
|
|
||||||
|
* The configuration files are now located in the ``config`` directory. This
|
||||||
|
includes the ``config.default.yaml``, ``config.yaml`` as well as the test
|
||||||
|
configuration files which are now located in the ``config/test`` directory.
|
||||||
|
Config files that are still in the root directory will be ignored.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/640)
|
||||||
|
|
||||||
|
* Renamed script and rule name from ``build_load_data`` to
|
||||||
|
``build_electricity_demand`` and ``retrieve_load_data`` to
|
||||||
|
``retrieve_electricity_demand``. (https://github.com/PyPSA/pypsa-eur/pull/642,
|
||||||
|
https://github.com/PyPSA/pypsa-eur/pull/652)
|
||||||
|
|
||||||
|
* Updated to new spatial clustering module introduced in PyPSA v0.25.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/696)
|
||||||
|
|
||||||
|
**Changes**
|
||||||
|
|
||||||
* Handling networks with links with multiple inputs/outputs no longer requires
|
* Handling networks with links with multiple inputs/outputs no longer requires
|
||||||
to override component attributes.
|
to override component attributes.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/695)
|
||||||
|
|
||||||
* Added configuration option ``enable: retrieve:`` to control whether data
|
* Added configuration option ``enable: retrieve:`` to control whether data
|
||||||
retrieval rules from snakemake are enabled or not. Th default setting ``auto``
|
retrieval rules from snakemake are enabled or not. Th default setting ``auto``
|
||||||
will automatically detect and enable/disable the rules based on internet connectivity.
|
will automatically detect and enable/disable the rules based on internet
|
||||||
|
connectivity. (https://github.com/PyPSA/pypsa-eur/pull/694)
|
||||||
|
|
||||||
|
* Update to ``technology-data`` v0.6.0.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/704)
|
||||||
|
|
||||||
|
* Handle data bundle extraction paths via ``snakemake.output``.
|
||||||
|
|
||||||
|
* Additional technologies are added to ``tech_color`` in the configuration files
|
||||||
|
to include previously unlisted carriers.
|
||||||
|
|
||||||
|
* Doc: Added note that Windows is only tested in CI with WSL.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/issues/697)
|
||||||
|
|
||||||
|
* Doc: Add support section. (https://github.com/PyPSA/pypsa-eur/pull/656)
|
||||||
|
|
||||||
|
* Open ``rasterio`` files with ``rioxarray``.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/474)
|
||||||
|
|
||||||
|
* Migrate CI to ``micromamba``. (https://github.com/PyPSA/pypsa-eur/pull/700)
|
||||||
|
|
||||||
|
**Bugs and Compatibility**
|
||||||
|
|
||||||
|
* The new minimum PyPSA version is v0.25.1.
|
||||||
|
|
||||||
|
* Removed ``vresutils`` dependency.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/662)
|
||||||
|
|
||||||
|
* Adapt to new ``powerplantmatching`` version.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/687,
|
||||||
|
https://github.com/PyPSA/pypsa-eur/pull/701)
|
||||||
|
|
||||||
|
* Bugfix: Correct typo in the CPLEX solver configuration in
|
||||||
|
``config.default.yaml``. (https://github.com/PyPSA/pypsa-eur/pull/630)
|
||||||
|
|
||||||
|
* Bugfix: Error in ``add_electricity`` where carriers were added multiple times
|
||||||
|
to the network, resulting in a non-unique carriers error.
|
||||||
|
|
||||||
|
* Bugfix of optional reserve constraint.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/645)
|
||||||
|
|
||||||
|
* Fix broken equity constraints logic.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/679)
|
||||||
|
|
||||||
|
* Fix addition of load shedding generators.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/649)
|
||||||
|
|
||||||
|
* Fix automatic building of documentation on readthedocs.org.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/658)
|
||||||
|
|
||||||
|
* Bugfix: Update network clustering to avoid adding deleted links in clustered
|
||||||
|
network. (https://github.com/PyPSA/pypsa-eur/pull/678)
|
||||||
|
|
||||||
|
* Address ``geopandas`` deprecations.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/678)
|
||||||
|
|
||||||
|
* Fix bug with underground hydrogen storage creation, where for some small model
|
||||||
|
regions no cavern storage is available.
|
||||||
|
(https://github.com/PyPSA/pypsa-eur/pull/672)
|
||||||
|
|
||||||
|
|
||||||
* Addressed deprecation warnings for ``pandas=2.0``. ``pandas=2.0`` is now minimum requirement.
|
* Addressed deprecation warnings for ``pandas=2.0``. ``pandas=2.0`` is now minimum requirement.
|
||||||
|
@ -22,11 +22,11 @@ Rule ``retrieve_databundle``
|
|||||||
Rule ``retrieve_cutout``
|
Rule ``retrieve_cutout``
|
||||||
============================
|
============================
|
||||||
|
|
||||||
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3517949.svg
|
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.6382570.svg
|
||||||
:target: https://doi.org/10.5281/zenodo.3517949
|
:target: https://doi.org/10.5281/zenodo.6382570
|
||||||
|
|
||||||
Cutouts are spatio-temporal subsets of the European weather data from the `ECMWF ERA5 <https://software.ecmwf.int/wiki/display/CKB/ERA5+data+documentation>`_ reanalysis dataset and the `CMSAF SARAH-2 <https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=SARAH_V002>`_ solar surface radiation dataset for the year 2013.
|
Cutouts are spatio-temporal subsets of the European weather data from the `ECMWF ERA5 <https://software.ecmwf.int/wiki/display/CKB/ERA5+data+documentation>`_ reanalysis dataset and the `CMSAF SARAH-2 <https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=SARAH_V002>`_ solar surface radiation dataset for the year 2013.
|
||||||
They have been prepared by and are for use with the `atlite <https://github.com/PyPSA/atlite>`_ tool. You can either generate them yourself using the ``build_cutouts`` rule or retrieve them directly from `zenodo <https://doi.org/10.5281/zenodo.3517949>`__ through the rule ``retrieve_cutout``.
|
They have been prepared by and are for use with the `atlite <https://github.com/PyPSA/atlite>`_ tool. You can either generate them yourself using the ``build_cutouts`` rule or retrieve them directly from `zenodo <https://doi.org/10.5281/zenodo.6382570>`__ through the rule ``retrieve_cutout``.
|
||||||
The :ref:`tutorial` uses a smaller cutout than required for the full model (30 MB), which is also automatically downloaded.
|
The :ref:`tutorial` uses a smaller cutout than required for the full model (30 MB), which is also automatically downloaded.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
@ -83,7 +83,7 @@ This rule, as a substitute for :mod:`build_natura_raster`, downloads an already
|
|||||||
Rule ``retrieve_electricity_demand``
|
Rule ``retrieve_electricity_demand``
|
||||||
====================================
|
====================================
|
||||||
|
|
||||||
This rule downloads hourly electric load data for each country from the `OPSD platform <data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv>`_.
|
This rule downloads hourly electric load data for each country from the `OPSD platform <https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv>`_.
|
||||||
|
|
||||||
**Relevant Settings**
|
**Relevant Settings**
|
||||||
|
|
||||||
@ -91,7 +91,7 @@ None.
|
|||||||
|
|
||||||
**Outputs**
|
**Outputs**
|
||||||
|
|
||||||
- ``data/load_raw.csv``
|
- ``resources/load_raw.csv``
|
||||||
|
|
||||||
|
|
||||||
Rule ``retrieve_cost_data``
|
Rule ``retrieve_cost_data``
|
||||||
@ -118,6 +118,11 @@ This rule downloads techno-economic assumptions from the `technology-data reposi
|
|||||||
|
|
||||||
- ``resources/costs.csv``
|
- ``resources/costs.csv``
|
||||||
|
|
||||||
|
Rule ``retrieve_irena``
|
||||||
|
================================
|
||||||
|
|
||||||
|
.. automodule:: retrieve_irena
|
||||||
|
|
||||||
Rule ``retrieve_ship_raster``
|
Rule ``retrieve_ship_raster``
|
||||||
================================
|
================================
|
||||||
|
|
||||||
|
163
doc/tutorial.rst
163
doc/tutorial.rst
@ -25,7 +25,7 @@ full model, which allows the user to explore most of its functionalities on a
|
|||||||
local machine. The tutorial will cover examples on how to configure and
|
local machine. The tutorial will cover examples on how to configure and
|
||||||
customise the PyPSA-Eur model and run the ``snakemake`` workflow step by step
|
customise the PyPSA-Eur model and run the ``snakemake`` workflow step by step
|
||||||
from network creation to the solved network. The configuration for the tutorial
|
from network creation to the solved network. The configuration for the tutorial
|
||||||
is located at ``test/config.electricity.yaml``. It includes parts deviating from
|
is located at ``config/test/config.electricity.yaml``. It includes parts deviating from
|
||||||
the default config file ``config/config.default.yaml``. To run the tutorial with this
|
the default config file ``config/config.default.yaml``. To run the tutorial with this
|
||||||
configuration, execute
|
configuration, execute
|
||||||
|
|
||||||
@ -96,7 +96,7 @@ open-source solver GLPK.
|
|||||||
:start-at: solver:
|
:start-at: solver:
|
||||||
:end-before: plotting:
|
:end-before: plotting:
|
||||||
|
|
||||||
Note, that ``test/config.electricity.yaml`` only includes changes relative to
|
Note, that ``config/test/config.electricity.yaml`` only includes changes relative to
|
||||||
the default configuration. There are many more configuration options, which are
|
the default configuration. There are many more configuration options, which are
|
||||||
documented at :ref:`config`.
|
documented at :ref:`config`.
|
||||||
|
|
||||||
@ -133,89 +133,82 @@ This triggers a workflow of multiple preceding jobs that depend on each rule's i
|
|||||||
graph[bgcolor=white, margin=0];
|
graph[bgcolor=white, margin=0];
|
||||||
node[shape=box, style=rounded, fontname=sans, fontsize=10, penwidth=2];
|
node[shape=box, style=rounded, fontname=sans, fontsize=10, penwidth=2];
|
||||||
edge[penwidth=2, color=grey];
|
edge[penwidth=2, color=grey];
|
||||||
0[label = "solve_network", color = "0.21 0.6 0.85", style="rounded"];
|
0[label = "solve_network", color = "0.33 0.6 0.85", style="rounded"];
|
||||||
1[label = "prepare_network\nll: copt\nopts: Co2L-24H", color = "0.02 0.6 0.85", style="rounded"];
|
1[label = "prepare_network\nll: copt\nopts: Co2L-24H", color = "0.03 0.6 0.85", style="rounded"];
|
||||||
2[label = "add_extra_components", color = "0.37 0.6 0.85", style="rounded"];
|
2[label = "add_extra_components", color = "0.45 0.6 0.85", style="rounded"];
|
||||||
3[label = "cluster_network\nclusters: 6", color = "0.39 0.6 0.85", style="rounded"];
|
3[label = "cluster_network\nclusters: 6", color = "0.46 0.6 0.85", style="rounded"];
|
||||||
4[label = "simplify_network\nsimpl: ", color = "0.11 0.6 0.85", style="rounded"];
|
4[label = "simplify_network\nsimpl: ", color = "0.52 0.6 0.85", style="rounded"];
|
||||||
5[label = "add_electricity", color = "0.23 0.6 0.85", style="rounded"];
|
5[label = "add_electricity", color = "0.55 0.6 0.85", style="rounded"];
|
||||||
6[label = "build_renewable_profiles\ntechnology: onwind", color = "0.57 0.6 0.85", style="rounded"];
|
6[label = "build_renewable_profiles\ntechnology: solar", color = "0.15 0.6 0.85", style="rounded"];
|
||||||
7[label = "base_network", color = "0.09 0.6 0.85", style="rounded"];
|
7[label = "base_network", color = "0.37 0.6 0.85", style="rounded,dashed"];
|
||||||
8[label = "build_shapes", color = "0.41 0.6 0.85", style="rounded"];
|
8[label = "build_shapes", color = "0.07 0.6 0.85", style="rounded,dashed"];
|
||||||
9[label = "retrieve_databundle", color = "0.28 0.6 0.85", style="rounded"];
|
9[label = "retrieve_databundle", color = "0.60 0.6 0.85", style="rounded"];
|
||||||
10[label = "retrieve_natura_raster", color = "0.62 0.6 0.85", style="rounded"];
|
10[label = "retrieve_natura_raster", color = "0.42 0.6 0.85", style="rounded"];
|
||||||
11[label = "build_bus_regions", color = "0.53 0.6 0.85", style="rounded"];
|
11[label = "build_bus_regions", color = "0.09 0.6 0.85", style="rounded,dashed"];
|
||||||
12[label = "retrieve_cutout\ncutout: europe-2013-era5", color = "0.05 0.6 0.85", style="rounded,dashed"];
|
12[label = "build_renewable_profiles\ntechnology: onwind", color = "0.15 0.6 0.85", style="rounded"];
|
||||||
13[label = "build_renewable_profiles\ntechnology: offwind-ac", color = "0.57 0.6 0.85", style="rounded"];
|
13[label = "build_renewable_profiles\ntechnology: offwind-ac", color = "0.15 0.6 0.85", style="rounded"];
|
||||||
14[label = "build_ship_raster", color = "0.64 0.6 0.85", style="rounded"];
|
14[label = "build_ship_raster", color = "0.02 0.6 0.85", style="rounded"];
|
||||||
15[label = "retrieve_ship_raster", color = "0.07 0.6 0.85", style="rounded,dashed"];
|
15[label = "retrieve_ship_raster", color = "0.40 0.6 0.85", style="rounded"];
|
||||||
16[label = "retrieve_cutout\ncutout: europe-2013-sarah", color = "0.05 0.6 0.85", style="rounded,dashed"];
|
16[label = "build_renewable_profiles\ntechnology: offwind-dc", color = "0.15 0.6 0.85", style="rounded"];
|
||||||
17[label = "build_renewable_profiles\ntechnology: offwind-dc", color = "0.57 0.6 0.85", style="rounded"];
|
17[label = "build_line_rating", color = "0.32 0.6 0.85", style="rounded"];
|
||||||
18[label = "build_renewable_profiles\ntechnology: solar", color = "0.57 0.6 0.85", style="rounded"];
|
18[label = "retrieve_cost_data\nyear: 2030", color = "0.50 0.6 0.85", style="rounded"];
|
||||||
19[label = "build_hydro_profile", color = "0.44 0.6 0.85", style="rounded"];
|
19[label = "build_powerplants", color = "0.64 0.6 0.85", style="rounded,dashed"];
|
||||||
20[label = "retrieve_cost_data", color = "0.30 0.6 0.85", style="rounded"];
|
20[label = "build_electricity_demand", color = "0.13 0.6 0.85", style="rounded,dashed"];
|
||||||
21[label = "build_powerplants", color = "0.16 0.6 0.85", style="rounded"];
|
21[label = "retrieve_electricity_demand", color = "0.31 0.6 0.85", style="rounded"];
|
||||||
22[label = "build_electricity_demand", color = "0.00 0.6 0.85", style="rounded"];
|
22[label = "copy_config", color = "0.23 0.6 0.85", style="rounded"];
|
||||||
23[label = "retrieve_electricity_demand", color = "0.34 0.6 0.85", style="rounded,dashed"];
|
1 -> 0
|
||||||
1 -> 0
|
22 -> 0
|
||||||
2 -> 1
|
2 -> 1
|
||||||
20 -> 1
|
18 -> 1
|
||||||
3 -> 2
|
3 -> 2
|
||||||
20 -> 2
|
18 -> 2
|
||||||
4 -> 3
|
4 -> 3
|
||||||
20 -> 3
|
18 -> 3
|
||||||
5 -> 4
|
5 -> 4
|
||||||
20 -> 4
|
18 -> 4
|
||||||
11 -> 4
|
11 -> 4
|
||||||
6 -> 5
|
6 -> 5
|
||||||
13 -> 5
|
12 -> 5
|
||||||
17 -> 5
|
13 -> 5
|
||||||
18 -> 5
|
16 -> 5
|
||||||
19 -> 5
|
7 -> 5
|
||||||
7 -> 5
|
17 -> 5
|
||||||
20 -> 5
|
18 -> 5
|
||||||
11 -> 5
|
11 -> 5
|
||||||
21 -> 5
|
19 -> 5
|
||||||
9 -> 5
|
9 -> 5
|
||||||
22 -> 5
|
20 -> 5
|
||||||
8 -> 5
|
8 -> 5
|
||||||
7 -> 6
|
7 -> 6
|
||||||
9 -> 6
|
9 -> 6
|
||||||
10 -> 6
|
10 -> 6
|
||||||
8 -> 6
|
8 -> 6
|
||||||
11 -> 6
|
11 -> 6
|
||||||
12 -> 6
|
8 -> 7
|
||||||
8 -> 7
|
9 -> 8
|
||||||
9 -> 8
|
8 -> 11
|
||||||
8 -> 11
|
7 -> 11
|
||||||
7 -> 11
|
7 -> 12
|
||||||
7 -> 13
|
9 -> 12
|
||||||
9 -> 13
|
10 -> 12
|
||||||
10 -> 13
|
8 -> 12
|
||||||
14 -> 13
|
11 -> 12
|
||||||
8 -> 13
|
7 -> 13
|
||||||
11 -> 13
|
9 -> 13
|
||||||
12 -> 13
|
10 -> 13
|
||||||
15 -> 14
|
14 -> 13
|
||||||
12 -> 14
|
8 -> 13
|
||||||
16 -> 14
|
11 -> 13
|
||||||
7 -> 17
|
15 -> 14
|
||||||
9 -> 17
|
7 -> 16
|
||||||
10 -> 17
|
9 -> 16
|
||||||
14 -> 17
|
10 -> 16
|
||||||
8 -> 17
|
14 -> 16
|
||||||
11 -> 17
|
8 -> 16
|
||||||
12 -> 17
|
11 -> 16
|
||||||
7 -> 18
|
7 -> 17
|
||||||
9 -> 18
|
7 -> 19
|
||||||
10 -> 18
|
21 -> 20
|
||||||
8 -> 18
|
|
||||||
11 -> 18
|
|
||||||
16 -> 18
|
|
||||||
8 -> 19
|
|
||||||
12 -> 19
|
|
||||||
7 -> 21
|
|
||||||
23 -> 22
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
|
||||||
|
@ -59,7 +59,7 @@ To run an overnight / greenfiled scenario with the specifications above, run
|
|||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
snakemake -call --configfile config/test/config.overnight.yaml all
|
snakemake -call all --configfile config/test/config.overnight.yaml
|
||||||
|
|
||||||
which will result in the following *additional* jobs ``snakemake`` wants to run
|
which will result in the following *additional* jobs ``snakemake`` wants to run
|
||||||
on top of those already included in the electricity-only tutorial:
|
on top of those already included in the electricity-only tutorial:
|
||||||
@ -318,7 +318,7 @@ To run a myopic foresight scenario with the specifications above, run
|
|||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
snakemake -call --configfile config/test/config.myopic.yaml all
|
snakemake -call all --configfile config/test/config.myopic.yaml
|
||||||
|
|
||||||
which will result in the following *additional* jobs ``snakemake`` wants to run:
|
which will result in the following *additional* jobs ``snakemake`` wants to run:
|
||||||
|
|
||||||
|
53
doc/validation.rst
Normal file
53
doc/validation.rst
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
..
|
||||||
|
SPDX-FileCopyrightText: 2019-2023 The PyPSA-Eur Authors
|
||||||
|
|
||||||
|
SPDX-License-Identifier: CC-BY-4.0
|
||||||
|
|
||||||
|
##########################################
|
||||||
|
Validation
|
||||||
|
##########################################
|
||||||
|
|
||||||
|
The PyPSA-Eur model workflow provides a built-in mechanism for validation. This allows users to contrast the outcomes of network optimization against the historical behaviour of the European power system. The snakemake rule ``validate_elec_networks`` enables this by generating comparative figures that encapsulate key data points such as dispatch carrier, cross-border flows, and market prices per price zone.
|
||||||
|
|
||||||
|
These comparisons utilize data from the 2019 ENTSO-E Transparency Platform. To enable this, an ENTSO-E API key must be inserted into the ``config.yaml`` file. Detailed steps for this process can be found in the user guide `here <https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html>`_.
|
||||||
|
|
||||||
|
Once the API key is set, the validation workflow can be triggered by running the following command:
|
||||||
|
|
||||||
|
snakemake validate_elec_networks --configfile config/config.validation.yaml -c8
|
||||||
|
|
||||||
|
|
||||||
|
The configuration file `config/config.validation.yaml` contains the following parameters:
|
||||||
|
|
||||||
|
.. literalinclude:: ../config/config.validation.yaml
|
||||||
|
:language: yaml
|
||||||
|
|
||||||
|
The setup uses monthly varying fuel prices for gas, lignite, coal and oil as well as CO2 prices, which are created by the script ``build_monthly_prices``. Upon completion of the validation process, the resulting network and generated figures will be stored in the ``results/validation`` directory for further analysis.
|
||||||
|
|
||||||
|
|
||||||
|
Results
|
||||||
|
=======
|
||||||
|
|
||||||
|
By the time of writing the comparison with the historical data shows partially accurate, partially improvable results. The following figures show the comparison of the dispatch of the different carriers.
|
||||||
|
|
||||||
|
.. image:: ../graphics/validation_seasonal_operation_area_elec_s_37_ec_lv1.0_Ept.png
|
||||||
|
:width: 100%
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
.. image:: ../graphics/validation_production_bar_elec_s_37_ec_lv1.0_Ept.png
|
||||||
|
:width: 100%
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Issues and possible improvements
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
**Overestimated dispatch of wind and solar:** Renewable potentials of wind and solar are slightly overestimated in the model. This leads to a higher dispatch of these carriers than in the historical data. In particular, the solar dispatch during winter is overestimated.
|
||||||
|
|
||||||
|
**Coal - Lignite fuel switch:** The model has a fuel switch from coal to lignite. This might result from non-captured subsidies for lignite and coal in the model. In order to fix the fuel switch from coal to lignite, a manual cost correction was added to the script ``build_monthly_prices``.
|
||||||
|
|
||||||
|
**Planned outages of nuclear power plants:** Planned outages of nuclear power plants are not captured in the model. This leads to a underestimated dispatch of nuclear power plants in winter and a overestimated dispatch in summer. This point is hard to fix, since the planned outages are not published in the ENTSO-E Transparency Platform.
|
||||||
|
|
||||||
|
**False classification of run-of-river power plants:** Some run-of-river power plants are classified as hydro power plants in the model. This leads to a general overestimation of the hydro power dispatch. In particular, Swedish hydro power plants are overestimated.
|
||||||
|
|
||||||
|
**Load shedding:** Due to constraint NTC's (crossborder capacities), the model has to shed load in some regions. This leads to a high market prices in the regions which drive the average market price up. Further fine-tuning of the NTC's is needed to avoid load shedding.
|
@ -12,74 +12,93 @@ dependencies:
|
|||||||
- _libgcc_mutex=0.1
|
- _libgcc_mutex=0.1
|
||||||
- _openmp_mutex=4.5
|
- _openmp_mutex=4.5
|
||||||
- affine=2.4.0
|
- affine=2.4.0
|
||||||
- alsa-lib=1.2.8
|
- alsa-lib=1.2.9
|
||||||
- ampl-mp=3.1.0
|
- ampl-mp=3.1.0
|
||||||
- amply=0.1.5
|
- amply=0.1.6
|
||||||
|
- anyio=3.7.1
|
||||||
- appdirs=1.4.4
|
- appdirs=1.4.4
|
||||||
|
- argon2-cffi=21.3.0
|
||||||
|
- argon2-cffi-bindings=21.2.0
|
||||||
- asttokens=2.2.1
|
- asttokens=2.2.1
|
||||||
- atlite=0.2.10
|
- async-lru=2.0.3
|
||||||
|
- atk-1.0=2.38.0
|
||||||
|
- atlite=0.2.11
|
||||||
- attr=2.5.1
|
- attr=2.5.1
|
||||||
- attrs=22.2.0
|
- attrs=23.1.0
|
||||||
|
- aws-c-auth=0.7.0
|
||||||
|
- aws-c-cal=0.6.0
|
||||||
|
- aws-c-common=0.8.23
|
||||||
|
- aws-c-compression=0.2.17
|
||||||
|
- aws-c-event-stream=0.3.1
|
||||||
|
- aws-c-http=0.7.11
|
||||||
|
- aws-c-io=0.13.28
|
||||||
|
- aws-c-mqtt=0.8.14
|
||||||
|
- aws-c-s3=0.3.13
|
||||||
|
- aws-c-sdkutils=0.1.11
|
||||||
|
- aws-checksums=0.1.16
|
||||||
|
- aws-crt-cpp=0.20.3
|
||||||
|
- aws-sdk-cpp=1.10.57
|
||||||
|
- babel=2.12.1
|
||||||
- backcall=0.2.0
|
- backcall=0.2.0
|
||||||
- backports=1.0
|
- backports=1.0
|
||||||
- backports.functools_lru_cache=1.6.4
|
- backports.functools_lru_cache=1.6.5
|
||||||
- beautifulsoup4=4.11.2
|
- beautifulsoup4=4.12.2
|
||||||
- blosc=1.21.3
|
- bleach=6.0.0
|
||||||
- bokeh=2.4.3
|
- blosc=1.21.4
|
||||||
|
- bokeh=3.2.1
|
||||||
- boost-cpp=1.78.0
|
- boost-cpp=1.78.0
|
||||||
- bottleneck=1.3.6
|
- bottleneck=1.3.7
|
||||||
- branca=0.6.0
|
- branca=0.6.0
|
||||||
- brotli=1.0.9
|
- brotli=1.0.9
|
||||||
- brotli-bin=1.0.9
|
- brotli-bin=1.0.9
|
||||||
- brotlipy=0.7.0
|
- brotli-python=1.0.9
|
||||||
- bzip2=1.0.8
|
- bzip2=1.0.8
|
||||||
- c-ares=1.18.1
|
- c-ares=1.19.1
|
||||||
- ca-certificates=2022.12.7
|
- c-blosc2=2.10.0
|
||||||
|
- ca-certificates=2023.7.22
|
||||||
- cairo=1.16.0
|
- cairo=1.16.0
|
||||||
- cartopy=0.21.1
|
- cartopy=0.21.1
|
||||||
- cdsapi=0.5.1
|
- cdsapi=0.6.1
|
||||||
- certifi=2022.12.7
|
- certifi=2023.7.22
|
||||||
- cffi=1.15.1
|
- cffi=1.15.1
|
||||||
- cfitsio=4.2.0
|
- cfitsio=4.2.0
|
||||||
- cftime=1.6.2
|
- cftime=1.6.2
|
||||||
- charset-normalizer=2.1.1
|
- charset-normalizer=3.2.0
|
||||||
- click=8.1.3
|
- click=8.1.6
|
||||||
- click-plugins=1.1.1
|
- click-plugins=1.1.1
|
||||||
- cligj=0.7.2
|
- cligj=0.7.2
|
||||||
- cloudpickle=2.2.1
|
- cloudpickle=2.2.1
|
||||||
- coin-or-cbc=2.10.8
|
|
||||||
- coin-or-cgl=0.60.6
|
|
||||||
- coin-or-clp=1.17.7
|
|
||||||
- coin-or-osi=0.108.7
|
|
||||||
- coin-or-utils=2.11.6
|
|
||||||
- coincbc=2.10.8
|
|
||||||
- colorama=0.4.6
|
- colorama=0.4.6
|
||||||
- configargparse=1.5.3
|
- comm=0.1.3
|
||||||
|
- configargparse=1.7
|
||||||
- connection_pool=0.0.3
|
- connection_pool=0.0.3
|
||||||
- country_converter=0.8.0
|
- contourpy=1.1.0
|
||||||
- cryptography=39.0.1
|
- country_converter=1.0.0
|
||||||
- curl=7.88.0
|
- curl=8.2.0
|
||||||
- cycler=0.11.0
|
- cycler=0.11.0
|
||||||
- cytoolz=0.12.0
|
- cytoolz=0.12.2
|
||||||
- dask=2023.2.0
|
- dask=2023.7.1
|
||||||
- dask-core=2023.2.0
|
- dask-core=2023.7.1
|
||||||
- datrie=0.8.2
|
- datrie=0.8.2
|
||||||
- dbus=1.13.6
|
- dbus=1.13.6
|
||||||
|
- debugpy=1.6.7
|
||||||
- decorator=5.1.1
|
- decorator=5.1.1
|
||||||
|
- defusedxml=0.7.1
|
||||||
- deprecation=2.1.0
|
- deprecation=2.1.0
|
||||||
- descartes=1.1.0
|
- descartes=1.1.0
|
||||||
- distributed=2023.2.0
|
- distributed=2023.7.1
|
||||||
- distro=1.8.0
|
- distro=1.8.0
|
||||||
- docutils=0.19
|
- docutils=0.20.1
|
||||||
- dpath=2.1.4
|
- dpath=2.1.6
|
||||||
- entsoe-py=0.5.8
|
- entrypoints=0.4
|
||||||
|
- entsoe-py=0.5.10
|
||||||
- et_xmlfile=1.1.0
|
- et_xmlfile=1.1.0
|
||||||
- exceptiongroup=1.1.0
|
- exceptiongroup=1.1.2
|
||||||
- executing=1.2.0
|
- executing=1.2.0
|
||||||
- expat=2.5.0
|
- expat=2.5.0
|
||||||
- fftw=3.3.10
|
- filelock=3.12.2
|
||||||
- filelock=3.9.0
|
- fiona=1.9.4
|
||||||
- fiona=1.9.1
|
- flit-core=3.9.0
|
||||||
- folium=0.14.0
|
- folium=0.14.0
|
||||||
- font-ttf-dejavu-sans-mono=2.37
|
- font-ttf-dejavu-sans-mono=2.37
|
||||||
- font-ttf-inconsolata=3.000
|
- font-ttf-inconsolata=3.000
|
||||||
@ -88,293 +107,366 @@ dependencies:
|
|||||||
- fontconfig=2.14.2
|
- fontconfig=2.14.2
|
||||||
- fonts-conda-ecosystem=1
|
- fonts-conda-ecosystem=1
|
||||||
- fonts-conda-forge=1
|
- fonts-conda-forge=1
|
||||||
- fonttools=4.38.0
|
- fonttools=4.41.1
|
||||||
- freetype=2.12.1
|
- freetype=2.12.1
|
||||||
- freexl=1.0.6
|
- freexl=1.0.6
|
||||||
- fsspec=2023.1.0
|
- fribidi=1.0.10
|
||||||
- gdal=3.6.2
|
- fsspec=2023.6.0
|
||||||
|
- gdal=3.7.0
|
||||||
|
- gdk-pixbuf=2.42.10
|
||||||
- geographiclib=1.52
|
- geographiclib=1.52
|
||||||
- geojson-rewind=1.0.2
|
- geojson-rewind=1.0.2
|
||||||
- geopandas=0.12.2
|
- geopandas=0.13.2
|
||||||
- geopandas-base=0.12.2
|
- geopandas-base=0.13.2
|
||||||
- geopy=2.3.0
|
- geopy=2.3.0
|
||||||
- geos=3.11.1
|
- geos=3.11.2
|
||||||
- geotiff=1.7.1
|
- geotiff=1.7.1
|
||||||
- gettext=0.21.1
|
- gettext=0.21.1
|
||||||
|
- gflags=2.2.2
|
||||||
- giflib=5.2.1
|
- giflib=5.2.1
|
||||||
- gitdb=4.0.10
|
- gitdb=4.0.10
|
||||||
- gitpython=3.1.30
|
- gitpython=3.1.32
|
||||||
- glib=2.74.1
|
- glib=2.76.4
|
||||||
- glib-tools=2.74.1
|
- glib-tools=2.76.4
|
||||||
|
- glog=0.6.0
|
||||||
|
- gmp=6.2.1
|
||||||
- graphite2=1.3.13
|
- graphite2=1.3.13
|
||||||
- gst-plugins-base=1.22.0
|
- graphviz=8.1.0
|
||||||
- gstreamer=1.22.0
|
- gst-plugins-base=1.22.5
|
||||||
- gstreamer-orc=0.4.33
|
- gstreamer=1.22.5
|
||||||
- harfbuzz=6.0.0
|
- gtk2=2.24.33
|
||||||
|
- gts=0.7.6
|
||||||
|
- harfbuzz=7.3.0
|
||||||
- hdf4=4.2.15
|
- hdf4=4.2.15
|
||||||
- hdf5=1.12.2
|
- hdf5=1.14.1
|
||||||
- heapdict=1.0.1
|
|
||||||
- humanfriendly=10.0
|
- humanfriendly=10.0
|
||||||
- icu=70.1
|
- icu=72.1
|
||||||
- idna=3.4
|
- idna=3.4
|
||||||
- importlib-metadata=6.0.0
|
- importlib-metadata=6.8.0
|
||||||
- importlib_resources=5.10.2
|
- importlib_metadata=6.8.0
|
||||||
|
- importlib_resources=6.0.0
|
||||||
- iniconfig=2.0.0
|
- iniconfig=2.0.0
|
||||||
- ipopt=3.14.11
|
- ipopt=3.14.12
|
||||||
- ipython=8.10.0
|
- ipykernel=6.24.0
|
||||||
- jack=1.9.22
|
- ipython=8.14.0
|
||||||
|
- ipython_genutils=0.2.0
|
||||||
|
- ipywidgets=8.0.7
|
||||||
- jedi=0.18.2
|
- jedi=0.18.2
|
||||||
- jinja2=3.1.2
|
- jinja2=3.1.2
|
||||||
- joblib=1.2.0
|
- joblib=1.3.0
|
||||||
- jpeg=9e
|
|
||||||
- json-c=0.16
|
- json-c=0.16
|
||||||
- jsonschema=4.17.3
|
- json5=0.9.14
|
||||||
- jupyter_core=5.2.0
|
- jsonschema=4.18.4
|
||||||
- kealib=1.5.0
|
- jsonschema-specifications=2023.7.1
|
||||||
|
- jupyter=1.0.0
|
||||||
|
- jupyter-lsp=2.2.0
|
||||||
|
- jupyter_client=8.3.0
|
||||||
|
- jupyter_console=6.6.3
|
||||||
|
- jupyter_core=5.3.1
|
||||||
|
- jupyter_events=0.6.3
|
||||||
|
- jupyter_server=2.7.0
|
||||||
|
- jupyter_server_terminals=0.4.4
|
||||||
|
- jupyterlab=4.0.3
|
||||||
|
- jupyterlab_pygments=0.2.2
|
||||||
|
- jupyterlab_server=2.24.0
|
||||||
|
- jupyterlab_widgets=3.0.8
|
||||||
|
- kealib=1.5.1
|
||||||
- keyutils=1.6.1
|
- keyutils=1.6.1
|
||||||
- kiwisolver=1.4.4
|
- kiwisolver=1.4.4
|
||||||
- krb5=1.20.1
|
- krb5=1.21.1
|
||||||
- lame=3.100
|
- lame=3.100
|
||||||
- lcms2=2.14
|
- lcms2=2.15
|
||||||
- ld_impl_linux-64=2.40
|
- ld_impl_linux-64=2.40
|
||||||
- lerc=4.0.0
|
- lerc=4.0.0
|
||||||
|
- libabseil=20230125.3
|
||||||
- libaec=1.0.6
|
- libaec=1.0.6
|
||||||
|
- libarchive=3.6.2
|
||||||
|
- libarrow=12.0.1
|
||||||
- libblas=3.9.0
|
- libblas=3.9.0
|
||||||
- libbrotlicommon=1.0.9
|
- libbrotlicommon=1.0.9
|
||||||
- libbrotlidec=1.0.9
|
- libbrotlidec=1.0.9
|
||||||
- libbrotlienc=1.0.9
|
- libbrotlienc=1.0.9
|
||||||
- libcap=2.66
|
- libcap=2.67
|
||||||
- libcblas=3.9.0
|
- libcblas=3.9.0
|
||||||
- libclang=15.0.7
|
- libclang=15.0.7
|
||||||
- libclang13=15.0.7
|
- libclang13=15.0.7
|
||||||
|
- libcrc32c=1.1.2
|
||||||
- libcups=2.3.3
|
- libcups=2.3.3
|
||||||
- libcurl=7.88.0
|
- libcurl=8.2.0
|
||||||
- libdb=6.2.32
|
- libdeflate=1.18
|
||||||
- libdeflate=1.17
|
|
||||||
- libedit=3.1.20191231
|
- libedit=3.1.20191231
|
||||||
- libev=4.33
|
- libev=4.33
|
||||||
- libevent=2.1.10
|
- libevent=2.1.12
|
||||||
|
- libexpat=2.5.0
|
||||||
- libffi=3.4.2
|
- libffi=3.4.2
|
||||||
- libflac=1.4.2
|
- libflac=1.4.3
|
||||||
- libgcc-ng=12.2.0
|
- libgcc-ng=13.1.0
|
||||||
- libgcrypt=1.10.1
|
- libgcrypt=1.10.1
|
||||||
- libgdal=3.6.2
|
- libgd=2.3.3
|
||||||
- libgfortran-ng=12.2.0
|
- libgdal=3.7.0
|
||||||
- libgfortran5=12.2.0
|
- libgfortran-ng=13.1.0
|
||||||
- libglib=2.74.1
|
- libgfortran5=13.1.0
|
||||||
- libgomp=12.2.0
|
- libglib=2.76.4
|
||||||
- libgpg-error=1.46
|
- libgomp=13.1.0
|
||||||
|
- libgoogle-cloud=2.12.0
|
||||||
|
- libgpg-error=1.47
|
||||||
|
- libgrpc=1.56.2
|
||||||
- libiconv=1.17
|
- libiconv=1.17
|
||||||
|
- libjpeg-turbo=2.1.5.1
|
||||||
- libkml=1.3.0
|
- libkml=1.3.0
|
||||||
- liblapack=3.9.0
|
- liblapack=3.9.0
|
||||||
- liblapacke=3.9.0
|
- liblapacke=3.9.0
|
||||||
- libllvm15=15.0.7
|
- libllvm15=15.0.7
|
||||||
- libnetcdf=4.8.1
|
- libnetcdf=4.9.2
|
||||||
- libnghttp2=1.51.0
|
- libnghttp2=1.52.0
|
||||||
- libnsl=2.0.0
|
- libnsl=2.0.0
|
||||||
|
- libnuma=2.0.16
|
||||||
- libogg=1.3.4
|
- libogg=1.3.4
|
||||||
- libopenblas=0.3.21
|
- libopenblas=0.3.23
|
||||||
- libopus=1.3.1
|
- libopus=1.3.1
|
||||||
- libpng=1.6.39
|
- libpng=1.6.39
|
||||||
- libpq=15.2
|
- libpq=15.3
|
||||||
|
- libprotobuf=4.23.3
|
||||||
|
- librsvg=2.56.1
|
||||||
- librttopo=1.1.0
|
- librttopo=1.1.0
|
||||||
- libsndfile=1.2.0
|
- libsndfile=1.2.0
|
||||||
|
- libsodium=1.0.18
|
||||||
- libspatialindex=1.9.3
|
- libspatialindex=1.9.3
|
||||||
- libspatialite=5.0.1
|
- libspatialite=5.0.1
|
||||||
- libsqlite=3.40.0
|
- libsqlite=3.42.0
|
||||||
- libssh2=1.10.0
|
- libssh2=1.11.0
|
||||||
- libstdcxx-ng=12.2.0
|
- libstdcxx-ng=13.1.0
|
||||||
- libsystemd0=252
|
- libsystemd0=253
|
||||||
- libtiff=4.5.0
|
- libthrift=0.18.1
|
||||||
|
- libtiff=4.5.1
|
||||||
- libtool=2.4.7
|
- libtool=2.4.7
|
||||||
- libudev1=252
|
- libutf8proc=2.8.0
|
||||||
- libuuid=2.32.1
|
- libuuid=2.38.1
|
||||||
- libvorbis=1.3.7
|
- libvorbis=1.3.7
|
||||||
- libwebp-base=1.2.4
|
- libwebp=1.3.1
|
||||||
- libxcb=1.13
|
- libwebp-base=1.3.1
|
||||||
|
- libxcb=1.15
|
||||||
- libxkbcommon=1.5.0
|
- libxkbcommon=1.5.0
|
||||||
- libxml2=2.10.3
|
- libxml2=2.11.4
|
||||||
- libxslt=1.1.37
|
- libxslt=1.1.37
|
||||||
- libzip=1.9.2
|
- libzip=1.9.2
|
||||||
- libzlib=1.2.13
|
- libzlib=1.2.13
|
||||||
- linopy=0.1.3
|
|
||||||
- locket=1.0.0
|
- locket=1.0.0
|
||||||
- lxml=4.9.2
|
- lxml=4.9.3
|
||||||
- lz4=4.3.2
|
- lz4=4.3.2
|
||||||
- lz4-c=1.9.4
|
- lz4-c=1.9.4
|
||||||
- lzo=2.10
|
- lzo=2.10
|
||||||
- mapclassify=2.5.0
|
- mapclassify=2.5.0
|
||||||
- markupsafe=2.1.2
|
- markupsafe=2.1.3
|
||||||
- matplotlib=3.5.3
|
- matplotlib=3.5.3
|
||||||
- matplotlib-base=3.5.3
|
- matplotlib-base=3.5.3
|
||||||
- matplotlib-inline=0.1.6
|
- matplotlib-inline=0.1.6
|
||||||
- memory_profiler=0.61.0
|
- memory_profiler=0.61.0
|
||||||
- metis=5.1.0
|
- metis=5.1.1
|
||||||
- mpg123=1.31.2
|
- mistune=3.0.0
|
||||||
- msgpack-python=1.0.4
|
- mpg123=1.31.3
|
||||||
|
- msgpack-python=1.0.5
|
||||||
- mumps-include=5.2.1
|
- mumps-include=5.2.1
|
||||||
- mumps-seq=5.2.1
|
- mumps-seq=5.2.1
|
||||||
- munch=2.5.0
|
- munch=4.0.0
|
||||||
- munkres=1.1.4
|
- munkres=1.1.4
|
||||||
- mysql-common=8.0.32
|
- mysql-common=8.0.33
|
||||||
- mysql-libs=8.0.32
|
- mysql-libs=8.0.33
|
||||||
- nbformat=5.7.3
|
- nbclient=0.8.0
|
||||||
- ncurses=6.3
|
- nbconvert=7.7.2
|
||||||
- netcdf4=1.6.2
|
- nbconvert-core=7.7.2
|
||||||
- networkx=3.0
|
- nbconvert-pandoc=7.7.2
|
||||||
|
- nbformat=5.9.1
|
||||||
|
- ncurses=6.4
|
||||||
|
- nest-asyncio=1.5.6
|
||||||
|
- netcdf4=1.6.4
|
||||||
|
- networkx=3.1
|
||||||
- nomkl=1.0
|
- nomkl=1.0
|
||||||
|
- notebook=7.0.0
|
||||||
|
- notebook-shim=0.2.3
|
||||||
- nspr=4.35
|
- nspr=4.35
|
||||||
- nss=3.88
|
- nss=3.89
|
||||||
- numexpr=2.8.3
|
- numexpr=2.8.4
|
||||||
- numpy=1.24
|
- numpy=1.25.1
|
||||||
- openjdk=17.0.3
|
- openjdk=17.0.3
|
||||||
- openjpeg=2.5.0
|
- openjpeg=2.5.0
|
||||||
- openpyxl=3.1.0
|
- openpyxl=3.1.2
|
||||||
- openssl=3.0.8
|
- openssl=3.1.1
|
||||||
- packaging=23.0
|
- orc=1.9.0
|
||||||
- pandas=1.5.3
|
- overrides=7.3.1
|
||||||
|
- packaging=23.1
|
||||||
|
- pandas=2.0.3
|
||||||
|
- pandoc=3.1.3
|
||||||
|
- pandocfilters=1.5.0
|
||||||
|
- pango=1.50.14
|
||||||
- parso=0.8.3
|
- parso=0.8.3
|
||||||
- partd=1.3.0
|
- partd=1.4.0
|
||||||
- patsy=0.5.3
|
- patsy=0.5.3
|
||||||
- pcre2=10.40
|
- pcre2=10.40
|
||||||
- pexpect=4.8.0
|
- pexpect=4.8.0
|
||||||
- pickleshare=0.7.5
|
- pickleshare=0.7.5
|
||||||
- pillow=9.4.0
|
- pillow=10.0.0
|
||||||
- pip=23.0
|
- pip=23.2.1
|
||||||
- pixman=0.40.0
|
- pixman=0.40.0
|
||||||
- pkgutil-resolve-name=1.3.10
|
- pkgutil-resolve-name=1.3.10
|
||||||
- plac=1.3.5
|
- plac=1.3.5
|
||||||
- platformdirs=3.0.0
|
- platformdirs=3.9.1
|
||||||
- pluggy=1.0.0
|
- pluggy=1.2.0
|
||||||
- ply=3.11
|
- ply=3.11
|
||||||
- pooch=1.6.0
|
- pooch=1.7.0
|
||||||
- poppler=22.12.0
|
- poppler=23.05.0
|
||||||
- poppler-data=0.4.12
|
- poppler-data=0.4.12
|
||||||
- postgresql=15.2
|
- postgresql=15.3
|
||||||
- powerplantmatching=0.5.6
|
- powerplantmatching=0.5.7
|
||||||
- progressbar2=4.2.0
|
- progressbar2=4.2.0
|
||||||
- proj=9.1.0
|
- proj=9.2.1
|
||||||
- prompt-toolkit=3.0.36
|
- prometheus_client=0.17.1
|
||||||
- psutil=5.9.4
|
- prompt-toolkit=3.0.39
|
||||||
|
- prompt_toolkit=3.0.39
|
||||||
|
- psutil=5.9.5
|
||||||
- pthread-stubs=0.4
|
- pthread-stubs=0.4
|
||||||
- ptyprocess=0.7.0
|
- ptyprocess=0.7.0
|
||||||
- pulp=2.7.0
|
- pulp=2.7.0
|
||||||
- pulseaudio=16.1
|
- pulseaudio-client=16.1
|
||||||
- pure_eval=0.2.2
|
- pure_eval=0.2.2
|
||||||
|
- py-cpuinfo=9.0.0
|
||||||
|
- pyarrow=12.0.1
|
||||||
- pycountry=22.3.5
|
- pycountry=22.3.5
|
||||||
- pycparser=2.21
|
- pycparser=2.21
|
||||||
- pygments=2.14.0
|
- pygments=2.15.1
|
||||||
- pyomo=6.4.4
|
- pyomo=6.6.1
|
||||||
- pyopenssl=23.0.0
|
- pyparsing=3.1.0
|
||||||
- pyparsing=3.0.9
|
- pyproj=3.6.0
|
||||||
- pyproj=3.4.1
|
|
||||||
- pypsa=0.22.1
|
|
||||||
- pyqt=5.15.7
|
- pyqt=5.15.7
|
||||||
- pyqt5-sip=12.11.0
|
- pyqt5-sip=12.11.0
|
||||||
- pyrsistent=0.19.3
|
|
||||||
- pyshp=2.3.1
|
- pyshp=2.3.1
|
||||||
- pysocks=1.7.1
|
- pysocks=1.7.1
|
||||||
- pytables=3.7.0
|
- pytables=3.8.0
|
||||||
- pytest=7.2.1
|
- pytest=7.4.0
|
||||||
- python=3.10.9
|
- python=3.10.12
|
||||||
- python-dateutil=2.8.2
|
- python-dateutil=2.8.2
|
||||||
- python-fastjsonschema=2.16.2
|
- python-fastjsonschema=2.18.0
|
||||||
- python-utils=3.5.2
|
- python-json-logger=2.0.7
|
||||||
|
- python-tzdata=2023.3
|
||||||
|
- python-utils=3.7.0
|
||||||
- python_abi=3.10
|
- python_abi=3.10
|
||||||
- pytz=2022.7.1
|
- pytz=2023.3
|
||||||
- pyxlsb=1.0.10
|
- pyxlsb=1.0.10
|
||||||
- pyyaml=6.0
|
- pyyaml=6.0
|
||||||
|
- pyzmq=25.1.0
|
||||||
- qt-main=5.15.8
|
- qt-main=5.15.8
|
||||||
- rasterio=1.3.4
|
- qtconsole=5.4.3
|
||||||
- readline=8.1.2
|
- qtconsole-base=5.4.3
|
||||||
- requests=2.28.1
|
- qtpy=2.3.1
|
||||||
- retry=0.9.2
|
- rasterio=1.3.8
|
||||||
- rich=12.5.1
|
- rdma-core=28.9
|
||||||
- rioxarray=0.13.3
|
- re2=2023.03.02
|
||||||
- rtree=1.0.0
|
- readline=8.2
|
||||||
- s2n=1.0.10
|
- referencing=0.30.0
|
||||||
- scikit-learn=1.1.1
|
- requests=2.31.0
|
||||||
- scipy=1.8.1
|
- reretry=0.11.8
|
||||||
|
- rfc3339-validator=0.1.4
|
||||||
|
- rfc3986-validator=0.1.1
|
||||||
|
- rioxarray=0.14.1
|
||||||
|
- rpds-py=0.9.2
|
||||||
|
- rtree=1.0.1
|
||||||
|
- s2n=1.3.46
|
||||||
|
- scikit-learn=1.3.0
|
||||||
|
- scipy=1.11.1
|
||||||
- scotch=6.0.9
|
- scotch=6.0.9
|
||||||
- seaborn=0.12.2
|
- seaborn=0.12.2
|
||||||
- seaborn-base=0.12.2
|
- seaborn-base=0.12.2
|
||||||
- setuptools=67.3.2
|
- send2trash=1.8.2
|
||||||
|
- setuptools=68.0.0
|
||||||
- setuptools-scm=7.1.0
|
- setuptools-scm=7.1.0
|
||||||
- setuptools_scm=7.1.0
|
- setuptools_scm=7.1.0
|
||||||
- shapely=2.0.1
|
- shapely=2.0.1
|
||||||
- sip=6.7.7
|
- sip=6.7.10
|
||||||
- six=1.16.0
|
- six=1.16.0
|
||||||
- smart_open=6.3.0
|
- smart_open=6.3.0
|
||||||
- smmap=3.0.5
|
- smmap=3.0.5
|
||||||
- snakemake-minimal=7.22.0
|
- snakemake-minimal=7.30.2
|
||||||
- snappy=1.1.9
|
- snappy=1.1.10
|
||||||
|
- sniffio=1.3.0
|
||||||
- snuggs=1.4.7
|
- snuggs=1.4.7
|
||||||
- sortedcontainers=2.4.0
|
- sortedcontainers=2.4.0
|
||||||
- soupsieve=2.3.2.post1
|
- soupsieve=2.3.2.post1
|
||||||
- sqlite=3.40.0
|
- sqlite=3.42.0
|
||||||
- stack_data=0.6.2
|
- stack_data=0.6.2
|
||||||
- statsmodels=0.13.5
|
- statsmodels=0.14.0
|
||||||
- stopit=1.1.2
|
- stopit=1.1.2
|
||||||
- tabula-py=2.6.0
|
- tabula-py=2.6.0
|
||||||
- tabulate=0.9.0
|
- tabulate=0.9.0
|
||||||
- tblib=1.7.0
|
- tblib=1.7.0
|
||||||
- threadpoolctl=3.1.0
|
- terminado=0.17.1
|
||||||
|
- threadpoolctl=3.2.0
|
||||||
- throttler=1.2.1
|
- throttler=1.2.1
|
||||||
- tiledb=2.13.2
|
- tiledb=2.13.2
|
||||||
|
- tinycss2=1.2.1
|
||||||
- tk=8.6.12
|
- tk=8.6.12
|
||||||
- toml=0.10.2
|
- toml=0.10.2
|
||||||
- tomli=2.0.1
|
- tomli=2.0.1
|
||||||
- toolz=0.12.0
|
- toolz=0.12.0
|
||||||
- toposort=1.9
|
- toposort=1.10
|
||||||
- tornado=6.2
|
- tornado=6.3.2
|
||||||
- tqdm=4.64.1
|
- tqdm=4.65.0
|
||||||
- traitlets=5.9.0
|
- traitlets=5.9.0
|
||||||
- typing-extensions=4.4.0
|
- typing-extensions=4.7.1
|
||||||
- typing_extensions=4.4.0
|
- typing_extensions=4.7.1
|
||||||
- tzcode=2022g
|
- typing_utils=0.1.0
|
||||||
- tzdata=2022g
|
- tzcode=2023c
|
||||||
|
- tzdata=2023c
|
||||||
|
- ucx=1.14.1
|
||||||
- unicodedata2=15.0.0
|
- unicodedata2=15.0.0
|
||||||
- unidecode=1.3.6
|
- unidecode=1.3.6
|
||||||
- unixodbc=2.3.10
|
- unixodbc=2.3.10
|
||||||
- urllib3=1.26.14
|
- urllib3=2.0.4
|
||||||
- wcwidth=0.2.6
|
- wcwidth=0.2.6
|
||||||
- wheel=0.38.4
|
- webencodings=0.5.1
|
||||||
- wrapt=1.14.1
|
- websocket-client=1.6.1
|
||||||
- xarray=2023.2.0
|
- wheel=0.41.0
|
||||||
|
- widgetsnbextension=4.0.8
|
||||||
|
- wrapt=1.15.0
|
||||||
|
- xarray=2023.7.0
|
||||||
- xcb-util=0.4.0
|
- xcb-util=0.4.0
|
||||||
- xcb-util-image=0.4.0
|
- xcb-util-image=0.4.0
|
||||||
- xcb-util-keysyms=0.4.0
|
- xcb-util-keysyms=0.4.0
|
||||||
- xcb-util-renderutil=0.3.9
|
- xcb-util-renderutil=0.3.9
|
||||||
- xcb-util-wm=0.4.1
|
- xcb-util-wm=0.4.1
|
||||||
- xerces-c=3.2.4
|
- xerces-c=3.2.4
|
||||||
|
- xkeyboard-config=2.39
|
||||||
- xlrd=2.0.1
|
- xlrd=2.0.1
|
||||||
- xorg-fixesproto=5.0
|
- xorg-fixesproto=5.0
|
||||||
- xorg-inputproto=2.3.2
|
- xorg-inputproto=2.3.2
|
||||||
- xorg-kbproto=1.0.7
|
- xorg-kbproto=1.0.7
|
||||||
- xorg-libice=1.0.10
|
- xorg-libice=1.1.1
|
||||||
- xorg-libsm=1.2.3
|
- xorg-libsm=1.2.4
|
||||||
- xorg-libx11=1.7.2
|
- xorg-libx11=1.8.6
|
||||||
- xorg-libxau=1.0.9
|
- xorg-libxau=1.0.11
|
||||||
- xorg-libxdmcp=1.1.3
|
- xorg-libxdmcp=1.1.3
|
||||||
- xorg-libxext=1.3.4
|
- xorg-libxext=1.3.4
|
||||||
- xorg-libxfixes=5.0.3
|
- xorg-libxfixes=5.0.3
|
||||||
- xorg-libxi=1.7.10
|
- xorg-libxi=1.7.10
|
||||||
- xorg-libxrender=0.9.10
|
- xorg-libxrender=0.9.11
|
||||||
- xorg-libxtst=1.2.3
|
- xorg-libxtst=1.2.3
|
||||||
- xorg-recordproto=1.14.2
|
- xorg-recordproto=1.14.2
|
||||||
- xorg-renderproto=0.11.1
|
- xorg-renderproto=0.11.1
|
||||||
- xorg-xextproto=7.3.0
|
- xorg-xextproto=7.3.0
|
||||||
|
- xorg-xf86vidmodeproto=2.3.1
|
||||||
- xorg-xproto=7.0.31
|
- xorg-xproto=7.0.31
|
||||||
- xyzservices=2022.9.0
|
- xyzservices=2023.7.0
|
||||||
- xz=5.2.6
|
- xz=5.2.6
|
||||||
- yaml=0.2.5
|
- yaml=0.2.5
|
||||||
- yte=1.5.1
|
- yte=1.5.1
|
||||||
- zict=2.2.0
|
- zeromq=4.3.4
|
||||||
- zipp=3.13.0
|
- zict=3.0.0
|
||||||
|
- zipp=3.16.2
|
||||||
- zlib=1.2.13
|
- zlib=1.2.13
|
||||||
|
- zlib-ng=2.0.7
|
||||||
- zstd=1.5.2
|
- zstd=1.5.2
|
||||||
- pip:
|
- pip:
|
||||||
- countrycode==0.2
|
- gurobipy==10.0.2
|
||||||
- highspy==1.5.0.dev0
|
- linopy==0.2.2
|
||||||
- pybind11==2.10.3
|
- pypsa==0.25.1
|
||||||
- tsam==2.2.2
|
- tsam==2.3.0
|
||||||
|
- validators==0.20.0
|
||||||
|
@ -11,6 +11,8 @@ dependencies:
|
|||||||
- pip
|
- pip
|
||||||
|
|
||||||
- atlite>=0.2.9
|
- atlite>=0.2.9
|
||||||
|
- pypsa>=0.26.0
|
||||||
|
- linopy
|
||||||
- dask
|
- dask
|
||||||
|
|
||||||
# Dependencies of the workflow itself
|
# Dependencies of the workflow itself
|
||||||
@ -18,23 +20,24 @@ dependencies:
|
|||||||
- openpyxl!=3.1.1
|
- openpyxl!=3.1.1
|
||||||
- pycountry
|
- pycountry
|
||||||
- seaborn
|
- seaborn
|
||||||
- snakemake-minimal>=7.7.0
|
# snakemake 8 introduced a number of breaking changes which the workflow has yet to be made compatible with
|
||||||
|
- snakemake-minimal>=7.7.0,<8.0.0
|
||||||
- memory_profiler
|
- memory_profiler
|
||||||
- yaml
|
- yaml
|
||||||
- pytables
|
- pytables
|
||||||
- lxml
|
- lxml
|
||||||
- powerplantmatching>=0.5.5
|
- powerplantmatching>=0.5.5
|
||||||
- numpy
|
- numpy
|
||||||
- pandas>=2.0
|
- pandas>=2.1
|
||||||
- geopandas>=0.11.0
|
- geopandas>=0.11.0
|
||||||
- xarray
|
- xarray>=2023.11.0
|
||||||
- rioxarray
|
- rioxarray
|
||||||
- netcdf4
|
- netcdf4
|
||||||
- networkx
|
- networkx
|
||||||
- scipy
|
- scipy
|
||||||
- shapely>=2.0
|
- shapely>=2.0
|
||||||
- pyomo
|
- pyomo
|
||||||
- matplotlib<3.6
|
- matplotlib
|
||||||
- proj
|
- proj
|
||||||
- fiona
|
- fiona
|
||||||
- country_converter
|
- country_converter
|
||||||
@ -44,6 +47,7 @@ dependencies:
|
|||||||
- tabula-py
|
- tabula-py
|
||||||
- pyxlsb
|
- pyxlsb
|
||||||
- graphviz
|
- graphviz
|
||||||
|
- ipopt
|
||||||
|
|
||||||
# Keep in conda environment when calling ipython
|
# Keep in conda environment when calling ipython
|
||||||
- ipython
|
- ipython
|
||||||
@ -53,6 +57,6 @@ dependencies:
|
|||||||
- descartes
|
- descartes
|
||||||
- rasterio!=1.2.10
|
- rasterio!=1.2.10
|
||||||
|
|
||||||
|
|
||||||
- pip:
|
- pip:
|
||||||
- tsam>=1.1.0
|
- tsam>=2.3.1
|
||||||
- git+https://github.com/PyPSA/PyPSA.git@master
|
|
||||||
|
BIN
graphics/validation_production_bar_elec_s_37_ec_lv1.0_Ept.png
Normal file
BIN
graphics/validation_production_bar_elec_s_37_ec_lv1.0_Ept.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 99 KiB |
Binary file not shown.
After Width: | Height: | Size: 801 KiB |
@ -4,3 +4,4 @@
|
|||||||
font.family: sans-serif
|
font.family: sans-serif
|
||||||
font.sans-serif: Ubuntu, DejaVu Sans
|
font.sans-serif: Ubuntu, DejaVu Sans
|
||||||
image.cmap: viridis
|
image.cmap: viridis
|
||||||
|
figure.autolayout : True
|
||||||
|
@ -24,7 +24,7 @@ rule build_electricity_demand:
|
|||||||
countries=config["countries"],
|
countries=config["countries"],
|
||||||
load=config["load"],
|
load=config["load"],
|
||||||
input:
|
input:
|
||||||
ancient("data/load_raw.csv"),
|
ancient(RESOURCES + "load_raw.csv"),
|
||||||
output:
|
output:
|
||||||
RESOURCES + "load.csv",
|
RESOURCES + "load.csv",
|
||||||
log:
|
log:
|
||||||
@ -62,6 +62,9 @@ rule base_network:
|
|||||||
params:
|
params:
|
||||||
countries=config["countries"],
|
countries=config["countries"],
|
||||||
snapshots=config["snapshots"],
|
snapshots=config["snapshots"],
|
||||||
|
lines=config["lines"],
|
||||||
|
links=config["links"],
|
||||||
|
transformers=config["transformers"],
|
||||||
input:
|
input:
|
||||||
eg_buses="data/entsoegridkit/buses.csv",
|
eg_buses="data/entsoegridkit/buses.csv",
|
||||||
eg_lines="data/entsoegridkit/lines.csv",
|
eg_lines="data/entsoegridkit/lines.csv",
|
||||||
@ -203,10 +206,62 @@ rule build_ship_raster:
|
|||||||
"../scripts/build_ship_raster.py"
|
"../scripts/build_ship_raster.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule determine_availability_matrix_MD_UA:
|
||||||
|
input:
|
||||||
|
copernicus=RESOURCES
|
||||||
|
+ "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
|
||||||
|
wdpa=RESOURCES + f"WDPA.gpkg",
|
||||||
|
wdpa_marine=RESOURCES + f"WDPA_WDOECM_marine.gpkg",
|
||||||
|
gebco=lambda w: (
|
||||||
|
"data/bundle/GEBCO_2014_2D.nc"
|
||||||
|
if "max_depth" in config["renewable"][w.technology].keys()
|
||||||
|
else []
|
||||||
|
),
|
||||||
|
ship_density=lambda w: (
|
||||||
|
RESOURCES + "shipdensity_raster.tif"
|
||||||
|
if "ship_threshold" in config["renewable"][w.technology].keys()
|
||||||
|
else []
|
||||||
|
),
|
||||||
|
country_shapes=RESOURCES + "country_shapes.geojson",
|
||||||
|
offshore_shapes=RESOURCES + "offshore_shapes.geojson",
|
||||||
|
regions=lambda w: (
|
||||||
|
RESOURCES + "regions_onshore.geojson"
|
||||||
|
if w.technology in ("onwind", "solar")
|
||||||
|
else RESOURCES + "regions_offshore.geojson"
|
||||||
|
),
|
||||||
|
cutout=lambda w: "cutouts/"
|
||||||
|
+ CDIR
|
||||||
|
+ config["renewable"][w.technology]["cutout"]
|
||||||
|
+ ".nc",
|
||||||
|
output:
|
||||||
|
availability_matrix=RESOURCES + "availability_matrix_MD-UA_{technology}.nc",
|
||||||
|
availability_map=RESOURCES + "availability_matrix_MD-UA_{technology}.png",
|
||||||
|
log:
|
||||||
|
LOGS + "determine_availability_matrix_MD_UA_{technology}.log",
|
||||||
|
threads: ATLITE_NPROCESSES
|
||||||
|
resources:
|
||||||
|
mem_mb=ATLITE_NPROCESSES * 5000,
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/determine_availability_matrix_MD_UA.py"
|
||||||
|
|
||||||
|
|
||||||
|
# Optional input when having Ukraine (UA) or Moldova (MD) in the countries list
|
||||||
|
if {"UA", "MD"}.intersection(set(config["countries"])):
|
||||||
|
opt = {
|
||||||
|
"availability_matrix_MD_UA": RESOURCES
|
||||||
|
+ "availability_matrix_MD-UA_{technology}.nc"
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
opt = {}
|
||||||
|
|
||||||
|
|
||||||
rule build_renewable_profiles:
|
rule build_renewable_profiles:
|
||||||
params:
|
params:
|
||||||
renewable=config["renewable"],
|
renewable=config["renewable"],
|
||||||
input:
|
input:
|
||||||
|
**opt,
|
||||||
base_network=RESOURCES + "networks/base.nc",
|
base_network=RESOURCES + "networks/base.nc",
|
||||||
corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"),
|
corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"),
|
||||||
natura=lambda w: (
|
natura=lambda w: (
|
||||||
@ -223,7 +278,7 @@ rule build_renewable_profiles:
|
|||||||
),
|
),
|
||||||
ship_density=lambda w: (
|
ship_density=lambda w: (
|
||||||
RESOURCES + "shipdensity_raster.tif"
|
RESOURCES + "shipdensity_raster.tif"
|
||||||
if "ship_threshold" in config["renewable"][w.technology].keys()
|
if config["renewable"][w.technology].get("ship_threshold", False)
|
||||||
else []
|
else []
|
||||||
),
|
),
|
||||||
country_shapes=RESOURCES + "country_shapes.geojson",
|
country_shapes=RESOURCES + "country_shapes.geojson",
|
||||||
@ -254,6 +309,24 @@ rule build_renewable_profiles:
|
|||||||
"../scripts/build_renewable_profiles.py"
|
"../scripts/build_renewable_profiles.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule build_monthly_prices:
|
||||||
|
input:
|
||||||
|
co2_price_raw="data/validation/emission-spot-primary-market-auction-report-2019-data.xls",
|
||||||
|
fuel_price_raw="data/validation/energy-price-trends-xlsx-5619002.xlsx",
|
||||||
|
output:
|
||||||
|
co2_price=RESOURCES + "co2_price.csv",
|
||||||
|
fuel_price=RESOURCES + "monthly_fuel_price.csv",
|
||||||
|
log:
|
||||||
|
LOGS + "build_monthly_prices.log",
|
||||||
|
threads: 1
|
||||||
|
resources:
|
||||||
|
mem_mb=5000,
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/build_monthly_prices.py"
|
||||||
|
|
||||||
|
|
||||||
rule build_hydro_profile:
|
rule build_hydro_profile:
|
||||||
params:
|
params:
|
||||||
hydro=config["renewable"]["hydro"],
|
hydro=config["renewable"]["hydro"],
|
||||||
@ -274,6 +347,30 @@ rule build_hydro_profile:
|
|||||||
"../scripts/build_hydro_profile.py"
|
"../scripts/build_hydro_profile.py"
|
||||||
|
|
||||||
|
|
||||||
|
if config["lines"]["dynamic_line_rating"]["activate"]:
|
||||||
|
|
||||||
|
rule build_line_rating:
|
||||||
|
input:
|
||||||
|
base_network=RESOURCES + "networks/base.nc",
|
||||||
|
cutout="cutouts/"
|
||||||
|
+ CDIR
|
||||||
|
+ config["lines"]["dynamic_line_rating"]["cutout"]
|
||||||
|
+ ".nc",
|
||||||
|
output:
|
||||||
|
output=RESOURCES + "networks/line_rating.nc",
|
||||||
|
log:
|
||||||
|
LOGS + "build_line_rating.log",
|
||||||
|
benchmark:
|
||||||
|
BENCHMARKS + "build_line_rating"
|
||||||
|
threads: ATLITE_NPROCESSES
|
||||||
|
resources:
|
||||||
|
mem_mb=ATLITE_NPROCESSES * 1000,
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/build_line_rating.py"
|
||||||
|
|
||||||
|
|
||||||
rule add_electricity:
|
rule add_electricity:
|
||||||
params:
|
params:
|
||||||
length_factor=config["lines"]["length_factor"],
|
length_factor=config["lines"]["length_factor"],
|
||||||
@ -281,7 +378,7 @@ rule add_electricity:
|
|||||||
countries=config["countries"],
|
countries=config["countries"],
|
||||||
renewable=config["renewable"],
|
renewable=config["renewable"],
|
||||||
electricity=config["electricity"],
|
electricity=config["electricity"],
|
||||||
conventional=config.get("conventional", {}),
|
conventional=config["conventional"],
|
||||||
costs=config["costs"],
|
costs=config["costs"],
|
||||||
input:
|
input:
|
||||||
**{
|
**{
|
||||||
@ -291,17 +388,26 @@ rule add_electricity:
|
|||||||
**{
|
**{
|
||||||
f"conventional_{carrier}_{attr}": fn
|
f"conventional_{carrier}_{attr}": fn
|
||||||
for carrier, d in config.get("conventional", {None: {}}).items()
|
for carrier, d in config.get("conventional", {None: {}}).items()
|
||||||
|
if carrier in config["electricity"]["conventional_carriers"]
|
||||||
for attr, fn in d.items()
|
for attr, fn in d.items()
|
||||||
if str(fn).startswith("data/")
|
if str(fn).startswith("data/")
|
||||||
},
|
},
|
||||||
base_network=RESOURCES + "networks/base.nc",
|
base_network=RESOURCES + "networks/base.nc",
|
||||||
|
line_rating=RESOURCES + "networks/line_rating.nc"
|
||||||
|
if config["lines"]["dynamic_line_rating"]["activate"]
|
||||||
|
else RESOURCES + "networks/base.nc",
|
||||||
tech_costs=COSTS,
|
tech_costs=COSTS,
|
||||||
regions=RESOURCES + "regions_onshore.geojson",
|
regions=RESOURCES + "regions_onshore.geojson",
|
||||||
powerplants=RESOURCES + "powerplants.csv",
|
powerplants=RESOURCES + "powerplants.csv",
|
||||||
hydro_capacities=ancient("data/bundle/hydro_capacities.csv"),
|
hydro_capacities=ancient("data/bundle/hydro_capacities.csv"),
|
||||||
geth_hydro_capacities="data/geth2015_hydro_capacities.csv",
|
geth_hydro_capacities="data/geth2015_hydro_capacities.csv",
|
||||||
|
unit_commitment="data/unit_commitment.csv",
|
||||||
|
fuel_price=RESOURCES + "monthly_fuel_price.csv"
|
||||||
|
if config["conventional"]["dynamic_fuel_price"]
|
||||||
|
else [],
|
||||||
load=RESOURCES + "load.csv",
|
load=RESOURCES + "load.csv",
|
||||||
nuts3_shapes=RESOURCES + "nuts3_shapes.geojson",
|
nuts3_shapes=RESOURCES + "nuts3_shapes.geojson",
|
||||||
|
ua_md_gdp="data/GDP_PPP_30arcsec_v3_mapped_default.csv",
|
||||||
output:
|
output:
|
||||||
RESOURCES + "networks/elec.nc",
|
RESOURCES + "networks/elec.nc",
|
||||||
log:
|
log:
|
||||||
@ -310,7 +416,7 @@ rule add_electricity:
|
|||||||
BENCHMARKS + "add_electricity"
|
BENCHMARKS + "add_electricity"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=5000,
|
mem_mb=10000,
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -321,7 +427,9 @@ rule simplify_network:
|
|||||||
params:
|
params:
|
||||||
simplify_network=config["clustering"]["simplify_network"],
|
simplify_network=config["clustering"]["simplify_network"],
|
||||||
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}),
|
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}),
|
||||||
focus_weights=config.get("focus_weights", None),
|
focus_weights=config["clustering"].get(
|
||||||
|
"focus_weights", config.get("focus_weights")
|
||||||
|
),
|
||||||
renewable_carriers=config["electricity"]["renewable_carriers"],
|
renewable_carriers=config["electricity"]["renewable_carriers"],
|
||||||
max_hours=config["electricity"]["max_hours"],
|
max_hours=config["electricity"]["max_hours"],
|
||||||
length_factor=config["lines"]["length_factor"],
|
length_factor=config["lines"]["length_factor"],
|
||||||
@ -344,7 +452,7 @@ rule simplify_network:
|
|||||||
BENCHMARKS + "simplify_network/elec_s{simpl}"
|
BENCHMARKS + "simplify_network/elec_s{simpl}"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=4000,
|
mem_mb=12000,
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -356,7 +464,9 @@ rule cluster_network:
|
|||||||
cluster_network=config["clustering"]["cluster_network"],
|
cluster_network=config["clustering"]["cluster_network"],
|
||||||
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}),
|
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}),
|
||||||
custom_busmap=config["enable"].get("custom_busmap", False),
|
custom_busmap=config["enable"].get("custom_busmap", False),
|
||||||
focus_weights=config.get("focus_weights", None),
|
focus_weights=config["clustering"].get(
|
||||||
|
"focus_weights", config.get("focus_weights")
|
||||||
|
),
|
||||||
renewable_carriers=config["electricity"]["renewable_carriers"],
|
renewable_carriers=config["electricity"]["renewable_carriers"],
|
||||||
conventional_carriers=config["electricity"].get("conventional_carriers", []),
|
conventional_carriers=config["electricity"].get("conventional_carriers", []),
|
||||||
max_hours=config["electricity"]["max_hours"],
|
max_hours=config["electricity"]["max_hours"],
|
||||||
@ -385,7 +495,7 @@ rule cluster_network:
|
|||||||
BENCHMARKS + "cluster_network/elec_s{simpl}_{clusters}"
|
BENCHMARKS + "cluster_network/elec_s{simpl}_{clusters}"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=6000,
|
mem_mb=10000,
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -408,7 +518,7 @@ rule add_extra_components:
|
|||||||
BENCHMARKS + "add_extra_components/elec_s{simpl}_{clusters}_ec"
|
BENCHMARKS + "add_extra_components/elec_s{simpl}_{clusters}_ec"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=3000,
|
mem_mb=4000,
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
@ -427,6 +537,7 @@ rule prepare_network:
|
|||||||
input:
|
input:
|
||||||
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc",
|
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc",
|
||||||
tech_costs=COSTS,
|
tech_costs=COSTS,
|
||||||
|
co2_price=lambda w: RESOURCES + "co2_price.csv" if "Ept" in w.opts else [],
|
||||||
output:
|
output:
|
||||||
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||||
log:
|
log:
|
||||||
|
@ -86,7 +86,7 @@ if config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]:
|
|||||||
rule build_gas_input_locations:
|
rule build_gas_input_locations:
|
||||||
input:
|
input:
|
||||||
lng=HTTP.remote(
|
lng=HTTP.remote(
|
||||||
"https://globalenergymonitor.org/wp-content/uploads/2022/09/Europe-Gas-Tracker-August-2022.xlsx",
|
"https://globalenergymonitor.org/wp-content/uploads/2023/07/Europe-Gas-Tracker-2023-03-v3.xlsx",
|
||||||
keep_local=True,
|
keep_local=True,
|
||||||
),
|
),
|
||||||
entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson",
|
entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson",
|
||||||
@ -242,9 +242,9 @@ rule build_energy_totals:
|
|||||||
energy=config["energy"],
|
energy=config["energy"],
|
||||||
input:
|
input:
|
||||||
nuts3_shapes=RESOURCES + "nuts3_shapes.geojson",
|
nuts3_shapes=RESOURCES + "nuts3_shapes.geojson",
|
||||||
co2="data/eea/UNFCCC_v23.csv",
|
co2="data/bundle-sector/eea/UNFCCC_v23.csv",
|
||||||
swiss="data/switzerland-sfoe/switzerland-new_format.csv",
|
swiss="data/bundle-sector/switzerland-sfoe/switzerland-new_format.csv",
|
||||||
idees="data/jrc-idees-2015",
|
idees="data/bundle-sector/jrc-idees-2015",
|
||||||
district_heat_share="data/district_heat_share.csv",
|
district_heat_share="data/district_heat_share.csv",
|
||||||
eurostat=input_eurostat,
|
eurostat=input_eurostat,
|
||||||
output:
|
output:
|
||||||
@ -269,10 +269,10 @@ rule build_biomass_potentials:
|
|||||||
biomass=config["biomass"],
|
biomass=config["biomass"],
|
||||||
input:
|
input:
|
||||||
enspreso_biomass=HTTP.remote(
|
enspreso_biomass=HTTP.remote(
|
||||||
"https://cidportal.jrc.ec.europa.eu/ftp/jrc-opendata/ENSPRESO/ENSPRESO_BIOMASS.xlsx",
|
"https://zenodo.org/records/10356004/files/ENSPRESO_BIOMASS.xlsx",
|
||||||
keep_local=True,
|
keep_local=True,
|
||||||
),
|
),
|
||||||
nuts2="data/nuts/NUTS_RG_10M_2013_4326_LEVL_2.geojson", # https://gisco-services.ec.europa.eu/distribution/v2/nuts/download/#nuts21
|
nuts2="data/bundle-sector/nuts/NUTS_RG_10M_2013_4326_LEVL_2.geojson", # https://gisco-services.ec.europa.eu/distribution/v2/nuts/download/#nuts21
|
||||||
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
||||||
nuts3_population=ancient("data/bundle/nama_10r_3popgdp.tsv.gz"),
|
nuts3_population=ancient("data/bundle/nama_10r_3popgdp.tsv.gz"),
|
||||||
swiss_cantons=ancient("data/bundle/ch_cantons.csv"),
|
swiss_cantons=ancient("data/bundle/ch_cantons.csv"),
|
||||||
@ -280,22 +280,23 @@ rule build_biomass_potentials:
|
|||||||
country_shapes=RESOURCES + "country_shapes.geojson",
|
country_shapes=RESOURCES + "country_shapes.geojson",
|
||||||
output:
|
output:
|
||||||
biomass_potentials_all=RESOURCES
|
biomass_potentials_all=RESOURCES
|
||||||
+ "biomass_potentials_all_s{simpl}_{clusters}.csv",
|
+ "biomass_potentials_all_s{simpl}_{clusters}_{planning_horizons}.csv",
|
||||||
biomass_potentials=RESOURCES + "biomass_potentials_s{simpl}_{clusters}.csv",
|
biomass_potentials=RESOURCES
|
||||||
|
+ "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv",
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=1000,
|
mem_mb=1000,
|
||||||
log:
|
log:
|
||||||
LOGS + "build_biomass_potentials_s{simpl}_{clusters}.log",
|
LOGS + "build_biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.log",
|
||||||
benchmark:
|
benchmark:
|
||||||
BENCHMARKS + "build_biomass_potentials_s{simpl}_{clusters}"
|
BENCHMARKS + "build_biomass_potentials_s{simpl}_{clusters}_{planning_horizons}"
|
||||||
conda:
|
conda:
|
||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
"../scripts/build_biomass_potentials.py"
|
"../scripts/build_biomass_potentials.py"
|
||||||
|
|
||||||
|
|
||||||
if config["sector"]["biomass_transport"]:
|
if config["sector"]["biomass_transport"] or config["sector"]["biomass_spatial"]:
|
||||||
|
|
||||||
rule build_biomass_transport_costs:
|
rule build_biomass_transport_costs:
|
||||||
input:
|
input:
|
||||||
@ -320,9 +321,8 @@ if config["sector"]["biomass_transport"]:
|
|||||||
build_biomass_transport_costs_output = rules.build_biomass_transport_costs.output
|
build_biomass_transport_costs_output = rules.build_biomass_transport_costs.output
|
||||||
|
|
||||||
|
|
||||||
if not config["sector"]["biomass_transport"]:
|
if not (config["sector"]["biomass_transport"] or config["sector"]["biomass_spatial"]):
|
||||||
# this is effecively an `else` statement which is however not liked by snakefmt
|
# this is effecively an `else` statement which is however not liked by snakefmt
|
||||||
|
|
||||||
build_biomass_transport_costs_output = {}
|
build_biomass_transport_costs_output = {}
|
||||||
|
|
||||||
|
|
||||||
@ -367,7 +367,7 @@ if not config["sector"]["regional_co2_sequestration_potential"]["enable"]:
|
|||||||
|
|
||||||
rule build_salt_cavern_potentials:
|
rule build_salt_cavern_potentials:
|
||||||
input:
|
input:
|
||||||
salt_caverns="data/h2_salt_caverns_GWh_per_sqkm.geojson",
|
salt_caverns="data/bundle-sector/h2_salt_caverns_GWh_per_sqkm.geojson",
|
||||||
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
||||||
regions_offshore=RESOURCES + "regions_offshore_elec_s{simpl}_{clusters}.geojson",
|
regions_offshore=RESOURCES + "regions_offshore_elec_s{simpl}_{clusters}.geojson",
|
||||||
output:
|
output:
|
||||||
@ -389,7 +389,7 @@ rule build_ammonia_production:
|
|||||||
params:
|
params:
|
||||||
countries=config["countries"],
|
countries=config["countries"],
|
||||||
input:
|
input:
|
||||||
usgs="data/myb1-2017-nitro.xls",
|
usgs="data/bundle-sector/myb1-2017-nitro.xls",
|
||||||
output:
|
output:
|
||||||
ammonia_production=RESOURCES + "ammonia_production.csv",
|
ammonia_production=RESOURCES + "ammonia_production.csv",
|
||||||
threads: 1
|
threads: 1
|
||||||
@ -411,7 +411,7 @@ rule build_industry_sector_ratios:
|
|||||||
ammonia=config["sector"].get("ammonia", False),
|
ammonia=config["sector"].get("ammonia", False),
|
||||||
input:
|
input:
|
||||||
ammonia_production=RESOURCES + "ammonia_production.csv",
|
ammonia_production=RESOURCES + "ammonia_production.csv",
|
||||||
idees="data/jrc-idees-2015",
|
idees="data/bundle-sector/jrc-idees-2015",
|
||||||
output:
|
output:
|
||||||
industry_sector_ratios=RESOURCES + "industry_sector_ratios.csv",
|
industry_sector_ratios=RESOURCES + "industry_sector_ratios.csv",
|
||||||
threads: 1
|
threads: 1
|
||||||
@ -433,8 +433,8 @@ rule build_industrial_production_per_country:
|
|||||||
countries=config["countries"],
|
countries=config["countries"],
|
||||||
input:
|
input:
|
||||||
ammonia_production=RESOURCES + "ammonia_production.csv",
|
ammonia_production=RESOURCES + "ammonia_production.csv",
|
||||||
jrc="data/jrc-idees-2015",
|
jrc="data/bundle-sector/jrc-idees-2015",
|
||||||
eurostat="data/eurostat-energy_balances-may_2018_edition",
|
eurostat="data/bundle-sector/eurostat-energy_balances-may_2018_edition",
|
||||||
output:
|
output:
|
||||||
industrial_production_per_country=RESOURCES
|
industrial_production_per_country=RESOURCES
|
||||||
+ "industrial_production_per_country.csv",
|
+ "industrial_production_per_country.csv",
|
||||||
@ -484,7 +484,7 @@ rule build_industrial_distribution_key:
|
|||||||
input:
|
input:
|
||||||
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
||||||
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
|
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
|
||||||
hotmaps_industrial_database="data/Industrial_Database.csv",
|
hotmaps_industrial_database="data/bundle-sector/Industrial_Database.csv",
|
||||||
output:
|
output:
|
||||||
industrial_distribution_key=RESOURCES
|
industrial_distribution_key=RESOURCES
|
||||||
+ "industrial_distribution_key_elec_s{simpl}_{clusters}.csv",
|
+ "industrial_distribution_key_elec_s{simpl}_{clusters}.csv",
|
||||||
@ -559,7 +559,7 @@ rule build_industrial_energy_demand_per_country_today:
|
|||||||
countries=config["countries"],
|
countries=config["countries"],
|
||||||
industry=config["industry"],
|
industry=config["industry"],
|
||||||
input:
|
input:
|
||||||
jrc="data/jrc-idees-2015",
|
jrc="data/bundle-sector/jrc-idees-2015",
|
||||||
ammonia_production=RESOURCES + "ammonia_production.csv",
|
ammonia_production=RESOURCES + "ammonia_production.csv",
|
||||||
industrial_production_per_country=RESOURCES
|
industrial_production_per_country=RESOURCES
|
||||||
+ "industrial_production_per_country.csv",
|
+ "industrial_production_per_country.csv",
|
||||||
@ -609,7 +609,7 @@ if config["sector"]["retrofitting"]["retro_endogen"]:
|
|||||||
countries=config["countries"],
|
countries=config["countries"],
|
||||||
input:
|
input:
|
||||||
building_stock="data/retro/data_building_stock.csv",
|
building_stock="data/retro/data_building_stock.csv",
|
||||||
data_tabula="data/retro/tabula-calculator-calcsetbuilding.csv",
|
data_tabula="data/bundle-sector/retro/tabula-calculator-calcsetbuilding.csv",
|
||||||
air_temperature=RESOURCES + "temp_air_total_elec_s{simpl}_{clusters}.nc",
|
air_temperature=RESOURCES + "temp_air_total_elec_s{simpl}_{clusters}.nc",
|
||||||
u_values_PL="data/retro/u_values_poland.csv",
|
u_values_PL="data/retro/u_values_poland.csv",
|
||||||
tax_w="data/retro/electricity_taxes_eu.csv",
|
tax_w="data/retro/electricity_taxes_eu.csv",
|
||||||
@ -685,8 +685,8 @@ rule build_transport_demand:
|
|||||||
pop_weighted_energy_totals=RESOURCES
|
pop_weighted_energy_totals=RESOURCES
|
||||||
+ "pop_weighted_energy_totals_s{simpl}_{clusters}.csv",
|
+ "pop_weighted_energy_totals_s{simpl}_{clusters}.csv",
|
||||||
transport_data=RESOURCES + "transport_data.csv",
|
transport_data=RESOURCES + "transport_data.csv",
|
||||||
traffic_data_KFZ="data/emobility/KFZ__count",
|
traffic_data_KFZ="data/bundle-sector/emobility/KFZ__count",
|
||||||
traffic_data_Pkw="data/emobility/Pkw__count",
|
traffic_data_Pkw="data/bundle-sector/emobility/Pkw__count",
|
||||||
temp_air_total=RESOURCES + "temp_air_total_elec_s{simpl}_{clusters}.nc",
|
temp_air_total=RESOURCES + "temp_air_total_elec_s{simpl}_{clusters}.nc",
|
||||||
output:
|
output:
|
||||||
transport_demand=RESOURCES + "transport_demand_s{simpl}_{clusters}.csv",
|
transport_demand=RESOURCES + "transport_demand_s{simpl}_{clusters}.csv",
|
||||||
@ -735,8 +735,13 @@ rule prepare_sector_network:
|
|||||||
avail_profile=RESOURCES + "avail_profile_s{simpl}_{clusters}.csv",
|
avail_profile=RESOURCES + "avail_profile_s{simpl}_{clusters}.csv",
|
||||||
dsm_profile=RESOURCES + "dsm_profile_s{simpl}_{clusters}.csv",
|
dsm_profile=RESOURCES + "dsm_profile_s{simpl}_{clusters}.csv",
|
||||||
co2_totals_name=RESOURCES + "co2_totals.csv",
|
co2_totals_name=RESOURCES + "co2_totals.csv",
|
||||||
co2="data/eea/UNFCCC_v23.csv",
|
co2="data/bundle-sector/eea/UNFCCC_v23.csv",
|
||||||
biomass_potentials=RESOURCES + "biomass_potentials_s{simpl}_{clusters}.csv",
|
biomass_potentials=RESOURCES
|
||||||
|
+ "biomass_potentials_s{simpl}_{clusters}_"
|
||||||
|
+ "{}.csv".format(config["biomass"]["year"])
|
||||||
|
if config["foresight"] == "overnight"
|
||||||
|
else RESOURCES
|
||||||
|
+ "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv",
|
||||||
heat_profile="data/heat_load_profile_BDEW.csv",
|
heat_profile="data/heat_load_profile_BDEW.csv",
|
||||||
costs="data/costs_{}.csv".format(config["costs"]["year"])
|
costs="data/costs_{}.csv".format(config["costs"]["year"])
|
||||||
if config["foresight"] == "overnight"
|
if config["foresight"] == "overnight"
|
||||||
|
@ -14,12 +14,6 @@ localrules:
|
|||||||
plot_networks,
|
plot_networks,
|
||||||
|
|
||||||
|
|
||||||
rule all:
|
|
||||||
input:
|
|
||||||
RESULTS + "graphs/costs.pdf",
|
|
||||||
default_target: True
|
|
||||||
|
|
||||||
|
|
||||||
rule cluster_networks:
|
rule cluster_networks:
|
||||||
input:
|
input:
|
||||||
expand(RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", **config["scenario"]),
|
expand(RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", **config["scenario"]),
|
||||||
@ -66,6 +60,15 @@ rule solve_sector_networks:
|
|||||||
),
|
),
|
||||||
|
|
||||||
|
|
||||||
|
rule solve_sector_networks_perfect:
|
||||||
|
input:
|
||||||
|
expand(
|
||||||
|
RESULTS
|
||||||
|
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
||||||
|
**config["scenario"]
|
||||||
|
),
|
||||||
|
|
||||||
|
|
||||||
rule plot_networks:
|
rule plot_networks:
|
||||||
input:
|
input:
|
||||||
expand(
|
expand(
|
||||||
@ -73,3 +76,18 @@ rule plot_networks:
|
|||||||
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
|
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
|
||||||
**config["scenario"]
|
**config["scenario"]
|
||||||
),
|
),
|
||||||
|
|
||||||
|
|
||||||
|
rule validate_elec_networks:
|
||||||
|
input:
|
||||||
|
expand(
|
||||||
|
RESULTS
|
||||||
|
+ "figures/.statistics_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
||||||
|
**config["scenario"]
|
||||||
|
),
|
||||||
|
expand(
|
||||||
|
RESULTS
|
||||||
|
+ "figures/.validation_{kind}_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
||||||
|
**config["scenario"],
|
||||||
|
kind=["production", "prices", "cross_border"]
|
||||||
|
),
|
||||||
|
@ -15,8 +15,8 @@ def memory(w):
|
|||||||
if m is not None:
|
if m is not None:
|
||||||
factor *= int(m.group(1)) / 8760
|
factor *= int(m.group(1)) / 8760
|
||||||
break
|
break
|
||||||
if w.clusters.endswith("m"):
|
if w.clusters.endswith("m") or w.clusters.endswith("c"):
|
||||||
return int(factor * (18000 + 180 * int(w.clusters[:-1])))
|
return int(factor * (55000 + 600 * int(w.clusters[:-1])))
|
||||||
elif w.clusters == "all":
|
elif w.clusters == "all":
|
||||||
return int(factor * (18000 + 180 * 4000))
|
return int(factor * (18000 + 180 * 4000))
|
||||||
else:
|
else:
|
||||||
@ -42,7 +42,7 @@ def has_internet_access(url="www.zenodo.org") -> bool:
|
|||||||
def input_eurostat(w):
|
def input_eurostat(w):
|
||||||
# 2016 includes BA, 2017 does not
|
# 2016 includes BA, 2017 does not
|
||||||
report_year = config["energy"]["eurostat_report_year"]
|
report_year = config["energy"]["eurostat_report_year"]
|
||||||
return f"data/eurostat-energy_balances-june_{report_year}_edition"
|
return f"data/bundle-sector/eurostat-energy_balances-june_{report_year}_edition"
|
||||||
|
|
||||||
|
|
||||||
def solved_previous_horizon(wildcards):
|
def solved_previous_horizon(wildcards):
|
||||||
|
@ -5,41 +5,71 @@
|
|||||||
|
|
||||||
localrules:
|
localrules:
|
||||||
copy_config,
|
copy_config,
|
||||||
copy_conda_env,
|
|
||||||
|
|
||||||
|
|
||||||
rule plot_network:
|
if config["foresight"] != "perfect":
|
||||||
params:
|
|
||||||
foresight=config["foresight"],
|
rule plot_network:
|
||||||
plotting=config["plotting"],
|
params:
|
||||||
input:
|
foresight=config["foresight"],
|
||||||
network=RESULTS
|
plotting=config["plotting"],
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
input:
|
||||||
regions=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
network=RESULTS
|
||||||
output:
|
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
map=RESULTS
|
regions=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
||||||
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
|
output:
|
||||||
today=RESULTS
|
map=RESULTS
|
||||||
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}-today.pdf",
|
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf",
|
||||||
threads: 2
|
today=RESULTS
|
||||||
resources:
|
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}-today.pdf",
|
||||||
mem_mb=10000,
|
threads: 2
|
||||||
benchmark:
|
resources:
|
||||||
(
|
mem_mb=10000,
|
||||||
|
benchmark:
|
||||||
|
(
|
||||||
|
BENCHMARKS
|
||||||
|
+ "plot_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
|
)
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/plot_network.py"
|
||||||
|
|
||||||
|
|
||||||
|
if config["foresight"] == "perfect":
|
||||||
|
|
||||||
|
rule plot_network:
|
||||||
|
params:
|
||||||
|
foresight=config["foresight"],
|
||||||
|
plotting=config["plotting"],
|
||||||
|
input:
|
||||||
|
network=RESULTS
|
||||||
|
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
||||||
|
regions=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
||||||
|
output:
|
||||||
|
**{
|
||||||
|
f"map_{year}": RESULTS
|
||||||
|
+ "maps/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}-costs-all_"
|
||||||
|
+ f"{year}.pdf"
|
||||||
|
for year in config["scenario"]["planning_horizons"]
|
||||||
|
},
|
||||||
|
threads: 2
|
||||||
|
resources:
|
||||||
|
mem_mb=10000,
|
||||||
|
benchmark:
|
||||||
BENCHMARKS
|
BENCHMARKS
|
||||||
+ "plot_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
+"postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_benchmark"
|
||||||
)
|
conda:
|
||||||
conda:
|
"../envs/environment.yaml"
|
||||||
"../envs/environment.yaml"
|
script:
|
||||||
script:
|
"../scripts/plot_network.py"
|
||||||
"../scripts/plot_network.py"
|
|
||||||
|
|
||||||
|
|
||||||
rule copy_config:
|
rule copy_config:
|
||||||
params:
|
params:
|
||||||
RDIR=RDIR,
|
RDIR=RDIR,
|
||||||
output:
|
output:
|
||||||
RESULTS + "config/config.yaml",
|
RESULTS + "config.yaml",
|
||||||
threads: 1
|
threads: 1
|
||||||
resources:
|
resources:
|
||||||
mem_mb=1000,
|
mem_mb=1000,
|
||||||
@ -51,22 +81,6 @@ rule copy_config:
|
|||||||
"../scripts/copy_config.py"
|
"../scripts/copy_config.py"
|
||||||
|
|
||||||
|
|
||||||
rule copy_conda_env:
|
|
||||||
output:
|
|
||||||
RESULTS + "config/environment.yaml",
|
|
||||||
threads: 1
|
|
||||||
resources:
|
|
||||||
mem_mb=500,
|
|
||||||
log:
|
|
||||||
LOGS + "copy_conda_env.log",
|
|
||||||
benchmark:
|
|
||||||
BENCHMARKS + "copy_conda_env"
|
|
||||||
conda:
|
|
||||||
"../envs/environment.yaml"
|
|
||||||
shell:
|
|
||||||
"conda env export -f {output} --no-builds"
|
|
||||||
|
|
||||||
|
|
||||||
rule make_summary:
|
rule make_summary:
|
||||||
params:
|
params:
|
||||||
foresight=config["foresight"],
|
foresight=config["foresight"],
|
||||||
@ -122,6 +136,8 @@ rule plot_summary:
|
|||||||
countries=config["countries"],
|
countries=config["countries"],
|
||||||
planning_horizons=config["scenario"]["planning_horizons"],
|
planning_horizons=config["scenario"]["planning_horizons"],
|
||||||
sector_opts=config["scenario"]["sector_opts"],
|
sector_opts=config["scenario"]["sector_opts"],
|
||||||
|
emissions_scope=config["energy"]["emissions"],
|
||||||
|
eurostat_report_year=config["energy"]["eurostat_report_year"],
|
||||||
plotting=config["plotting"],
|
plotting=config["plotting"],
|
||||||
RDIR=RDIR,
|
RDIR=RDIR,
|
||||||
input:
|
input:
|
||||||
@ -129,6 +145,7 @@ rule plot_summary:
|
|||||||
energy=RESULTS + "csvs/energy.csv",
|
energy=RESULTS + "csvs/energy.csv",
|
||||||
balances=RESULTS + "csvs/supply_energy.csv",
|
balances=RESULTS + "csvs/supply_energy.csv",
|
||||||
eurostat=input_eurostat,
|
eurostat=input_eurostat,
|
||||||
|
co2="data/bundle-sector/eea/UNFCCC_v23.csv",
|
||||||
output:
|
output:
|
||||||
costs=RESULTS + "graphs/costs.pdf",
|
costs=RESULTS + "graphs/costs.pdf",
|
||||||
energy=RESULTS + "graphs/energy.pdf",
|
energy=RESULTS + "graphs/energy.pdf",
|
||||||
@ -144,3 +161,34 @@ rule plot_summary:
|
|||||||
"../envs/environment.yaml"
|
"../envs/environment.yaml"
|
||||||
script:
|
script:
|
||||||
"../scripts/plot_summary.py"
|
"../scripts/plot_summary.py"
|
||||||
|
|
||||||
|
|
||||||
|
STATISTICS_BARPLOTS = [
|
||||||
|
"capacity_factor",
|
||||||
|
"installed_capacity",
|
||||||
|
"optimal_capacity",
|
||||||
|
"capital_expenditure",
|
||||||
|
"operational_expenditure",
|
||||||
|
"curtailment",
|
||||||
|
"supply",
|
||||||
|
"withdrawal",
|
||||||
|
"market_value",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
rule plot_elec_statistics:
|
||||||
|
params:
|
||||||
|
plotting=config["plotting"],
|
||||||
|
barplots=STATISTICS_BARPLOTS,
|
||||||
|
input:
|
||||||
|
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||||
|
output:
|
||||||
|
**{
|
||||||
|
f"{plot}_bar": RESULTS
|
||||||
|
+ f"figures/statistics_{plot}_bar_elec_s{{simpl}}_{{clusters}}_ec_l{{ll}}_{{opts}}.pdf"
|
||||||
|
for plot in STATISTICS_BARPLOTS
|
||||||
|
},
|
||||||
|
barplots_touch=RESULTS
|
||||||
|
+ "figures/.statistics_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
||||||
|
script:
|
||||||
|
"../scripts/plot_statistics.py"
|
||||||
|
@ -2,6 +2,9 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
if config["enable"].get("retrieve", "auto") == "auto":
|
if config["enable"].get("retrieve", "auto") == "auto":
|
||||||
config["enable"]["retrieve"] = has_internet_access()
|
config["enable"]["retrieve"] = has_internet_access()
|
||||||
|
|
||||||
@ -27,7 +30,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_databundle",
|
|||||||
|
|
||||||
rule retrieve_databundle:
|
rule retrieve_databundle:
|
||||||
output:
|
output:
|
||||||
expand("data/bundle/{file}", file=datafiles),
|
protected(expand("data/bundle/{file}", file=datafiles)),
|
||||||
log:
|
log:
|
||||||
LOGS + "retrieve_databundle.log",
|
LOGS + "retrieve_databundle.log",
|
||||||
resources:
|
resources:
|
||||||
@ -39,6 +42,24 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_databundle",
|
|||||||
"../scripts/retrieve_databundle.py"
|
"../scripts/retrieve_databundle.py"
|
||||||
|
|
||||||
|
|
||||||
|
if config["enable"].get("retrieve_irena"):
|
||||||
|
|
||||||
|
rule retrieve_irena:
|
||||||
|
output:
|
||||||
|
offwind="data/existing_infrastructure/offwind_capacity_IRENA.csv",
|
||||||
|
onwind="data/existing_infrastructure/onwind_capacity_IRENA.csv",
|
||||||
|
solar="data/existing_infrastructure/solar_capacity_IRENA.csv",
|
||||||
|
log:
|
||||||
|
LOGS + "retrieve_irena.log",
|
||||||
|
resources:
|
||||||
|
mem_mb=1000,
|
||||||
|
retries: 2
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/retrieve_irena.py"
|
||||||
|
|
||||||
|
|
||||||
if config["enable"]["retrieve"] and config["enable"].get("retrieve_cutout", True):
|
if config["enable"]["retrieve"] and config["enable"].get("retrieve_cutout", True):
|
||||||
|
|
||||||
rule retrieve_cutout:
|
rule retrieve_cutout:
|
||||||
@ -92,7 +113,7 @@ if config["enable"]["retrieve"] and config["enable"].get(
|
|||||||
static=True,
|
static=True,
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
RESOURCES + "natura.tiff",
|
protected(RESOURCES + "natura.tiff"),
|
||||||
log:
|
log:
|
||||||
LOGS + "retrieve_natura_raster.log",
|
LOGS + "retrieve_natura_raster.log",
|
||||||
resources:
|
resources:
|
||||||
@ -106,22 +127,30 @@ if config["enable"]["retrieve"] and config["enable"].get(
|
|||||||
"retrieve_sector_databundle", True
|
"retrieve_sector_databundle", True
|
||||||
):
|
):
|
||||||
datafiles = [
|
datafiles = [
|
||||||
"data/eea/UNFCCC_v23.csv",
|
"eea/UNFCCC_v23.csv",
|
||||||
"data/switzerland-sfoe/switzerland-new_format.csv",
|
"switzerland-sfoe/switzerland-new_format.csv",
|
||||||
"data/nuts/NUTS_RG_10M_2013_4326_LEVL_2.geojson",
|
"nuts/NUTS_RG_10M_2013_4326_LEVL_2.geojson",
|
||||||
"data/myb1-2017-nitro.xls",
|
"myb1-2017-nitro.xls",
|
||||||
"data/Industrial_Database.csv",
|
"Industrial_Database.csv",
|
||||||
"data/emobility/KFZ__count",
|
"emobility/KFZ__count",
|
||||||
"data/emobility/Pkw__count",
|
"emobility/Pkw__count",
|
||||||
"data/h2_salt_caverns_GWh_per_sqkm.geojson",
|
"h2_salt_caverns_GWh_per_sqkm.geojson",
|
||||||
directory("data/eurostat-energy_balances-june_2016_edition"),
|
]
|
||||||
directory("data/eurostat-energy_balances-may_2018_edition"),
|
|
||||||
directory("data/jrc-idees-2015"),
|
datafolders = [
|
||||||
|
protected(
|
||||||
|
directory("data/bundle-sector/eurostat-energy_balances-june_2016_edition")
|
||||||
|
),
|
||||||
|
protected(
|
||||||
|
directory("data/bundle-sector/eurostat-energy_balances-may_2018_edition")
|
||||||
|
),
|
||||||
|
protected(directory("data/bundle-sector/jrc-idees-2015")),
|
||||||
]
|
]
|
||||||
|
|
||||||
rule retrieve_sector_databundle:
|
rule retrieve_sector_databundle:
|
||||||
output:
|
output:
|
||||||
*datafiles,
|
protected(expand("data/bundle-sector/{files}", files=datafiles)),
|
||||||
|
*datafolders,
|
||||||
log:
|
log:
|
||||||
LOGS + "retrieve_sector_databundle.log",
|
LOGS + "retrieve_sector_databundle.log",
|
||||||
retries: 2
|
retries: 2
|
||||||
@ -143,7 +172,9 @@ if config["enable"]["retrieve"] and (
|
|||||||
|
|
||||||
rule retrieve_gas_infrastructure_data:
|
rule retrieve_gas_infrastructure_data:
|
||||||
output:
|
output:
|
||||||
expand("data/gas_network/scigrid-gas/data/{files}", files=datafiles),
|
protected(
|
||||||
|
expand("data/gas_network/scigrid-gas/data/{files}", files=datafiles)
|
||||||
|
),
|
||||||
log:
|
log:
|
||||||
LOGS + "retrieve_gas_infrastructure_data.log",
|
LOGS + "retrieve_gas_infrastructure_data.log",
|
||||||
retries: 2
|
retries: 2
|
||||||
@ -158,12 +189,16 @@ if config["enable"]["retrieve"]:
|
|||||||
rule retrieve_electricity_demand:
|
rule retrieve_electricity_demand:
|
||||||
input:
|
input:
|
||||||
HTTP.remote(
|
HTTP.remote(
|
||||||
"data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv",
|
"data.open-power-system-data.org/time_series/{version}/time_series_60min_singleindex.csv".format(
|
||||||
|
version="2019-06-05"
|
||||||
|
if config["snapshots"]["end"] < "2019"
|
||||||
|
else "2020-10-06"
|
||||||
|
),
|
||||||
keep_local=True,
|
keep_local=True,
|
||||||
static=True,
|
static=True,
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
"data/load_raw.csv",
|
RESOURCES + "load_raw.csv",
|
||||||
log:
|
log:
|
||||||
LOGS + "retrieve_electricity_demand.log",
|
LOGS + "retrieve_electricity_demand.log",
|
||||||
resources:
|
resources:
|
||||||
@ -183,7 +218,7 @@ if config["enable"]["retrieve"]:
|
|||||||
static=True,
|
static=True,
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
"data/shipdensity_global.zip",
|
protected("data/shipdensity_global.zip"),
|
||||||
log:
|
log:
|
||||||
LOGS + "retrieve_ship_raster.log",
|
LOGS + "retrieve_ship_raster.log",
|
||||||
resources:
|
resources:
|
||||||
@ -191,3 +226,138 @@ if config["enable"]["retrieve"]:
|
|||||||
retries: 2
|
retries: 2
|
||||||
run:
|
run:
|
||||||
move(input[0], output[0])
|
move(input[0], output[0])
|
||||||
|
|
||||||
|
|
||||||
|
if config["enable"]["retrieve"]:
|
||||||
|
|
||||||
|
# Downloading Copernicus Global Land Cover for land cover and land use:
|
||||||
|
# Website: https://land.copernicus.eu/global/products/lc
|
||||||
|
rule download_copernicus_land_cover:
|
||||||
|
input:
|
||||||
|
HTTP.remote(
|
||||||
|
"zenodo.org/record/3939050/files/PROBAV_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
|
||||||
|
static=True,
|
||||||
|
),
|
||||||
|
output:
|
||||||
|
RESOURCES
|
||||||
|
+ "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
|
||||||
|
run:
|
||||||
|
move(input[0], output[0])
|
||||||
|
|
||||||
|
|
||||||
|
if config["enable"]["retrieve"]:
|
||||||
|
# Some logic to find the correct file URL
|
||||||
|
# Sometimes files are released delayed or ahead of schedule, check which file is currently available
|
||||||
|
|
||||||
|
def check_file_exists(url):
|
||||||
|
response = requests.head(url)
|
||||||
|
return response.status_code == 200
|
||||||
|
|
||||||
|
# Basic pattern where WDPA files can be found
|
||||||
|
url_pattern = (
|
||||||
|
"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public_shp.zip"
|
||||||
|
)
|
||||||
|
|
||||||
|
# 3-letter month + 4 digit year for current/previous/next month to test
|
||||||
|
current_monthyear = datetime.now().strftime("%b%Y")
|
||||||
|
prev_monthyear = (datetime.now() - timedelta(30)).strftime("%b%Y")
|
||||||
|
next_monthyear = (datetime.now() + timedelta(30)).strftime("%b%Y")
|
||||||
|
|
||||||
|
# Test prioritised: current month -> previous -> next
|
||||||
|
for bYYYY in [current_monthyear, prev_monthyear, next_monthyear]:
|
||||||
|
if check_file_exists(url := url_pattern.format(bYYYY=bYYYY)):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# If None of the three URLs are working
|
||||||
|
url = False
|
||||||
|
|
||||||
|
assert (
|
||||||
|
url
|
||||||
|
), f"No WDPA files found at {url_pattern} for bY='{current_monthyear}, {prev_monthyear}, or {next_monthyear}'"
|
||||||
|
|
||||||
|
# Downloading protected area database from WDPA
|
||||||
|
# extract the main zip and then merge the contained 3 zipped shapefiles
|
||||||
|
# Website: https://www.protectedplanet.net/en/thematic-areas/wdpa
|
||||||
|
rule download_wdpa:
|
||||||
|
input:
|
||||||
|
HTTP.remote(
|
||||||
|
url,
|
||||||
|
static=True,
|
||||||
|
keep_local=True,
|
||||||
|
),
|
||||||
|
params:
|
||||||
|
zip=RESOURCES + f"WDPA_shp.zip",
|
||||||
|
folder=directory(RESOURCES + f"WDPA"),
|
||||||
|
output:
|
||||||
|
gpkg=RESOURCES + f"WDPA.gpkg",
|
||||||
|
run:
|
||||||
|
shell("cp {input} {params.zip}")
|
||||||
|
shell("unzip -o {params.zip} -d {params.folder}")
|
||||||
|
for i in range(3):
|
||||||
|
# vsizip is special driver for directly working with zipped shapefiles in ogr2ogr
|
||||||
|
layer_path = (
|
||||||
|
f"/vsizip/{params.folder}/WDPA_{bYYYY}_Public_shp_{i}.zip"
|
||||||
|
)
|
||||||
|
print(f"Adding layer {i+1} of 3 to combined output file.")
|
||||||
|
shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}")
|
||||||
|
|
||||||
|
rule download_wdpa_marine:
|
||||||
|
# Downloading Marine protected area database from WDPA
|
||||||
|
# extract the main zip and then merge the contained 3 zipped shapefiles
|
||||||
|
# Website: https://www.protectedplanet.net/en/thematic-areas/marine-protected-areas
|
||||||
|
input:
|
||||||
|
HTTP.remote(
|
||||||
|
f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_WDOECM_{bYYYY}_Public_marine_shp.zip",
|
||||||
|
static=True,
|
||||||
|
keep_local=True,
|
||||||
|
),
|
||||||
|
params:
|
||||||
|
zip=RESOURCES + f"WDPA_WDOECM_marine.zip",
|
||||||
|
folder=directory(RESOURCES + f"WDPA_WDOECM_marine"),
|
||||||
|
output:
|
||||||
|
gpkg=RESOURCES + f"WDPA_WDOECM_marine.gpkg",
|
||||||
|
run:
|
||||||
|
shell("cp {input} {params.zip}")
|
||||||
|
shell("unzip -o {params.zip} -d {params.folder}")
|
||||||
|
for i in range(3):
|
||||||
|
# vsizip is special driver for directly working with zipped shapefiles in ogr2ogr
|
||||||
|
layer_path = f"/vsizip/{params.folder}/WDPA_WDOECM_{bYYYY}_Public_marine_shp_{i}.zip"
|
||||||
|
print(f"Adding layer {i+1} of 3 to combined output file.")
|
||||||
|
shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if config["enable"]["retrieve"]:
|
||||||
|
|
||||||
|
rule retrieve_monthly_co2_prices:
|
||||||
|
input:
|
||||||
|
HTTP.remote(
|
||||||
|
"https://www.eex.com/fileadmin/EEX/Downloads/EUA_Emission_Spot_Primary_Market_Auction_Report/Archive_Reports/emission-spot-primary-market-auction-report-2019-data.xls",
|
||||||
|
keep_local=True,
|
||||||
|
static=True,
|
||||||
|
),
|
||||||
|
output:
|
||||||
|
"data/validation/emission-spot-primary-market-auction-report-2019-data.xls",
|
||||||
|
log:
|
||||||
|
LOGS + "retrieve_monthly_co2_prices.log",
|
||||||
|
resources:
|
||||||
|
mem_mb=5000,
|
||||||
|
retries: 2
|
||||||
|
run:
|
||||||
|
move(input[0], output[0])
|
||||||
|
|
||||||
|
|
||||||
|
if config["enable"]["retrieve"]:
|
||||||
|
|
||||||
|
rule retrieve_monthly_fuel_prices:
|
||||||
|
output:
|
||||||
|
"data/validation/energy-price-trends-xlsx-5619002.xlsx",
|
||||||
|
log:
|
||||||
|
LOGS + "retrieve_monthly_fuel_prices.log",
|
||||||
|
resources:
|
||||||
|
mem_mb=5000,
|
||||||
|
retries: 2
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/retrieve_monthly_fuel_prices.py"
|
||||||
|
@ -13,6 +13,7 @@ rule solve_network:
|
|||||||
),
|
),
|
||||||
input:
|
input:
|
||||||
network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||||
|
config=RESULTS + "config.yaml",
|
||||||
output:
|
output:
|
||||||
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||||
log:
|
log:
|
||||||
@ -26,6 +27,7 @@ rule solve_network:
|
|||||||
threads: 4
|
threads: 4
|
||||||
resources:
|
resources:
|
||||||
mem_mb=memory,
|
mem_mb=memory,
|
||||||
|
walltime=config["solving"].get("walltime", "12:00:00"),
|
||||||
shadow:
|
shadow:
|
||||||
"minimal"
|
"minimal"
|
||||||
conda:
|
conda:
|
||||||
@ -55,7 +57,8 @@ rule solve_operations_network:
|
|||||||
)
|
)
|
||||||
threads: 4
|
threads: 4
|
||||||
resources:
|
resources:
|
||||||
mem_mb=(lambda w: 5000 + 372 * int(w.clusters)),
|
mem_mb=(lambda w: 10000 + 372 * int(w.clusters)),
|
||||||
|
walltime=config["solving"].get("walltime", "12:00:00"),
|
||||||
shadow:
|
shadow:
|
||||||
"minimal"
|
"minimal"
|
||||||
conda:
|
conda:
|
||||||
|
@ -92,7 +92,7 @@ rule solve_sector_network_myopic:
|
|||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
costs="data/costs_{planning_horizons}.csv",
|
costs="data/costs_{planning_horizons}.csv",
|
||||||
config=RESULTS + "config/config.yaml",
|
config=RESULTS + "config.yaml",
|
||||||
output:
|
output:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
@ -106,6 +106,7 @@ rule solve_sector_network_myopic:
|
|||||||
threads: 4
|
threads: 4
|
||||||
resources:
|
resources:
|
||||||
mem_mb=config["solving"]["mem"],
|
mem_mb=config["solving"]["mem"],
|
||||||
|
walltime=config["solving"].get("walltime", "12:00:00"),
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
BENCHMARKS
|
BENCHMARKS
|
||||||
|
@ -14,9 +14,7 @@ rule solve_sector_network:
|
|||||||
input:
|
input:
|
||||||
network=RESULTS
|
network=RESULTS
|
||||||
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
costs="data/costs_{}.csv".format(config["costs"]["year"]),
|
config=RESULTS + "config.yaml",
|
||||||
config=RESULTS + "config/config.yaml",
|
|
||||||
#env=RDIR + 'config/environment.yaml',
|
|
||||||
output:
|
output:
|
||||||
RESULTS
|
RESULTS
|
||||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
@ -30,6 +28,7 @@ rule solve_sector_network:
|
|||||||
threads: config["solving"]["solver"].get("threads", 4)
|
threads: config["solving"]["solver"].get("threads", 4)
|
||||||
resources:
|
resources:
|
||||||
mem_mb=config["solving"]["mem"],
|
mem_mb=config["solving"]["mem"],
|
||||||
|
walltime=config["solving"].get("walltime", "12:00:00"),
|
||||||
benchmark:
|
benchmark:
|
||||||
(
|
(
|
||||||
RESULTS
|
RESULTS
|
||||||
|
194
rules/solve_perfect.smk
Normal file
194
rules/solve_perfect.smk
Normal file
@ -0,0 +1,194 @@
|
|||||||
|
# SPDX-FileCopyrightText: : 2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
rule add_existing_baseyear:
|
||||||
|
params:
|
||||||
|
baseyear=config["scenario"]["planning_horizons"][0],
|
||||||
|
sector=config["sector"],
|
||||||
|
existing_capacities=config["existing_capacities"],
|
||||||
|
costs=config["costs"],
|
||||||
|
input:
|
||||||
|
network=RESULTS
|
||||||
|
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
|
powerplants=RESOURCES + "powerplants.csv",
|
||||||
|
busmap_s=RESOURCES + "busmap_elec_s{simpl}.csv",
|
||||||
|
busmap=RESOURCES + "busmap_elec_s{simpl}_{clusters}.csv",
|
||||||
|
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
|
||||||
|
costs="data/costs_{}.csv".format(config["scenario"]["planning_horizons"][0]),
|
||||||
|
cop_soil_total=RESOURCES + "cop_soil_total_elec_s{simpl}_{clusters}.nc",
|
||||||
|
cop_air_total=RESOURCES + "cop_air_total_elec_s{simpl}_{clusters}.nc",
|
||||||
|
existing_heating="data/existing_infrastructure/existing_heating_raw.csv",
|
||||||
|
existing_solar="data/existing_infrastructure/solar_capacity_IRENA.csv",
|
||||||
|
existing_onwind="data/existing_infrastructure/onwind_capacity_IRENA.csv",
|
||||||
|
existing_offwind="data/existing_infrastructure/offwind_capacity_IRENA.csv",
|
||||||
|
output:
|
||||||
|
RESULTS
|
||||||
|
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
|
wildcard_constraints:
|
||||||
|
planning_horizons=config["scenario"]["planning_horizons"][0], #only applies to baseyear
|
||||||
|
threads: 1
|
||||||
|
resources:
|
||||||
|
mem_mb=2000,
|
||||||
|
log:
|
||||||
|
LOGS
|
||||||
|
+ "add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
||||||
|
benchmark:
|
||||||
|
(
|
||||||
|
BENCHMARKS
|
||||||
|
+ "add_existing_baseyear/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
|
)
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/add_existing_baseyear.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule add_brownfield:
|
||||||
|
params:
|
||||||
|
H2_retrofit=config["sector"]["H2_retrofit"],
|
||||||
|
H2_retrofit_capacity_per_CH4=config["sector"]["H2_retrofit_capacity_per_CH4"],
|
||||||
|
threshold_capacity=config["existing_capacities"]["threshold_capacity"],
|
||||||
|
input:
|
||||||
|
network=RESULTS
|
||||||
|
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
|
network_p=solved_previous_horizon, #solved network at previous time step
|
||||||
|
costs="data/costs_{planning_horizons}.csv",
|
||||||
|
cop_soil_total=RESOURCES + "cop_soil_total_elec_s{simpl}_{clusters}.nc",
|
||||||
|
cop_air_total=RESOURCES + "cop_air_total_elec_s{simpl}_{clusters}.nc",
|
||||||
|
output:
|
||||||
|
RESULTS
|
||||||
|
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||||
|
threads: 4
|
||||||
|
resources:
|
||||||
|
mem_mb=10000,
|
||||||
|
log:
|
||||||
|
LOGS
|
||||||
|
+ "add_brownfield_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log",
|
||||||
|
benchmark:
|
||||||
|
(
|
||||||
|
BENCHMARKS
|
||||||
|
+ "add_brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||||
|
)
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/add_brownfield.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule prepare_perfect_foresight:
|
||||||
|
input:
|
||||||
|
**{
|
||||||
|
f"network_{year}": RESULTS
|
||||||
|
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_"
|
||||||
|
+ f"{year}.nc"
|
||||||
|
for year in config["scenario"]["planning_horizons"][1:]
|
||||||
|
},
|
||||||
|
brownfield_network=lambda w: (
|
||||||
|
RESULTS
|
||||||
|
+ "prenetworks-brownfield/"
|
||||||
|
+ "elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_"
|
||||||
|
+ "{}.nc".format(str(config["scenario"]["planning_horizons"][0]))
|
||||||
|
),
|
||||||
|
output:
|
||||||
|
RESULTS
|
||||||
|
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
||||||
|
threads: 2
|
||||||
|
resources:
|
||||||
|
mem_mb=10000,
|
||||||
|
log:
|
||||||
|
LOGS
|
||||||
|
+ "prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}.log",
|
||||||
|
benchmark:
|
||||||
|
(
|
||||||
|
BENCHMARKS
|
||||||
|
+ "prepare_perfect_foresight{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}"
|
||||||
|
)
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/prepare_perfect_foresight.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule solve_sector_network_perfect:
|
||||||
|
params:
|
||||||
|
solving=config["solving"],
|
||||||
|
foresight=config["foresight"],
|
||||||
|
sector=config["sector"],
|
||||||
|
planning_horizons=config["scenario"]["planning_horizons"],
|
||||||
|
co2_sequestration_potential=config["sector"].get(
|
||||||
|
"co2_sequestration_potential", 200
|
||||||
|
),
|
||||||
|
input:
|
||||||
|
network=RESULTS
|
||||||
|
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
||||||
|
costs="data/costs_2030.csv",
|
||||||
|
config=RESULTS + "config.yaml",
|
||||||
|
output:
|
||||||
|
RESULTS
|
||||||
|
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",
|
||||||
|
threads: 4
|
||||||
|
resources:
|
||||||
|
mem_mb=config["solving"]["mem"],
|
||||||
|
shadow:
|
||||||
|
"shallow"
|
||||||
|
log:
|
||||||
|
solver=RESULTS
|
||||||
|
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_solver.log",
|
||||||
|
python=RESULTS
|
||||||
|
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_python.log",
|
||||||
|
memory=RESULTS
|
||||||
|
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years_memory.log",
|
||||||
|
benchmark:
|
||||||
|
(
|
||||||
|
BENCHMARKS
|
||||||
|
+ "solve_sector_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years}"
|
||||||
|
)
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/solve_network.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule make_summary_perfect:
|
||||||
|
input:
|
||||||
|
**{
|
||||||
|
f"networks_{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}": RESULTS
|
||||||
|
+ f"postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc"
|
||||||
|
for simpl in config["scenario"]["simpl"]
|
||||||
|
for clusters in config["scenario"]["clusters"]
|
||||||
|
for opts in config["scenario"]["opts"]
|
||||||
|
for sector_opts in config["scenario"]["sector_opts"]
|
||||||
|
for ll in config["scenario"]["ll"]
|
||||||
|
},
|
||||||
|
costs="data/costs_2020.csv",
|
||||||
|
output:
|
||||||
|
nodal_costs=RESULTS + "csvs/nodal_costs.csv",
|
||||||
|
nodal_capacities=RESULTS + "csvs/nodal_capacities.csv",
|
||||||
|
nodal_cfs=RESULTS + "csvs/nodal_cfs.csv",
|
||||||
|
cfs=RESULTS + "csvs/cfs.csv",
|
||||||
|
costs=RESULTS + "csvs/costs.csv",
|
||||||
|
capacities=RESULTS + "csvs/capacities.csv",
|
||||||
|
curtailment=RESULTS + "csvs/curtailment.csv",
|
||||||
|
energy=RESULTS + "csvs/energy.csv",
|
||||||
|
supply=RESULTS + "csvs/supply.csv",
|
||||||
|
supply_energy=RESULTS + "csvs/supply_energy.csv",
|
||||||
|
prices=RESULTS + "csvs/prices.csv",
|
||||||
|
weighted_prices=RESULTS + "csvs/weighted_prices.csv",
|
||||||
|
market_values=RESULTS + "csvs/market_values.csv",
|
||||||
|
price_statistics=RESULTS + "csvs/price_statistics.csv",
|
||||||
|
metrics=RESULTS + "csvs/metrics.csv",
|
||||||
|
co2_emissions=RESULTS + "csvs/co2_emissions.csv",
|
||||||
|
threads: 2
|
||||||
|
resources:
|
||||||
|
mem_mb=10000,
|
||||||
|
log:
|
||||||
|
LOGS + "make_summary_perfect.log",
|
||||||
|
benchmark:
|
||||||
|
(BENCHMARKS + "make_summary_perfect")
|
||||||
|
conda:
|
||||||
|
"../envs/environment.yaml"
|
||||||
|
script:
|
||||||
|
"../scripts/make_summary_perfect.py"
|
||||||
|
|
||||||
|
|
||||||
|
ruleorder: add_existing_baseyear > add_brownfield
|
117
rules/validate.smk
Normal file
117
rules/validate.smk
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
# SPDX-FileCopyrightText: : 2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
PRODUCTION_PLOTS = [
|
||||||
|
"production_bar",
|
||||||
|
"production_deviation_bar",
|
||||||
|
"seasonal_operation_area",
|
||||||
|
]
|
||||||
|
CROSS_BORDER_PLOTS = ["trade_time_series", "cross_border_bar"]
|
||||||
|
PRICES_PLOTS = ["price_bar", "price_line"]
|
||||||
|
|
||||||
|
|
||||||
|
rule build_electricity_production:
|
||||||
|
"""
|
||||||
|
This rule builds the electricity production for each country and technology from ENTSO-E data.
|
||||||
|
The data is used for validation of the optimization results.
|
||||||
|
"""
|
||||||
|
params:
|
||||||
|
snapshots=config["snapshots"],
|
||||||
|
countries=config["countries"],
|
||||||
|
output:
|
||||||
|
RESOURCES + "historical_electricity_production.csv",
|
||||||
|
log:
|
||||||
|
LOGS + "build_electricity_production.log",
|
||||||
|
resources:
|
||||||
|
mem_mb=5000,
|
||||||
|
script:
|
||||||
|
"../scripts/build_electricity_production.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule build_cross_border_flows:
|
||||||
|
"""
|
||||||
|
This rule builds the cross-border flows from ENTSO-E data.
|
||||||
|
The data is used for validation of the optimization results.
|
||||||
|
"""
|
||||||
|
params:
|
||||||
|
snapshots=config["snapshots"],
|
||||||
|
countries=config["countries"],
|
||||||
|
input:
|
||||||
|
network=RESOURCES + "networks/base.nc",
|
||||||
|
output:
|
||||||
|
RESOURCES + "historical_cross_border_flows.csv",
|
||||||
|
log:
|
||||||
|
LOGS + "build_cross_border_flows.log",
|
||||||
|
resources:
|
||||||
|
mem_mb=5000,
|
||||||
|
script:
|
||||||
|
"../scripts/build_cross_border_flows.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule build_electricity_prices:
|
||||||
|
"""
|
||||||
|
This rule builds the electricity prices from ENTSO-E data.
|
||||||
|
The data is used for validation of the optimization results.
|
||||||
|
"""
|
||||||
|
params:
|
||||||
|
snapshots=config["snapshots"],
|
||||||
|
countries=config["countries"],
|
||||||
|
output:
|
||||||
|
RESOURCES + "historical_electricity_prices.csv",
|
||||||
|
log:
|
||||||
|
LOGS + "build_electricity_prices.log",
|
||||||
|
resources:
|
||||||
|
mem_mb=5000,
|
||||||
|
script:
|
||||||
|
"../scripts/build_electricity_prices.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule plot_validation_electricity_production:
|
||||||
|
input:
|
||||||
|
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||||
|
electricity_production=RESOURCES + "historical_electricity_production.csv",
|
||||||
|
output:
|
||||||
|
**{
|
||||||
|
plot: RESULTS
|
||||||
|
+ f"figures/validation_{plot}_elec_s{{simpl}}_{{clusters}}_ec_l{{ll}}_{{opts}}.pdf"
|
||||||
|
for plot in PRODUCTION_PLOTS
|
||||||
|
},
|
||||||
|
plots_touch=RESULTS
|
||||||
|
+ "figures/.validation_production_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
||||||
|
script:
|
||||||
|
"../scripts/plot_validation_electricity_production.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule plot_validation_cross_border_flows:
|
||||||
|
params:
|
||||||
|
countries=config["countries"],
|
||||||
|
input:
|
||||||
|
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||||
|
cross_border_flows=RESOURCES + "historical_cross_border_flows.csv",
|
||||||
|
output:
|
||||||
|
**{
|
||||||
|
plot: RESULTS
|
||||||
|
+ f"figures/validation_{plot}_elec_s{{simpl}}_{{clusters}}_ec_l{{ll}}_{{opts}}.pdf"
|
||||||
|
for plot in CROSS_BORDER_PLOTS
|
||||||
|
},
|
||||||
|
plots_touch=RESULTS
|
||||||
|
+ "figures/.validation_cross_border_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
||||||
|
script:
|
||||||
|
"../scripts/plot_validation_cross_border_flows.py"
|
||||||
|
|
||||||
|
|
||||||
|
rule plot_validation_electricity_prices:
|
||||||
|
input:
|
||||||
|
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||||
|
electricity_prices=RESOURCES + "historical_electricity_prices.csv",
|
||||||
|
output:
|
||||||
|
**{
|
||||||
|
plot: RESULTS
|
||||||
|
+ f"figures/validation_{plot}_elec_s{{simpl}}_{{clusters}}_ec_l{{ll}}_{{opts}}.pdf"
|
||||||
|
for plot in PRICES_PLOTS
|
||||||
|
},
|
||||||
|
plots_touch=RESULTS
|
||||||
|
+ "figures/.validation_prices_plots_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}",
|
||||||
|
script:
|
||||||
|
"../scripts/plot_validation_electricity_prices.py"
|
256
scripts/_benchmark.py
Normal file
256
scripts/_benchmark.py
Normal file
@ -0,0 +1,256 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2020-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
"""
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# TODO: provide alternative when multiprocessing is not available
|
||||||
|
try:
|
||||||
|
from multiprocessing import Pipe, Process
|
||||||
|
except ImportError:
|
||||||
|
from multiprocessing.dummy import Process, Pipe
|
||||||
|
|
||||||
|
from memory_profiler import _get_memory, choose_backend
|
||||||
|
|
||||||
|
|
||||||
|
# The memory logging facilities have been adapted from memory_profiler
|
||||||
|
class MemTimer(Process):
|
||||||
|
"""
|
||||||
|
Write memory consumption over a time interval to file until signaled to
|
||||||
|
stop on the pipe.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, monitor_pid, interval, pipe, filename, max_usage, backend, *args, **kw
|
||||||
|
):
|
||||||
|
self.monitor_pid = monitor_pid
|
||||||
|
self.interval = interval
|
||||||
|
self.pipe = pipe
|
||||||
|
self.filename = filename
|
||||||
|
self.max_usage = max_usage
|
||||||
|
self.backend = backend
|
||||||
|
|
||||||
|
self.timestamps = kw.pop("timestamps", True)
|
||||||
|
self.include_children = kw.pop("include_children", True)
|
||||||
|
|
||||||
|
super(MemTimer, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
# get baseline memory usage
|
||||||
|
cur_mem = _get_memory(
|
||||||
|
self.monitor_pid,
|
||||||
|
self.backend,
|
||||||
|
timestamps=self.timestamps,
|
||||||
|
include_children=self.include_children,
|
||||||
|
)
|
||||||
|
|
||||||
|
n_measurements = 1
|
||||||
|
mem_usage = cur_mem if self.max_usage else [cur_mem]
|
||||||
|
|
||||||
|
if self.filename is not None:
|
||||||
|
stream = open(self.filename, "w")
|
||||||
|
stream.write("MEM {0:.6f} {1:.4f}\n".format(*cur_mem))
|
||||||
|
stream.flush()
|
||||||
|
else:
|
||||||
|
stream = None
|
||||||
|
|
||||||
|
self.pipe.send(0) # we're ready
|
||||||
|
stop = False
|
||||||
|
while True:
|
||||||
|
cur_mem = _get_memory(
|
||||||
|
self.monitor_pid,
|
||||||
|
self.backend,
|
||||||
|
timestamps=self.timestamps,
|
||||||
|
include_children=self.include_children,
|
||||||
|
)
|
||||||
|
|
||||||
|
if stream is not None:
|
||||||
|
stream.write("MEM {0:.6f} {1:.4f}\n".format(*cur_mem))
|
||||||
|
stream.flush()
|
||||||
|
|
||||||
|
n_measurements += 1
|
||||||
|
if not self.max_usage:
|
||||||
|
mem_usage.append(cur_mem)
|
||||||
|
else:
|
||||||
|
mem_usage = max(cur_mem, mem_usage)
|
||||||
|
|
||||||
|
if stop:
|
||||||
|
break
|
||||||
|
stop = self.pipe.poll(self.interval)
|
||||||
|
# do one more iteration
|
||||||
|
|
||||||
|
if stream is not None:
|
||||||
|
stream.close()
|
||||||
|
|
||||||
|
self.pipe.send(mem_usage)
|
||||||
|
self.pipe.send(n_measurements)
|
||||||
|
|
||||||
|
|
||||||
|
class memory_logger(object):
|
||||||
|
"""
|
||||||
|
Context manager for taking and reporting memory measurements at fixed
|
||||||
|
intervals from a separate process, for the duration of a context.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
filename : None|str
|
||||||
|
Name of the text file to log memory measurements, if None no log is
|
||||||
|
created (defaults to None)
|
||||||
|
interval : float
|
||||||
|
Interval between measurements (defaults to 1.)
|
||||||
|
max_usage : bool
|
||||||
|
If True, only store and report the maximum value (defaults to True)
|
||||||
|
timestamps : bool
|
||||||
|
Whether to record tuples of memory usage and timestamps; if logging to
|
||||||
|
a file timestamps are always kept (defaults to True)
|
||||||
|
include_children : bool
|
||||||
|
Whether the memory of subprocesses is to be included (default: True)
|
||||||
|
|
||||||
|
Arguments
|
||||||
|
---------
|
||||||
|
n_measurements : int
|
||||||
|
Number of measurements that have been taken
|
||||||
|
mem_usage : (float, float)|[(float, float)]
|
||||||
|
All memory measurements and timestamps (if timestamps was True) or only
|
||||||
|
the maximum memory usage and its timestamp
|
||||||
|
|
||||||
|
Note
|
||||||
|
----
|
||||||
|
The arguments are only set after all the measurements, i.e. outside of the
|
||||||
|
with statement.
|
||||||
|
|
||||||
|
Example
|
||||||
|
-------
|
||||||
|
with memory_logger(filename="memory.log", max_usage=True) as mem:
|
||||||
|
# Do a lot of long running memory intensive stuff
|
||||||
|
hard_memory_bound_stuff()
|
||||||
|
|
||||||
|
max_mem, timestamp = mem.mem_usage
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
filename=None,
|
||||||
|
interval=1.0,
|
||||||
|
max_usage=True,
|
||||||
|
timestamps=True,
|
||||||
|
include_children=True,
|
||||||
|
):
|
||||||
|
if filename is not None:
|
||||||
|
timestamps = True
|
||||||
|
|
||||||
|
self.filename = filename
|
||||||
|
self.interval = interval
|
||||||
|
self.max_usage = max_usage
|
||||||
|
self.timestamps = timestamps
|
||||||
|
self.include_children = include_children
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
backend = choose_backend()
|
||||||
|
|
||||||
|
self.child_conn, self.parent_conn = Pipe() # this will store MemTimer's results
|
||||||
|
self.p = MemTimer(
|
||||||
|
os.getpid(),
|
||||||
|
self.interval,
|
||||||
|
self.child_conn,
|
||||||
|
self.filename,
|
||||||
|
backend=backend,
|
||||||
|
timestamps=self.timestamps,
|
||||||
|
max_usage=self.max_usage,
|
||||||
|
include_children=self.include_children,
|
||||||
|
)
|
||||||
|
self.p.start()
|
||||||
|
self.parent_conn.recv() # wait until memory logging in subprocess is ready
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
if exc_type is None:
|
||||||
|
self.parent_conn.send(0) # finish timing
|
||||||
|
|
||||||
|
self.mem_usage = self.parent_conn.recv()
|
||||||
|
self.n_measurements = self.parent_conn.recv()
|
||||||
|
else:
|
||||||
|
self.p.terminate()
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class timer(object):
|
||||||
|
level = 0
|
||||||
|
opened = False
|
||||||
|
|
||||||
|
def __init__(self, name="", verbose=True):
|
||||||
|
self.name = name
|
||||||
|
self.verbose = verbose
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
if self.verbose:
|
||||||
|
if self.opened:
|
||||||
|
sys.stdout.write("\n")
|
||||||
|
|
||||||
|
if len(self.name) > 0:
|
||||||
|
sys.stdout.write((".. " * self.level) + self.name + ": ")
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
self.__class__.opened = True
|
||||||
|
|
||||||
|
self.__class__.level += 1
|
||||||
|
|
||||||
|
self.start = time.time()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def print_usec(self, usec):
|
||||||
|
if usec < 1000:
|
||||||
|
print("%.1f usec" % usec)
|
||||||
|
else:
|
||||||
|
msec = usec / 1000
|
||||||
|
if msec < 1000:
|
||||||
|
print("%.1f msec" % msec)
|
||||||
|
else:
|
||||||
|
sec = msec / 1000
|
||||||
|
print("%.1f sec" % sec)
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
if not self.opened and self.verbose:
|
||||||
|
sys.stdout.write(".. " * self.level)
|
||||||
|
|
||||||
|
if exc_type is None:
|
||||||
|
stop = time.time()
|
||||||
|
self.usec = usec = (stop - self.start) * 1e6
|
||||||
|
if self.verbose:
|
||||||
|
self.print_usec(usec)
|
||||||
|
elif self.verbose:
|
||||||
|
print("failed")
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
self.__class__.level -= 1
|
||||||
|
if self.verbose:
|
||||||
|
self.__class__.opened = False
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class optional(object):
|
||||||
|
def __init__(self, variable, contextman):
|
||||||
|
self.variable = variable
|
||||||
|
self.contextman = contextman
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
if self.variable:
|
||||||
|
return self.contextman.__enter__()
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
if self.variable:
|
||||||
|
return self.contextman.__exit__(exc_type, exc_val, exc_tb)
|
||||||
|
return False
|
@ -191,7 +191,7 @@ def progress_retrieve(url, file, disable=False):
|
|||||||
urllib.request.urlretrieve(url, file, reporthook=update_to)
|
urllib.request.urlretrieve(url, file, reporthook=update_to)
|
||||||
|
|
||||||
|
|
||||||
def mock_snakemake(rulename, configfiles=[], **wildcards):
|
def mock_snakemake(rulename, root_dir=None, configfiles=[], **wildcards):
|
||||||
"""
|
"""
|
||||||
This function is expected to be executed from the 'scripts'-directory of '
|
This function is expected to be executed from the 'scripts'-directory of '
|
||||||
the snakemake project. It returns a snakemake.script.Snakemake object,
|
the snakemake project. It returns a snakemake.script.Snakemake object,
|
||||||
@ -203,6 +203,8 @@ def mock_snakemake(rulename, configfiles=[], **wildcards):
|
|||||||
----------
|
----------
|
||||||
rulename: str
|
rulename: str
|
||||||
name of the rule for which the snakemake object should be generated
|
name of the rule for which the snakemake object should be generated
|
||||||
|
root_dir: str/path-like
|
||||||
|
path to the root directory of the snakemake project
|
||||||
configfiles: list, str
|
configfiles: list, str
|
||||||
list of configfiles to be used to update the config
|
list of configfiles to be used to update the config
|
||||||
**wildcards:
|
**wildcards:
|
||||||
@ -217,7 +219,10 @@ def mock_snakemake(rulename, configfiles=[], **wildcards):
|
|||||||
from snakemake.script import Snakemake
|
from snakemake.script import Snakemake
|
||||||
|
|
||||||
script_dir = Path(__file__).parent.resolve()
|
script_dir = Path(__file__).parent.resolve()
|
||||||
root_dir = script_dir.parent
|
if root_dir is None:
|
||||||
|
root_dir = script_dir.parent
|
||||||
|
else:
|
||||||
|
root_dir = Path(root_dir).resolve()
|
||||||
|
|
||||||
user_in_script_dir = Path.cwd().resolve() == script_dir
|
user_in_script_dir = Path.cwd().resolve() == script_dir
|
||||||
if user_in_script_dir:
|
if user_in_script_dir:
|
||||||
@ -303,10 +308,7 @@ def generate_periodic_profiles(dt_index, nodes, weekly_profile, localize=None):
|
|||||||
|
|
||||||
|
|
||||||
def parse(l):
|
def parse(l):
|
||||||
if len(l) == 1:
|
return yaml.safe_load(l[0]) if len(l) == 1 else {l.pop(0): parse(l)}
|
||||||
return yaml.safe_load(l[0])
|
|
||||||
else:
|
|
||||||
return {l.pop(0): parse(l)}
|
|
||||||
|
|
||||||
|
|
||||||
def update_config_with_sector_opts(config, sector_opts):
|
def update_config_with_sector_opts(config, sector_opts):
|
||||||
|
@ -41,12 +41,9 @@ def add_brownfield(n, n_p, year):
|
|||||||
# remove assets if their optimized nominal capacity is lower than a threshold
|
# remove assets if their optimized nominal capacity is lower than a threshold
|
||||||
# since CHP heat Link is proportional to CHP electric Link, make sure threshold is compatible
|
# since CHP heat Link is proportional to CHP electric Link, make sure threshold is compatible
|
||||||
chp_heat = c.df.index[
|
chp_heat = c.df.index[
|
||||||
(
|
(c.df[f"{attr}_nom_extendable"] & c.df.index.str.contains("urban central"))
|
||||||
c.df[attr + "_nom_extendable"]
|
& c.df.index.str.contains("CHP")
|
||||||
& c.df.index.str.contains("urban central")
|
& c.df.index.str.contains("heat")
|
||||||
& c.df.index.str.contains("CHP")
|
|
||||||
& c.df.index.str.contains("heat")
|
|
||||||
)
|
|
||||||
]
|
]
|
||||||
|
|
||||||
threshold = snakemake.params.threshold_capacity
|
threshold = snakemake.params.threshold_capacity
|
||||||
@ -60,21 +57,20 @@ def add_brownfield(n, n_p, year):
|
|||||||
)
|
)
|
||||||
n_p.mremove(
|
n_p.mremove(
|
||||||
c.name,
|
c.name,
|
||||||
chp_heat[c.df.loc[chp_heat, attr + "_nom_opt"] < threshold_chp_heat],
|
chp_heat[c.df.loc[chp_heat, f"{attr}_nom_opt"] < threshold_chp_heat],
|
||||||
)
|
)
|
||||||
|
|
||||||
n_p.mremove(
|
n_p.mremove(
|
||||||
c.name,
|
c.name,
|
||||||
c.df.index[
|
c.df.index[
|
||||||
c.df[attr + "_nom_extendable"]
|
(c.df[f"{attr}_nom_extendable"] & ~c.df.index.isin(chp_heat))
|
||||||
& ~c.df.index.isin(chp_heat)
|
& (c.df[f"{attr}_nom_opt"] < threshold)
|
||||||
& (c.df[attr + "_nom_opt"] < threshold)
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
# copy over assets but fix their capacity
|
# copy over assets but fix their capacity
|
||||||
c.df[attr + "_nom"] = c.df[attr + "_nom_opt"]
|
c.df[f"{attr}_nom"] = c.df[f"{attr}_nom_opt"]
|
||||||
c.df[attr + "_nom_extendable"] = False
|
c.df[f"{attr}_nom_extendable"] = False
|
||||||
|
|
||||||
n.import_components_from_dataframe(c.df, c.name)
|
n.import_components_from_dataframe(c.df, c.name)
|
||||||
|
|
||||||
@ -124,7 +120,6 @@ def add_brownfield(n, n_p, year):
|
|||||||
n.links.loc[new_pipes, "p_nom_min"] = 0.0
|
n.links.loc[new_pipes, "p_nom_min"] = 0.0
|
||||||
|
|
||||||
|
|
||||||
# %%
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
@ -2,8 +2,6 @@
|
|||||||
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
# coding: utf-8
|
|
||||||
"""
|
"""
|
||||||
Adds electrical generators and existing hydro storage units to a base network.
|
Adds electrical generators and existing hydro storage units to a base network.
|
||||||
|
|
||||||
@ -86,6 +84,7 @@ It further adds extendable ``generators`` with **zero** capacity for
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
from itertools import product
|
from itertools import product
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@ -167,7 +166,7 @@ def sanitize_carriers(n, config):
|
|||||||
nice_names = (
|
nice_names = (
|
||||||
pd.Series(config["plotting"]["nice_names"])
|
pd.Series(config["plotting"]["nice_names"])
|
||||||
.reindex(carrier_i)
|
.reindex(carrier_i)
|
||||||
.fillna(carrier_i.to_series().str.title())
|
.fillna(carrier_i.to_series())
|
||||||
)
|
)
|
||||||
n.carriers["nice_name"] = n.carriers.nice_name.where(
|
n.carriers["nice_name"] = n.carriers.nice_name.where(
|
||||||
n.carriers.nice_name != "", nice_names
|
n.carriers.nice_name != "", nice_names
|
||||||
@ -206,7 +205,6 @@ def load_costs(tech_costs, config, max_hours, Nyears=1.0):
|
|||||||
* costs["investment"]
|
* costs["investment"]
|
||||||
* Nyears
|
* Nyears
|
||||||
)
|
)
|
||||||
|
|
||||||
costs.at["OCGT", "fuel"] = costs.at["gas", "fuel"]
|
costs.at["OCGT", "fuel"] = costs.at["gas", "fuel"]
|
||||||
costs.at["CCGT", "fuel"] = costs.at["gas", "fuel"]
|
costs.at["CCGT", "fuel"] = costs.at["gas", "fuel"]
|
||||||
|
|
||||||
@ -258,6 +256,7 @@ def load_powerplants(ppl_fn):
|
|||||||
"bioenergy": "biomass",
|
"bioenergy": "biomass",
|
||||||
"ccgt, thermal": "CCGT",
|
"ccgt, thermal": "CCGT",
|
||||||
"hard coal": "coal",
|
"hard coal": "coal",
|
||||||
|
"natural gas": "OCGT",
|
||||||
}
|
}
|
||||||
return (
|
return (
|
||||||
pd.read_csv(ppl_fn, index_col=0, dtype={"bus": "str"})
|
pd.read_csv(ppl_fn, index_col=0, dtype={"bus": "str"})
|
||||||
@ -282,11 +281,13 @@ def shapes_to_shapes(orig, dest):
|
|||||||
return transfer
|
return transfer
|
||||||
|
|
||||||
|
|
||||||
def attach_load(n, regions, load, nuts3_shapes, countries, scaling=1.0):
|
def attach_load(n, regions, load, nuts3_shapes, ua_md_gdp, countries, scaling=1.0):
|
||||||
substation_lv_i = n.buses.index[n.buses["substation_lv"]]
|
substation_lv_i = n.buses.index[n.buses["substation_lv"]]
|
||||||
regions = gpd.read_file(regions).set_index("name").reindex(substation_lv_i)
|
regions = gpd.read_file(regions).set_index("name").reindex(substation_lv_i)
|
||||||
opsd_load = pd.read_csv(load, index_col=0, parse_dates=True).filter(items=countries)
|
opsd_load = pd.read_csv(load, index_col=0, parse_dates=True).filter(items=countries)
|
||||||
|
|
||||||
|
ua_md_gdp = pd.read_csv(ua_md_gdp, dtype={"name": "str"}).set_index("name")
|
||||||
|
|
||||||
logger.info(f"Load data scaled with scalling factor {scaling}.")
|
logger.info(f"Load data scaled with scalling factor {scaling}.")
|
||||||
opsd_load *= scaling
|
opsd_load *= scaling
|
||||||
|
|
||||||
@ -294,26 +295,29 @@ def attach_load(n, regions, load, nuts3_shapes, countries, scaling=1.0):
|
|||||||
|
|
||||||
def upsample(cntry, group):
|
def upsample(cntry, group):
|
||||||
l = opsd_load[cntry]
|
l = opsd_load[cntry]
|
||||||
|
|
||||||
if len(group) == 1:
|
if len(group) == 1:
|
||||||
return pd.DataFrame({group.index[0]: l})
|
return pd.DataFrame({group.index[0]: l})
|
||||||
else:
|
nuts3_cntry = nuts3.loc[nuts3.country == cntry]
|
||||||
nuts3_cntry = nuts3.loc[nuts3.country == cntry]
|
transfer = shapes_to_shapes(group, nuts3_cntry.geometry).T.tocsr()
|
||||||
transfer = shapes_to_shapes(group, nuts3_cntry.geometry).T.tocsr()
|
gdp_n = pd.Series(
|
||||||
gdp_n = pd.Series(
|
transfer.dot(nuts3_cntry["gdp"].fillna(1.0).values), index=group.index
|
||||||
transfer.dot(nuts3_cntry["gdp"].fillna(1.0).values), index=group.index
|
)
|
||||||
)
|
pop_n = pd.Series(
|
||||||
pop_n = pd.Series(
|
transfer.dot(nuts3_cntry["pop"].fillna(1.0).values), index=group.index
|
||||||
transfer.dot(nuts3_cntry["pop"].fillna(1.0).values), index=group.index
|
)
|
||||||
)
|
|
||||||
|
|
||||||
# relative factors 0.6 and 0.4 have been determined from a linear
|
# relative factors 0.6 and 0.4 have been determined from a linear
|
||||||
# regression on the country to continent load data
|
# regression on the country to continent load data
|
||||||
factors = normed(0.6 * normed(gdp_n) + 0.4 * normed(pop_n))
|
factors = normed(0.6 * normed(gdp_n) + 0.4 * normed(pop_n))
|
||||||
return pd.DataFrame(
|
if cntry in ["UA", "MD"]:
|
||||||
factors.values * l.values[:, np.newaxis],
|
# overwrite factor because nuts3 provides no data for UA+MD
|
||||||
index=l.index,
|
factors = normed(ua_md_gdp.loc[group.index, "GDP_PPP"].squeeze())
|
||||||
columns=factors.index,
|
return pd.DataFrame(
|
||||||
)
|
factors.values * l.values[:, np.newaxis],
|
||||||
|
index=l.index,
|
||||||
|
columns=factors.index,
|
||||||
|
)
|
||||||
|
|
||||||
load = pd.concat(
|
load = pd.concat(
|
||||||
[
|
[
|
||||||
@ -362,7 +366,6 @@ def attach_wind_and_solar(
|
|||||||
n, costs, input_profiles, carriers, extendable_carriers, line_length_factor=1
|
n, costs, input_profiles, carriers, extendable_carriers, line_length_factor=1
|
||||||
):
|
):
|
||||||
add_missing_carriers(n, carriers)
|
add_missing_carriers(n, carriers)
|
||||||
|
|
||||||
for car in carriers:
|
for car in carriers:
|
||||||
if car == "hydro":
|
if car == "hydro":
|
||||||
continue
|
continue
|
||||||
@ -410,6 +413,7 @@ def attach_wind_and_solar(
|
|||||||
capital_cost=capital_cost,
|
capital_cost=capital_cost,
|
||||||
efficiency=costs.at[supcar, "efficiency"],
|
efficiency=costs.at[supcar, "efficiency"],
|
||||||
p_max_pu=ds["profile"].transpose("time", "bus").to_pandas(),
|
p_max_pu=ds["profile"].transpose("time", "bus").to_pandas(),
|
||||||
|
lifetime=costs.at[supcar, "lifetime"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -421,6 +425,8 @@ def attach_conventional_generators(
|
|||||||
extendable_carriers,
|
extendable_carriers,
|
||||||
conventional_params,
|
conventional_params,
|
||||||
conventional_inputs,
|
conventional_inputs,
|
||||||
|
unit_commitment=None,
|
||||||
|
fuel_price=None,
|
||||||
):
|
):
|
||||||
carriers = list(set(conventional_carriers) | set(extendable_carriers["Generator"]))
|
carriers = list(set(conventional_carriers) | set(extendable_carriers["Generator"]))
|
||||||
add_missing_carriers(n, carriers)
|
add_missing_carriers(n, carriers)
|
||||||
@ -436,18 +442,37 @@ def attach_conventional_generators(
|
|||||||
ppl = (
|
ppl = (
|
||||||
ppl.query("carrier in @carriers")
|
ppl.query("carrier in @carriers")
|
||||||
.join(costs, on="carrier", rsuffix="_r")
|
.join(costs, on="carrier", rsuffix="_r")
|
||||||
.rename(index=lambda s: "C" + str(s))
|
.rename(index=lambda s: f"C{str(s)}")
|
||||||
)
|
)
|
||||||
ppl["efficiency"] = ppl.efficiency.fillna(ppl.efficiency_r)
|
ppl["efficiency"] = ppl.efficiency.fillna(ppl.efficiency_r)
|
||||||
ppl["marginal_cost"] = (
|
|
||||||
ppl.carrier.map(costs.VOM) + ppl.carrier.map(costs.fuel) / ppl.efficiency
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(
|
if unit_commitment is not None:
|
||||||
"Adding {} generators with capacities [GW] \n{}".format(
|
committable_attrs = ppl.carrier.isin(unit_commitment).to_frame("committable")
|
||||||
len(ppl), ppl.groupby("carrier").p_nom.sum().div(1e3).round(2)
|
for attr in unit_commitment.index:
|
||||||
|
default = pypsa.components.component_attrs["Generator"].default[attr]
|
||||||
|
committable_attrs[attr] = ppl.carrier.map(unit_commitment.loc[attr]).fillna(
|
||||||
|
default
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
committable_attrs = {}
|
||||||
|
|
||||||
|
if fuel_price is not None:
|
||||||
|
fuel_price = fuel_price.assign(
|
||||||
|
OCGT=fuel_price["gas"], CCGT=fuel_price["gas"]
|
||||||
|
).drop("gas", axis=1)
|
||||||
|
missing_carriers = list(set(carriers) - set(fuel_price))
|
||||||
|
fuel_price = fuel_price.assign(**costs.fuel[missing_carriers])
|
||||||
|
fuel_price = fuel_price.reindex(ppl.carrier, axis=1)
|
||||||
|
fuel_price.columns = ppl.index
|
||||||
|
marginal_cost = fuel_price.div(ppl.efficiency).add(ppl.carrier.map(costs.VOM))
|
||||||
|
else:
|
||||||
|
marginal_cost = (
|
||||||
|
ppl.carrier.map(costs.VOM) + ppl.carrier.map(costs.fuel) / ppl.efficiency
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
# Define generators using modified ppl DataFrame
|
||||||
|
caps = ppl.groupby("carrier").p_nom.sum().div(1e3).round(2)
|
||||||
|
logger.info(f"Adding {len(ppl)} generators with capacities [GW] \n{caps}")
|
||||||
|
|
||||||
n.madd(
|
n.madd(
|
||||||
"Generator",
|
"Generator",
|
||||||
@ -458,13 +483,14 @@ def attach_conventional_generators(
|
|||||||
p_nom=ppl.p_nom.where(ppl.carrier.isin(conventional_carriers), 0),
|
p_nom=ppl.p_nom.where(ppl.carrier.isin(conventional_carriers), 0),
|
||||||
p_nom_extendable=ppl.carrier.isin(extendable_carriers["Generator"]),
|
p_nom_extendable=ppl.carrier.isin(extendable_carriers["Generator"]),
|
||||||
efficiency=ppl.efficiency,
|
efficiency=ppl.efficiency,
|
||||||
marginal_cost=ppl.marginal_cost,
|
marginal_cost=marginal_cost,
|
||||||
capital_cost=ppl.capital_cost,
|
capital_cost=ppl.capital_cost,
|
||||||
build_year=ppl.datein.fillna(0).astype(int),
|
build_year=ppl.datein.fillna(0).astype(int),
|
||||||
lifetime=(ppl.dateout - ppl.datein).fillna(np.inf),
|
lifetime=(ppl.dateout - ppl.datein).fillna(np.inf),
|
||||||
|
**committable_attrs,
|
||||||
)
|
)
|
||||||
|
|
||||||
for carrier in conventional_params:
|
for carrier in set(conventional_params) & set(carriers):
|
||||||
# Generators with technology affected
|
# Generators with technology affected
|
||||||
idx = n.generators.query("carrier == @carrier").index
|
idx = n.generators.query("carrier == @carrier").index
|
||||||
|
|
||||||
@ -493,7 +519,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **par
|
|||||||
ppl = (
|
ppl = (
|
||||||
ppl.query('carrier == "hydro"')
|
ppl.query('carrier == "hydro"')
|
||||||
.reset_index(drop=True)
|
.reset_index(drop=True)
|
||||||
.rename(index=lambda s: str(s) + " hydro")
|
.rename(index=lambda s: f"{str(s)} hydro")
|
||||||
)
|
)
|
||||||
ror = ppl.query('technology == "Run-Of-River"')
|
ror = ppl.query('technology == "Run-Of-River"')
|
||||||
phs = ppl.query('technology == "Pumped Storage"')
|
phs = ppl.query('technology == "Pumped Storage"')
|
||||||
@ -590,14 +616,19 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **par
|
|||||||
)
|
)
|
||||||
if not missing_countries.empty:
|
if not missing_countries.empty:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Assuming max_hours=6 for hydro reservoirs in the countries: {}".format(
|
f'Assuming max_hours=6 for hydro reservoirs in the countries: {", ".join(missing_countries)}'
|
||||||
", ".join(missing_countries)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
hydro_max_hours = hydro.max_hours.where(
|
hydro_max_hours = hydro.max_hours.where(
|
||||||
hydro.max_hours > 0, hydro.country.map(max_hours_country)
|
hydro.max_hours > 0, hydro.country.map(max_hours_country)
|
||||||
).fillna(6)
|
).fillna(6)
|
||||||
|
|
||||||
|
if flatten_dispatch := params.get("flatten_dispatch", False):
|
||||||
|
buffer = params.get("flatten_dispatch_buffer", 0.2)
|
||||||
|
average_capacity_factor = inflow_t[hydro.index].mean() / hydro["p_nom"]
|
||||||
|
p_max_pu = (average_capacity_factor + buffer).clip(upper=1)
|
||||||
|
else:
|
||||||
|
p_max_pu = 1
|
||||||
|
|
||||||
n.madd(
|
n.madd(
|
||||||
"StorageUnit",
|
"StorageUnit",
|
||||||
hydro.index,
|
hydro.index,
|
||||||
@ -607,7 +638,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **par
|
|||||||
max_hours=hydro_max_hours,
|
max_hours=hydro_max_hours,
|
||||||
capital_cost=costs.at["hydro", "capital_cost"],
|
capital_cost=costs.at["hydro", "capital_cost"],
|
||||||
marginal_cost=costs.at["hydro", "marginal_cost"],
|
marginal_cost=costs.at["hydro", "marginal_cost"],
|
||||||
p_max_pu=1.0, # dispatch
|
p_max_pu=p_max_pu, # dispatch
|
||||||
p_min_pu=0.0, # store
|
p_min_pu=0.0, # store
|
||||||
efficiency_dispatch=costs.at["hydro", "efficiency"],
|
efficiency_dispatch=costs.at["hydro", "efficiency"],
|
||||||
efficiency_store=0.0,
|
efficiency_store=0.0,
|
||||||
@ -687,7 +718,17 @@ def attach_extendable_generators(n, costs, ppl, carriers):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def attach_OPSD_renewables(n, tech_map):
|
def attach_OPSD_renewables(n: pypsa.Network, tech_map: Dict[str, List[str]]) -> None:
|
||||||
|
"""
|
||||||
|
Attach renewable capacities from the OPSD dataset to the network.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- n: The PyPSA network to attach the capacities to.
|
||||||
|
- tech_map: A dictionary mapping fuel types to carrier names.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- None
|
||||||
|
"""
|
||||||
tech_string = ", ".join(sum(tech_map.values(), []))
|
tech_string = ", ".join(sum(tech_map.values(), []))
|
||||||
logger.info(f"Using OPSD renewable capacities for carriers {tech_string}.")
|
logger.info(f"Using OPSD renewable capacities for carriers {tech_string}.")
|
||||||
|
|
||||||
@ -697,13 +738,14 @@ def attach_OPSD_renewables(n, tech_map):
|
|||||||
{"Solar": "PV"}
|
{"Solar": "PV"}
|
||||||
)
|
)
|
||||||
df = df.query("Fueltype in @tech_map").powerplant.convert_country_to_alpha2()
|
df = df.query("Fueltype in @tech_map").powerplant.convert_country_to_alpha2()
|
||||||
|
df = df.dropna(subset=["lat", "lon"])
|
||||||
|
|
||||||
for fueltype, carriers in tech_map.items():
|
for fueltype, carriers in tech_map.items():
|
||||||
gens = n.generators[lambda df: df.carrier.isin(carriers)]
|
gens = n.generators[lambda df: df.carrier.isin(carriers)]
|
||||||
buses = n.buses.loc[gens.bus.unique()]
|
buses = n.buses.loc[gens.bus.unique()]
|
||||||
gens_per_bus = gens.groupby("bus").p_nom.count()
|
gens_per_bus = gens.groupby("bus").p_nom.count()
|
||||||
|
|
||||||
caps = map_country_bus(df.query("Fueltype == @fueltype and lat == lat"), buses)
|
caps = map_country_bus(df.query("Fueltype == @fueltype"), buses)
|
||||||
caps = caps.groupby(["bus"]).Capacity.sum()
|
caps = caps.groupby(["bus"]).Capacity.sum()
|
||||||
caps = caps / gens_per_bus.reindex(caps.index, fill_value=1)
|
caps = caps / gens_per_bus.reindex(caps.index, fill_value=1)
|
||||||
|
|
||||||
@ -711,7 +753,26 @@ def attach_OPSD_renewables(n, tech_map):
|
|||||||
n.generators.p_nom_min.update(gens.bus.map(caps).dropna())
|
n.generators.p_nom_min.update(gens.bus.map(caps).dropna())
|
||||||
|
|
||||||
|
|
||||||
def estimate_renewable_capacities(n, year, tech_map, expansion_limit, countries):
|
def estimate_renewable_capacities(
|
||||||
|
n: pypsa.Network, year: int, tech_map: dict, expansion_limit: bool, countries: list
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Estimate a different between renewable capacities in the network and
|
||||||
|
reported country totals from IRENASTAT dataset. Distribute the difference
|
||||||
|
with a heuristic.
|
||||||
|
|
||||||
|
Heuristic: n.generators_t.p_max_pu.mean() * n.generators.p_nom_max
|
||||||
|
|
||||||
|
Args:
|
||||||
|
- n: The PyPSA network.
|
||||||
|
- year: The year of optimisation.
|
||||||
|
- tech_map: A dictionary mapping fuel types to carrier names.
|
||||||
|
- expansion_limit: Boolean value from config file
|
||||||
|
- countries: A list of country codes to estimate capacities for.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- None
|
||||||
|
"""
|
||||||
if not len(countries) or not len(tech_map):
|
if not len(countries) or not len(tech_map):
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -728,7 +789,10 @@ def estimate_renewable_capacities(n, year, tech_map, expansion_limit, countries)
|
|||||||
|
|
||||||
for ppm_technology, techs in tech_map.items():
|
for ppm_technology, techs in tech_map.items():
|
||||||
tech_i = n.generators.query("carrier in @techs").index
|
tech_i = n.generators.query("carrier in @techs").index
|
||||||
stats = capacities.loc[ppm_technology].reindex(countries, fill_value=0.0)
|
if ppm_technology in capacities.index.get_level_values("Technology"):
|
||||||
|
stats = capacities.loc[ppm_technology].reindex(countries, fill_value=0.0)
|
||||||
|
else:
|
||||||
|
stats = pd.Series(0.0, index=countries)
|
||||||
country = n.generators.bus[tech_i].map(n.buses.country)
|
country = n.generators.bus[tech_i].map(n.buses.country)
|
||||||
existent = n.generators.p_nom[tech_i].groupby(country).sum()
|
existent = n.generators.p_nom[tech_i].groupby(country).sum()
|
||||||
missing = stats - existent
|
missing = stats - existent
|
||||||
@ -752,6 +816,30 @@ def estimate_renewable_capacities(n, year, tech_map, expansion_limit, countries)
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def attach_line_rating(
|
||||||
|
n, rating, s_max_pu, correction_factor, max_voltage_difference, max_line_rating
|
||||||
|
):
|
||||||
|
# TODO: Only considers overhead lines
|
||||||
|
n.lines_t.s_max_pu = (rating / n.lines.s_nom[rating.columns]) * correction_factor
|
||||||
|
if max_voltage_difference:
|
||||||
|
x_pu = (
|
||||||
|
n.lines.type.map(n.line_types["x_per_length"])
|
||||||
|
* n.lines.length
|
||||||
|
/ (n.lines.v_nom**2)
|
||||||
|
)
|
||||||
|
# need to clip here as cap values might be below 1
|
||||||
|
# -> would mean the line cannot be operated at actual given pessimistic ampacity
|
||||||
|
s_max_pu_cap = (
|
||||||
|
np.deg2rad(max_voltage_difference) / (x_pu * n.lines.s_nom)
|
||||||
|
).clip(lower=1)
|
||||||
|
n.lines_t.s_max_pu = n.lines_t.s_max_pu.clip(
|
||||||
|
lower=1, upper=s_max_pu_cap, axis=1
|
||||||
|
)
|
||||||
|
if max_line_rating:
|
||||||
|
n.lines_t.s_max_pu = n.lines_t.s_max_pu.clip(upper=max_line_rating)
|
||||||
|
n.lines_t.s_max_pu *= s_max_pu
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
@ -777,6 +865,7 @@ if __name__ == "__main__":
|
|||||||
snakemake.input.regions,
|
snakemake.input.regions,
|
||||||
snakemake.input.load,
|
snakemake.input.load,
|
||||||
snakemake.input.nuts3_shapes,
|
snakemake.input.nuts3_shapes,
|
||||||
|
snakemake.input.ua_md_gdp,
|
||||||
params.countries,
|
params.countries,
|
||||||
params.scaling_factor,
|
params.scaling_factor,
|
||||||
)
|
)
|
||||||
@ -789,6 +878,20 @@ if __name__ == "__main__":
|
|||||||
conventional_inputs = {
|
conventional_inputs = {
|
||||||
k: v for k, v in snakemake.input.items() if k.startswith("conventional_")
|
k: v for k, v in snakemake.input.items() if k.startswith("conventional_")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if params.conventional["unit_commitment"]:
|
||||||
|
unit_commitment = pd.read_csv(snakemake.input.unit_commitment, index_col=0)
|
||||||
|
else:
|
||||||
|
unit_commitment = None
|
||||||
|
|
||||||
|
if params.conventional["dynamic_fuel_price"]:
|
||||||
|
fuel_price = pd.read_csv(
|
||||||
|
snakemake.input.fuel_price, index_col=0, header=0, parse_dates=True
|
||||||
|
)
|
||||||
|
fuel_price = fuel_price.reindex(n.snapshots).fillna(method="ffill")
|
||||||
|
else:
|
||||||
|
fuel_price = None
|
||||||
|
|
||||||
attach_conventional_generators(
|
attach_conventional_generators(
|
||||||
n,
|
n,
|
||||||
costs,
|
costs,
|
||||||
@ -797,6 +900,8 @@ if __name__ == "__main__":
|
|||||||
extendable_carriers,
|
extendable_carriers,
|
||||||
params.conventional,
|
params.conventional,
|
||||||
conventional_inputs,
|
conventional_inputs,
|
||||||
|
unit_commitment=unit_commitment,
|
||||||
|
fuel_price=fuel_price,
|
||||||
)
|
)
|
||||||
|
|
||||||
attach_wind_and_solar(
|
attach_wind_and_solar(
|
||||||
@ -809,15 +914,16 @@ if __name__ == "__main__":
|
|||||||
)
|
)
|
||||||
|
|
||||||
if "hydro" in renewable_carriers:
|
if "hydro" in renewable_carriers:
|
||||||
para = params.renewable["hydro"]
|
p = params.renewable["hydro"]
|
||||||
|
carriers = p.pop("carriers", [])
|
||||||
attach_hydro(
|
attach_hydro(
|
||||||
n,
|
n,
|
||||||
costs,
|
costs,
|
||||||
ppl,
|
ppl,
|
||||||
snakemake.input.profile_hydro,
|
snakemake.input.profile_hydro,
|
||||||
snakemake.input.hydro_capacities,
|
snakemake.input.hydro_capacities,
|
||||||
para.pop("carriers", []),
|
carriers,
|
||||||
**para,
|
**p,
|
||||||
)
|
)
|
||||||
|
|
||||||
estimate_renewable_caps = params.electricity["estimate_renewable_capacities"]
|
estimate_renewable_caps = params.electricity["estimate_renewable_capacities"]
|
||||||
@ -834,6 +940,23 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
update_p_nom_max(n)
|
update_p_nom_max(n)
|
||||||
|
|
||||||
|
line_rating_config = snakemake.config["lines"]["dynamic_line_rating"]
|
||||||
|
if line_rating_config["activate"]:
|
||||||
|
rating = xr.open_dataarray(snakemake.input.line_rating).to_pandas().transpose()
|
||||||
|
s_max_pu = snakemake.config["lines"]["s_max_pu"]
|
||||||
|
correction_factor = line_rating_config["correction_factor"]
|
||||||
|
max_voltage_difference = line_rating_config["max_voltage_difference"]
|
||||||
|
max_line_rating = line_rating_config["max_line_rating"]
|
||||||
|
|
||||||
|
attach_line_rating(
|
||||||
|
n,
|
||||||
|
rating,
|
||||||
|
s_max_pu,
|
||||||
|
correction_factor,
|
||||||
|
max_voltage_difference,
|
||||||
|
max_line_rating,
|
||||||
|
)
|
||||||
|
|
||||||
sanitize_carriers(n, snakemake.config)
|
sanitize_carriers(n, snakemake.config)
|
||||||
|
|
||||||
n.meta = snakemake.config
|
n.meta = snakemake.config
|
||||||
|
@ -45,7 +45,7 @@ def add_build_year_to_new_assets(n, baseyear):
|
|||||||
|
|
||||||
# add -baseyear to name
|
# add -baseyear to name
|
||||||
rename = pd.Series(c.df.index, c.df.index)
|
rename = pd.Series(c.df.index, c.df.index)
|
||||||
rename[assets] += "-" + str(baseyear)
|
rename[assets] += f"-{str(baseyear)}"
|
||||||
c.df.rename(index=rename, inplace=True)
|
c.df.rename(index=rename, inplace=True)
|
||||||
|
|
||||||
# rename time-dependent
|
# rename time-dependent
|
||||||
@ -88,7 +88,9 @@ def add_existing_renewables(df_agg):
|
|||||||
]
|
]
|
||||||
cfs = n.generators_t.p_max_pu[gens].mean()
|
cfs = n.generators_t.p_max_pu[gens].mean()
|
||||||
cfs_key = cfs / cfs.sum()
|
cfs_key = cfs / cfs.sum()
|
||||||
nodal_fraction.loc[n.generators.loc[gens, "bus"]] = cfs_key.values
|
nodal_fraction.loc[n.generators.loc[gens, "bus"]] = cfs_key.groupby(
|
||||||
|
n.generators.loc[gens, "bus"]
|
||||||
|
).sum()
|
||||||
|
|
||||||
nodal_df = df.loc[n.buses.loc[elec_buses, "country"]]
|
nodal_df = df.loc[n.buses.loc[elec_buses, "country"]]
|
||||||
nodal_df.index = elec_buses
|
nodal_df.index = elec_buses
|
||||||
@ -252,7 +254,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
|
|||||||
if "m" in snakemake.wildcards.clusters:
|
if "m" in snakemake.wildcards.clusters:
|
||||||
for ind in new_capacity.index:
|
for ind in new_capacity.index:
|
||||||
# existing capacities are split evenly among regions in every country
|
# existing capacities are split evenly among regions in every country
|
||||||
inv_ind = [i for i in inv_busmap[ind]]
|
inv_ind = list(inv_busmap[ind])
|
||||||
|
|
||||||
# for offshore the splitting only includes coastal regions
|
# for offshore the splitting only includes coastal regions
|
||||||
inv_ind = [
|
inv_ind = [
|
||||||
@ -305,6 +307,18 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
|
|||||||
if "EU" not in vars(spatial)[carrier[generator]].locations:
|
if "EU" not in vars(spatial)[carrier[generator]].locations:
|
||||||
bus0 = bus0.intersection(capacity.index + " gas")
|
bus0 = bus0.intersection(capacity.index + " gas")
|
||||||
|
|
||||||
|
# check for missing bus
|
||||||
|
missing_bus = pd.Index(bus0).difference(n.buses.index)
|
||||||
|
if not missing_bus.empty:
|
||||||
|
logger.info(f"add buses {bus0}")
|
||||||
|
n.madd(
|
||||||
|
"Bus",
|
||||||
|
bus0,
|
||||||
|
carrier=generator,
|
||||||
|
location=vars(spatial)[carrier[generator]].locations,
|
||||||
|
unit="MWh_el",
|
||||||
|
)
|
||||||
|
|
||||||
already_build = n.links.index.intersection(asset_i)
|
already_build = n.links.index.intersection(asset_i)
|
||||||
new_build = asset_i.difference(n.links.index)
|
new_build = asset_i.difference(n.links.index)
|
||||||
lifetime_assets = lifetime.loc[grouping_year, generator].dropna()
|
lifetime_assets = lifetime.loc[grouping_year, generator].dropna()
|
||||||
@ -435,15 +449,23 @@ def add_heating_capacities_installed_before_baseyear(
|
|||||||
|
|
||||||
# split existing capacities between residential and services
|
# split existing capacities between residential and services
|
||||||
# proportional to energy demand
|
# proportional to energy demand
|
||||||
|
p_set_sum = n.loads_t.p_set.sum()
|
||||||
ratio_residential = pd.Series(
|
ratio_residential = pd.Series(
|
||||||
[
|
[
|
||||||
(
|
(
|
||||||
n.loads_t.p_set.sum()[f"{node} residential rural heat"]
|
p_set_sum[f"{node} residential rural heat"]
|
||||||
/ (
|
/ (
|
||||||
n.loads_t.p_set.sum()[f"{node} residential rural heat"]
|
p_set_sum[f"{node} residential rural heat"]
|
||||||
+ n.loads_t.p_set.sum()[f"{node} services rural heat"]
|
+ p_set_sum[f"{node} services rural heat"]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
# if rural heating demand for one of the nodes doesn't exist,
|
||||||
|
# then columns were dropped before and heating demand share should be 0.0
|
||||||
|
if all(
|
||||||
|
f"{node} {service} rural heat" in p_set_sum.index
|
||||||
|
for service in ["residential", "services"]
|
||||||
|
)
|
||||||
|
else 0.0
|
||||||
for node in nodal_df.index
|
for node in nodal_df.index
|
||||||
],
|
],
|
||||||
index=nodal_df.index,
|
index=nodal_df.index,
|
||||||
@ -525,13 +547,17 @@ def add_heating_capacities_installed_before_baseyear(
|
|||||||
bus0=nodes[name],
|
bus0=nodes[name],
|
||||||
bus1=nodes[name] + " " + name + " heat",
|
bus1=nodes[name] + " " + name + " heat",
|
||||||
carrier=name + " resistive heater",
|
carrier=name + " resistive heater",
|
||||||
efficiency=costs.at[name_type + " resistive heater", "efficiency"],
|
efficiency=costs.at[f"{name_type} resistive heater", "efficiency"],
|
||||||
capital_cost=costs.at[name_type + " resistive heater", "efficiency"]
|
capital_cost=(
|
||||||
* costs.at[name_type + " resistive heater", "fixed"],
|
costs.at[f"{name_type} resistive heater", "efficiency"]
|
||||||
p_nom=0.5
|
* costs.at[f"{name_type} resistive heater", "fixed"]
|
||||||
* nodal_df[f"{heat_type} resistive heater"][nodes[name]]
|
),
|
||||||
* ratio
|
p_nom=(
|
||||||
/ costs.at[name_type + " resistive heater", "efficiency"],
|
0.5
|
||||||
|
* nodal_df[f"{heat_type} resistive heater"][nodes[name]]
|
||||||
|
* ratio
|
||||||
|
/ costs.at[f"{name_type} resistive heater", "efficiency"]
|
||||||
|
),
|
||||||
build_year=int(grouping_year),
|
build_year=int(grouping_year),
|
||||||
lifetime=costs.at[costs_name, "lifetime"],
|
lifetime=costs.at[costs_name, "lifetime"],
|
||||||
)
|
)
|
||||||
@ -544,16 +570,20 @@ def add_heating_capacities_installed_before_baseyear(
|
|||||||
bus1=nodes[name] + " " + name + " heat",
|
bus1=nodes[name] + " " + name + " heat",
|
||||||
bus2="co2 atmosphere",
|
bus2="co2 atmosphere",
|
||||||
carrier=name + " gas boiler",
|
carrier=name + " gas boiler",
|
||||||
efficiency=costs.at[name_type + " gas boiler", "efficiency"],
|
efficiency=costs.at[f"{name_type} gas boiler", "efficiency"],
|
||||||
efficiency2=costs.at["gas", "CO2 intensity"],
|
efficiency2=costs.at["gas", "CO2 intensity"],
|
||||||
capital_cost=costs.at[name_type + " gas boiler", "efficiency"]
|
capital_cost=(
|
||||||
* costs.at[name_type + " gas boiler", "fixed"],
|
costs.at[f"{name_type} gas boiler", "efficiency"]
|
||||||
p_nom=0.5
|
* costs.at[f"{name_type} gas boiler", "fixed"]
|
||||||
* nodal_df[f"{heat_type} gas boiler"][nodes[name]]
|
),
|
||||||
* ratio
|
p_nom=(
|
||||||
/ costs.at[name_type + " gas boiler", "efficiency"],
|
0.5
|
||||||
|
* nodal_df[f"{heat_type} gas boiler"][nodes[name]]
|
||||||
|
* ratio
|
||||||
|
/ costs.at[f"{name_type} gas boiler", "efficiency"]
|
||||||
|
),
|
||||||
build_year=int(grouping_year),
|
build_year=int(grouping_year),
|
||||||
lifetime=costs.at[name_type + " gas boiler", "lifetime"],
|
lifetime=costs.at[f"{name_type} gas boiler", "lifetime"],
|
||||||
)
|
)
|
||||||
|
|
||||||
n.madd(
|
n.madd(
|
||||||
@ -573,7 +603,7 @@ def add_heating_capacities_installed_before_baseyear(
|
|||||||
* ratio
|
* ratio
|
||||||
/ costs.at["decentral oil boiler", "efficiency"],
|
/ costs.at["decentral oil boiler", "efficiency"],
|
||||||
build_year=int(grouping_year),
|
build_year=int(grouping_year),
|
||||||
lifetime=costs.at[name_type + " gas boiler", "lifetime"],
|
lifetime=costs.at[f"{name_type} gas boiler", "lifetime"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# delete links with p_nom=nan corresponding to extra nodes in country
|
# delete links with p_nom=nan corresponding to extra nodes in country
|
||||||
@ -597,21 +627,24 @@ def add_heating_capacities_installed_before_baseyear(
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# drop assets which are at the end of their lifetime
|
||||||
|
links_i = n.links[(n.links.build_year + n.links.lifetime <= baseyear)].index
|
||||||
|
n.mremove("Link", links_i)
|
||||||
|
|
||||||
|
|
||||||
# %%
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"add_existing_baseyear",
|
"add_existing_baseyear",
|
||||||
configfiles="config/test/config.myopic.yaml",
|
# configfiles="config/test/config.myopic.yaml",
|
||||||
simpl="",
|
simpl="",
|
||||||
clusters="5",
|
clusters="37",
|
||||||
ll="v1.5",
|
ll="v1.0",
|
||||||
opts="",
|
opts="",
|
||||||
sector_opts="24H-T-H-B-I-A-solar+p3-dist1",
|
sector_opts="1p7-4380H-T-H-B-I-A-solar+p3-dist1",
|
||||||
planning_horizons=2030,
|
planning_horizons=2020,
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.basicConfig(level=snakemake.config["logging"]["level"])
|
logging.basicConfig(level=snakemake.config["logging"]["level"])
|
||||||
|
@ -151,9 +151,7 @@ def _load_buses_from_eg(eg_buses, europe_shape, config_elec):
|
|||||||
buses.v_nom.isin(config_elec["voltages"]) | buses.v_nom.isnull()
|
buses.v_nom.isin(config_elec["voltages"]) | buses.v_nom.isnull()
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
"Removing buses with voltages {}".format(
|
f'Removing buses with voltages {pd.Index(buses.v_nom.unique()).dropna().difference(config_elec["voltages"])}'
|
||||||
pd.Index(buses.v_nom.unique()).dropna().difference(config_elec["voltages"])
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return pd.DataFrame(buses.loc[buses_in_europe_b & buses_with_v_nom_to_keep_b])
|
return pd.DataFrame(buses.loc[buses_in_europe_b & buses_with_v_nom_to_keep_b])
|
||||||
@ -337,7 +335,7 @@ def _load_lines_from_eg(buses, eg_lines):
|
|||||||
)
|
)
|
||||||
|
|
||||||
lines["length"] /= 1e3
|
lines["length"] /= 1e3
|
||||||
|
lines["carrier"] = "AC"
|
||||||
lines = _remove_dangling_branches(lines, buses)
|
lines = _remove_dangling_branches(lines, buses)
|
||||||
|
|
||||||
return lines
|
return lines
|
||||||
@ -368,6 +366,25 @@ def _apply_parameter_corrections(n, parameter_corrections):
|
|||||||
df.loc[inds, attr] = r[inds].astype(df[attr].dtype)
|
df.loc[inds, attr] = r[inds].astype(df[attr].dtype)
|
||||||
|
|
||||||
|
|
||||||
|
def _reconnect_crimea(lines):
|
||||||
|
logger.info("Reconnecting Crimea to the Ukrainian grid.")
|
||||||
|
lines_to_crimea = pd.DataFrame(
|
||||||
|
{
|
||||||
|
"bus0": ["3065", "3181", "3181"],
|
||||||
|
"bus1": ["3057", "3055", "3057"],
|
||||||
|
"v_nom": [300, 300, 300],
|
||||||
|
"num_parallel": [1, 1, 1],
|
||||||
|
"length": [140, 120, 140],
|
||||||
|
"carrier": ["AC", "AC", "AC"],
|
||||||
|
"underground": [False, False, False],
|
||||||
|
"under_construction": [False, False, False],
|
||||||
|
},
|
||||||
|
index=["Melitopol", "Liubymivka left", "Luibymivka right"],
|
||||||
|
)
|
||||||
|
|
||||||
|
return pd.concat([lines, lines_to_crimea])
|
||||||
|
|
||||||
|
|
||||||
def _set_electrical_parameters_lines(lines, config):
|
def _set_electrical_parameters_lines(lines, config):
|
||||||
v_noms = config["electricity"]["voltages"]
|
v_noms = config["electricity"]["voltages"]
|
||||||
linetypes = config["lines"]["types"]
|
linetypes = config["lines"]["types"]
|
||||||
@ -452,19 +469,15 @@ def _remove_dangling_branches(branches, buses):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _remove_unconnected_components(network):
|
def _remove_unconnected_components(network, threshold=6):
|
||||||
_, labels = csgraph.connected_components(network.adjacency_matrix(), directed=False)
|
_, labels = csgraph.connected_components(network.adjacency_matrix(), directed=False)
|
||||||
component = pd.Series(labels, index=network.buses.index)
|
component = pd.Series(labels, index=network.buses.index)
|
||||||
|
|
||||||
component_sizes = component.value_counts()
|
component_sizes = component.value_counts()
|
||||||
components_to_remove = component_sizes.iloc[1:]
|
components_to_remove = component_sizes.loc[component_sizes < threshold]
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Removing {} unconnected network components with less than {} buses. In total {} buses.".format(
|
f"Removing {len(components_to_remove)} unconnected network components with less than {components_to_remove.max()} buses. In total {components_to_remove.sum()} buses."
|
||||||
len(components_to_remove),
|
|
||||||
components_to_remove.max(),
|
|
||||||
components_to_remove.sum(),
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return network[component == component_sizes.index[0]]
|
return network[component == component_sizes.index[0]]
|
||||||
@ -547,7 +560,7 @@ def _set_countries_and_substations(n, config, country_shapes, offshore_shapes):
|
|||||||
~buses["under_construction"]
|
~buses["under_construction"]
|
||||||
)
|
)
|
||||||
|
|
||||||
c_nan_b = buses.country.isnull()
|
c_nan_b = buses.country == "na"
|
||||||
if c_nan_b.sum() > 0:
|
if c_nan_b.sum() > 0:
|
||||||
c_tag = _get_country(buses.loc[c_nan_b])
|
c_tag = _get_country(buses.loc[c_nan_b])
|
||||||
c_tag.loc[~c_tag.isin(countries)] = np.nan
|
c_tag.loc[~c_tag.isin(countries)] = np.nan
|
||||||
@ -705,6 +718,9 @@ def base_network(
|
|||||||
lines = _load_lines_from_eg(buses, eg_lines)
|
lines = _load_lines_from_eg(buses, eg_lines)
|
||||||
transformers = _load_transformers_from_eg(buses, eg_transformers)
|
transformers = _load_transformers_from_eg(buses, eg_transformers)
|
||||||
|
|
||||||
|
if config["lines"].get("reconnect_crimea", True) and "UA" in config["countries"]:
|
||||||
|
lines = _reconnect_crimea(lines)
|
||||||
|
|
||||||
lines = _set_electrical_parameters_lines(lines, config)
|
lines = _set_electrical_parameters_lines(lines, config)
|
||||||
transformers = _set_electrical_parameters_transformers(transformers, config)
|
transformers = _set_electrical_parameters_transformers(transformers, config)
|
||||||
links = _set_electrical_parameters_links(links, config, links_p_nom)
|
links = _set_electrical_parameters_links(links, config, links_p_nom)
|
||||||
|
@ -7,9 +7,15 @@ Compute biogas and solid biomass potentials for each clustered model region
|
|||||||
using data from JRC ENSPRESO.
|
using data from JRC ENSPRESO.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
||||||
|
AVAILABLE_BIOMASS_YEARS = [2010, 2020, 2030, 2040, 2050]
|
||||||
|
|
||||||
|
|
||||||
def build_nuts_population_data(year=2013):
|
def build_nuts_population_data(year=2013):
|
||||||
pop = pd.read_csv(
|
pop = pd.read_csv(
|
||||||
@ -208,13 +214,41 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake("build_biomass_potentials", simpl="", clusters="5")
|
snakemake = mock_snakemake(
|
||||||
|
"build_biomass_potentials",
|
||||||
|
simpl="",
|
||||||
|
clusters="5",
|
||||||
|
planning_horizons=2050,
|
||||||
|
)
|
||||||
|
|
||||||
|
overnight = snakemake.config["foresight"] == "overnight"
|
||||||
params = snakemake.params.biomass
|
params = snakemake.params.biomass
|
||||||
year = params["year"]
|
investment_year = int(snakemake.wildcards.planning_horizons)
|
||||||
|
year = params["year"] if overnight else investment_year
|
||||||
scenario = params["scenario"]
|
scenario = params["scenario"]
|
||||||
|
|
||||||
enspreso = enspreso_biomass_potentials(year, scenario)
|
if year > 2050:
|
||||||
|
logger.info("No biomass potentials for years after 2050, using 2050.")
|
||||||
|
max_year = max(AVAILABLE_BIOMASS_YEARS)
|
||||||
|
enspreso = enspreso_biomass_potentials(max_year, scenario)
|
||||||
|
|
||||||
|
elif year not in AVAILABLE_BIOMASS_YEARS:
|
||||||
|
before = int(np.floor(year / 10) * 10)
|
||||||
|
after = int(np.ceil(year / 10) * 10)
|
||||||
|
logger.info(
|
||||||
|
f"No biomass potentials for {year}, interpolating linearly between {before} and {after}."
|
||||||
|
)
|
||||||
|
|
||||||
|
enspreso_before = enspreso_biomass_potentials(before, scenario)
|
||||||
|
enspreso_after = enspreso_biomass_potentials(after, scenario)
|
||||||
|
|
||||||
|
fraction = (year - before) / (after - before)
|
||||||
|
|
||||||
|
enspreso = enspreso_before + fraction * (enspreso_after - enspreso_before)
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.info(f"Using biomass potentials for {year}.")
|
||||||
|
enspreso = enspreso_biomass_potentials(year, scenario)
|
||||||
|
|
||||||
enspreso = disaggregate_nuts0(enspreso)
|
enspreso = disaggregate_nuts0(enspreso)
|
||||||
|
|
||||||
@ -229,7 +263,7 @@ if __name__ == "__main__":
|
|||||||
df.to_csv(snakemake.output.biomass_potentials_all)
|
df.to_csv(snakemake.output.biomass_potentials_all)
|
||||||
|
|
||||||
grouper = {v: k for k, vv in params["classes"].items() for v in vv}
|
grouper = {v: k for k, vv in params["classes"].items() for v in vv}
|
||||||
df = df.groupby(grouper, axis=1).sum()
|
df = df.T.groupby(grouper).sum().T
|
||||||
|
|
||||||
df *= 1e6 # TWh/a to MWh/a
|
df *= 1e6 # TWh/a to MWh/a
|
||||||
df.index.name = "MWh/a"
|
df.index.name = "MWh/a"
|
||||||
|
65
scripts/build_cross_border_flows.py
Normal file
65
scripts/build_cross_border_flows.py
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import pypsa
|
||||||
|
from _helpers import configure_logging
|
||||||
|
from entsoe import EntsoePandasClient
|
||||||
|
from entsoe.exceptions import InvalidBusinessParameterError, NoMatchingDataError
|
||||||
|
from requests import HTTPError
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake("build_cross_border_flows")
|
||||||
|
configure_logging(snakemake)
|
||||||
|
|
||||||
|
api_key = snakemake.config["private"]["keys"]["entsoe_api"]
|
||||||
|
client = EntsoePandasClient(api_key=api_key)
|
||||||
|
|
||||||
|
n = pypsa.Network(snakemake.input.network)
|
||||||
|
start = pd.Timestamp(snakemake.params.snapshots["start"], tz="Europe/Brussels")
|
||||||
|
end = pd.Timestamp(snakemake.params.snapshots["end"], tz="Europe/Brussels")
|
||||||
|
|
||||||
|
branches = n.branches().query("carrier in ['AC', 'DC']")
|
||||||
|
c = n.buses.country
|
||||||
|
branch_countries = pd.concat([branches.bus0.map(c), branches.bus1.map(c)], axis=1)
|
||||||
|
branch_countries = branch_countries.query("bus0 != bus1")
|
||||||
|
branch_countries = branch_countries.apply(sorted, axis=1, result_type="broadcast")
|
||||||
|
country_pairs = branch_countries.drop_duplicates().reset_index(drop=True)
|
||||||
|
|
||||||
|
flows = []
|
||||||
|
unavailable_borders = []
|
||||||
|
for from_country, to_country in country_pairs.values:
|
||||||
|
try:
|
||||||
|
flow_directed = client.query_crossborder_flows(
|
||||||
|
from_country, to_country, start=start, end=end
|
||||||
|
)
|
||||||
|
flow_reverse = client.query_crossborder_flows(
|
||||||
|
to_country, from_country, start=start, end=end
|
||||||
|
)
|
||||||
|
flow = (flow_directed - flow_reverse).rename(
|
||||||
|
f"{from_country} - {to_country}"
|
||||||
|
)
|
||||||
|
flow = flow.tz_localize(None).resample("1h").mean()
|
||||||
|
flow = flow.loc[start.tz_localize(None) : end.tz_localize(None)]
|
||||||
|
flows.append(flow)
|
||||||
|
except (HTTPError, NoMatchingDataError, InvalidBusinessParameterError):
|
||||||
|
unavailable_borders.append(f"{from_country}-{to_country}")
|
||||||
|
|
||||||
|
if unavailable_borders:
|
||||||
|
logger.warning(
|
||||||
|
"Historical electricity cross-border flows for countries"
|
||||||
|
f" {', '.join(unavailable_borders)} not available."
|
||||||
|
)
|
||||||
|
|
||||||
|
flows = pd.concat(flows, axis=1)
|
||||||
|
flows.to_csv(snakemake.output[0])
|
@ -31,7 +31,7 @@ Relevant Settings
|
|||||||
Inputs
|
Inputs
|
||||||
------
|
------
|
||||||
|
|
||||||
- ``data/load_raw.csv``:
|
- ``resources/load_raw.csv``:
|
||||||
|
|
||||||
Outputs
|
Outputs
|
||||||
-------
|
-------
|
||||||
@ -82,6 +82,7 @@ def load_timeseries(fn, years, countries, powerstatistics=True):
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
pd.read_csv(fn, index_col=0, parse_dates=[0], date_format="%Y-%m-%dT%H:%M:%SZ")
|
pd.read_csv(fn, index_col=0, parse_dates=[0], date_format="%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
.tz_localize(None)
|
||||||
.filter(like=pattern)
|
.filter(like=pattern)
|
||||||
.rename(columns=rename)
|
.rename(columns=rename)
|
||||||
.dropna(how="all", axis=0)
|
.dropna(how="all", axis=0)
|
||||||
@ -154,7 +155,7 @@ def copy_timeslice(load, cntry, start, stop, delta, fn_load=None):
|
|||||||
].values
|
].values
|
||||||
|
|
||||||
|
|
||||||
def manual_adjustment(load, fn_load, powerstatistics):
|
def manual_adjustment(load, fn_load, powerstatistics, countries):
|
||||||
"""
|
"""
|
||||||
Adjust gaps manual for load data from OPSD time-series package.
|
Adjust gaps manual for load data from OPSD time-series package.
|
||||||
|
|
||||||
@ -165,6 +166,7 @@ def manual_adjustment(load, fn_load, powerstatistics):
|
|||||||
by the corresponding ratio of total energy consumptions reported by
|
by the corresponding ratio of total energy consumptions reported by
|
||||||
IEA Data browser [0] for the year 2013.
|
IEA Data browser [0] for the year 2013.
|
||||||
|
|
||||||
|
|
||||||
2. For the ENTSOE transparency load data (if powerstatistics is False)
|
2. For the ENTSOE transparency load data (if powerstatistics is False)
|
||||||
|
|
||||||
Albania (AL) and Macedonia (MK) do not exist in the data set. Both get the
|
Albania (AL) and Macedonia (MK) do not exist in the data set. Both get the
|
||||||
@ -173,6 +175,9 @@ def manual_adjustment(load, fn_load, powerstatistics):
|
|||||||
|
|
||||||
[0] https://www.iea.org/data-and-statistics?country=WORLD&fuel=Electricity%20and%20heat&indicator=TotElecCons
|
[0] https://www.iea.org/data-and-statistics?country=WORLD&fuel=Electricity%20and%20heat&indicator=TotElecCons
|
||||||
|
|
||||||
|
Bosnia and Herzegovina (BA) does not exist in the data set for 2019. It gets the
|
||||||
|
electricity consumption data from Croatia (HR) for the year 2019, scaled by the
|
||||||
|
factors derived from https://energy.at-site.be/eurostat-2021/
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
@ -261,9 +266,25 @@ def manual_adjustment(load, fn_load, powerstatistics):
|
|||||||
load["AL"] = load.ME * (5.7 / 2.9)
|
load["AL"] = load.ME * (5.7 / 2.9)
|
||||||
if "MK" not in load and "MK" in countries:
|
if "MK" not in load and "MK" in countries:
|
||||||
load["MK"] = load.ME * (6.7 / 2.9)
|
load["MK"] = load.ME * (6.7 / 2.9)
|
||||||
|
if "BA" not in load and "BA" in countries:
|
||||||
|
load["BA"] = load.HR * (11.0 / 16.2)
|
||||||
copy_timeslice(
|
copy_timeslice(
|
||||||
load, "BG", "2018-10-27 21:00", "2018-10-28 22:00", Delta(weeks=1)
|
load, "BG", "2018-10-27 21:00", "2018-10-28 22:00", Delta(weeks=1)
|
||||||
)
|
)
|
||||||
|
copy_timeslice(
|
||||||
|
load, "LU", "2019-01-02 11:00", "2019-01-05 05:00", Delta(weeks=-1)
|
||||||
|
)
|
||||||
|
copy_timeslice(
|
||||||
|
load, "LU", "2019-02-05 20:00", "2019-02-06 19:00", Delta(weeks=-1)
|
||||||
|
)
|
||||||
|
|
||||||
|
if "UA" in countries:
|
||||||
|
copy_timeslice(
|
||||||
|
load, "UA", "2013-01-25 14:00", "2013-01-28 21:00", Delta(weeks=1)
|
||||||
|
)
|
||||||
|
copy_timeslice(
|
||||||
|
load, "UA", "2013-10-28 03:00", "2013-10-28 20:00", Delta(weeks=1)
|
||||||
|
)
|
||||||
|
|
||||||
return load
|
return load
|
||||||
|
|
||||||
@ -285,8 +306,25 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
|
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
|
||||||
|
|
||||||
|
if "UA" in countries:
|
||||||
|
# attach load of UA (best data only for entsoe transparency)
|
||||||
|
load_ua = load_timeseries(snakemake.input[0], "2018", ["UA"], False)
|
||||||
|
snapshot_year = str(snapshots.year.unique().item())
|
||||||
|
time_diff = pd.Timestamp("2018") - pd.Timestamp(snapshot_year)
|
||||||
|
load_ua.index -= (
|
||||||
|
time_diff # hack indices (currently, UA is manually set to 2018)
|
||||||
|
)
|
||||||
|
load["UA"] = load_ua
|
||||||
|
# attach load of MD (no time-series available, use 2020-totals and distribute according to UA):
|
||||||
|
# https://www.iea.org/data-and-statistics/data-browser/?country=MOLDOVA&fuel=Energy%20consumption&indicator=TotElecCons
|
||||||
|
if "MD" in countries:
|
||||||
|
load["MD"] = 6.2e6 * (load_ua / load_ua.sum())
|
||||||
|
|
||||||
if snakemake.params.load["manual_adjustments"]:
|
if snakemake.params.load["manual_adjustments"]:
|
||||||
load = manual_adjustment(load, snakemake.input[0], powerstatistics)
|
load = manual_adjustment(load, snakemake.input[0], powerstatistics, countries)
|
||||||
|
|
||||||
|
if load.empty:
|
||||||
|
logger.warning("Build electricity demand time series is empty.")
|
||||||
|
|
||||||
logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.")
|
logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.")
|
||||||
load = load.interpolate(method="linear", limit=interpolate_limit)
|
load = load.interpolate(method="linear", limit=interpolate_limit)
|
||||||
|
52
scripts/build_electricity_prices.py
Normal file
52
scripts/build_electricity_prices.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
from _helpers import configure_logging
|
||||||
|
from entsoe import EntsoePandasClient
|
||||||
|
from entsoe.exceptions import NoMatchingDataError
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake("build_cross_border_flows")
|
||||||
|
configure_logging(snakemake)
|
||||||
|
|
||||||
|
api_key = snakemake.config["private"]["keys"]["entsoe_api"]
|
||||||
|
client = EntsoePandasClient(api_key=api_key)
|
||||||
|
|
||||||
|
start = pd.Timestamp(snakemake.params.snapshots["start"], tz="Europe/Brussels")
|
||||||
|
end = pd.Timestamp(snakemake.params.snapshots["end"], tz="Europe/Brussels")
|
||||||
|
|
||||||
|
countries = snakemake.params.countries
|
||||||
|
|
||||||
|
prices = []
|
||||||
|
unavailable_countries = []
|
||||||
|
|
||||||
|
for country in countries:
|
||||||
|
country_code = country
|
||||||
|
|
||||||
|
try:
|
||||||
|
gen = client.query_day_ahead_prices(country, start=start, end=end)
|
||||||
|
gen = gen.tz_localize(None).resample("1h").mean()
|
||||||
|
gen = gen.loc[start.tz_localize(None) : end.tz_localize(None)]
|
||||||
|
prices.append(gen)
|
||||||
|
except NoMatchingDataError:
|
||||||
|
unavailable_countries.append(country)
|
||||||
|
|
||||||
|
if unavailable_countries:
|
||||||
|
logger.warning(
|
||||||
|
f"Historical electricity prices for countries {', '.join(unavailable_countries)} not available."
|
||||||
|
)
|
||||||
|
|
||||||
|
keys = [c for c in countries if c not in unavailable_countries]
|
||||||
|
prices = pd.concat(prices, keys=keys, axis=1)
|
||||||
|
prices.to_csv(snakemake.output[0])
|
73
scripts/build_electricity_production.py
Normal file
73
scripts/build_electricity_production.py
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
from _helpers import configure_logging
|
||||||
|
from entsoe import EntsoePandasClient
|
||||||
|
from entsoe.exceptions import NoMatchingDataError
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
carrier_grouper = {
|
||||||
|
"Waste": "Biomass",
|
||||||
|
"Hydro Pumped Storage": "Hydro",
|
||||||
|
"Hydro Water Reservoir": "Hydro",
|
||||||
|
"Hydro Run-of-river and poundage": "Run of River",
|
||||||
|
"Fossil Coal-derived gas": "Gas",
|
||||||
|
"Fossil Gas": "Gas",
|
||||||
|
"Fossil Oil": "Oil",
|
||||||
|
"Fossil Oil shale": "Oil",
|
||||||
|
"Fossil Brown coal/Lignite": "Lignite",
|
||||||
|
"Fossil Peat": "Lignite",
|
||||||
|
"Fossil Hard coal": "Coal",
|
||||||
|
"Wind Onshore": "Onshore Wind",
|
||||||
|
"Wind Offshore": "Offshore Wind",
|
||||||
|
"Other renewable": "Other",
|
||||||
|
"Marine": "Other",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake("build_electricity_production")
|
||||||
|
configure_logging(snakemake)
|
||||||
|
|
||||||
|
api_key = snakemake.config["private"]["keys"]["entsoe_api"]
|
||||||
|
client = EntsoePandasClient(api_key=api_key)
|
||||||
|
|
||||||
|
start = pd.Timestamp(snakemake.params.snapshots["start"], tz="Europe/Brussels")
|
||||||
|
end = pd.Timestamp(snakemake.params.snapshots["end"], tz="Europe/Brussels")
|
||||||
|
|
||||||
|
countries = snakemake.params.countries
|
||||||
|
|
||||||
|
generation = []
|
||||||
|
unavailable_countries = []
|
||||||
|
|
||||||
|
for country in countries:
|
||||||
|
country_code = country
|
||||||
|
|
||||||
|
try:
|
||||||
|
gen = client.query_generation(country, start=start, end=end, nett=True)
|
||||||
|
gen = gen.tz_localize(None).resample("1h").mean()
|
||||||
|
gen = gen.loc[start.tz_localize(None) : end.tz_localize(None)]
|
||||||
|
gen = gen.rename(columns=carrier_grouper).groupby(level=0, axis=1).sum()
|
||||||
|
generation.append(gen)
|
||||||
|
except NoMatchingDataError:
|
||||||
|
unavailable_countries.append(country)
|
||||||
|
|
||||||
|
if unavailable_countries:
|
||||||
|
logger.warning(
|
||||||
|
f"Historical electricity production for countries {', '.join(unavailable_countries)} not available."
|
||||||
|
)
|
||||||
|
|
||||||
|
keys = [c for c in countries if c not in unavailable_countries]
|
||||||
|
generation = pd.concat(generation, keys=keys, axis=1)
|
||||||
|
generation.to_csv(snakemake.output[0])
|
@ -172,8 +172,6 @@ def build_swiss(year):
|
|||||||
|
|
||||||
|
|
||||||
def idees_per_country(ct, year, base_dir):
|
def idees_per_country(ct, year, base_dir):
|
||||||
ct_totals = {}
|
|
||||||
|
|
||||||
ct_idees = idees_rename.get(ct, ct)
|
ct_idees = idees_rename.get(ct, ct)
|
||||||
fn_residential = f"{base_dir}/JRC-IDEES-2015_Residential_{ct_idees}.xlsx"
|
fn_residential = f"{base_dir}/JRC-IDEES-2015_Residential_{ct_idees}.xlsx"
|
||||||
fn_tertiary = f"{base_dir}/JRC-IDEES-2015_Tertiary_{ct_idees}.xlsx"
|
fn_tertiary = f"{base_dir}/JRC-IDEES-2015_Tertiary_{ct_idees}.xlsx"
|
||||||
@ -183,11 +181,11 @@ def idees_per_country(ct, year, base_dir):
|
|||||||
|
|
||||||
df = pd.read_excel(fn_residential, "RES_hh_fec", index_col=0)[year]
|
df = pd.read_excel(fn_residential, "RES_hh_fec", index_col=0)[year]
|
||||||
|
|
||||||
ct_totals["total residential space"] = df["Space heating"]
|
|
||||||
|
|
||||||
rows = ["Advanced electric heating", "Conventional electric heating"]
|
rows = ["Advanced electric heating", "Conventional electric heating"]
|
||||||
ct_totals["electricity residential space"] = df[rows].sum()
|
ct_totals = {
|
||||||
|
"total residential space": df["Space heating"],
|
||||||
|
"electricity residential space": df[rows].sum(),
|
||||||
|
}
|
||||||
ct_totals["total residential water"] = df.at["Water heating"]
|
ct_totals["total residential water"] = df.at["Water heating"]
|
||||||
|
|
||||||
assert df.index[23] == "Electricity"
|
assert df.index[23] == "Electricity"
|
||||||
|
@ -29,25 +29,25 @@ def diameter_to_capacity(pipe_diameter_mm):
|
|||||||
Based on p.15 of
|
Based on p.15 of
|
||||||
https://gasforclimate2050.eu/wp-content/uploads/2020/07/2020_European-Hydrogen-Backbone_Report.pdf
|
https://gasforclimate2050.eu/wp-content/uploads/2020/07/2020_European-Hydrogen-Backbone_Report.pdf
|
||||||
"""
|
"""
|
||||||
# slopes definitions
|
|
||||||
m0 = (1500 - 0) / (500 - 0)
|
|
||||||
m1 = (5000 - 1500) / (600 - 500)
|
m1 = (5000 - 1500) / (600 - 500)
|
||||||
m2 = (11250 - 5000) / (900 - 600)
|
m2 = (11250 - 5000) / (900 - 600)
|
||||||
m3 = (21700 - 11250) / (1200 - 900)
|
|
||||||
|
|
||||||
# intercept
|
|
||||||
a0 = 0
|
|
||||||
a1 = -16000
|
a1 = -16000
|
||||||
a2 = -7500
|
a2 = -7500
|
||||||
a3 = -20100
|
|
||||||
|
|
||||||
if pipe_diameter_mm < 500:
|
if pipe_diameter_mm < 500:
|
||||||
|
# slopes definitions
|
||||||
|
m0 = (1500 - 0) / (500 - 0)
|
||||||
|
# intercept
|
||||||
|
a0 = 0
|
||||||
return a0 + m0 * pipe_diameter_mm
|
return a0 + m0 * pipe_diameter_mm
|
||||||
elif pipe_diameter_mm < 600:
|
elif pipe_diameter_mm < 600:
|
||||||
return a1 + m1 * pipe_diameter_mm
|
return a1 + m1 * pipe_diameter_mm
|
||||||
elif pipe_diameter_mm < 900:
|
elif pipe_diameter_mm < 900:
|
||||||
return a2 + m2 * pipe_diameter_mm
|
return a2 + m2 * pipe_diameter_mm
|
||||||
else:
|
else:
|
||||||
|
m3 = (21700 - 11250) / (1200 - 900)
|
||||||
|
|
||||||
|
a3 = -20100
|
||||||
|
|
||||||
return a3 + m3 * pipe_diameter_mm
|
return a3 + m3 * pipe_diameter_mm
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ Relevant Settings
|
|||||||
Inputs
|
Inputs
|
||||||
------
|
------
|
||||||
|
|
||||||
- ``data/bundle/EIA_hydro_generation_2000_2014.csv``: Hydroelectricity net generation per country and year (`EIA <https://www.eia.gov/beta/international/data/browser/#/?pa=000000000000000000000000000000g&c=1028i008006gg6168g80a4k000e0ag00gg0004g800ho00g8&ct=0&ug=8&tl_id=2-A&vs=INTL.33-12-ALB-BKWH.A&cy=2014&vo=0&v=H&start=2000&end=2016>`_)
|
- ``data/bundle/eia_hydro_annual_generation.csv``: Hydroelectricity net generation per country and year (`EIA <https://www.eia.gov/beta/international/data/browser/#/?pa=000000000000000000000000000000g&c=1028i008006gg6168g80a4k000e0ag00gg0004g800ho00g8&ct=0&ug=8&tl_id=2-A&vs=INTL.33-12-ALB-BKWH.A&cy=2014&vo=0&v=H&start=2000&end=2016>`_)
|
||||||
|
|
||||||
.. image:: img/hydrogeneration.png
|
.. image:: img/hydrogeneration.png
|
||||||
:scale: 33 %
|
:scale: 33 %
|
||||||
@ -72,12 +72,14 @@ cc = coco.CountryConverter()
|
|||||||
|
|
||||||
def get_eia_annual_hydro_generation(fn, countries):
|
def get_eia_annual_hydro_generation(fn, countries):
|
||||||
# in billion kWh/a = TWh/a
|
# in billion kWh/a = TWh/a
|
||||||
df = pd.read_csv(fn, skiprows=2, index_col=1, na_values=[" ", "--"]).iloc[1:, 1:]
|
df = pd.read_csv(
|
||||||
|
fn, skiprows=2, index_col=1, na_values=[" ", "--"], decimal=","
|
||||||
|
).iloc[1:, 1:]
|
||||||
df.index = df.index.str.strip()
|
df.index = df.index.str.strip()
|
||||||
|
|
||||||
former_countries = {
|
former_countries = {
|
||||||
"Former Czechoslovakia": dict(
|
"Former Czechoslovakia": dict(
|
||||||
countries=["Czech Republic", "Slovakia"], start=1980, end=1992
|
countries=["Czechia", "Slovakia"], start=1980, end=1992
|
||||||
),
|
),
|
||||||
"Former Serbia and Montenegro": dict(
|
"Former Serbia and Montenegro": dict(
|
||||||
countries=["Serbia", "Montenegro"], start=1992, end=2005
|
countries=["Serbia", "Montenegro"], start=1992, end=2005
|
||||||
|
@ -13,10 +13,13 @@ logger = logging.getLogger(__name__)
|
|||||||
import uuid
|
import uuid
|
||||||
from itertools import product
|
from itertools import product
|
||||||
|
|
||||||
|
import country_converter as coco
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from packaging.version import Version, parse
|
from packaging.version import Version, parse
|
||||||
|
|
||||||
|
cc = coco.CountryConverter()
|
||||||
|
|
||||||
|
|
||||||
def locate_missing_industrial_sites(df):
|
def locate_missing_industrial_sites(df):
|
||||||
"""
|
"""
|
||||||
@ -93,6 +96,17 @@ def prepare_hotmaps_database(regions):
|
|||||||
gdf.rename(columns={"index_right": "bus"}, inplace=True)
|
gdf.rename(columns={"index_right": "bus"}, inplace=True)
|
||||||
gdf["country"] = gdf.bus.str[:2]
|
gdf["country"] = gdf.bus.str[:2]
|
||||||
|
|
||||||
|
# the .sjoin can lead to duplicates if a geom is in two overlapping regions
|
||||||
|
if gdf.index.duplicated().any():
|
||||||
|
# get all duplicated entries
|
||||||
|
duplicated_i = gdf.index[gdf.index.duplicated()]
|
||||||
|
# convert from raw data country name to iso-2-code
|
||||||
|
code = cc.convert(gdf.loc[duplicated_i, "Country"], to="iso2")
|
||||||
|
# screen out malformed country allocation
|
||||||
|
gdf_filtered = gdf.loc[duplicated_i].query("country == @code")
|
||||||
|
# concat not duplicated and filtered gdf
|
||||||
|
gdf = pd.concat([gdf.drop(duplicated_i), gdf_filtered])
|
||||||
|
|
||||||
return gdf
|
return gdf
|
||||||
|
|
||||||
|
|
||||||
@ -115,7 +129,9 @@ def build_nodal_distribution_key(hotmaps, regions, countries):
|
|||||||
facilities = hotmaps.query("country == @country and Subsector == @sector")
|
facilities = hotmaps.query("country == @country and Subsector == @sector")
|
||||||
|
|
||||||
if not facilities.empty:
|
if not facilities.empty:
|
||||||
emissions = facilities["Emissions_ETS_2014"]
|
emissions = facilities["Emissions_ETS_2014"].fillna(
|
||||||
|
hotmaps["Emissions_EPRTR_2014"]
|
||||||
|
)
|
||||||
if emissions.sum() == 0:
|
if emissions.sum() == 0:
|
||||||
key = pd.Series(1 / len(facilities), facilities.index)
|
key = pd.Series(1 / len(facilities), facilities.index)
|
||||||
else:
|
else:
|
||||||
@ -138,7 +154,7 @@ if __name__ == "__main__":
|
|||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_industrial_distribution_key",
|
"build_industrial_distribution_key",
|
||||||
simpl="",
|
simpl="",
|
||||||
clusters=48,
|
clusters=128,
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.basicConfig(level=snakemake.config["logging"]["level"])
|
logging.basicConfig(level=snakemake.config["logging"]["level"])
|
||||||
|
@ -167,9 +167,7 @@ def industrial_energy_demand(countries, year):
|
|||||||
with mp.Pool(processes=nprocesses) as pool:
|
with mp.Pool(processes=nprocesses) as pool:
|
||||||
demand_l = list(tqdm(pool.imap(func, countries), **tqdm_kwargs))
|
demand_l = list(tqdm(pool.imap(func, countries), **tqdm_kwargs))
|
||||||
|
|
||||||
demand = pd.concat(demand_l, keys=countries)
|
return pd.concat(demand_l, keys=countries)
|
||||||
|
|
||||||
return demand
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
154
scripts/build_line_rating.py
Executable file
154
scripts/build_line_rating.py
Executable file
@ -0,0 +1,154 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
# coding: utf-8
|
||||||
|
"""
|
||||||
|
Adds dynamic line rating timeseries to the base network.
|
||||||
|
|
||||||
|
Relevant Settings
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
.. code:: yaml
|
||||||
|
|
||||||
|
lines:
|
||||||
|
cutout:
|
||||||
|
line_rating:
|
||||||
|
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
Documentation of the configuration file ``config.yaml`
|
||||||
|
Inputs
|
||||||
|
------
|
||||||
|
|
||||||
|
- ``data/cutouts``:
|
||||||
|
- ``networks/base.nc``: confer :ref:`base`
|
||||||
|
|
||||||
|
Outputs
|
||||||
|
-------
|
||||||
|
|
||||||
|
- ``resources/line_rating.nc``
|
||||||
|
|
||||||
|
|
||||||
|
Description
|
||||||
|
-----------
|
||||||
|
|
||||||
|
The rule :mod:`build_line_rating` calculates the line rating for transmission lines.
|
||||||
|
The line rating provides the maximal capacity of a transmission line considering the heat exchange with the environment.
|
||||||
|
|
||||||
|
The following heat gains and losses are considered:
|
||||||
|
|
||||||
|
- heat gain through resistive losses
|
||||||
|
- heat gain through solar radiation
|
||||||
|
- heat loss through radiation of the transmission line
|
||||||
|
- heat loss through forced convection with wind
|
||||||
|
- heat loss through natural convection
|
||||||
|
|
||||||
|
|
||||||
|
With a heat balance considering the maximum temperature threshold of the transmission line,
|
||||||
|
the maximal possible capacity factor "s_max_pu" for each transmission line at each time step is calculated.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
import atlite
|
||||||
|
import geopandas as gpd
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import pypsa
|
||||||
|
import xarray as xr
|
||||||
|
from _helpers import configure_logging
|
||||||
|
from shapely.geometry import LineString as Line
|
||||||
|
from shapely.geometry import Point
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_resistance(T, R_ref, T_ref=293, alpha=0.00403):
|
||||||
|
"""
|
||||||
|
Calculates the resistance at other temperatures than the reference
|
||||||
|
temperature.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
T : Temperature at which resistance is calculated in [°C] or [K]
|
||||||
|
R_ref : Resistance at reference temperature in [Ohm] or [Ohm/Per Length Unit]
|
||||||
|
T_ref : Reference temperature in [°C] or [K]
|
||||||
|
alpha: Temperature coefficient in [1/K]
|
||||||
|
Defaults are:
|
||||||
|
* T_ref : 20 °C
|
||||||
|
* alpha : 0.00403 1/K
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
Resistance of at given temperature.
|
||||||
|
"""
|
||||||
|
return R_ref * (1 + alpha * (T - T_ref))
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_line_rating(n, cutout):
|
||||||
|
"""
|
||||||
|
Calculates the maximal allowed power flow in each line for each time step
|
||||||
|
considering the maximal temperature.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
n : pypsa.Network object containing information on grid
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
xarray DataArray object with maximal power.
|
||||||
|
"""
|
||||||
|
relevant_lines = n.lines[(n.lines["underground"] == False)]
|
||||||
|
buses = relevant_lines[["bus0", "bus1"]].values
|
||||||
|
x = n.buses.x
|
||||||
|
y = n.buses.y
|
||||||
|
shapes = [Line([Point(x[b0], y[b0]), Point(x[b1], y[b1])]) for (b0, b1) in buses]
|
||||||
|
shapes = gpd.GeoSeries(shapes, index=relevant_lines.index)
|
||||||
|
if relevant_lines.r_pu.eq(0).all():
|
||||||
|
# Overwrite standard line resistance with line resistance obtained from line type
|
||||||
|
r_per_length = n.line_types["r_per_length"]
|
||||||
|
R = (
|
||||||
|
relevant_lines.join(r_per_length, on=["type"])["r_per_length"] / 1000
|
||||||
|
) # in meters
|
||||||
|
# If line type with bundles is given retrieve number of conductors per bundle
|
||||||
|
relevant_lines["n_bundle"] = (
|
||||||
|
relevant_lines["type"]
|
||||||
|
.where(relevant_lines["type"].str.contains("bundle"))
|
||||||
|
.dropna()
|
||||||
|
.apply(lambda x: int(re.findall(r"(\d+)-bundle", x)[0]))
|
||||||
|
)
|
||||||
|
# Set default number of bundles per line
|
||||||
|
relevant_lines["n_bundle"].fillna(1, inplace=True)
|
||||||
|
R *= relevant_lines["n_bundle"]
|
||||||
|
R = calculate_resistance(T=353, R_ref=R)
|
||||||
|
Imax = cutout.line_rating(shapes, R, D=0.0218, Ts=353, epsilon=0.8, alpha=0.8)
|
||||||
|
line_factor = relevant_lines.eval("v_nom * n_bundle * num_parallel") / 1e3 # in mW
|
||||||
|
return xr.DataArray(
|
||||||
|
data=np.sqrt(3) * Imax * line_factor.values.reshape(-1, 1),
|
||||||
|
attrs=dict(
|
||||||
|
description="Maximal possible power in MW for given line considering line rating"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake(
|
||||||
|
"build_line_rating",
|
||||||
|
network="elec",
|
||||||
|
simpl="",
|
||||||
|
clusters="5",
|
||||||
|
ll="v1.0",
|
||||||
|
opts="Co2L-4H",
|
||||||
|
)
|
||||||
|
configure_logging(snakemake)
|
||||||
|
|
||||||
|
n = pypsa.Network(snakemake.input.base_network)
|
||||||
|
time = pd.date_range(freq="h", **snakemake.config["snapshots"])
|
||||||
|
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
||||||
|
|
||||||
|
da = calculate_line_rating(n, cutout)
|
||||||
|
da.to_netcdf(snakemake.output[0])
|
122
scripts/build_monthly_prices.py
Normal file
122
scripts/build_monthly_prices.py
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
Created on Tue May 16 10:37:35 2023.
|
||||||
|
|
||||||
|
This script extracts monthly fuel prices of oil, gas, coal and lignite,
|
||||||
|
as well as CO2 prices
|
||||||
|
|
||||||
|
|
||||||
|
Inputs
|
||||||
|
------
|
||||||
|
- ``data/energy-price-trends-xlsx-5619002.xlsx``: energy price index of fossil fuels
|
||||||
|
- ``emission-spot-primary-market-auction-report-2019-data.xls``: CO2 Prices spot primary auction
|
||||||
|
|
||||||
|
|
||||||
|
Outputs
|
||||||
|
-------
|
||||||
|
|
||||||
|
- ``data/validation/monthly_fuel_price.csv``
|
||||||
|
- ``data/validation/CO2_price_2019.csv``
|
||||||
|
|
||||||
|
|
||||||
|
Description
|
||||||
|
-----------
|
||||||
|
|
||||||
|
The rule :mod:`build_monthly_prices` collects monthly fuel prices and CO2 prices
|
||||||
|
and translates them from different input sources to pypsa syntax
|
||||||
|
|
||||||
|
Data sources:
|
||||||
|
[1] Fuel price index. Destatis
|
||||||
|
https://www.destatis.de/EN/Home/_node.html
|
||||||
|
[2] average annual fuel price lignite, ENTSO-E
|
||||||
|
https://2020.entsos-tyndp-scenarios.eu/fuel-commodities-and-carbon-prices/
|
||||||
|
[3] CO2 Prices, Emission spot primary auction, EEX
|
||||||
|
https://www.eex.com/en/market-data/environmental-markets/eua-primary-auction-spot-download
|
||||||
|
|
||||||
|
|
||||||
|
Data was accessed at 16.5.2023
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
from _helpers import configure_logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# keywords in datasheet
|
||||||
|
keywords = {
|
||||||
|
"coal": " GP09-051 Hard coal",
|
||||||
|
"lignite": " GP09-052 Lignite and lignite briquettes",
|
||||||
|
"oil": " GP09-0610 10 Mineral oil, crude",
|
||||||
|
"gas": "GP09-062 Natural gas",
|
||||||
|
}
|
||||||
|
|
||||||
|
# sheet names to pypsa syntax
|
||||||
|
sheet_name_map = {
|
||||||
|
"coal": "5.1 Hard coal and lignite",
|
||||||
|
"lignite": "5.1 Hard coal and lignite",
|
||||||
|
"oil": "5.2 Mineral oil",
|
||||||
|
"gas": "5.3.1 Natural gas - indices",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# import fuel price 2015 in Eur/MWh
|
||||||
|
# source lignite, price for 2020, scaled by price index, ENTSO-E [3]
|
||||||
|
price_2020 = (
|
||||||
|
pd.Series({"coal": 3.0, "oil": 10.6, "gas": 5.6, "lignite": 1.1}) * 3.6
|
||||||
|
) # Eur/MWh
|
||||||
|
|
||||||
|
# manual adjustment of coal price
|
||||||
|
price_2020["coal"] = 2.4 * 3.6
|
||||||
|
price_2020["lignite"] = 1.6 * 3.6
|
||||||
|
|
||||||
|
|
||||||
|
def get_fuel_price():
|
||||||
|
price = {}
|
||||||
|
for carrier, keyword in keywords.items():
|
||||||
|
sheet_name = sheet_name_map[carrier]
|
||||||
|
df = pd.read_excel(
|
||||||
|
snakemake.input.fuel_price_raw,
|
||||||
|
sheet_name=sheet_name,
|
||||||
|
index_col=0,
|
||||||
|
skiprows=6,
|
||||||
|
nrows=18,
|
||||||
|
)
|
||||||
|
df = df.dropna(axis=0).iloc[:, :12]
|
||||||
|
start, end = df.index[0], str(int(df.index[-1][:4]) + 1)
|
||||||
|
df = df.stack()
|
||||||
|
df.index = pd.date_range(start=start, end=end, freq="MS", inclusive="left")
|
||||||
|
scale = price_2020[carrier] / df["2020"].mean() # scale to 2020 price
|
||||||
|
df = df.mul(scale)
|
||||||
|
price[carrier] = df
|
||||||
|
|
||||||
|
return pd.concat(price, axis=1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_co2_price():
|
||||||
|
# emission price
|
||||||
|
co2_price = pd.read_excel(snakemake.input.co2_price_raw, index_col=1, header=5)
|
||||||
|
return co2_price["Auction Price €/tCO2"]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake("build_monthly_prices")
|
||||||
|
|
||||||
|
configure_logging(snakemake)
|
||||||
|
|
||||||
|
fuel_price = get_fuel_price()
|
||||||
|
fuel_price.to_csv(snakemake.output.fuel_price)
|
||||||
|
|
||||||
|
co2_price = get_co2_price()
|
||||||
|
co2_price.to_csv(snakemake.output.co2_price)
|
@ -54,6 +54,23 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
def determine_cutout_xXyY(cutout_name):
|
def determine_cutout_xXyY(cutout_name):
|
||||||
|
"""
|
||||||
|
Determine the full extent of a cutout.
|
||||||
|
|
||||||
|
Since the coordinates of the cutout data are given as the
|
||||||
|
center of the grid cells, the extent of the cutout is
|
||||||
|
calculated by adding/subtracting half of the grid cell size.
|
||||||
|
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
cutout_name : str
|
||||||
|
Path to the cutout.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
A list of extent coordinates in the order [x, X, y, Y].
|
||||||
|
"""
|
||||||
cutout = atlite.Cutout(cutout_name)
|
cutout = atlite.Cutout(cutout_name)
|
||||||
assert cutout.crs.to_epsg() == 4326
|
assert cutout.crs.to_epsg() == 4326
|
||||||
x, X, y, Y = cutout.extent
|
x, X, y, Y = cutout.extent
|
||||||
|
@ -89,7 +89,7 @@ logger = logging.getLogger(__name__)
|
|||||||
def add_custom_powerplants(ppl, custom_powerplants, custom_ppl_query=False):
|
def add_custom_powerplants(ppl, custom_powerplants, custom_ppl_query=False):
|
||||||
if not custom_ppl_query:
|
if not custom_ppl_query:
|
||||||
return ppl
|
return ppl
|
||||||
add_ppls = pd.read_csv(custom_powerplants, index_col=0, dtype={"bus": "str"})
|
add_ppls = pd.read_csv(custom_powerplants, dtype={"bus": "str"})
|
||||||
if isinstance(custom_ppl_query, str):
|
if isinstance(custom_ppl_query, str):
|
||||||
add_ppls.query(custom_ppl_query, inplace=True)
|
add_ppls.query(custom_ppl_query, inplace=True)
|
||||||
return pd.concat(
|
return pd.concat(
|
||||||
@ -146,8 +146,7 @@ if __name__ == "__main__":
|
|||||||
ppl, snakemake.input.custom_powerplants, custom_ppl_query
|
ppl, snakemake.input.custom_powerplants, custom_ppl_query
|
||||||
)
|
)
|
||||||
|
|
||||||
countries_wo_ppl = set(countries) - set(ppl.Country.unique())
|
if countries_wo_ppl := set(countries) - set(ppl.Country.unique()):
|
||||||
if countries_wo_ppl:
|
|
||||||
logging.warning(f"No powerplants known in: {', '.join(countries_wo_ppl)}")
|
logging.warning(f"No powerplants known in: {', '.join(countries_wo_ppl)}")
|
||||||
|
|
||||||
substations = n.buses.query("substation_lv")
|
substations = n.buses.query("substation_lv")
|
||||||
|
@ -186,6 +186,7 @@ import time
|
|||||||
import atlite
|
import atlite
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import configure_logging
|
from _helpers import configure_logging
|
||||||
from dask.distributed import Client
|
from dask.distributed import Client
|
||||||
@ -222,7 +223,8 @@ if __name__ == "__main__":
|
|||||||
else:
|
else:
|
||||||
client = None
|
client = None
|
||||||
|
|
||||||
cutout = atlite.Cutout(snakemake.input.cutout)
|
sns = pd.date_range(freq="h", **snakemake.config["snapshots"])
|
||||||
|
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=sns)
|
||||||
regions = gpd.read_file(snakemake.input.regions)
|
regions = gpd.read_file(snakemake.input.regions)
|
||||||
assert not regions.empty, (
|
assert not regions.empty, (
|
||||||
f"List of regions in {snakemake.input.regions} is empty, please "
|
f"List of regions in {snakemake.input.regions} is empty, please "
|
||||||
@ -249,7 +251,7 @@ if __name__ == "__main__":
|
|||||||
snakemake.input.corine, codes=codes, buffer=buffer, crs=3035
|
snakemake.input.corine, codes=codes, buffer=buffer, crs=3035
|
||||||
)
|
)
|
||||||
|
|
||||||
if "ship_threshold" in params:
|
if params.get("ship_threshold"):
|
||||||
shipping_threshold = (
|
shipping_threshold = (
|
||||||
params["ship_threshold"] * 8760 * 6
|
params["ship_threshold"] * 8760 * 6
|
||||||
) # approximation because 6 years of data which is hourly collected
|
) # approximation because 6 years of data which is hourly collected
|
||||||
@ -285,6 +287,14 @@ if __name__ == "__main__":
|
|||||||
else:
|
else:
|
||||||
availability = cutout.availabilitymatrix(regions, excluder, **kwargs)
|
availability = cutout.availabilitymatrix(regions, excluder, **kwargs)
|
||||||
|
|
||||||
|
# For Moldova and Ukraine: Overwrite parts not covered by Corine with
|
||||||
|
# externally determined available areas
|
||||||
|
if "availability_matrix_MD_UA" in snakemake.input.keys():
|
||||||
|
availability_MDUA = xr.open_dataarray(
|
||||||
|
snakemake.input["availability_matrix_MD_UA"]
|
||||||
|
)
|
||||||
|
availability.loc[availability_MDUA.coords] = availability_MDUA
|
||||||
|
|
||||||
area = cutout.grid.to_crs(3035).area / 1e6
|
area = cutout.grid.to_crs(3035).area / 1e6
|
||||||
area = xr.DataArray(
|
area = xr.DataArray(
|
||||||
area.values.reshape(cutout.shape), [cutout.coords["y"], cutout.coords["x"]]
|
area.values.reshape(cutout.shape), [cutout.coords["y"], cutout.coords["x"]]
|
||||||
@ -372,4 +382,6 @@ if __name__ == "__main__":
|
|||||||
ds["profile"] = ds["profile"].where(ds["profile"] >= min_p_max_pu, 0)
|
ds["profile"] = ds["profile"].where(ds["profile"] >= min_p_max_pu, 0)
|
||||||
|
|
||||||
ds.to_netcdf(snakemake.output.profile)
|
ds.to_netcdf(snakemake.output.profile)
|
||||||
client.shutdown()
|
|
||||||
|
if client is not None:
|
||||||
|
client.shutdown()
|
||||||
|
@ -102,7 +102,7 @@ solar_energy_transmittance = (
|
|||||||
)
|
)
|
||||||
# solar global radiation [kWh/(m^2a)]
|
# solar global radiation [kWh/(m^2a)]
|
||||||
solar_global_radiation = pd.Series(
|
solar_global_radiation = pd.Series(
|
||||||
[246, 401, 246, 148],
|
[271, 392, 271, 160],
|
||||||
index=["east", "south", "west", "north"],
|
index=["east", "south", "west", "north"],
|
||||||
name="solar_global_radiation [kWh/(m^2a)]",
|
name="solar_global_radiation [kWh/(m^2a)]",
|
||||||
)
|
)
|
||||||
@ -164,6 +164,12 @@ def prepare_building_stock_data():
|
|||||||
},
|
},
|
||||||
inplace=True,
|
inplace=True,
|
||||||
)
|
)
|
||||||
|
building_data["feature"].replace(
|
||||||
|
{
|
||||||
|
"Construction features (U-value)": "Construction features (U-values)",
|
||||||
|
},
|
||||||
|
inplace=True,
|
||||||
|
)
|
||||||
|
|
||||||
building_data.country_code = building_data.country_code.str.upper()
|
building_data.country_code = building_data.country_code.str.upper()
|
||||||
building_data["subsector"].replace(
|
building_data["subsector"].replace(
|
||||||
@ -198,12 +204,14 @@ def prepare_building_stock_data():
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
building_data["country_code"] = building_data["country"].map(country_iso_dic)
|
||||||
|
|
||||||
# heated floor area ----------------------------------------------------------
|
# heated floor area ----------------------------------------------------------
|
||||||
area = building_data[
|
area = building_data[
|
||||||
(building_data.type == "Heated area [Mm²]")
|
(building_data.type == "Heated area [Mm²]")
|
||||||
& (building_data.subsector != "Total")
|
& (building_data.subsector != "Total")
|
||||||
]
|
]
|
||||||
area_tot = area.groupby(["country", "sector"]).sum()
|
area_tot = area[["country", "sector", "value"]].groupby(["country", "sector"]).sum()
|
||||||
area = pd.concat(
|
area = pd.concat(
|
||||||
[
|
[
|
||||||
area,
|
area,
|
||||||
@ -223,7 +231,7 @@ def prepare_building_stock_data():
|
|||||||
usecols=[0, 1, 2, 3],
|
usecols=[0, 1, 2, 3],
|
||||||
encoding="ISO-8859-1",
|
encoding="ISO-8859-1",
|
||||||
)
|
)
|
||||||
area_tot = area_tot.append(area_missing.unstack(level=-1).dropna().stack())
|
area_tot = pd.concat([area_tot, area_missing.unstack(level=-1).dropna().stack()])
|
||||||
area_tot = area_tot.loc[~area_tot.index.duplicated(keep="last")]
|
area_tot = area_tot.loc[~area_tot.index.duplicated(keep="last")]
|
||||||
|
|
||||||
# for still missing countries calculate floor area by population size
|
# for still missing countries calculate floor area by population size
|
||||||
@ -246,7 +254,7 @@ def prepare_building_stock_data():
|
|||||||
averaged_data.index = index
|
averaged_data.index = index
|
||||||
averaged_data["estimated"] = 1
|
averaged_data["estimated"] = 1
|
||||||
if ct not in area_tot.index.levels[0]:
|
if ct not in area_tot.index.levels[0]:
|
||||||
area_tot = area_tot.append(averaged_data, sort=True)
|
area_tot = pd.concat([area_tot, averaged_data], sort=True)
|
||||||
else:
|
else:
|
||||||
area_tot.loc[averaged_data.index] = averaged_data
|
area_tot.loc[averaged_data.index] = averaged_data
|
||||||
|
|
||||||
@ -272,7 +280,7 @@ def prepare_building_stock_data():
|
|||||||
][x["bage"]].iloc[0],
|
][x["bage"]].iloc[0],
|
||||||
axis=1,
|
axis=1,
|
||||||
)
|
)
|
||||||
data_PL_final = data_PL_final.append(data_PL)
|
data_PL_final = pd.concat([data_PL_final, data_PL])
|
||||||
|
|
||||||
u_values = pd.concat([u_values, data_PL_final]).reset_index(drop=True)
|
u_values = pd.concat([u_values, data_PL_final]).reset_index(drop=True)
|
||||||
|
|
||||||
@ -609,12 +617,11 @@ def calculate_costs(u_values, l, cost_retro, window_assumptions):
|
|||||||
/ x.A_C_Ref
|
/ x.A_C_Ref
|
||||||
if x.name[3] != "Window"
|
if x.name[3] != "Window"
|
||||||
else (
|
else (
|
||||||
window_cost(x["new_U_{}".format(l)], cost_retro, window_assumptions)
|
(window_cost(x[f"new_U_{l}"], cost_retro, window_assumptions) * x.A_element)
|
||||||
* x.A_element
|
|
||||||
/ x.A_C_Ref
|
/ x.A_C_Ref
|
||||||
if x.value > window_limit(float(l), window_assumptions)
|
)
|
||||||
else 0
|
if x.value > window_limit(float(l), window_assumptions)
|
||||||
),
|
else 0,
|
||||||
axis=1,
|
axis=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -739,12 +746,12 @@ def calculate_heat_losses(u_values, data_tabula, l_strength, temperature_factor)
|
|||||||
# (1) by transmission
|
# (1) by transmission
|
||||||
# calculate new U values of building elements due to additional insulation
|
# calculate new U values of building elements due to additional insulation
|
||||||
for l in l_strength:
|
for l in l_strength:
|
||||||
u_values["new_U_{}".format(l)] = calculate_new_u(
|
u_values[f"new_U_{l}"] = calculate_new_u(
|
||||||
u_values, l, l_weight, window_assumptions
|
u_values, l, l_weight, window_assumptions
|
||||||
)
|
)
|
||||||
# surface area of building components [m^2]
|
# surface area of building components [m^2]
|
||||||
area_element = (
|
area_element = (
|
||||||
data_tabula[["A_{}".format(e) for e in u_values.index.levels[3]]]
|
data_tabula[[f"A_{e}" for e in u_values.index.levels[3]]]
|
||||||
.rename(columns=lambda x: x[2:])
|
.rename(columns=lambda x: x[2:])
|
||||||
.stack()
|
.stack()
|
||||||
.unstack(-2)
|
.unstack(-2)
|
||||||
@ -756,7 +763,7 @@ def calculate_heat_losses(u_values, data_tabula, l_strength, temperature_factor)
|
|||||||
|
|
||||||
# heat transfer H_tr_e [W/m^2K] through building element
|
# heat transfer H_tr_e [W/m^2K] through building element
|
||||||
# U_e * A_e / A_C_Ref
|
# U_e * A_e / A_C_Ref
|
||||||
columns = ["value"] + ["new_U_{}".format(l) for l in l_strength]
|
columns = ["value"] + [f"new_U_{l}" for l in l_strength]
|
||||||
heat_transfer = pd.concat(
|
heat_transfer = pd.concat(
|
||||||
[u_values[columns].mul(u_values.A_element, axis=0), u_values.A_element], axis=1
|
[u_values[columns].mul(u_values.A_element, axis=0), u_values.A_element], axis=1
|
||||||
)
|
)
|
||||||
@ -875,10 +882,7 @@ def calculate_gain_utilisation_factor(heat_transfer_perm2, Q_ht, Q_gain):
|
|||||||
alpha = alpha_H_0 + (tau / tau_H_0)
|
alpha = alpha_H_0 + (tau / tau_H_0)
|
||||||
# heat balance ratio
|
# heat balance ratio
|
||||||
gamma = (1 / Q_ht).mul(Q_gain.sum(axis=1), axis=0)
|
gamma = (1 / Q_ht).mul(Q_gain.sum(axis=1), axis=0)
|
||||||
# gain utilisation factor
|
return (1 - gamma**alpha) / (1 - gamma ** (alpha + 1))
|
||||||
nu = (1 - gamma**alpha) / (1 - gamma ** (alpha + 1))
|
|
||||||
|
|
||||||
return nu
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_space_heat_savings(
|
def calculate_space_heat_savings(
|
||||||
@ -947,7 +951,8 @@ def sample_dE_costs_area(
|
|||||||
.rename(index=rename_sectors, level=2)
|
.rename(index=rename_sectors, level=2)
|
||||||
.reset_index()
|
.reset_index()
|
||||||
)
|
)
|
||||||
.rename(columns={"country": "country_code"})
|
# if uncommented, leads to the second `country_code` column
|
||||||
|
# .rename(columns={"country": "country_code"})
|
||||||
.set_index(["country_code", "subsector", "bage"])
|
.set_index(["country_code", "subsector", "bage"])
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -960,13 +965,14 @@ def sample_dE_costs_area(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# map missing countries
|
# map missing countries
|
||||||
for ct in countries.difference(cost_dE.index.levels[0]):
|
for ct in set(countries).difference(cost_dE.index.levels[0]):
|
||||||
averaged_data = (
|
averaged_data = (
|
||||||
cost_dE.reindex(index=map_for_missings[ct], level=0)
|
cost_dE.reindex(index=map_for_missings[ct], level=0)
|
||||||
.mean(level=1)
|
.groupby(level=1)
|
||||||
|
.mean()
|
||||||
.set_index(pd.MultiIndex.from_product([[ct], cost_dE.index.levels[1]]))
|
.set_index(pd.MultiIndex.from_product([[ct], cost_dE.index.levels[1]]))
|
||||||
)
|
)
|
||||||
cost_dE = cost_dE.append(averaged_data)
|
cost_dE = pd.concat([cost_dE, averaged_data])
|
||||||
|
|
||||||
# weights costs after construction index
|
# weights costs after construction index
|
||||||
if construction_index:
|
if construction_index:
|
||||||
@ -983,24 +989,23 @@ def sample_dE_costs_area(
|
|||||||
# drop not considered countries
|
# drop not considered countries
|
||||||
cost_dE = cost_dE.reindex(countries, level=0)
|
cost_dE = cost_dE.reindex(countries, level=0)
|
||||||
# get share of residential and service floor area
|
# get share of residential and service floor area
|
||||||
sec_w = area_tot.value / area_tot.value.groupby(level=0).sum()
|
sec_w = area_tot.div(area_tot.groupby(level=0).transform("sum"))
|
||||||
# get the total cost-energy-savings weight by sector area
|
# get the total cost-energy-savings weight by sector area
|
||||||
tot = (
|
tot = (
|
||||||
cost_dE.mul(sec_w, axis=0)
|
# sec_w has columns "estimated" and "value"
|
||||||
.groupby(level="country_code")
|
cost_dE.mul(sec_w.value, axis=0)
|
||||||
|
# for some reasons names of the levels were lost somewhere
|
||||||
|
# .groupby(level="country_code")
|
||||||
|
.groupby(level=0)
|
||||||
.sum()
|
.sum()
|
||||||
.set_index(
|
.set_index(pd.MultiIndex.from_product([cost_dE.index.unique(level=0), ["tot"]]))
|
||||||
pd.MultiIndex.from_product(
|
|
||||||
[cost_dE.index.unique(level="country_code"), ["tot"]]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
cost_dE = cost_dE.append(tot).unstack().stack()
|
cost_dE = pd.concat([cost_dE, tot]).unstack().stack()
|
||||||
|
|
||||||
summed_area = pd.DataFrame(area_tot.groupby("country").sum()).set_index(
|
summed_area = pd.DataFrame(area_tot.groupby(level=0).sum()).set_index(
|
||||||
pd.MultiIndex.from_product([area_tot.index.unique(level="country"), ["tot"]])
|
pd.MultiIndex.from_product([area_tot.index.unique(level=0), ["tot"]])
|
||||||
)
|
)
|
||||||
area_tot = area_tot.append(summed_area).unstack().stack()
|
area_tot = pd.concat([area_tot, summed_area]).unstack().stack()
|
||||||
|
|
||||||
cost_per_saving = cost_dE["cost"] / (
|
cost_per_saving = cost_dE["cost"] / (
|
||||||
1 - cost_dE["dE"]
|
1 - cost_dE["dE"]
|
||||||
|
@ -66,11 +66,7 @@ def salt_cavern_potential_by_region(caverns, regions):
|
|||||||
"capacity_per_area * share * area_caverns / 1000"
|
"capacity_per_area * share * area_caverns / 1000"
|
||||||
) # TWh
|
) # TWh
|
||||||
|
|
||||||
caverns_regions = (
|
return overlay.groupby(["name", "storage_type"]).e_nom.sum().unstack("storage_type")
|
||||||
overlay.groupby(["name", "storage_type"]).e_nom.sum().unstack("storage_type")
|
|
||||||
)
|
|
||||||
|
|
||||||
return caverns_regions
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -28,9 +28,7 @@ def allocate_sequestration_potential(
|
|||||||
overlay["share"] = area(overlay) / overlay["area_sqkm"]
|
overlay["share"] = area(overlay) / overlay["area_sqkm"]
|
||||||
adjust_cols = overlay.columns.difference({"name", "area_sqkm", "geometry", "share"})
|
adjust_cols = overlay.columns.difference({"name", "area_sqkm", "geometry", "share"})
|
||||||
overlay[adjust_cols] = overlay[adjust_cols].multiply(overlay["share"], axis=0)
|
overlay[adjust_cols] = overlay[adjust_cols].multiply(overlay["share"], axis=0)
|
||||||
gdf_regions = overlay.groupby("name").sum()
|
return overlay.dissolve("name", aggfunc="sum")[attr]
|
||||||
gdf_regions.drop(["area_sqkm", "share"], axis=1, inplace=True)
|
|
||||||
return gdf_regions.squeeze()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -119,7 +119,7 @@ def countries(naturalearth, country_list):
|
|||||||
fieldnames = (
|
fieldnames = (
|
||||||
df[x].where(lambda s: s != "-99") for x in ("ISO_A2", "WB_A2", "ADM0_A3")
|
df[x].where(lambda s: s != "-99") for x in ("ISO_A2", "WB_A2", "ADM0_A3")
|
||||||
)
|
)
|
||||||
df["name"] = reduce(lambda x, y: x.fillna(y), fieldnames, next(fieldnames)).str[0:2]
|
df["name"] = reduce(lambda x, y: x.fillna(y), fieldnames, next(fieldnames)).str[:2]
|
||||||
|
|
||||||
df = df.loc[
|
df = df.loc[
|
||||||
df.name.isin(country_list) & ((df["scalerank"] == 0) | (df["scalerank"] == 5))
|
df.name.isin(country_list) & ((df["scalerank"] == 0) | (df["scalerank"] == 5))
|
||||||
@ -174,8 +174,8 @@ def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp):
|
|||||||
pd.MultiIndex.from_tuples(pop.pop("unit,geo\\time").str.split(","))
|
pd.MultiIndex.from_tuples(pop.pop("unit,geo\\time").str.split(","))
|
||||||
)
|
)
|
||||||
.loc["THS"]
|
.loc["THS"]
|
||||||
.applymap(lambda x: pd.to_numeric(x, errors="coerce"))
|
.map(lambda x: pd.to_numeric(x, errors="coerce"))
|
||||||
.fillna(method="bfill", axis=1)
|
.bfill(axis=1)
|
||||||
)["2014"]
|
)["2014"]
|
||||||
|
|
||||||
gdp = pd.read_table(nuts3gdp, na_values=[":"], delimiter=" ?\t", engine="python")
|
gdp = pd.read_table(nuts3gdp, na_values=[":"], delimiter=" ?\t", engine="python")
|
||||||
@ -184,8 +184,8 @@ def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp):
|
|||||||
pd.MultiIndex.from_tuples(gdp.pop("unit,geo\\time").str.split(","))
|
pd.MultiIndex.from_tuples(gdp.pop("unit,geo\\time").str.split(","))
|
||||||
)
|
)
|
||||||
.loc["EUR_HAB"]
|
.loc["EUR_HAB"]
|
||||||
.applymap(lambda x: pd.to_numeric(x, errors="coerce"))
|
.map(lambda x: pd.to_numeric(x, errors="coerce"))
|
||||||
.fillna(method="bfill", axis=1)
|
.bfill(axis=1)
|
||||||
)["2014"]
|
)["2014"]
|
||||||
|
|
||||||
cantons = pd.read_csv(ch_cantons)
|
cantons = pd.read_csv(ch_cantons)
|
||||||
|
@ -81,14 +81,12 @@ def build_transport_demand(traffic_fn, airtemp_fn, nodes, nodal_transport_data):
|
|||||||
- pop_weighted_energy_totals["electricity rail"]
|
- pop_weighted_energy_totals["electricity rail"]
|
||||||
)
|
)
|
||||||
|
|
||||||
transport = (
|
return (
|
||||||
(transport_shape.multiply(energy_totals_transport) * 1e6 * nyears)
|
(transport_shape.multiply(energy_totals_transport) * 1e6 * nyears)
|
||||||
.divide(efficiency_gain * ice_correction)
|
.divide(efficiency_gain * ice_correction)
|
||||||
.multiply(1 + dd_EV)
|
.multiply(1 + dd_EV)
|
||||||
)
|
)
|
||||||
|
|
||||||
return transport
|
|
||||||
|
|
||||||
|
|
||||||
def transport_degree_factor(
|
def transport_degree_factor(
|
||||||
temperature,
|
temperature,
|
||||||
@ -132,14 +130,12 @@ def bev_availability_profile(fn, snapshots, nodes, options):
|
|||||||
traffic.mean() - traffic.min()
|
traffic.mean() - traffic.min()
|
||||||
)
|
)
|
||||||
|
|
||||||
avail_profile = generate_periodic_profiles(
|
return generate_periodic_profiles(
|
||||||
dt_index=snapshots,
|
dt_index=snapshots,
|
||||||
nodes=nodes,
|
nodes=nodes,
|
||||||
weekly_profile=avail.values,
|
weekly_profile=avail.values,
|
||||||
)
|
)
|
||||||
|
|
||||||
return avail_profile
|
|
||||||
|
|
||||||
|
|
||||||
def bev_dsm_profile(snapshots, nodes, options):
|
def bev_dsm_profile(snapshots, nodes, options):
|
||||||
dsm_week = np.zeros((24 * 7,))
|
dsm_week = np.zeros((24 * 7,))
|
||||||
@ -148,14 +144,12 @@ def bev_dsm_profile(snapshots, nodes, options):
|
|||||||
"bev_dsm_restriction_value"
|
"bev_dsm_restriction_value"
|
||||||
]
|
]
|
||||||
|
|
||||||
dsm_profile = generate_periodic_profiles(
|
return generate_periodic_profiles(
|
||||||
dt_index=snapshots,
|
dt_index=snapshots,
|
||||||
nodes=nodes,
|
nodes=nodes,
|
||||||
weekly_profile=dsm_week,
|
weekly_profile=dsm_week,
|
||||||
)
|
)
|
||||||
|
|
||||||
return dsm_profile
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
|
@ -16,8 +16,7 @@ Relevant Settings
|
|||||||
clustering:
|
clustering:
|
||||||
cluster_network:
|
cluster_network:
|
||||||
aggregation_strategies:
|
aggregation_strategies:
|
||||||
|
focus_weights:
|
||||||
focus_weights:
|
|
||||||
|
|
||||||
solving:
|
solving:
|
||||||
solver:
|
solver:
|
||||||
@ -237,7 +236,7 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"):
|
|||||||
n_clusters >= len(N) and n_clusters <= N.sum()
|
n_clusters >= len(N) and n_clusters <= N.sum()
|
||||||
), f"Number of clusters must be {len(N)} <= n_clusters <= {N.sum()} for this selection of countries."
|
), f"Number of clusters must be {len(N)} <= n_clusters <= {N.sum()} for this selection of countries."
|
||||||
|
|
||||||
if focus_weights is not None:
|
if isinstance(focus_weights, dict):
|
||||||
total_focus = sum(list(focus_weights.values()))
|
total_focus = sum(list(focus_weights.values()))
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
@ -271,7 +270,7 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"):
|
|||||||
)
|
)
|
||||||
|
|
||||||
opt = po.SolverFactory(solver_name)
|
opt = po.SolverFactory(solver_name)
|
||||||
if not opt.has_capability("quadratic_objective"):
|
if solver_name == "appsi_highs" or not opt.has_capability("quadratic_objective"):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"The configured solver `{solver_name}` does not support quadratic objectives. Falling back to `ipopt`."
|
f"The configured solver `{solver_name}` does not support quadratic objectives. Falling back to `ipopt`."
|
||||||
)
|
)
|
||||||
@ -322,9 +321,9 @@ def busmap_for_n_clusters(
|
|||||||
neighbor_bus = n.lines.query(
|
neighbor_bus = n.lines.query(
|
||||||
"bus0 == @disconnected_bus or bus1 == @disconnected_bus"
|
"bus0 == @disconnected_bus or bus1 == @disconnected_bus"
|
||||||
).iloc[0][["bus0", "bus1"]]
|
).iloc[0][["bus0", "bus1"]]
|
||||||
new_country = list(
|
new_country = list(set(n.buses.loc[neighbor_bus].country) - {country})[
|
||||||
set(n.buses.loc[neighbor_bus].country) - set([country])
|
0
|
||||||
)[0]
|
]
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"overwriting country `{country}` of bus `{disconnected_bus}` "
|
f"overwriting country `{country}` of bus `{disconnected_bus}` "
|
||||||
@ -461,14 +460,18 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake("cluster_network", simpl="", clusters="37c")
|
snakemake = mock_snakemake("cluster_network", simpl="", clusters="37")
|
||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
|
|
||||||
params = snakemake.params
|
params = snakemake.params
|
||||||
solver_name = snakemake.config["solving"]["solver"]["name"]
|
solver_name = snakemake.config["solving"]["solver"]["name"]
|
||||||
|
solver_name = "appsi_highs" if solver_name == "highs" else solver_name
|
||||||
|
|
||||||
n = pypsa.Network(snakemake.input.network)
|
n = pypsa.Network(snakemake.input.network)
|
||||||
|
|
||||||
|
# remove integer outputs for compatibility with PyPSA v0.26.0
|
||||||
|
n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore")
|
||||||
|
|
||||||
exclude_carriers = params.cluster_network["exclude_carriers"]
|
exclude_carriers = params.cluster_network["exclude_carriers"]
|
||||||
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
|
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
|
||||||
conventional_carriers = set(params.conventional_carriers)
|
conventional_carriers = set(params.conventional_carriers)
|
||||||
@ -483,6 +486,23 @@ if __name__ == "__main__":
|
|||||||
else:
|
else:
|
||||||
n_clusters = int(snakemake.wildcards.clusters)
|
n_clusters = int(snakemake.wildcards.clusters)
|
||||||
|
|
||||||
|
if params.cluster_network.get("consider_efficiency_classes", False):
|
||||||
|
carriers = []
|
||||||
|
for c in aggregate_carriers:
|
||||||
|
gens = n.generators.query("carrier == @c")
|
||||||
|
low = gens.efficiency.quantile(0.10)
|
||||||
|
high = gens.efficiency.quantile(0.90)
|
||||||
|
if low >= high:
|
||||||
|
carriers += [c]
|
||||||
|
else:
|
||||||
|
labels = ["low", "medium", "high"]
|
||||||
|
suffix = pd.cut(
|
||||||
|
gens.efficiency, bins=[0, low, high, 1], labels=labels
|
||||||
|
).astype(str)
|
||||||
|
carriers += [f"{c} {label} efficiency" for label in labels]
|
||||||
|
n.generators.carrier.update(gens.carrier + " " + suffix + " efficiency")
|
||||||
|
aggregate_carriers = carriers
|
||||||
|
|
||||||
if n_clusters == len(n.buses):
|
if n_clusters == len(n.buses):
|
||||||
# Fast-path if no clustering is necessary
|
# Fast-path if no clustering is necessary
|
||||||
busmap = n.buses.index.to_series()
|
busmap = n.buses.index.to_series()
|
||||||
@ -524,6 +544,11 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
update_p_nom_max(clustering.network)
|
update_p_nom_max(clustering.network)
|
||||||
|
|
||||||
|
if params.cluster_network.get("consider_efficiency_classes"):
|
||||||
|
labels = [f" {label} efficiency" for label in ["low", "medium", "high"]]
|
||||||
|
nc = clustering.network
|
||||||
|
nc.generators["carrier"] = nc.generators.carrier.replace(labels, "", regex=True)
|
||||||
|
|
||||||
clustering.network.meta = dict(
|
clustering.network.meta = dict(
|
||||||
snakemake.config, **dict(wildcards=dict(snakemake.wildcards))
|
snakemake.config, **dict(wildcards=dict(snakemake.wildcards))
|
||||||
)
|
)
|
||||||
|
@ -11,25 +11,13 @@ from shutil import copy
|
|||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
files = {
|
|
||||||
"config/config.yaml": "config.yaml",
|
|
||||||
"Snakefile": "Snakefile",
|
|
||||||
"scripts/solve_network.py": "solve_network.py",
|
|
||||||
"scripts/prepare_sector_network.py": "prepare_sector_network.py",
|
|
||||||
}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
snakemake = mock_snakemake("copy_config")
|
snakemake = mock_snakemake("copy_config")
|
||||||
|
|
||||||
basepath = Path(f"results/{snakemake.params.RDIR}config/")
|
with open(snakemake.output[0], "w") as yaml_file:
|
||||||
|
|
||||||
for f, name in files.items():
|
|
||||||
copy(f, basepath / name)
|
|
||||||
|
|
||||||
with open(basepath / "config.snakemake.yaml", "w") as yaml_file:
|
|
||||||
yaml.dump(
|
yaml.dump(
|
||||||
snakemake.config,
|
snakemake.config,
|
||||||
yaml_file,
|
yaml_file,
|
||||||
|
156
scripts/determine_availability_matrix_MD_UA.py
Normal file
156
scripts/determine_availability_matrix_MD_UA.py
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
|
import atlite
|
||||||
|
import fiona
|
||||||
|
import geopandas as gpd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
from _helpers import configure_logging
|
||||||
|
from atlite.gis import shape_availability
|
||||||
|
from rasterio.plot import show
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_wdpa_layer_name(wdpa_fn, layer_substring):
|
||||||
|
"""
|
||||||
|
Get layername from file "wdpa_fn" whose name contains "layer_substring".
|
||||||
|
"""
|
||||||
|
l = fiona.listlayers(wdpa_fn)
|
||||||
|
return [_ for _ in l if layer_substring in _][0]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake(
|
||||||
|
"determine_availability_matrix_MD_UA", technology="solar"
|
||||||
|
)
|
||||||
|
configure_logging(snakemake)
|
||||||
|
|
||||||
|
nprocesses = None # snakemake.config["atlite"].get("nprocesses")
|
||||||
|
noprogress = not snakemake.config["atlite"].get("show_progress", True)
|
||||||
|
config = snakemake.config["renewable"][snakemake.wildcards.technology]
|
||||||
|
|
||||||
|
cutout = atlite.Cutout(snakemake.input.cutout)
|
||||||
|
regions = (
|
||||||
|
gpd.read_file(snakemake.input.regions).set_index("name").rename_axis("bus")
|
||||||
|
)
|
||||||
|
buses = regions.index
|
||||||
|
|
||||||
|
excluder = atlite.ExclusionContainer(crs=3035, res=100)
|
||||||
|
|
||||||
|
corine = config.get("corine", {})
|
||||||
|
if "grid_codes" in corine:
|
||||||
|
# Land cover codes to emulate CORINE results
|
||||||
|
if snakemake.wildcards.technology == "solar":
|
||||||
|
codes = [20, 30, 40, 50, 60, 90, 100]
|
||||||
|
elif snakemake.wildcards.technology == "onwind":
|
||||||
|
codes = [20, 30, 40, 60, 100]
|
||||||
|
elif snakemake.wildcards.technology == "offwind-ac":
|
||||||
|
codes = [80, 200]
|
||||||
|
elif snakemake.wildcards.technology == "offwind-dc":
|
||||||
|
codes = [80, 200]
|
||||||
|
else:
|
||||||
|
assert False, "technology not supported"
|
||||||
|
|
||||||
|
excluder.add_raster(
|
||||||
|
snakemake.input.copernicus, codes=codes, invert=True, crs="EPSG:4326"
|
||||||
|
)
|
||||||
|
if "distance" in corine and corine.get("distance", 0.0) > 0.0:
|
||||||
|
# Land cover codes to emulate CORINE results
|
||||||
|
if snakemake.wildcards.technology == "onwind":
|
||||||
|
codes = [50]
|
||||||
|
else:
|
||||||
|
assert False, "technology not supported"
|
||||||
|
|
||||||
|
buffer = corine["distance"]
|
||||||
|
excluder.add_raster(
|
||||||
|
snakemake.input.copernicus, codes=codes, buffer=buffer, crs="EPSG:4326"
|
||||||
|
)
|
||||||
|
|
||||||
|
if config["natura"]:
|
||||||
|
wdpa_fn = (
|
||||||
|
snakemake.input.wdpa_marine
|
||||||
|
if "offwind" in snakemake.wildcards.technology
|
||||||
|
else snakemake.input.wdpa
|
||||||
|
)
|
||||||
|
layer = get_wdpa_layer_name(wdpa_fn, "polygons")
|
||||||
|
wdpa = gpd.read_file(
|
||||||
|
wdpa_fn,
|
||||||
|
bbox=regions.geometry,
|
||||||
|
layer=layer,
|
||||||
|
).to_crs(3035)
|
||||||
|
if not wdpa.empty:
|
||||||
|
excluder.add_geometry(wdpa.geometry)
|
||||||
|
|
||||||
|
layer = get_wdpa_layer_name(wdpa_fn, "points")
|
||||||
|
wdpa_pts = gpd.read_file(
|
||||||
|
wdpa_fn,
|
||||||
|
bbox=regions.geometry,
|
||||||
|
layer=layer,
|
||||||
|
).to_crs(3035)
|
||||||
|
wdpa_pts = wdpa_pts[wdpa_pts["REP_AREA"] > 1]
|
||||||
|
wdpa_pts["buffer_radius"] = np.sqrt(wdpa_pts["REP_AREA"] / np.pi) * 1000
|
||||||
|
wdpa_pts = wdpa_pts.set_geometry(
|
||||||
|
wdpa_pts["geometry"].buffer(wdpa_pts["buffer_radius"])
|
||||||
|
)
|
||||||
|
if not wdpa_pts.empty:
|
||||||
|
excluder.add_geometry(wdpa_pts.geometry)
|
||||||
|
|
||||||
|
if "max_depth" in config:
|
||||||
|
# lambda not supported for atlite + multiprocessing
|
||||||
|
# use named function np.greater with partially frozen argument instead
|
||||||
|
# and exclude areas where: -max_depth > grid cell depth
|
||||||
|
func = functools.partial(np.greater, -config["max_depth"])
|
||||||
|
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4236, nodata=-1000)
|
||||||
|
|
||||||
|
if "min_shore_distance" in config:
|
||||||
|
buffer = config["min_shore_distance"]
|
||||||
|
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)
|
||||||
|
|
||||||
|
if "max_shore_distance" in config:
|
||||||
|
buffer = config["max_shore_distance"]
|
||||||
|
excluder.add_geometry(
|
||||||
|
snakemake.input.country_shapes, buffer=buffer, invert=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if "ship_threshold" in config:
|
||||||
|
shipping_threshold = config["ship_threshold"] * 8760 * 6
|
||||||
|
func = functools.partial(np.less, shipping_threshold)
|
||||||
|
excluder.add_raster(
|
||||||
|
snakemake.input.ship_density, codes=func, crs=4326, allow_no_overlap=True
|
||||||
|
)
|
||||||
|
|
||||||
|
kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress)
|
||||||
|
if noprogress:
|
||||||
|
logger.info("Calculate landuse availabilities...")
|
||||||
|
start = time.time()
|
||||||
|
availability = cutout.availabilitymatrix(regions, excluder, **kwargs)
|
||||||
|
duration = time.time() - start
|
||||||
|
logger.info(f"Completed availability calculation ({duration:2.2f}s)")
|
||||||
|
else:
|
||||||
|
availability = cutout.availabilitymatrix(regions, excluder, **kwargs)
|
||||||
|
|
||||||
|
regions_geometry = regions.to_crs(3035).geometry
|
||||||
|
band, transform = shape_availability(regions_geometry, excluder)
|
||||||
|
fig, ax = plt.subplots(figsize=(4, 8))
|
||||||
|
gpd.GeoSeries(regions_geometry.unary_union).plot(ax=ax, color="none")
|
||||||
|
show(band, transform=transform, cmap="Greens", ax=ax)
|
||||||
|
plt.axis("off")
|
||||||
|
plt.savefig(snakemake.output.availability_map, bbox_inches="tight", dpi=500)
|
||||||
|
|
||||||
|
# Limit results only to buses for UA and MD
|
||||||
|
buses = regions.loc[regions["country"].isin(["UA", "MD"])].index.values
|
||||||
|
availability = availability.sel(bus=buses)
|
||||||
|
|
||||||
|
# Save and plot for verification
|
||||||
|
availability.to_netcdf(snakemake.output.availability_matrix)
|
@ -33,10 +33,7 @@ def assign_locations(n):
|
|||||||
ifind = pd.Series(c.df.index.str.find(" ", start=4), c.df.index)
|
ifind = pd.Series(c.df.index.str.find(" ", start=4), c.df.index)
|
||||||
for i in ifind.unique():
|
for i in ifind.unique():
|
||||||
names = ifind.index[ifind == i]
|
names = ifind.index[ifind == i]
|
||||||
if i == -1:
|
c.df.loc[names, "location"] = "" if i == -1 else names.str[:i]
|
||||||
c.df.loc[names, "location"] = ""
|
|
||||||
else:
|
|
||||||
c.df.loc[names, "location"] = names.str[:i]
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_nodal_cfs(n, label, nodal_cfs):
|
def calculate_nodal_cfs(n, label, nodal_cfs):
|
||||||
@ -397,7 +394,7 @@ def calculate_supply_energy(n, label, supply_energy):
|
|||||||
|
|
||||||
for c in n.iterate_components(n.branch_components):
|
for c in n.iterate_components(n.branch_components):
|
||||||
for end in [col[3:] for col in c.df.columns if col[:3] == "bus"]:
|
for end in [col[3:] for col in c.df.columns if col[:3] == "bus"]:
|
||||||
items = c.df.index[c.df["bus" + str(end)].map(bus_map).fillna(False)]
|
items = c.df.index[c.df[f"bus{str(end)}"].map(bus_map).fillna(False)]
|
||||||
|
|
||||||
if len(items) == 0:
|
if len(items) == 0:
|
||||||
continue
|
continue
|
||||||
@ -449,6 +446,10 @@ def calculate_metrics(n, label, metrics):
|
|||||||
if "CO2Limit" in n.global_constraints.index:
|
if "CO2Limit" in n.global_constraints.index:
|
||||||
metrics.at["co2_shadow", label] = n.global_constraints.at["CO2Limit", "mu"]
|
metrics.at["co2_shadow", label] = n.global_constraints.at["CO2Limit", "mu"]
|
||||||
|
|
||||||
|
if "co2_sequestration_limit" in n.global_constraints.index:
|
||||||
|
metrics.at["co2_storage_shadow", label] = n.global_constraints.at[
|
||||||
|
"co2_sequestration_limit", "mu"
|
||||||
|
]
|
||||||
return metrics
|
return metrics
|
||||||
|
|
||||||
|
|
||||||
@ -493,7 +494,7 @@ def calculate_weighted_prices(n, label, weighted_prices):
|
|||||||
"H2": ["Sabatier", "H2 Fuel Cell"],
|
"H2": ["Sabatier", "H2 Fuel Cell"],
|
||||||
}
|
}
|
||||||
|
|
||||||
for carrier in link_loads:
|
for carrier, value in link_loads.items():
|
||||||
if carrier == "electricity":
|
if carrier == "electricity":
|
||||||
suffix = ""
|
suffix = ""
|
||||||
elif carrier[:5] == "space":
|
elif carrier[:5] == "space":
|
||||||
@ -515,15 +516,15 @@ def calculate_weighted_prices(n, label, weighted_prices):
|
|||||||
else:
|
else:
|
||||||
load = n.loads_t.p_set[buses]
|
load = n.loads_t.p_set[buses]
|
||||||
|
|
||||||
for tech in link_loads[carrier]:
|
for tech in value:
|
||||||
names = n.links.index[n.links.index.to_series().str[-len(tech) :] == tech]
|
names = n.links.index[n.links.index.to_series().str[-len(tech) :] == tech]
|
||||||
|
|
||||||
if names.empty:
|
if not names.empty:
|
||||||
continue
|
load += (
|
||||||
|
n.links_t.p0[names]
|
||||||
load += (
|
.groupby(n.links.loc[names, "bus0"], axis=1)
|
||||||
n.links_t.p0[names].groupby(n.links.loc[names, "bus0"], axis=1).sum()
|
.sum()
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add H2 Store when charging
|
# Add H2 Store when charging
|
||||||
# if carrier == "H2":
|
# if carrier == "H2":
|
||||||
@ -650,11 +651,7 @@ def make_summaries(networks_dict):
|
|||||||
networks_dict.keys(), names=["cluster", "ll", "opt", "planning_horizon"]
|
networks_dict.keys(), names=["cluster", "ll", "opt", "planning_horizon"]
|
||||||
)
|
)
|
||||||
|
|
||||||
df = {}
|
df = {output: pd.DataFrame(columns=columns, dtype=float) for output in outputs}
|
||||||
|
|
||||||
for output in outputs:
|
|
||||||
df[output] = pd.DataFrame(columns=columns, dtype=float)
|
|
||||||
|
|
||||||
for label, filename in networks_dict.items():
|
for label, filename in networks_dict.items():
|
||||||
logger.info(f"Make summary for scenario {label}, using {filename}")
|
logger.info(f"Make summary for scenario {label}, using {filename}")
|
||||||
|
|
||||||
@ -711,5 +708,5 @@ if __name__ == "__main__":
|
|||||||
if snakemake.params.foresight == "myopic":
|
if snakemake.params.foresight == "myopic":
|
||||||
cumulative_cost = calculate_cumulative_cost()
|
cumulative_cost = calculate_cumulative_cost()
|
||||||
cumulative_cost.to_csv(
|
cumulative_cost.to_csv(
|
||||||
"results/" + snakemake.params.RDIR + "/csvs/cumulative_cost.csv"
|
"results/" + snakemake.params.RDIR + "csvs/cumulative_cost.csv"
|
||||||
)
|
)
|
||||||
|
755
scripts/make_summary_perfect.py
Normal file
755
scripts/make_summary_perfect.py
Normal file
@ -0,0 +1,755 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2020-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
"""
|
||||||
|
Create summary CSV files for all scenario runs with perfect foresight including
|
||||||
|
costs, capacities, capacity factors, curtailment, energy balances, prices and
|
||||||
|
other metrics.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
import pypsa
|
||||||
|
from make_summary import (
|
||||||
|
assign_carriers,
|
||||||
|
assign_locations,
|
||||||
|
calculate_cfs,
|
||||||
|
calculate_nodal_cfs,
|
||||||
|
calculate_nodal_costs,
|
||||||
|
)
|
||||||
|
from prepare_sector_network import prepare_costs
|
||||||
|
from pypsa.descriptors import get_active_assets, nominal_attrs
|
||||||
|
from six import iteritems
|
||||||
|
|
||||||
|
idx = pd.IndexSlice
|
||||||
|
|
||||||
|
opt_name = {"Store": "e", "Line": "s", "Transformer": "s"}
|
||||||
|
|
||||||
|
|
||||||
|
def reindex_columns(df, cols):
|
||||||
|
investments = cols.levels[3]
|
||||||
|
if len(cols.names) != len(df.columns.levels):
|
||||||
|
df = pd.concat([df] * len(investments), axis=1)
|
||||||
|
df.columns = cols
|
||||||
|
df = df.reindex(cols, axis=1)
|
||||||
|
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_costs(n, label, costs):
|
||||||
|
investments = n.investment_periods
|
||||||
|
cols = pd.MultiIndex.from_product(
|
||||||
|
[
|
||||||
|
costs.columns.levels[0],
|
||||||
|
costs.columns.levels[1],
|
||||||
|
costs.columns.levels[2],
|
||||||
|
investments,
|
||||||
|
],
|
||||||
|
names=costs.columns.names[:3] + ["year"],
|
||||||
|
)
|
||||||
|
|
||||||
|
costs = reindex_columns(costs, cols)
|
||||||
|
|
||||||
|
for c in n.iterate_components(
|
||||||
|
n.branch_components | n.controllable_one_port_components ^ {"Load"}
|
||||||
|
):
|
||||||
|
capital_costs = c.df.capital_cost * c.df[opt_name.get(c.name, "p") + "_nom_opt"]
|
||||||
|
active = pd.concat(
|
||||||
|
[
|
||||||
|
get_active_assets(n, c.name, inv_p).rename(inv_p)
|
||||||
|
for inv_p in investments
|
||||||
|
],
|
||||||
|
axis=1,
|
||||||
|
).astype(int)
|
||||||
|
capital_costs = active.mul(capital_costs, axis=0)
|
||||||
|
discount = (
|
||||||
|
n.investment_period_weightings["objective"]
|
||||||
|
/ n.investment_period_weightings["years"]
|
||||||
|
)
|
||||||
|
capital_costs_grouped = capital_costs.groupby(c.df.carrier).sum().mul(discount)
|
||||||
|
|
||||||
|
capital_costs_grouped = pd.concat([capital_costs_grouped], keys=["capital"])
|
||||||
|
capital_costs_grouped = pd.concat([capital_costs_grouped], keys=[c.list_name])
|
||||||
|
|
||||||
|
costs = costs.reindex(capital_costs_grouped.index.union(costs.index))
|
||||||
|
|
||||||
|
costs.loc[capital_costs_grouped.index, label] = capital_costs_grouped.values
|
||||||
|
|
||||||
|
if c.name == "Link":
|
||||||
|
p = (
|
||||||
|
c.pnl.p0.multiply(n.snapshot_weightings.generators, axis=0)
|
||||||
|
.groupby(level=0)
|
||||||
|
.sum()
|
||||||
|
)
|
||||||
|
elif c.name == "Line":
|
||||||
|
continue
|
||||||
|
elif c.name == "StorageUnit":
|
||||||
|
p_all = c.pnl.p.multiply(n.snapshot_weightings.stores, axis=0)
|
||||||
|
p_all[p_all < 0.0] = 0.0
|
||||||
|
p = p_all.groupby(level=0).sum()
|
||||||
|
else:
|
||||||
|
p = (
|
||||||
|
round(c.pnl.p, ndigits=2)
|
||||||
|
.multiply(n.snapshot_weightings.generators, axis=0)
|
||||||
|
.groupby(level=0)
|
||||||
|
.sum()
|
||||||
|
)
|
||||||
|
|
||||||
|
# correct sequestration cost
|
||||||
|
if c.name == "Store":
|
||||||
|
items = c.df.index[
|
||||||
|
(c.df.carrier == "co2 stored") & (c.df.marginal_cost <= -100.0)
|
||||||
|
]
|
||||||
|
c.df.loc[items, "marginal_cost"] = -20.0
|
||||||
|
|
||||||
|
marginal_costs = p.mul(c.df.marginal_cost).T
|
||||||
|
# marginal_costs = active.mul(marginal_costs, axis=0)
|
||||||
|
marginal_costs_grouped = (
|
||||||
|
marginal_costs.groupby(c.df.carrier).sum().mul(discount)
|
||||||
|
)
|
||||||
|
|
||||||
|
marginal_costs_grouped = pd.concat([marginal_costs_grouped], keys=["marginal"])
|
||||||
|
marginal_costs_grouped = pd.concat([marginal_costs_grouped], keys=[c.list_name])
|
||||||
|
|
||||||
|
costs = costs.reindex(marginal_costs_grouped.index.union(costs.index))
|
||||||
|
|
||||||
|
costs.loc[marginal_costs_grouped.index, label] = marginal_costs_grouped.values
|
||||||
|
|
||||||
|
# add back in all hydro
|
||||||
|
# costs.loc[("storage_units","capital","hydro"),label] = (0.01)*2e6*n.storage_units.loc[n.storage_units.group=="hydro","p_nom"].sum()
|
||||||
|
# costs.loc[("storage_units","capital","PHS"),label] = (0.01)*2e6*n.storage_units.loc[n.storage_units.group=="PHS","p_nom"].sum()
|
||||||
|
# costs.loc[("generators","capital","ror"),label] = (0.02)*3e6*n.generators.loc[n.generators.group=="ror","p_nom"].sum()
|
||||||
|
|
||||||
|
return costs
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_cumulative_cost():
|
||||||
|
planning_horizons = snakemake.config["scenario"]["planning_horizons"]
|
||||||
|
|
||||||
|
cumulative_cost = pd.DataFrame(
|
||||||
|
index=df["costs"].sum().index,
|
||||||
|
columns=pd.Series(data=np.arange(0, 0.1, 0.01), name="social discount rate"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# discount cost and express them in money value of planning_horizons[0]
|
||||||
|
for r in cumulative_cost.columns:
|
||||||
|
cumulative_cost[r] = [
|
||||||
|
df["costs"].sum()[index] / ((1 + r) ** (index[-1] - planning_horizons[0]))
|
||||||
|
for index in cumulative_cost.index
|
||||||
|
]
|
||||||
|
|
||||||
|
# integrate cost throughout the transition path
|
||||||
|
for r in cumulative_cost.columns:
|
||||||
|
for cluster in cumulative_cost.index.get_level_values(level=0).unique():
|
||||||
|
for lv in cumulative_cost.index.get_level_values(level=1).unique():
|
||||||
|
for sector_opts in cumulative_cost.index.get_level_values(
|
||||||
|
level=2
|
||||||
|
).unique():
|
||||||
|
cumulative_cost.loc[
|
||||||
|
(cluster, lv, sector_opts, "cumulative cost"), r
|
||||||
|
] = np.trapz(
|
||||||
|
cumulative_cost.loc[
|
||||||
|
idx[cluster, lv, sector_opts, planning_horizons], r
|
||||||
|
].values,
|
||||||
|
x=planning_horizons,
|
||||||
|
)
|
||||||
|
|
||||||
|
return cumulative_cost
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_nodal_capacities(n, label, nodal_capacities):
|
||||||
|
# Beware this also has extraneous locations for country (e.g. biomass) or continent-wide (e.g. fossil gas/oil) stuff
|
||||||
|
for c in n.iterate_components(
|
||||||
|
n.branch_components | n.controllable_one_port_components ^ {"Load"}
|
||||||
|
):
|
||||||
|
nodal_capacities_c = c.df.groupby(["location", "carrier"])[
|
||||||
|
opt_name.get(c.name, "p") + "_nom_opt"
|
||||||
|
].sum()
|
||||||
|
index = pd.MultiIndex.from_tuples(
|
||||||
|
[(c.list_name,) + t for t in nodal_capacities_c.index.to_list()]
|
||||||
|
)
|
||||||
|
nodal_capacities = nodal_capacities.reindex(index.union(nodal_capacities.index))
|
||||||
|
nodal_capacities.loc[index, label] = nodal_capacities_c.values
|
||||||
|
|
||||||
|
return nodal_capacities
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_capacities(n, label, capacities):
|
||||||
|
investments = n.investment_periods
|
||||||
|
cols = pd.MultiIndex.from_product(
|
||||||
|
[
|
||||||
|
capacities.columns.levels[0],
|
||||||
|
capacities.columns.levels[1],
|
||||||
|
capacities.columns.levels[2],
|
||||||
|
investments,
|
||||||
|
],
|
||||||
|
names=capacities.columns.names[:3] + ["year"],
|
||||||
|
)
|
||||||
|
capacities = reindex_columns(capacities, cols)
|
||||||
|
|
||||||
|
for c in n.iterate_components(
|
||||||
|
n.branch_components | n.controllable_one_port_components ^ {"Load"}
|
||||||
|
):
|
||||||
|
active = pd.concat(
|
||||||
|
[
|
||||||
|
get_active_assets(n, c.name, inv_p).rename(inv_p)
|
||||||
|
for inv_p in investments
|
||||||
|
],
|
||||||
|
axis=1,
|
||||||
|
).astype(int)
|
||||||
|
caps = c.df[opt_name.get(c.name, "p") + "_nom_opt"]
|
||||||
|
caps = active.mul(caps, axis=0)
|
||||||
|
capacities_grouped = (
|
||||||
|
caps.groupby(c.df.carrier).sum().drop("load", errors="ignore")
|
||||||
|
)
|
||||||
|
capacities_grouped = pd.concat([capacities_grouped], keys=[c.list_name])
|
||||||
|
|
||||||
|
capacities = capacities.reindex(
|
||||||
|
capacities_grouped.index.union(capacities.index)
|
||||||
|
)
|
||||||
|
|
||||||
|
capacities.loc[capacities_grouped.index, label] = capacities_grouped.values
|
||||||
|
|
||||||
|
return capacities
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_curtailment(n, label, curtailment):
|
||||||
|
avail = (
|
||||||
|
n.generators_t.p_max_pu.multiply(n.generators.p_nom_opt)
|
||||||
|
.sum()
|
||||||
|
.groupby(n.generators.carrier)
|
||||||
|
.sum()
|
||||||
|
)
|
||||||
|
used = n.generators_t.p.sum().groupby(n.generators.carrier).sum()
|
||||||
|
|
||||||
|
curtailment[label] = (((avail - used) / avail) * 100).round(3)
|
||||||
|
|
||||||
|
return curtailment
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_energy(n, label, energy):
|
||||||
|
investments = n.investment_periods
|
||||||
|
cols = pd.MultiIndex.from_product(
|
||||||
|
[
|
||||||
|
energy.columns.levels[0],
|
||||||
|
energy.columns.levels[1],
|
||||||
|
energy.columns.levels[2],
|
||||||
|
investments,
|
||||||
|
],
|
||||||
|
names=energy.columns.names[:3] + ["year"],
|
||||||
|
)
|
||||||
|
energy = reindex_columns(energy, cols)
|
||||||
|
|
||||||
|
for c in n.iterate_components(n.one_port_components | n.branch_components):
|
||||||
|
if c.name in n.one_port_components:
|
||||||
|
c_energies = (
|
||||||
|
c.pnl.p.multiply(n.snapshot_weightings.generators, axis=0)
|
||||||
|
.groupby(level=0)
|
||||||
|
.sum()
|
||||||
|
.multiply(c.df.sign)
|
||||||
|
.groupby(c.df.carrier, axis=1)
|
||||||
|
.sum()
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
c_energies = pd.DataFrame(
|
||||||
|
0.0, columns=c.df.carrier.unique(), index=n.investment_periods
|
||||||
|
)
|
||||||
|
for port in [col[3:] for col in c.df.columns if col[:3] == "bus"]:
|
||||||
|
totals = (
|
||||||
|
c.pnl["p" + port]
|
||||||
|
.multiply(n.snapshot_weightings.generators, axis=0)
|
||||||
|
.groupby(level=0)
|
||||||
|
.sum()
|
||||||
|
)
|
||||||
|
# remove values where bus is missing (bug in nomopyomo)
|
||||||
|
no_bus = c.df.index[c.df["bus" + port] == ""]
|
||||||
|
totals[no_bus] = float(
|
||||||
|
n.component_attrs[c.name].loc["p" + port, "default"]
|
||||||
|
)
|
||||||
|
c_energies -= totals.groupby(c.df.carrier, axis=1).sum()
|
||||||
|
|
||||||
|
c_energies = pd.concat([c_energies.T], keys=[c.list_name])
|
||||||
|
|
||||||
|
energy = energy.reindex(c_energies.index.union(energy.index))
|
||||||
|
|
||||||
|
energy.loc[c_energies.index, label] = c_energies.values
|
||||||
|
|
||||||
|
return energy
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_supply(n, label, supply):
|
||||||
|
"""
|
||||||
|
Calculate the max dispatch of each component at the buses aggregated by
|
||||||
|
carrier.
|
||||||
|
"""
|
||||||
|
|
||||||
|
bus_carriers = n.buses.carrier.unique()
|
||||||
|
|
||||||
|
for i in bus_carriers:
|
||||||
|
bus_map = n.buses.carrier == i
|
||||||
|
bus_map.at[""] = False
|
||||||
|
|
||||||
|
for c in n.iterate_components(n.one_port_components):
|
||||||
|
items = c.df.index[c.df.bus.map(bus_map).fillna(False)]
|
||||||
|
|
||||||
|
if len(items) == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
s = (
|
||||||
|
c.pnl.p[items]
|
||||||
|
.max()
|
||||||
|
.multiply(c.df.loc[items, "sign"])
|
||||||
|
.groupby(c.df.loc[items, "carrier"])
|
||||||
|
.sum()
|
||||||
|
)
|
||||||
|
s = pd.concat([s], keys=[c.list_name])
|
||||||
|
s = pd.concat([s], keys=[i])
|
||||||
|
|
||||||
|
supply = supply.reindex(s.index.union(supply.index))
|
||||||
|
supply.loc[s.index, label] = s
|
||||||
|
|
||||||
|
for c in n.iterate_components(n.branch_components):
|
||||||
|
for end in [col[3:] for col in c.df.columns if col[:3] == "bus"]:
|
||||||
|
items = c.df.index[c.df["bus" + end].map(bus_map).fillna(False)]
|
||||||
|
|
||||||
|
if len(items) == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# lots of sign compensation for direction and to do maximums
|
||||||
|
s = (-1) ** (1 - int(end)) * (
|
||||||
|
(-1) ** int(end) * c.pnl["p" + end][items]
|
||||||
|
).max().groupby(c.df.loc[items, "carrier"]).sum()
|
||||||
|
s.index = s.index + end
|
||||||
|
s = pd.concat([s], keys=[c.list_name])
|
||||||
|
s = pd.concat([s], keys=[i])
|
||||||
|
|
||||||
|
supply = supply.reindex(s.index.union(supply.index))
|
||||||
|
supply.loc[s.index, label] = s
|
||||||
|
|
||||||
|
return supply
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_supply_energy(n, label, supply_energy):
|
||||||
|
"""
|
||||||
|
Calculate the total energy supply/consuption of each component at the buses
|
||||||
|
aggregated by carrier.
|
||||||
|
"""
|
||||||
|
|
||||||
|
investments = n.investment_periods
|
||||||
|
cols = pd.MultiIndex.from_product(
|
||||||
|
[
|
||||||
|
supply_energy.columns.levels[0],
|
||||||
|
supply_energy.columns.levels[1],
|
||||||
|
supply_energy.columns.levels[2],
|
||||||
|
investments,
|
||||||
|
],
|
||||||
|
names=supply_energy.columns.names[:3] + ["year"],
|
||||||
|
)
|
||||||
|
supply_energy = reindex_columns(supply_energy, cols)
|
||||||
|
|
||||||
|
bus_carriers = n.buses.carrier.unique()
|
||||||
|
|
||||||
|
for i in bus_carriers:
|
||||||
|
bus_map = n.buses.carrier == i
|
||||||
|
bus_map.at[""] = False
|
||||||
|
|
||||||
|
for c in n.iterate_components(n.one_port_components):
|
||||||
|
items = c.df.index[c.df.bus.map(bus_map).fillna(False)]
|
||||||
|
|
||||||
|
if len(items) == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if c.name == "Generator":
|
||||||
|
weightings = n.snapshot_weightings.generators
|
||||||
|
else:
|
||||||
|
weightings = n.snapshot_weightings.stores
|
||||||
|
|
||||||
|
if i in ["oil", "co2", "H2"]:
|
||||||
|
if c.name == "Load":
|
||||||
|
c.df.loc[items, "carrier"] = [
|
||||||
|
load.split("-202")[0] for load in items
|
||||||
|
]
|
||||||
|
if i == "oil" and c.name == "Generator":
|
||||||
|
c.df.loc[items, "carrier"] = "imported oil"
|
||||||
|
s = (
|
||||||
|
c.pnl.p[items]
|
||||||
|
.multiply(weightings, axis=0)
|
||||||
|
.groupby(level=0)
|
||||||
|
.sum()
|
||||||
|
.multiply(c.df.loc[items, "sign"])
|
||||||
|
.groupby(c.df.loc[items, "carrier"], axis=1)
|
||||||
|
.sum()
|
||||||
|
.T
|
||||||
|
)
|
||||||
|
s = pd.concat([s], keys=[c.list_name])
|
||||||
|
s = pd.concat([s], keys=[i])
|
||||||
|
|
||||||
|
supply_energy = supply_energy.reindex(
|
||||||
|
s.index.union(supply_energy.index, sort=False)
|
||||||
|
)
|
||||||
|
supply_energy.loc[s.index, label] = s.values
|
||||||
|
|
||||||
|
for c in n.iterate_components(n.branch_components):
|
||||||
|
for end in [col[3:] for col in c.df.columns if col[:3] == "bus"]:
|
||||||
|
items = c.df.index[c.df[f"bus{str(end)}"].map(bus_map).fillna(False)]
|
||||||
|
|
||||||
|
if len(items) == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
s = (
|
||||||
|
(-1)
|
||||||
|
* c.pnl["p" + end]
|
||||||
|
.reindex(items, axis=1)
|
||||||
|
.multiply(n.snapshot_weightings.objective, axis=0)
|
||||||
|
.groupby(level=0)
|
||||||
|
.sum()
|
||||||
|
.groupby(c.df.loc[items, "carrier"], axis=1)
|
||||||
|
.sum()
|
||||||
|
).T
|
||||||
|
s.index = s.index + end
|
||||||
|
s = pd.concat([s], keys=[c.list_name])
|
||||||
|
s = pd.concat([s], keys=[i])
|
||||||
|
|
||||||
|
supply_energy = supply_energy.reindex(
|
||||||
|
s.index.union(supply_energy.index, sort=False)
|
||||||
|
)
|
||||||
|
|
||||||
|
supply_energy.loc[s.index, label] = s.values
|
||||||
|
|
||||||
|
return supply_energy
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_metrics(n, label, metrics):
|
||||||
|
metrics = metrics.reindex(
|
||||||
|
pd.Index(
|
||||||
|
[
|
||||||
|
"line_volume",
|
||||||
|
"line_volume_limit",
|
||||||
|
"line_volume_AC",
|
||||||
|
"line_volume_DC",
|
||||||
|
"line_volume_shadow",
|
||||||
|
"co2_shadow",
|
||||||
|
]
|
||||||
|
).union(metrics.index)
|
||||||
|
)
|
||||||
|
|
||||||
|
metrics.at["line_volume_DC", label] = (n.links.length * n.links.p_nom_opt)[
|
||||||
|
n.links.carrier == "DC"
|
||||||
|
].sum()
|
||||||
|
metrics.at["line_volume_AC", label] = (n.lines.length * n.lines.s_nom_opt).sum()
|
||||||
|
metrics.at["line_volume", label] = metrics.loc[
|
||||||
|
["line_volume_AC", "line_volume_DC"], label
|
||||||
|
].sum()
|
||||||
|
|
||||||
|
if hasattr(n, "line_volume_limit"):
|
||||||
|
metrics.at["line_volume_limit", label] = n.line_volume_limit
|
||||||
|
metrics.at["line_volume_shadow", label] = n.line_volume_limit_dual
|
||||||
|
|
||||||
|
if "CO2Limit" in n.global_constraints.index:
|
||||||
|
metrics.at["co2_shadow", label] = n.global_constraints.at["CO2Limit", "mu"]
|
||||||
|
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_prices(n, label, prices):
|
||||||
|
prices = prices.reindex(prices.index.union(n.buses.carrier.unique()))
|
||||||
|
|
||||||
|
# WARNING: this is time-averaged, see weighted_prices for load-weighted average
|
||||||
|
prices[label] = n.buses_t.marginal_price.mean().groupby(n.buses.carrier).mean()
|
||||||
|
|
||||||
|
return prices
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_weighted_prices(n, label, weighted_prices):
|
||||||
|
# Warning: doesn't include storage units as loads
|
||||||
|
|
||||||
|
weighted_prices = weighted_prices.reindex(
|
||||||
|
pd.Index(
|
||||||
|
[
|
||||||
|
"electricity",
|
||||||
|
"heat",
|
||||||
|
"space heat",
|
||||||
|
"urban heat",
|
||||||
|
"space urban heat",
|
||||||
|
"gas",
|
||||||
|
"H2",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
link_loads = {
|
||||||
|
"electricity": [
|
||||||
|
"heat pump",
|
||||||
|
"resistive heater",
|
||||||
|
"battery charger",
|
||||||
|
"H2 Electrolysis",
|
||||||
|
],
|
||||||
|
"heat": ["water tanks charger"],
|
||||||
|
"urban heat": ["water tanks charger"],
|
||||||
|
"space heat": [],
|
||||||
|
"space urban heat": [],
|
||||||
|
"gas": ["OCGT", "gas boiler", "CHP electric", "CHP heat"],
|
||||||
|
"H2": ["Sabatier", "H2 Fuel Cell"],
|
||||||
|
}
|
||||||
|
|
||||||
|
for carrier, value in link_loads.items():
|
||||||
|
if carrier == "electricity":
|
||||||
|
suffix = ""
|
||||||
|
elif carrier[:5] == "space":
|
||||||
|
suffix = carrier[5:]
|
||||||
|
else:
|
||||||
|
suffix = " " + carrier
|
||||||
|
|
||||||
|
buses = n.buses.index[n.buses.index.str[2:] == suffix]
|
||||||
|
|
||||||
|
if buses.empty:
|
||||||
|
continue
|
||||||
|
|
||||||
|
load = (
|
||||||
|
pd.DataFrame(index=n.snapshots, columns=buses, data=0.0)
|
||||||
|
if carrier in ["H2", "gas"]
|
||||||
|
else n.loads_t.p_set.reindex(buses, axis=1)
|
||||||
|
)
|
||||||
|
for tech in value:
|
||||||
|
names = n.links.index[n.links.index.to_series().str[-len(tech) :] == tech]
|
||||||
|
|
||||||
|
if names.empty:
|
||||||
|
continue
|
||||||
|
|
||||||
|
load += (
|
||||||
|
n.links_t.p0[names].groupby(n.links.loc[names, "bus0"], axis=1).sum()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add H2 Store when charging
|
||||||
|
# if carrier == "H2":
|
||||||
|
# stores = n.stores_t.p[buses+ " Store"].groupby(n.stores.loc[buses+ " Store","bus"],axis=1).sum(axis=1)
|
||||||
|
# stores[stores > 0.] = 0.
|
||||||
|
# load += -stores
|
||||||
|
|
||||||
|
weighted_prices.loc[carrier, label] = (
|
||||||
|
load * n.buses_t.marginal_price[buses]
|
||||||
|
).sum().sum() / load.sum().sum()
|
||||||
|
|
||||||
|
if carrier[:5] == "space":
|
||||||
|
print(load * n.buses_t.marginal_price[buses])
|
||||||
|
|
||||||
|
return weighted_prices
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_market_values(n, label, market_values):
|
||||||
|
# Warning: doesn't include storage units
|
||||||
|
|
||||||
|
carrier = "AC"
|
||||||
|
|
||||||
|
buses = n.buses.index[n.buses.carrier == carrier]
|
||||||
|
|
||||||
|
## First do market value of generators ##
|
||||||
|
|
||||||
|
generators = n.generators.index[n.buses.loc[n.generators.bus, "carrier"] == carrier]
|
||||||
|
|
||||||
|
techs = n.generators.loc[generators, "carrier"].value_counts().index
|
||||||
|
|
||||||
|
market_values = market_values.reindex(market_values.index.union(techs))
|
||||||
|
|
||||||
|
for tech in techs:
|
||||||
|
gens = generators[n.generators.loc[generators, "carrier"] == tech]
|
||||||
|
|
||||||
|
dispatch = (
|
||||||
|
n.generators_t.p[gens]
|
||||||
|
.groupby(n.generators.loc[gens, "bus"], axis=1)
|
||||||
|
.sum()
|
||||||
|
.reindex(columns=buses, fill_value=0.0)
|
||||||
|
)
|
||||||
|
|
||||||
|
revenue = dispatch * n.buses_t.marginal_price[buses]
|
||||||
|
|
||||||
|
market_values.at[tech, label] = revenue.sum().sum() / dispatch.sum().sum()
|
||||||
|
|
||||||
|
## Now do market value of links ##
|
||||||
|
|
||||||
|
for i in ["0", "1"]:
|
||||||
|
all_links = n.links.index[n.buses.loc[n.links["bus" + i], "carrier"] == carrier]
|
||||||
|
|
||||||
|
techs = n.links.loc[all_links, "carrier"].value_counts().index
|
||||||
|
|
||||||
|
market_values = market_values.reindex(market_values.index.union(techs))
|
||||||
|
|
||||||
|
for tech in techs:
|
||||||
|
links = all_links[n.links.loc[all_links, "carrier"] == tech]
|
||||||
|
|
||||||
|
dispatch = (
|
||||||
|
n.links_t["p" + i][links]
|
||||||
|
.groupby(n.links.loc[links, "bus" + i], axis=1)
|
||||||
|
.sum()
|
||||||
|
.reindex(columns=buses, fill_value=0.0)
|
||||||
|
)
|
||||||
|
|
||||||
|
revenue = dispatch * n.buses_t.marginal_price[buses]
|
||||||
|
|
||||||
|
market_values.at[tech, label] = revenue.sum().sum() / dispatch.sum().sum()
|
||||||
|
|
||||||
|
return market_values
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_price_statistics(n, label, price_statistics):
|
||||||
|
price_statistics = price_statistics.reindex(
|
||||||
|
price_statistics.index.union(
|
||||||
|
pd.Index(["zero_hours", "mean", "standard_deviation"])
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
buses = n.buses.index[n.buses.carrier == "AC"]
|
||||||
|
|
||||||
|
threshold = 0.1 # higher than phoney marginal_cost of wind/solar
|
||||||
|
|
||||||
|
df = pd.DataFrame(data=0.0, columns=buses, index=n.snapshots)
|
||||||
|
|
||||||
|
df[n.buses_t.marginal_price[buses] < threshold] = 1.0
|
||||||
|
|
||||||
|
price_statistics.at["zero_hours", label] = df.sum().sum() / (
|
||||||
|
df.shape[0] * df.shape[1]
|
||||||
|
)
|
||||||
|
|
||||||
|
price_statistics.at["mean", label] = n.buses_t.marginal_price[buses].mean().mean()
|
||||||
|
|
||||||
|
price_statistics.at["standard_deviation", label] = (
|
||||||
|
n.buses_t.marginal_price[buses].std().std()
|
||||||
|
)
|
||||||
|
|
||||||
|
return price_statistics
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_co2_emissions(n, label, df):
|
||||||
|
carattr = "co2_emissions"
|
||||||
|
emissions = n.carriers.query(f"{carattr} != 0")[carattr]
|
||||||
|
|
||||||
|
if emissions.empty:
|
||||||
|
return
|
||||||
|
|
||||||
|
weightings = n.snapshot_weightings.generators.mul(
|
||||||
|
n.investment_period_weightings["years"]
|
||||||
|
.reindex(n.snapshots)
|
||||||
|
.fillna(method="bfill")
|
||||||
|
.fillna(1.0),
|
||||||
|
axis=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
# generators
|
||||||
|
gens = n.generators.query("carrier in @emissions.index")
|
||||||
|
if not gens.empty:
|
||||||
|
em_pu = gens.carrier.map(emissions) / gens.efficiency
|
||||||
|
em_pu = (
|
||||||
|
weightings["generators"].to_frame("weightings")
|
||||||
|
@ em_pu.to_frame("weightings").T
|
||||||
|
)
|
||||||
|
emitted = n.generators_t.p[gens.index].mul(em_pu)
|
||||||
|
|
||||||
|
emitted_grouped = (
|
||||||
|
emitted.groupby(level=0).sum().groupby(n.generators.carrier, axis=1).sum().T
|
||||||
|
)
|
||||||
|
|
||||||
|
df = df.reindex(emitted_grouped.index.union(df.index))
|
||||||
|
|
||||||
|
df.loc[emitted_grouped.index, label] = emitted_grouped.values
|
||||||
|
|
||||||
|
if any(n.stores.carrier == "co2"):
|
||||||
|
co2_i = n.stores[n.stores.carrier == "co2"].index
|
||||||
|
df[label] = n.stores_t.e.groupby(level=0).last()[co2_i].iloc[:, 0]
|
||||||
|
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
outputs = [
|
||||||
|
"nodal_costs",
|
||||||
|
"nodal_capacities",
|
||||||
|
"nodal_cfs",
|
||||||
|
"cfs",
|
||||||
|
"costs",
|
||||||
|
"capacities",
|
||||||
|
"curtailment",
|
||||||
|
"energy",
|
||||||
|
"supply",
|
||||||
|
"supply_energy",
|
||||||
|
"prices",
|
||||||
|
"weighted_prices",
|
||||||
|
"price_statistics",
|
||||||
|
"market_values",
|
||||||
|
"metrics",
|
||||||
|
"co2_emissions",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def make_summaries(networks_dict):
|
||||||
|
columns = pd.MultiIndex.from_tuples(
|
||||||
|
networks_dict.keys(), names=["cluster", "lv", "opt"]
|
||||||
|
)
|
||||||
|
df = {}
|
||||||
|
|
||||||
|
for output in outputs:
|
||||||
|
df[output] = pd.DataFrame(columns=columns, dtype=float)
|
||||||
|
|
||||||
|
for label, filename in iteritems(networks_dict):
|
||||||
|
print(label, filename)
|
||||||
|
try:
|
||||||
|
n = pypsa.Network(filename)
|
||||||
|
except OSError:
|
||||||
|
print(label, " not solved yet.")
|
||||||
|
continue
|
||||||
|
# del networks_dict[label]
|
||||||
|
|
||||||
|
if not hasattr(n, "objective"):
|
||||||
|
print(label, " not solved correctly. Check log if infeasible or unbounded.")
|
||||||
|
continue
|
||||||
|
assign_carriers(n)
|
||||||
|
assign_locations(n)
|
||||||
|
|
||||||
|
for output in outputs:
|
||||||
|
df[output] = globals()["calculate_" + output](n, label, df[output])
|
||||||
|
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
def to_csv(df):
|
||||||
|
for key in df:
|
||||||
|
df[key] = df[key].apply(lambda x: pd.to_numeric(x))
|
||||||
|
df[key].to_csv(snakemake.output[key])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Detect running outside of snakemake and mock snakemake for testing
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake("make_summary_perfect")
|
||||||
|
|
||||||
|
run = snakemake.config["run"]["name"]
|
||||||
|
if run != "":
|
||||||
|
run += "/"
|
||||||
|
|
||||||
|
networks_dict = {
|
||||||
|
(clusters, lv, opts + sector_opts): "results/"
|
||||||
|
+ run
|
||||||
|
+ f"postnetworks/elec_s{simpl}_{clusters}_l{lv}_{opts}_{sector_opts}_brownfield_all_years.nc"
|
||||||
|
for simpl in snakemake.config["scenario"]["simpl"]
|
||||||
|
for clusters in snakemake.config["scenario"]["clusters"]
|
||||||
|
for opts in snakemake.config["scenario"]["opts"]
|
||||||
|
for sector_opts in snakemake.config["scenario"]["sector_opts"]
|
||||||
|
for lv in snakemake.config["scenario"]["ll"]
|
||||||
|
}
|
||||||
|
|
||||||
|
print(networks_dict)
|
||||||
|
|
||||||
|
nyears = 1
|
||||||
|
costs_db = prepare_costs(
|
||||||
|
snakemake.input.costs,
|
||||||
|
snakemake.config["costs"],
|
||||||
|
nyears,
|
||||||
|
)
|
||||||
|
|
||||||
|
df = make_summaries(networks_dict)
|
||||||
|
|
||||||
|
df["metrics"].loc["total costs"] = df["costs"].sum().groupby(level=[0, 1, 2]).sum()
|
||||||
|
|
||||||
|
to_csv(df)
|
@ -24,7 +24,7 @@ from make_summary import assign_carriers
|
|||||||
from plot_summary import preferred_order, rename_techs
|
from plot_summary import preferred_order, rename_techs
|
||||||
from pypsa.plot import add_legend_circles, add_legend_lines, add_legend_patches
|
from pypsa.plot import add_legend_circles, add_legend_lines, add_legend_patches
|
||||||
|
|
||||||
plt.style.use(["ggplot", "matplotlibrc"])
|
plt.style.use(["ggplot"])
|
||||||
|
|
||||||
|
|
||||||
def rename_techs_tyndp(tech):
|
def rename_techs_tyndp(tech):
|
||||||
@ -145,12 +145,12 @@ def plot_map(
|
|||||||
ac_color = "rosybrown"
|
ac_color = "rosybrown"
|
||||||
dc_color = "darkseagreen"
|
dc_color = "darkseagreen"
|
||||||
|
|
||||||
|
title = "added grid"
|
||||||
|
|
||||||
if snakemake.wildcards["ll"] == "v1.0":
|
if snakemake.wildcards["ll"] == "v1.0":
|
||||||
# should be zero
|
# should be zero
|
||||||
line_widths = n.lines.s_nom_opt - n.lines.s_nom
|
line_widths = n.lines.s_nom_opt - n.lines.s_nom
|
||||||
link_widths = n.links.p_nom_opt - n.links.p_nom
|
link_widths = n.links.p_nom_opt - n.links.p_nom
|
||||||
title = "added grid"
|
|
||||||
|
|
||||||
if transmission:
|
if transmission:
|
||||||
line_widths = n.lines.s_nom_opt
|
line_widths = n.lines.s_nom_opt
|
||||||
link_widths = n.links.p_nom_opt
|
link_widths = n.links.p_nom_opt
|
||||||
@ -160,8 +160,6 @@ def plot_map(
|
|||||||
else:
|
else:
|
||||||
line_widths = n.lines.s_nom_opt - n.lines.s_nom_min
|
line_widths = n.lines.s_nom_opt - n.lines.s_nom_min
|
||||||
link_widths = n.links.p_nom_opt - n.links.p_nom_min
|
link_widths = n.links.p_nom_opt - n.links.p_nom_min
|
||||||
title = "added grid"
|
|
||||||
|
|
||||||
if transmission:
|
if transmission:
|
||||||
line_widths = n.lines.s_nom_opt
|
line_widths = n.lines.s_nom_opt
|
||||||
link_widths = n.links.p_nom_opt
|
link_widths = n.links.p_nom_opt
|
||||||
@ -262,12 +260,7 @@ def group_pipes(df, drop_direction=False):
|
|||||||
lambda x: f"H2 pipeline {x.bus0.replace(' H2', '')} -> {x.bus1.replace(' H2', '')}",
|
lambda x: f"H2 pipeline {x.bus0.replace(' H2', '')} -> {x.bus1.replace(' H2', '')}",
|
||||||
axis=1,
|
axis=1,
|
||||||
)
|
)
|
||||||
# group pipe lines connecting the same buses and rename them for plotting
|
return df.groupby(level=0).agg({"p_nom_opt": sum, "bus0": "first", "bus1": "first"})
|
||||||
pipe_capacity = df.groupby(level=0).agg(
|
|
||||||
{"p_nom_opt": sum, "bus0": "first", "bus1": "first"}
|
|
||||||
)
|
|
||||||
|
|
||||||
return pipe_capacity
|
|
||||||
|
|
||||||
|
|
||||||
def plot_h2_map(network, regions):
|
def plot_h2_map(network, regions):
|
||||||
@ -766,11 +759,13 @@ def plot_series(network, carrier="AC", name="test"):
|
|||||||
supply = pd.concat(
|
supply = pd.concat(
|
||||||
(
|
(
|
||||||
supply,
|
supply,
|
||||||
(-1)
|
(
|
||||||
* c.pnl["p" + str(i)]
|
-1
|
||||||
.loc[:, c.df.index[c.df["bus" + str(i)].isin(buses)]]
|
* c.pnl[f"p{str(i)}"]
|
||||||
.groupby(c.df.carrier, axis=1)
|
.loc[:, c.df.index[c.df[f"bus{str(i)}"].isin(buses)]]
|
||||||
.sum(),
|
.groupby(c.df.carrier, axis=1)
|
||||||
|
.sum()
|
||||||
|
),
|
||||||
),
|
),
|
||||||
axis=1,
|
axis=1,
|
||||||
)
|
)
|
||||||
@ -913,6 +908,158 @@ def plot_series(network, carrier="AC", name="test"):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def plot_map_perfect(
|
||||||
|
network,
|
||||||
|
components=["Link", "Store", "StorageUnit", "Generator"],
|
||||||
|
bus_size_factor=1.7e10,
|
||||||
|
):
|
||||||
|
n = network.copy()
|
||||||
|
assign_location(n)
|
||||||
|
# Drop non-electric buses so they don't clutter the plot
|
||||||
|
n.buses.drop(n.buses.index[n.buses.carrier != "AC"], inplace=True)
|
||||||
|
# investment periods
|
||||||
|
investments = n.snapshots.levels[0]
|
||||||
|
|
||||||
|
costs = {}
|
||||||
|
for comp in components:
|
||||||
|
df_c = n.df(comp)
|
||||||
|
if df_c.empty:
|
||||||
|
continue
|
||||||
|
df_c["nice_group"] = df_c.carrier.map(rename_techs_tyndp)
|
||||||
|
|
||||||
|
attr = "e_nom_opt" if comp == "Store" else "p_nom_opt"
|
||||||
|
|
||||||
|
active = pd.concat(
|
||||||
|
[n.get_active_assets(comp, inv_p).rename(inv_p) for inv_p in investments],
|
||||||
|
axis=1,
|
||||||
|
).astype(int)
|
||||||
|
capital_cost = n.df(comp)[attr] * n.df(comp).capital_cost
|
||||||
|
capital_cost_t = (
|
||||||
|
(active.mul(capital_cost, axis=0))
|
||||||
|
.groupby([n.df(comp).location, n.df(comp).nice_group])
|
||||||
|
.sum()
|
||||||
|
)
|
||||||
|
|
||||||
|
capital_cost_t.drop("load", level=1, inplace=True, errors="ignore")
|
||||||
|
|
||||||
|
costs[comp] = capital_cost_t
|
||||||
|
|
||||||
|
costs = pd.concat(costs).groupby(level=[1, 2]).sum()
|
||||||
|
costs.drop(costs[costs.sum(axis=1) == 0].index, inplace=True)
|
||||||
|
|
||||||
|
new_columns = preferred_order.intersection(costs.index.levels[1]).append(
|
||||||
|
costs.index.levels[1].difference(preferred_order)
|
||||||
|
)
|
||||||
|
costs = costs.reindex(new_columns, level=1)
|
||||||
|
|
||||||
|
for item in new_columns:
|
||||||
|
if item not in snakemake.config["plotting"]["tech_colors"]:
|
||||||
|
print(
|
||||||
|
"Warning!",
|
||||||
|
item,
|
||||||
|
"not in config/plotting/tech_colors, assign random color",
|
||||||
|
)
|
||||||
|
snakemake.config["plotting"]["tech_colors"] = "pink"
|
||||||
|
|
||||||
|
n.links.drop(
|
||||||
|
n.links.index[(n.links.carrier != "DC") & (n.links.carrier != "B2B")],
|
||||||
|
inplace=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# drop non-bus
|
||||||
|
to_drop = costs.index.levels[0].symmetric_difference(n.buses.index)
|
||||||
|
if len(to_drop) != 0:
|
||||||
|
print("dropping non-buses", to_drop)
|
||||||
|
costs.drop(to_drop, level=0, inplace=True, axis=0, errors="ignore")
|
||||||
|
|
||||||
|
# make sure they are removed from index
|
||||||
|
costs.index = pd.MultiIndex.from_tuples(costs.index.values)
|
||||||
|
|
||||||
|
# PDF has minimum width, so set these to zero
|
||||||
|
line_lower_threshold = 500.0
|
||||||
|
line_upper_threshold = 1e4
|
||||||
|
linewidth_factor = 2e3
|
||||||
|
ac_color = "gray"
|
||||||
|
dc_color = "m"
|
||||||
|
|
||||||
|
line_widths = n.lines.s_nom_opt
|
||||||
|
link_widths = n.links.p_nom_opt
|
||||||
|
linewidth_factor = 2e3
|
||||||
|
line_lower_threshold = 0.0
|
||||||
|
title = "Today's transmission"
|
||||||
|
|
||||||
|
line_widths[line_widths < line_lower_threshold] = 0.0
|
||||||
|
link_widths[link_widths < line_lower_threshold] = 0.0
|
||||||
|
|
||||||
|
line_widths[line_widths > line_upper_threshold] = line_upper_threshold
|
||||||
|
link_widths[link_widths > line_upper_threshold] = line_upper_threshold
|
||||||
|
|
||||||
|
for year in costs.columns:
|
||||||
|
fig, ax = plt.subplots(subplot_kw={"projection": ccrs.PlateCarree()})
|
||||||
|
fig.set_size_inches(7, 6)
|
||||||
|
fig.suptitle(year)
|
||||||
|
|
||||||
|
n.plot(
|
||||||
|
bus_sizes=costs[year] / bus_size_factor,
|
||||||
|
bus_colors=snakemake.config["plotting"]["tech_colors"],
|
||||||
|
line_colors=ac_color,
|
||||||
|
link_colors=dc_color,
|
||||||
|
line_widths=line_widths / linewidth_factor,
|
||||||
|
link_widths=link_widths / linewidth_factor,
|
||||||
|
ax=ax,
|
||||||
|
**map_opts,
|
||||||
|
)
|
||||||
|
|
||||||
|
sizes = [20, 10, 5]
|
||||||
|
labels = [f"{s} bEUR/a" for s in sizes]
|
||||||
|
sizes = [s / bus_size_factor * 1e9 for s in sizes]
|
||||||
|
|
||||||
|
legend_kw = dict(
|
||||||
|
loc="upper left",
|
||||||
|
bbox_to_anchor=(0.01, 1.06),
|
||||||
|
labelspacing=0.8,
|
||||||
|
frameon=False,
|
||||||
|
handletextpad=0,
|
||||||
|
title="system cost",
|
||||||
|
)
|
||||||
|
|
||||||
|
add_legend_circles(
|
||||||
|
ax,
|
||||||
|
sizes,
|
||||||
|
labels,
|
||||||
|
srid=n.srid,
|
||||||
|
patch_kw=dict(facecolor="lightgrey"),
|
||||||
|
legend_kw=legend_kw,
|
||||||
|
)
|
||||||
|
|
||||||
|
sizes = [10, 5]
|
||||||
|
labels = [f"{s} GW" for s in sizes]
|
||||||
|
scale = 1e3 / linewidth_factor
|
||||||
|
sizes = [s * scale for s in sizes]
|
||||||
|
|
||||||
|
legend_kw = dict(
|
||||||
|
loc="upper left",
|
||||||
|
bbox_to_anchor=(0.27, 1.06),
|
||||||
|
frameon=False,
|
||||||
|
labelspacing=0.8,
|
||||||
|
handletextpad=1,
|
||||||
|
title=title,
|
||||||
|
)
|
||||||
|
|
||||||
|
add_legend_lines(
|
||||||
|
ax, sizes, labels, patch_kw=dict(color="lightgrey"), legend_kw=legend_kw
|
||||||
|
)
|
||||||
|
|
||||||
|
legend_kw = dict(
|
||||||
|
bbox_to_anchor=(1.52, 1.04),
|
||||||
|
frameon=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
fig.savefig(
|
||||||
|
snakemake.output[f"map_{year}"], transparent=True, bbox_inches="tight"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
@ -921,10 +1068,9 @@ if __name__ == "__main__":
|
|||||||
"plot_network",
|
"plot_network",
|
||||||
simpl="",
|
simpl="",
|
||||||
opts="",
|
opts="",
|
||||||
clusters="5",
|
clusters="37",
|
||||||
ll="v1.5",
|
ll="v1.0",
|
||||||
sector_opts="CO2L0-1H-T-H-B-I-A-solar+p3-dist1",
|
sector_opts="4380H-T-H-B-I-A-solar+p3-dist1",
|
||||||
planning_horizons="2030",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.basicConfig(level=snakemake.config["logging"]["level"])
|
logging.basicConfig(level=snakemake.config["logging"]["level"])
|
||||||
@ -938,16 +1084,23 @@ if __name__ == "__main__":
|
|||||||
if map_opts["boundaries"] is None:
|
if map_opts["boundaries"] is None:
|
||||||
map_opts["boundaries"] = regions.total_bounds[[0, 2, 1, 3]] + [-1, 1, -1, 1]
|
map_opts["boundaries"] = regions.total_bounds[[0, 2, 1, 3]] + [-1, 1, -1, 1]
|
||||||
|
|
||||||
plot_map(
|
if snakemake.params["foresight"] == "perfect":
|
||||||
n,
|
plot_map_perfect(
|
||||||
components=["generators", "links", "stores", "storage_units"],
|
n,
|
||||||
bus_size_factor=2e10,
|
components=["Link", "Store", "StorageUnit", "Generator"],
|
||||||
transmission=False,
|
bus_size_factor=2e10,
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
plot_map(
|
||||||
|
n,
|
||||||
|
components=["generators", "links", "stores", "storage_units"],
|
||||||
|
bus_size_factor=2e10,
|
||||||
|
transmission=False,
|
||||||
|
)
|
||||||
|
|
||||||
plot_h2_map(n, regions)
|
plot_h2_map(n, regions)
|
||||||
plot_ch4_map(n)
|
plot_ch4_map(n)
|
||||||
plot_map_without(n)
|
plot_map_without(n)
|
||||||
|
|
||||||
# plot_series(n, carrier="AC", name=suffix)
|
# plot_series(n, carrier="AC", name=suffix)
|
||||||
# plot_series(n, carrier="heat", name=suffix)
|
# plot_series(n, carrier="heat", name=suffix)
|
||||||
|
114
scripts/plot_statistics.py
Normal file
114
scripts/plot_statistics.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import pypsa
|
||||||
|
import seaborn as sns
|
||||||
|
from _helpers import configure_logging
|
||||||
|
|
||||||
|
sns.set_theme("paper", style="whitegrid")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake(
|
||||||
|
"plot_elec_statistics",
|
||||||
|
simpl="",
|
||||||
|
opts="Ept-12h",
|
||||||
|
clusters="37",
|
||||||
|
ll="v1.0",
|
||||||
|
)
|
||||||
|
configure_logging(snakemake)
|
||||||
|
|
||||||
|
n = pypsa.Network(snakemake.input.network)
|
||||||
|
|
||||||
|
n.loads.carrier = "load"
|
||||||
|
n.carriers.loc["load", ["nice_name", "color"]] = "Load", "darkred"
|
||||||
|
colors = n.carriers.set_index("nice_name").color.where(
|
||||||
|
lambda s: s != "", "lightgrey"
|
||||||
|
)
|
||||||
|
|
||||||
|
def rename_index(ds):
|
||||||
|
specific = ds.index.map(lambda x: f"{x[1]}\n({x[0]})")
|
||||||
|
generic = ds.index.get_level_values("carrier")
|
||||||
|
duplicated = generic.duplicated(keep=False)
|
||||||
|
index = specific.where(duplicated, generic)
|
||||||
|
return ds.set_axis(index)
|
||||||
|
|
||||||
|
def plot_static_per_carrier(ds, ax, drop_zero=True):
|
||||||
|
if drop_zero:
|
||||||
|
ds = ds[ds != 0]
|
||||||
|
ds = ds.dropna()
|
||||||
|
c = colors[ds.index.get_level_values("carrier")]
|
||||||
|
ds = ds.pipe(rename_index)
|
||||||
|
label = f"{ds.attrs['name']} [{ds.attrs['unit']}]"
|
||||||
|
ds.plot.barh(color=c.values, xlabel=label, ax=ax)
|
||||||
|
ax.grid(axis="y")
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
ds = n.statistics.capacity_factor().dropna()
|
||||||
|
plot_static_per_carrier(ds, ax)
|
||||||
|
fig.savefig(snakemake.output.capacity_factor_bar)
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
ds = n.statistics.installed_capacity().dropna()
|
||||||
|
ds = ds.drop("Line")
|
||||||
|
ds = ds.drop(("Generator", "Load"))
|
||||||
|
ds = ds / 1e3
|
||||||
|
ds.attrs["unit"] = "GW"
|
||||||
|
plot_static_per_carrier(ds, ax)
|
||||||
|
fig.savefig(snakemake.output.installed_capacity_bar)
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
ds = n.statistics.optimal_capacity()
|
||||||
|
ds = ds.drop("Line")
|
||||||
|
ds = ds.drop(("Generator", "Load"))
|
||||||
|
ds = ds / 1e3
|
||||||
|
ds.attrs["unit"] = "GW"
|
||||||
|
plot_static_per_carrier(ds, ax)
|
||||||
|
fig.savefig(snakemake.output.optimal_capacity_bar)
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
ds = n.statistics.capex()
|
||||||
|
plot_static_per_carrier(ds, ax)
|
||||||
|
fig.savefig(snakemake.output.capital_expenditure_bar)
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
ds = n.statistics.opex()
|
||||||
|
plot_static_per_carrier(ds, ax)
|
||||||
|
fig.savefig(snakemake.output.operational_expenditure_bar)
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
ds = n.statistics.curtailment()
|
||||||
|
plot_static_per_carrier(ds, ax)
|
||||||
|
fig.savefig(snakemake.output.curtailment_bar)
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
ds = n.statistics.supply()
|
||||||
|
ds = ds.drop("Line")
|
||||||
|
ds = ds / 1e6
|
||||||
|
ds.attrs["unit"] = "TWh"
|
||||||
|
plot_static_per_carrier(ds, ax)
|
||||||
|
fig.savefig(snakemake.output.supply_bar)
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
ds = n.statistics.withdrawal()
|
||||||
|
ds = ds.drop("Line")
|
||||||
|
ds = ds / -1e6
|
||||||
|
ds.attrs["unit"] = "TWh"
|
||||||
|
plot_static_per_carrier(ds, ax)
|
||||||
|
fig.savefig(snakemake.output.withdrawal_bar)
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
ds = n.statistics.market_value()
|
||||||
|
plot_static_per_carrier(ds, ax)
|
||||||
|
fig.savefig(snakemake.output.market_value_bar)
|
||||||
|
|
||||||
|
# touch file
|
||||||
|
with open(snakemake.output.barplots_touch, "a"):
|
||||||
|
pass
|
@ -49,6 +49,10 @@ def rename_techs(label):
|
|||||||
# "H2 Fuel Cell": "hydrogen storage",
|
# "H2 Fuel Cell": "hydrogen storage",
|
||||||
# "H2 pipeline": "hydrogen storage",
|
# "H2 pipeline": "hydrogen storage",
|
||||||
"battery": "battery storage",
|
"battery": "battery storage",
|
||||||
|
"H2 for industry": "H2 for industry",
|
||||||
|
"land transport fuel cell": "land transport fuel cell",
|
||||||
|
"land transport oil": "land transport oil",
|
||||||
|
"oil shipping": "shipping oil",
|
||||||
# "CC": "CC"
|
# "CC": "CC"
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -157,11 +161,11 @@ def plot_costs():
|
|||||||
df.index.difference(preferred_order)
|
df.index.difference(preferred_order)
|
||||||
)
|
)
|
||||||
|
|
||||||
new_columns = df.sum().sort_values().index
|
# new_columns = df.sum().sort_values().index
|
||||||
|
|
||||||
fig, ax = plt.subplots(figsize=(12, 8))
|
fig, ax = plt.subplots(figsize=(12, 8))
|
||||||
|
|
||||||
df.loc[new_index, new_columns].T.plot(
|
df.loc[new_index].T.plot(
|
||||||
kind="bar",
|
kind="bar",
|
||||||
ax=ax,
|
ax=ax,
|
||||||
stacked=True,
|
stacked=True,
|
||||||
@ -213,17 +217,22 @@ def plot_energy():
|
|||||||
|
|
||||||
logger.info(f"Total energy of {round(df.sum()[0])} TWh/a")
|
logger.info(f"Total energy of {round(df.sum()[0])} TWh/a")
|
||||||
|
|
||||||
|
if df.empty:
|
||||||
|
fig, ax = plt.subplots(figsize=(12, 8))
|
||||||
|
fig.savefig(snakemake.output.energy, bbox_inches="tight")
|
||||||
|
return
|
||||||
|
|
||||||
new_index = preferred_order.intersection(df.index).append(
|
new_index = preferred_order.intersection(df.index).append(
|
||||||
df.index.difference(preferred_order)
|
df.index.difference(preferred_order)
|
||||||
)
|
)
|
||||||
|
|
||||||
new_columns = df.columns.sort_values()
|
# new_columns = df.columns.sort_values()
|
||||||
|
|
||||||
fig, ax = plt.subplots(figsize=(12, 8))
|
fig, ax = plt.subplots(figsize=(12, 8))
|
||||||
|
|
||||||
logger.debug(df.loc[new_index, new_columns])
|
logger.debug(df.loc[new_index])
|
||||||
|
|
||||||
df.loc[new_index, new_columns].T.plot(
|
df.loc[new_index].T.plot(
|
||||||
kind="bar",
|
kind="bar",
|
||||||
ax=ax,
|
ax=ax,
|
||||||
stacked=True,
|
stacked=True,
|
||||||
@ -267,8 +276,6 @@ def plot_balances():
|
|||||||
i for i in balances_df.index.levels[0] if i not in co2_carriers
|
i for i in balances_df.index.levels[0] if i not in co2_carriers
|
||||||
]
|
]
|
||||||
|
|
||||||
fig, ax = plt.subplots(figsize=(12, 8))
|
|
||||||
|
|
||||||
for k, v in balances.items():
|
for k, v in balances.items():
|
||||||
df = balances_df.loc[v]
|
df = balances_df.loc[v]
|
||||||
df = df.groupby(df.index.get_level_values(2)).sum()
|
df = df.groupby(df.index.get_level_values(2)).sum()
|
||||||
@ -279,7 +286,7 @@ def plot_balances():
|
|||||||
# remove trailing link ports
|
# remove trailing link ports
|
||||||
df.index = [
|
df.index = [
|
||||||
i[:-1]
|
i[:-1]
|
||||||
if ((i not in ["co2", "NH3"]) and (i[-1:] in ["0", "1", "2", "3"]))
|
if ((i not in ["co2", "NH3", "H2"]) and (i[-1:] in ["0", "1", "2", "3"]))
|
||||||
else i
|
else i
|
||||||
for i in df.index
|
for i in df.index
|
||||||
]
|
]
|
||||||
@ -290,11 +297,7 @@ def plot_balances():
|
|||||||
df.abs().max(axis=1) < snakemake.params.plotting["energy_threshold"] / 10
|
df.abs().max(axis=1) < snakemake.params.plotting["energy_threshold"] / 10
|
||||||
]
|
]
|
||||||
|
|
||||||
if v[0] in co2_carriers:
|
units = "MtCO2/a" if v[0] in co2_carriers else "TWh/a"
|
||||||
units = "MtCO2/a"
|
|
||||||
else:
|
|
||||||
units = "TWh/a"
|
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"Dropping technology energy balance smaller than {snakemake.params['plotting']['energy_threshold']/10} {units}"
|
f"Dropping technology energy balance smaller than {snakemake.params['plotting']['energy_threshold']/10} {units}"
|
||||||
)
|
)
|
||||||
@ -313,6 +316,8 @@ def plot_balances():
|
|||||||
|
|
||||||
new_columns = df.columns.sort_values()
|
new_columns = df.columns.sort_values()
|
||||||
|
|
||||||
|
fig, ax = plt.subplots(figsize=(12, 8))
|
||||||
|
|
||||||
df.loc[new_index, new_columns].T.plot(
|
df.loc[new_index, new_columns].T.plot(
|
||||||
kind="bar",
|
kind="bar",
|
||||||
ax=ax,
|
ax=ax,
|
||||||
@ -345,8 +350,6 @@ def plot_balances():
|
|||||||
|
|
||||||
fig.savefig(snakemake.output.balances[:-10] + k + ".pdf", bbox_inches="tight")
|
fig.savefig(snakemake.output.balances[:-10] + k + ".pdf", bbox_inches="tight")
|
||||||
|
|
||||||
plt.cla()
|
|
||||||
|
|
||||||
|
|
||||||
def historical_emissions(countries):
|
def historical_emissions(countries):
|
||||||
"""
|
"""
|
||||||
@ -354,8 +357,7 @@ def historical_emissions(countries):
|
|||||||
"""
|
"""
|
||||||
# https://www.eea.europa.eu/data-and-maps/data/national-emissions-reported-to-the-unfccc-and-to-the-eu-greenhouse-gas-monitoring-mechanism-16
|
# https://www.eea.europa.eu/data-and-maps/data/national-emissions-reported-to-the-unfccc-and-to-the-eu-greenhouse-gas-monitoring-mechanism-16
|
||||||
# downloaded 201228 (modified by EEA last on 201221)
|
# downloaded 201228 (modified by EEA last on 201221)
|
||||||
fn = "data/eea/UNFCCC_v23.csv"
|
df = pd.read_csv(snakemake.input.co2, encoding="latin-1", low_memory=False)
|
||||||
df = pd.read_csv(fn, encoding="latin-1")
|
|
||||||
df.loc[df["Year"] == "1985-1987", "Year"] = 1986
|
df.loc[df["Year"] == "1985-1987", "Year"] = 1986
|
||||||
df["Year"] = df["Year"].astype(int)
|
df["Year"] = df["Year"].astype(int)
|
||||||
df = df.set_index(
|
df = df.set_index(
|
||||||
@ -379,15 +381,21 @@ def historical_emissions(countries):
|
|||||||
e["waste management"] = "5 - Waste management"
|
e["waste management"] = "5 - Waste management"
|
||||||
e["other"] = "6 - Other Sector"
|
e["other"] = "6 - Other Sector"
|
||||||
e["indirect"] = "ind_CO2 - Indirect CO2"
|
e["indirect"] = "ind_CO2 - Indirect CO2"
|
||||||
e["total wL"] = "Total (with LULUCF)"
|
e["other LULUCF"] = "4.H - Other LULUCF"
|
||||||
e["total woL"] = "Total (without LULUCF)"
|
|
||||||
|
|
||||||
pol = ["CO2"] # ["All greenhouse gases - (CO2 equivalent)"]
|
pol = ["CO2"] # ["All greenhouse gases - (CO2 equivalent)"]
|
||||||
if "GB" in countries:
|
if "GB" in countries:
|
||||||
countries.remove("GB")
|
countries.remove("GB")
|
||||||
countries.append("UK")
|
countries.append("UK")
|
||||||
|
|
||||||
year = np.arange(1990, 2018).tolist()
|
year = df.index.levels[0][df.index.levels[0] >= 1990]
|
||||||
|
|
||||||
|
missing = pd.Index(countries).difference(df.index.levels[2])
|
||||||
|
if not missing.empty:
|
||||||
|
logger.warning(
|
||||||
|
f"The following countries are missing and not considered when plotting historic CO2 emissions: {missing}"
|
||||||
|
)
|
||||||
|
countries = pd.Index(df.index.levels[2]).intersection(countries)
|
||||||
|
|
||||||
idx = pd.IndexSlice
|
idx = pd.IndexSlice
|
||||||
co2_totals = (
|
co2_totals = (
|
||||||
@ -444,31 +452,57 @@ def plot_carbon_budget_distribution(input_eurostat):
|
|||||||
|
|
||||||
sns.set()
|
sns.set()
|
||||||
sns.set_style("ticks")
|
sns.set_style("ticks")
|
||||||
plt.style.use("seaborn-ticks")
|
|
||||||
plt.rcParams["xtick.direction"] = "in"
|
plt.rcParams["xtick.direction"] = "in"
|
||||||
plt.rcParams["ytick.direction"] = "in"
|
plt.rcParams["ytick.direction"] = "in"
|
||||||
plt.rcParams["xtick.labelsize"] = 20
|
plt.rcParams["xtick.labelsize"] = 20
|
||||||
plt.rcParams["ytick.labelsize"] = 20
|
plt.rcParams["ytick.labelsize"] = 20
|
||||||
|
|
||||||
|
emissions_scope = snakemake.params.emissions_scope
|
||||||
|
report_year = snakemake.params.eurostat_report_year
|
||||||
|
input_co2 = snakemake.input.co2
|
||||||
|
|
||||||
|
# historic emissions
|
||||||
|
countries = snakemake.params.countries
|
||||||
|
e_1990 = co2_emissions_year(
|
||||||
|
countries,
|
||||||
|
input_eurostat,
|
||||||
|
opts,
|
||||||
|
emissions_scope,
|
||||||
|
report_year,
|
||||||
|
input_co2,
|
||||||
|
year=1990,
|
||||||
|
)
|
||||||
|
emissions = historical_emissions(countries)
|
||||||
|
# add other years https://sdi.eea.europa.eu/data/0569441f-2853-4664-a7cd-db969ef54de0
|
||||||
|
emissions.loc[2019] = 2.971372
|
||||||
|
emissions.loc[2020] = 2.691958
|
||||||
|
emissions.loc[2021] = 2.869355
|
||||||
|
|
||||||
|
if snakemake.config["foresight"] == "myopic":
|
||||||
|
path_cb = "results/" + snakemake.params.RDIR + "/csvs/"
|
||||||
|
co2_cap = pd.read_csv(path_cb + "carbon_budget_distribution.csv", index_col=0)[
|
||||||
|
["cb"]
|
||||||
|
]
|
||||||
|
co2_cap *= e_1990
|
||||||
|
else:
|
||||||
|
supply_energy = pd.read_csv(
|
||||||
|
snakemake.input.balances, index_col=[0, 1, 2], header=[0, 1, 2, 3]
|
||||||
|
)
|
||||||
|
co2_cap = (
|
||||||
|
supply_energy.loc["co2"].droplevel(0).drop("co2").sum().unstack().T / 1e9
|
||||||
|
)
|
||||||
|
co2_cap.rename(index=lambda x: int(x), inplace=True)
|
||||||
|
|
||||||
plt.figure(figsize=(10, 7))
|
plt.figure(figsize=(10, 7))
|
||||||
gs1 = gridspec.GridSpec(1, 1)
|
gs1 = gridspec.GridSpec(1, 1)
|
||||||
ax1 = plt.subplot(gs1[0, 0])
|
ax1 = plt.subplot(gs1[0, 0])
|
||||||
ax1.set_ylabel("CO$_2$ emissions (Gt per year)", fontsize=22)
|
ax1.set_ylabel("CO$_2$ emissions \n [Gt per year]", fontsize=22)
|
||||||
ax1.set_ylim([0, 5])
|
# ax1.set_ylim([0, 5])
|
||||||
ax1.set_xlim([1990, snakemake.params.planning_horizons[-1] + 1])
|
ax1.set_xlim([1990, snakemake.params.planning_horizons[-1] + 1])
|
||||||
|
|
||||||
path_cb = "results/" + snakemake.params.RDIR + "/csvs/"
|
|
||||||
countries = snakemake.params.countries
|
|
||||||
e_1990 = co2_emissions_year(countries, input_eurostat, opts, year=1990)
|
|
||||||
CO2_CAP = pd.read_csv(path_cb + "carbon_budget_distribution.csv", index_col=0)
|
|
||||||
|
|
||||||
ax1.plot(e_1990 * CO2_CAP[o], linewidth=3, color="dodgerblue", label=None)
|
|
||||||
|
|
||||||
emissions = historical_emissions(countries)
|
|
||||||
|
|
||||||
ax1.plot(emissions, color="black", linewidth=3, label=None)
|
ax1.plot(emissions, color="black", linewidth=3, label=None)
|
||||||
|
|
||||||
# plot committed and uder-discussion targets
|
# plot committed and under-discussion targets
|
||||||
# (notice that historical emissions include all countries in the
|
# (notice that historical emissions include all countries in the
|
||||||
# network, but targets refer to EU)
|
# network, but targets refer to EU)
|
||||||
ax1.plot(
|
ax1.plot(
|
||||||
@ -485,7 +519,7 @@ def plot_carbon_budget_distribution(input_eurostat):
|
|||||||
[0.45 * emissions[1990]],
|
[0.45 * emissions[1990]],
|
||||||
marker="*",
|
marker="*",
|
||||||
markersize=12,
|
markersize=12,
|
||||||
markerfacecolor="white",
|
markerfacecolor="black",
|
||||||
markeredgecolor="black",
|
markeredgecolor="black",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -509,21 +543,7 @@ def plot_carbon_budget_distribution(input_eurostat):
|
|||||||
|
|
||||||
ax1.plot(
|
ax1.plot(
|
||||||
[2050],
|
[2050],
|
||||||
[0.01 * emissions[1990]],
|
[0.0 * emissions[1990]],
|
||||||
marker="*",
|
|
||||||
markersize=12,
|
|
||||||
markerfacecolor="white",
|
|
||||||
linewidth=0,
|
|
||||||
markeredgecolor="black",
|
|
||||||
label="EU under-discussion target",
|
|
||||||
zorder=10,
|
|
||||||
clip_on=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
ax1.plot(
|
|
||||||
[2050],
|
|
||||||
[0.125 * emissions[1990]],
|
|
||||||
"ro",
|
|
||||||
marker="*",
|
marker="*",
|
||||||
markersize=12,
|
markersize=12,
|
||||||
markerfacecolor="black",
|
markerfacecolor="black",
|
||||||
@ -531,12 +551,16 @@ def plot_carbon_budget_distribution(input_eurostat):
|
|||||||
label="EU committed target",
|
label="EU committed target",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
for col in co2_cap.columns:
|
||||||
|
ax1.plot(co2_cap[col], linewidth=3, label=col)
|
||||||
|
|
||||||
ax1.legend(
|
ax1.legend(
|
||||||
fancybox=True, fontsize=18, loc=(0.01, 0.01), facecolor="white", frameon=True
|
fancybox=True, fontsize=18, loc=(0.01, 0.01), facecolor="white", frameon=True
|
||||||
)
|
)
|
||||||
|
|
||||||
path_cb_plot = "results/" + snakemake.params.RDIR + "/graphs/"
|
plt.grid(axis="y")
|
||||||
plt.savefig(path_cb_plot + "carbon_budget_plot.pdf", dpi=300)
|
path = snakemake.output.balances.split("balances")[0] + "carbon_budget.pdf"
|
||||||
|
plt.savefig(path, bbox_inches="tight")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
@ -557,6 +581,5 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
for sector_opts in snakemake.params.sector_opts:
|
for sector_opts in snakemake.params.sector_opts:
|
||||||
opts = sector_opts.split("-")
|
opts = sector_opts.split("-")
|
||||||
for o in opts:
|
if any("cb" in o for o in opts) or snakemake.config["foresight"] == "perfect":
|
||||||
if "cb" in o:
|
plot_carbon_budget_distribution(snakemake.input.eurostat)
|
||||||
plot_carbon_budget_distribution(snakemake.input.eurostat)
|
|
||||||
|
234
scripts/plot_validation_cross_border_flows.py
Normal file
234
scripts/plot_validation_cross_border_flows.py
Normal file
@ -0,0 +1,234 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import country_converter as coco
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import pandas as pd
|
||||||
|
import pypsa
|
||||||
|
import seaborn as sns
|
||||||
|
from _helpers import configure_logging
|
||||||
|
|
||||||
|
sns.set_theme("paper", style="whitegrid")
|
||||||
|
|
||||||
|
cc = coco.CountryConverter()
|
||||||
|
|
||||||
|
color_country = {
|
||||||
|
"AL": "#440154",
|
||||||
|
"AT": "#482677",
|
||||||
|
"BA": "#43398e",
|
||||||
|
"BE": "#3953a4",
|
||||||
|
"BG": "#2c728e",
|
||||||
|
"CH": "#228b8d",
|
||||||
|
"CZ": "#1f9d8a",
|
||||||
|
"DE": "#29af7f",
|
||||||
|
"DK": "#3fbc73",
|
||||||
|
"EE": "#5ec962",
|
||||||
|
"ES": "#84d44b",
|
||||||
|
"FI": "#addc30",
|
||||||
|
"FR": "#d8e219",
|
||||||
|
"GB": "#fde725",
|
||||||
|
"GR": "#f0f921",
|
||||||
|
"HR": "#f1c25e",
|
||||||
|
"HU": "#f4a784",
|
||||||
|
"IE": "#f78f98",
|
||||||
|
"IT": "#f87ea0",
|
||||||
|
"LT": "#f87a9a",
|
||||||
|
"LU": "#f57694",
|
||||||
|
"LV": "#f3758d",
|
||||||
|
"ME": "#f37685",
|
||||||
|
"MK": "#f37b7c",
|
||||||
|
"NL": "#FF6666",
|
||||||
|
"NO": "#FF3333",
|
||||||
|
"PL": "#eb0000",
|
||||||
|
"PT": "#d70000",
|
||||||
|
"RO": "#c00000",
|
||||||
|
"RS": "#a50000",
|
||||||
|
"SE": "#8a0000",
|
||||||
|
"SI": "#6f0000",
|
||||||
|
"SK": "#550000",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def sort_one_country(country, df):
|
||||||
|
indices = [link for link in df.columns if country in link]
|
||||||
|
df_country = df[indices].copy()
|
||||||
|
for link in df_country.columns:
|
||||||
|
if country in link[5:]:
|
||||||
|
df_country[link] = -df_country[link]
|
||||||
|
link_reverse = str(link[5:] + " - " + link[:2])
|
||||||
|
df_country = df_country.rename(columns={link: link_reverse})
|
||||||
|
|
||||||
|
return df_country.reindex(sorted(df_country.columns), axis=1)
|
||||||
|
|
||||||
|
|
||||||
|
def cross_border_time_series(countries, data):
|
||||||
|
fig, ax = plt.subplots(2 * len(countries), 1, figsize=(15, 10 * len(countries)))
|
||||||
|
axis = 0
|
||||||
|
|
||||||
|
for country in countries:
|
||||||
|
ymin = 0
|
||||||
|
ymax = 0
|
||||||
|
for df in data:
|
||||||
|
df_country = sort_one_country(country, df)
|
||||||
|
df_neg, df_pos = df_country.clip(upper=0), df_country.clip(lower=0)
|
||||||
|
|
||||||
|
color = [color_country[link[5:]] for link in df_country.columns]
|
||||||
|
|
||||||
|
df_pos.plot.area(
|
||||||
|
ax=ax[axis], stacked=True, linewidth=0.0, color=color, ylim=[-1, 1]
|
||||||
|
)
|
||||||
|
|
||||||
|
df_neg.plot.area(
|
||||||
|
ax=ax[axis], stacked=True, linewidth=0.0, color=color, ylim=[-1, 1]
|
||||||
|
)
|
||||||
|
title = "Historic" if (axis % 2) == 0 else "Optimized"
|
||||||
|
ax[axis].set_title(
|
||||||
|
f"{title} Import / Export for " + cc.convert(country, to="name_short")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Custom legend elements
|
||||||
|
legend_elements = []
|
||||||
|
|
||||||
|
for link in df_country.columns:
|
||||||
|
legend_elements = legend_elements + [
|
||||||
|
plt.fill_between(
|
||||||
|
[],
|
||||||
|
[],
|
||||||
|
color=color_country[link[5:]],
|
||||||
|
label=cc.convert(link[5:], to="name_short"),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Create the legend
|
||||||
|
ax[axis].legend(handles=legend_elements, loc="upper right")
|
||||||
|
|
||||||
|
# rescale the y axis
|
||||||
|
neg_min = df_neg.sum(axis=1).min() * 1.2
|
||||||
|
if neg_min < ymin:
|
||||||
|
ymin = neg_min
|
||||||
|
|
||||||
|
pos_max = df_pos.sum(axis=1).max() * 1.2
|
||||||
|
if pos_max < ymax:
|
||||||
|
ymax = pos_max
|
||||||
|
|
||||||
|
axis = axis + 1
|
||||||
|
|
||||||
|
for x in range(axis - 2, axis):
|
||||||
|
ax[x].set_ylim([neg_min, pos_max])
|
||||||
|
|
||||||
|
fig.savefig(snakemake.output.trade_time_series, bbox_inches="tight")
|
||||||
|
|
||||||
|
|
||||||
|
def cross_border_bar(countries, data):
|
||||||
|
df_positive = pd.DataFrame()
|
||||||
|
df_negative = pd.DataFrame()
|
||||||
|
color = []
|
||||||
|
|
||||||
|
for country in countries:
|
||||||
|
order = 0
|
||||||
|
for df in data:
|
||||||
|
df_country = sort_one_country(country, df)
|
||||||
|
df_neg, df_pos = df_country.clip(upper=0), df_country.clip(lower=0)
|
||||||
|
|
||||||
|
title = "Historic" if (order % 2) == 0 else "Optimized"
|
||||||
|
df_positive_new = pd.DataFrame(data=df_pos.sum()).T.rename(
|
||||||
|
{0: f"{title} " + cc.convert(country, to="name_short")}
|
||||||
|
)
|
||||||
|
df_negative_new = pd.DataFrame(data=df_neg.sum()).T.rename(
|
||||||
|
{0: f"{title} " + cc.convert(country, to="name_short")}
|
||||||
|
)
|
||||||
|
|
||||||
|
df_positive = pd.concat([df_positive_new, df_positive])
|
||||||
|
df_negative = pd.concat([df_negative_new, df_negative])
|
||||||
|
|
||||||
|
order = order + 1
|
||||||
|
|
||||||
|
color = [color_country[link[5:]] for link in df_positive.columns]
|
||||||
|
|
||||||
|
fig, ax = plt.subplots(figsize=(15, 60))
|
||||||
|
|
||||||
|
df_positive.plot.barh(ax=ax, stacked=True, color=color, zorder=2)
|
||||||
|
df_negative.plot.barh(ax=ax, stacked=True, color=color, zorder=2)
|
||||||
|
|
||||||
|
plt.grid(axis="x", zorder=0)
|
||||||
|
plt.grid(axis="y", zorder=0)
|
||||||
|
|
||||||
|
# Custom legend elements
|
||||||
|
legend_elements = []
|
||||||
|
|
||||||
|
for country in list(color_country.keys()):
|
||||||
|
legend_elements = legend_elements + [
|
||||||
|
plt.fill_between(
|
||||||
|
[],
|
||||||
|
[],
|
||||||
|
color=color_country[country],
|
||||||
|
label=cc.convert(country, to="name_short"),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Create the legend
|
||||||
|
plt.legend(handles=legend_elements, loc="upper right")
|
||||||
|
|
||||||
|
fig.savefig(snakemake.output.cross_border_bar, bbox_inches="tight")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake(
|
||||||
|
"plot_electricity_prices",
|
||||||
|
simpl="",
|
||||||
|
opts="Ept-12h",
|
||||||
|
clusters="37",
|
||||||
|
ll="v1.0",
|
||||||
|
)
|
||||||
|
configure_logging(snakemake)
|
||||||
|
|
||||||
|
countries = snakemake.params.countries
|
||||||
|
|
||||||
|
n = pypsa.Network(snakemake.input.network)
|
||||||
|
n.loads.carrier = "load"
|
||||||
|
|
||||||
|
historic = pd.read_csv(
|
||||||
|
snakemake.input.cross_border_flows,
|
||||||
|
index_col=0,
|
||||||
|
header=0,
|
||||||
|
parse_dates=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(historic.index) > len(n.snapshots):
|
||||||
|
historic = historic.resample(n.snapshots.inferred_freq).mean().loc[n.snapshots]
|
||||||
|
|
||||||
|
# Preparing network data to be shaped similar to ENTSOE datastructure
|
||||||
|
optimized_links = n.links_t.p0.rename(
|
||||||
|
columns=dict(n.links.bus0.str[:2] + " - " + n.links.bus1.str[:2])
|
||||||
|
)
|
||||||
|
optimized_lines = n.lines_t.p0.rename(
|
||||||
|
columns=dict(n.lines.bus0.str[:2] + " - " + n.lines.bus1.str[:2])
|
||||||
|
)
|
||||||
|
optimized = pd.concat([optimized_links, optimized_lines], axis=1)
|
||||||
|
|
||||||
|
# Drop internal country connection
|
||||||
|
optimized.drop(
|
||||||
|
[c for c in optimized.columns if c[:2] == c[5:]], axis=1, inplace=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# align columns name
|
||||||
|
for c1 in optimized.columns:
|
||||||
|
for c2 in optimized.columns:
|
||||||
|
if c1[:2] == c2[5:] and c2[:2] == c1[5:]:
|
||||||
|
optimized = optimized.rename(columns={c1: c2})
|
||||||
|
|
||||||
|
optimized = optimized.groupby(lambda x: x, axis=1).sum()
|
||||||
|
|
||||||
|
cross_border_bar(countries, [historic, optimized])
|
||||||
|
|
||||||
|
cross_border_time_series(countries, [historic, optimized])
|
||||||
|
|
||||||
|
# touch file
|
||||||
|
with open(snakemake.output.plots_touch, "a"):
|
||||||
|
pass
|
63
scripts/plot_validation_electricity_prices.py
Normal file
63
scripts/plot_validation_electricity_prices.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import pandas as pd
|
||||||
|
import pypsa
|
||||||
|
import seaborn as sns
|
||||||
|
from _helpers import configure_logging
|
||||||
|
from pypsa.statistics import get_bus_and_carrier
|
||||||
|
|
||||||
|
sns.set_theme("paper", style="whitegrid")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake(
|
||||||
|
"plot_electricity_prices",
|
||||||
|
simpl="",
|
||||||
|
opts="Ept-12h",
|
||||||
|
clusters="37",
|
||||||
|
ll="v1.0",
|
||||||
|
)
|
||||||
|
configure_logging(snakemake)
|
||||||
|
|
||||||
|
n = pypsa.Network(snakemake.input.network)
|
||||||
|
n.loads.carrier = "load"
|
||||||
|
|
||||||
|
historic = pd.read_csv(
|
||||||
|
snakemake.input.electricity_prices,
|
||||||
|
index_col=0,
|
||||||
|
header=0,
|
||||||
|
parse_dates=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(historic.index) > len(n.snapshots):
|
||||||
|
historic = historic.resample(n.snapshots.inferred_freq).mean().loc[n.snapshots]
|
||||||
|
|
||||||
|
optimized = n.buses_t.marginal_price.groupby(n.buses.country, axis=1).mean()
|
||||||
|
|
||||||
|
data = pd.concat([historic, optimized], keys=["Historic", "Optimized"], axis=1)
|
||||||
|
data.columns.names = ["Kind", "Country"]
|
||||||
|
|
||||||
|
fig, ax = plt.subplots(figsize=(6, 6))
|
||||||
|
|
||||||
|
df = data.mean().unstack().T
|
||||||
|
df.plot.barh(ax=ax, xlabel="Electricity Price [€/MWh]", ylabel="")
|
||||||
|
ax.grid(axis="y")
|
||||||
|
fig.savefig(snakemake.output.price_bar, bbox_inches="tight")
|
||||||
|
|
||||||
|
fig, ax = plt.subplots()
|
||||||
|
|
||||||
|
df = data.groupby(level="Kind", axis=1).mean()
|
||||||
|
df.plot(ax=ax, xlabel="", ylabel="Electricity Price [€/MWh]", alpha=0.8)
|
||||||
|
ax.grid(axis="x")
|
||||||
|
fig.savefig(snakemake.output.price_line, bbox_inches="tight")
|
||||||
|
|
||||||
|
# touch file
|
||||||
|
with open(snakemake.output.plots_touch, "a"):
|
||||||
|
pass
|
144
scripts/plot_validation_electricity_production.py
Normal file
144
scripts/plot_validation_electricity_production.py
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# SPDX-FileCopyrightText: : 2017-2023 The PyPSA-Eur Authors
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import pandas as pd
|
||||||
|
import pypsa
|
||||||
|
import seaborn as sns
|
||||||
|
from _helpers import configure_logging
|
||||||
|
from pypsa.statistics import get_bus_and_carrier
|
||||||
|
|
||||||
|
sns.set_theme("paper", style="whitegrid")
|
||||||
|
|
||||||
|
carrier_groups = {
|
||||||
|
"Offshore Wind (AC)": "Offshore Wind",
|
||||||
|
"Offshore Wind (DC)": "Offshore Wind",
|
||||||
|
"Open-Cycle Gas": "Gas",
|
||||||
|
"Combined-Cycle Gas": "Gas",
|
||||||
|
"Reservoir & Dam": "Hydro",
|
||||||
|
"Pumped Hydro Storage": "Hydro",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if "snakemake" not in globals():
|
||||||
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
snakemake = mock_snakemake(
|
||||||
|
"plot_validation_electricity_production",
|
||||||
|
simpl="",
|
||||||
|
opts="Ept",
|
||||||
|
clusters="37c",
|
||||||
|
ll="v1.0",
|
||||||
|
)
|
||||||
|
configure_logging(snakemake)
|
||||||
|
|
||||||
|
n = pypsa.Network(snakemake.input.network)
|
||||||
|
n.loads.carrier = "load"
|
||||||
|
|
||||||
|
historic = pd.read_csv(
|
||||||
|
snakemake.input.electricity_production,
|
||||||
|
index_col=0,
|
||||||
|
header=[0, 1],
|
||||||
|
parse_dates=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
colors = n.carriers.set_index("nice_name").color.where(
|
||||||
|
lambda s: s != "", "lightgrey"
|
||||||
|
)
|
||||||
|
colors["Offshore Wind"] = colors["Offshore Wind (AC)"]
|
||||||
|
colors["Gas"] = colors["Combined-Cycle Gas"]
|
||||||
|
colors["Hydro"] = colors["Reservoir & Dam"]
|
||||||
|
colors["Other"] = "lightgray"
|
||||||
|
|
||||||
|
if len(historic.index) > len(n.snapshots):
|
||||||
|
historic = historic.resample(n.snapshots.inferred_freq).mean().loc[n.snapshots]
|
||||||
|
|
||||||
|
optimized = n.statistics.dispatch(
|
||||||
|
groupby=get_bus_and_carrier, aggregate_time=False
|
||||||
|
).T
|
||||||
|
optimized = optimized[["Generator", "StorageUnit"]].droplevel(0, axis=1)
|
||||||
|
optimized = optimized.rename(columns=n.buses.country, level=0)
|
||||||
|
optimized = optimized.rename(columns=carrier_groups, level=1)
|
||||||
|
optimized = optimized.groupby(axis=1, level=[0, 1]).sum()
|
||||||
|
|
||||||
|
data = pd.concat([historic, optimized], keys=["Historic", "Optimized"], axis=1)
|
||||||
|
data.columns.names = ["Kind", "Country", "Carrier"]
|
||||||
|
data = data.mul(n.snapshot_weightings.generators, axis=0)
|
||||||
|
|
||||||
|
# total production per carrier
|
||||||
|
fig, ax = plt.subplots(figsize=(6, 6))
|
||||||
|
|
||||||
|
df = data.groupby(level=["Kind", "Carrier"], axis=1).sum().sum().unstack().T
|
||||||
|
df = df / 1e6 # TWh
|
||||||
|
df.plot.barh(ax=ax, xlabel="Electricity Production [TWh]", ylabel="")
|
||||||
|
ax.grid(axis="y")
|
||||||
|
fig.savefig(snakemake.output.production_bar, bbox_inches="tight")
|
||||||
|
|
||||||
|
# highest diffs
|
||||||
|
|
||||||
|
fig, ax = plt.subplots(figsize=(6, 10))
|
||||||
|
|
||||||
|
df = data.sum() / 1e6 # TWh
|
||||||
|
df = df["Optimized"] - df["Historic"]
|
||||||
|
df = df.dropna().sort_values()
|
||||||
|
df = pd.concat([df.iloc[:5], df.iloc[-5:]])
|
||||||
|
c = colors[df.index.get_level_values(1)]
|
||||||
|
df.plot.barh(
|
||||||
|
xlabel="Optimized Production - Historic Production [TWh]", ax=ax, color=c.values
|
||||||
|
)
|
||||||
|
ax.set_title("Strongest Deviations")
|
||||||
|
ax.grid(axis="y")
|
||||||
|
fig.savefig(snakemake.output.production_deviation_bar, bbox_inches="tight")
|
||||||
|
|
||||||
|
# seasonal operation
|
||||||
|
|
||||||
|
fig, axes = plt.subplots(3, 1, figsize=(9, 9))
|
||||||
|
|
||||||
|
df = (
|
||||||
|
data.groupby(level=["Kind", "Carrier"], axis=1)
|
||||||
|
.sum()
|
||||||
|
.resample("1W")
|
||||||
|
.mean()
|
||||||
|
.clip(lower=0)
|
||||||
|
)
|
||||||
|
df = df / 1e3
|
||||||
|
|
||||||
|
order = (
|
||||||
|
(df["Historic"].diff().abs().sum() / df["Historic"].sum()).sort_values().index
|
||||||
|
)
|
||||||
|
c = colors[order]
|
||||||
|
optimized = df["Optimized"].reindex(order, axis=1, level=1)
|
||||||
|
historical = df["Historic"].reindex(order, axis=1, level=1)
|
||||||
|
|
||||||
|
kwargs = dict(color=c, legend=False, ylabel="Production [GW]", xlabel="")
|
||||||
|
|
||||||
|
optimized.plot.area(ax=axes[0], **kwargs, title="Optimized")
|
||||||
|
historical.plot.area(ax=axes[1], **kwargs, title="Historic")
|
||||||
|
|
||||||
|
diff = optimized - historical
|
||||||
|
diff.clip(lower=0).plot.area(
|
||||||
|
ax=axes[2], **kwargs, title="$\Delta$ (Optimized - Historic)"
|
||||||
|
)
|
||||||
|
lim = axes[2].get_ylim()[1]
|
||||||
|
diff.clip(upper=0).plot.area(ax=axes[2], **kwargs)
|
||||||
|
axes[2].set_ylim(bottom=-lim, top=lim)
|
||||||
|
|
||||||
|
h, l = axes[0].get_legend_handles_labels()
|
||||||
|
fig.legend(
|
||||||
|
h[::-1],
|
||||||
|
l[::-1],
|
||||||
|
loc="center left",
|
||||||
|
bbox_to_anchor=(1, 0.5),
|
||||||
|
ncol=1,
|
||||||
|
frameon=False,
|
||||||
|
labelspacing=1,
|
||||||
|
)
|
||||||
|
fig.savefig(snakemake.output.seasonal_operation_area, bbox_inches="tight")
|
||||||
|
|
||||||
|
# touch file
|
||||||
|
with open(snakemake.output.plots_touch, "a"):
|
||||||
|
pass
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user