some refactoring and code cleaning (#190)
* some refactoring and code cleaning * execute pre-commit * pre-commit: limit large files * add license to .pre-commit * add pre-commit to env * solve: tidy memory logger * travis: add conda list for easier debugging * undo config test/tutorial without plotting, rm matplotlibrc, .licenses * remove {networks} wildcard * unadd pre-commit config * add release notes * restore REUSE compliance * fix docs environment python version * env: remove gurobi from dependencies * fix unclean merge block * fix elif to if * lighter rtd style * lighter rtd style II
This commit is contained in:
parent
84edde8f63
commit
2fc1ea0255
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -11,7 +11,7 @@ assignees: ''
|
||||
## Checklist
|
||||
|
||||
- [ ] I am using the current [`master`](https://github.com/PyPSA/pypsa-eur/tree/master) branch or the latest [release](https://github.com/PyPSA/pypsa-eur/releases). Please indicate.
|
||||
- [ ] I am running on an up-to-date [`pypsa-eur` environment](https://github.com/PyPSA/pypsa-eur/blob/master/environment.yaml). Update via `conda env update -f environment.yaml`.
|
||||
- [ ] I am running on an up-to-date [`pypsa-eur` environment](https://github.com/PyPSA/pypsa-eur/blob/master/envs/environment.yaml). Update via `conda env update -f envs/environment.yaml`.
|
||||
|
||||
## Describe the Bug
|
||||
|
||||
@ -24,4 +24,4 @@ assignees: ''
|
||||
|
||||
```
|
||||
<paste here>
|
||||
```
|
||||
```
|
||||
|
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -2,4 +2,4 @@ blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: PyPSA Mailing List
|
||||
url: https://groups.google.com/forum/#!forum/pypsa
|
||||
about: Please ask and answer general usage questions here.
|
||||
about: Please ask and answer general usage questions here.
|
||||
|
4
.github/pull_request_template.md
vendored
4
.github/pull_request_template.md
vendored
@ -7,7 +7,7 @@ Closes # (if applicable).
|
||||
|
||||
- [ ] I tested my contribution locally and it seems to work fine.
|
||||
- [ ] Code and workflow changes are sufficiently documented.
|
||||
- [ ] Newly introduced dependencies are added to `environment.yaml` and `environment.docs.yaml`.
|
||||
- [ ] Newly introduced dependencies are added to `envs/environment.yaml` and `envs/environment.docs.yaml`.
|
||||
- [ ] Changes in configuration options are added in all of `config.default.yaml`, `config.tutorial.yaml`, and `test/config.test1.yaml`.
|
||||
- [ ] Changes in configuration options are also documented in `doc/configtables/*.csv` and line references are adjusted in `doc/configuration.rst` and `doc/tutorial.rst`.
|
||||
- [ ] A note for the release notes `doc/release_notes.rst` is amended in the format of previous release notes.
|
||||
- [ ] A note for the release notes `doc/release_notes.rst` is amended in the format of previous release notes.
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -7,6 +7,7 @@
|
||||
__pycache__
|
||||
*dconf
|
||||
gurobi.log
|
||||
.vscode
|
||||
|
||||
/bak
|
||||
/resources
|
||||
|
@ -5,4 +5,4 @@
|
||||
version: 2
|
||||
|
||||
conda:
|
||||
environment: environment.docs.yaml
|
||||
environment: envs/environment.docs.yaml
|
||||
|
@ -25,4 +25,4 @@ License: CC0-1.0
|
||||
|
||||
Files: borg-it
|
||||
Copyright: : 2017-2020 The PyPSA-Eur Authors
|
||||
License: CC0-1.0
|
||||
License: CC0-1.0
|
||||
|
@ -20,7 +20,7 @@ before_install:
|
||||
|
||||
# install conda environment
|
||||
- conda install -c conda-forge mamba
|
||||
- mamba env create -f ./environment.yaml
|
||||
- mamba env create -f ./envs/environment.yaml
|
||||
- conda activate pypsa-eur
|
||||
|
||||
# install open-source solver
|
||||
|
@ -36,7 +36,7 @@ and local grid
|
||||
bottlenecks may cause unrealistic load-shedding or generator
|
||||
curtailment. We recommend to cluster the network to a couple of
|
||||
hundred nodes to remove these local inconsistencies. See the
|
||||
discussion in Section 3.4 "Model validation" of the paper.
|
||||
discussion in Section 3.4 "Model validation" of the paper.
|
||||
|
||||
![PyPSA-Eur Grid Model](doc/img/base.png)
|
||||
|
||||
|
236
Snakefile
236
Snakefile
@ -11,33 +11,31 @@ if not exists("config.yaml"):
|
||||
configfile: "config.yaml"
|
||||
|
||||
COSTS="data/costs.csv"
|
||||
ATLITE_NPROCESSES = config['atlite'].get('nprocesses', 4)
|
||||
|
||||
|
||||
wildcard_constraints:
|
||||
ll="(v|c)([0-9\.]+|opt|all)|all", # line limit, can be volume or cost
|
||||
simpl="[a-zA-Z0-9]*|all",
|
||||
clusters="[0-9]+m?|all",
|
||||
sectors="[+a-zA-Z0-9]+",
|
||||
ll="(v|c)([0-9\.]+|opt|all)|all",
|
||||
opts="[-+a-zA-Z0-9\.]*"
|
||||
|
||||
|
||||
rule cluster_all_elec_networks:
|
||||
input:
|
||||
expand("networks/elec_s{simpl}_{clusters}.nc",
|
||||
**config['scenario'])
|
||||
input: expand("networks/elec_s{simpl}_{clusters}.nc", **config['scenario'])
|
||||
|
||||
|
||||
rule extra_components_all_elec_networks:
|
||||
input:
|
||||
expand("networks/elec_s{simpl}_{clusters}_ec.nc",
|
||||
**config['scenario'])
|
||||
input: expand("networks/elec_s{simpl}_{clusters}_ec.nc", **config['scenario'])
|
||||
|
||||
|
||||
rule prepare_all_elec_networks:
|
||||
input:
|
||||
expand("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||
**config['scenario'])
|
||||
input: expand("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", **config['scenario'])
|
||||
|
||||
|
||||
rule solve_all_elec_networks:
|
||||
input:
|
||||
expand("results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||
**config['scenario'])
|
||||
input: expand("results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", **config['scenario'])
|
||||
|
||||
|
||||
if config['enable'].get('prepare_links_p_nom', False):
|
||||
rule prepare_links_p_nom:
|
||||
@ -45,7 +43,6 @@ if config['enable'].get('prepare_links_p_nom', False):
|
||||
log: 'logs/prepare_links_p_nom.log'
|
||||
threads: 1
|
||||
resources: mem=500
|
||||
# group: 'nonfeedin_preparation'
|
||||
script: 'scripts/prepare_links_p_nom.py'
|
||||
|
||||
|
||||
@ -55,20 +52,24 @@ datafiles = ['ch_cantons.csv', 'je-e-21.03.02.xls',
|
||||
'NUTS_2013_60M_SH/data/NUTS_RG_60M_2013.shp', 'nama_10r_3popgdp.tsv.gz',
|
||||
'nama_10r_3gdp.tsv.gz', 'corine/g250_clc06_V18_5.tif']
|
||||
|
||||
|
||||
if not config.get('tutorial', False):
|
||||
datafiles.extend(["natura/Natura2000_end2015.shp", "GEBCO_2014_2D.nc"])
|
||||
|
||||
|
||||
if config['enable'].get('retrieve_databundle', True):
|
||||
rule retrieve_databundle:
|
||||
output: expand('data/bundle/{file}', file=datafiles)
|
||||
output: expand('data/bundle/{file}', file=datafiles)
|
||||
log: "logs/retrieve_databundle.log"
|
||||
script: 'scripts/retrieve_databundle.py'
|
||||
|
||||
|
||||
rule build_load_data:
|
||||
output: "resources/load.csv"
|
||||
log: "logs/build_load_data.log"
|
||||
script: 'scripts/build_load_data.py'
|
||||
|
||||
|
||||
rule build_powerplants:
|
||||
input:
|
||||
base_network="networks/base.nc",
|
||||
@ -77,9 +78,9 @@ rule build_powerplants:
|
||||
log: "logs/build_powerplants.log"
|
||||
threads: 1
|
||||
resources: mem=500
|
||||
# group: 'nonfeedin_preparation'
|
||||
script: "scripts/build_powerplants.py"
|
||||
|
||||
|
||||
rule base_network:
|
||||
input:
|
||||
eg_buses='data/entsoegridkit/buses.csv',
|
||||
@ -98,9 +99,9 @@ rule base_network:
|
||||
benchmark: "benchmarks/base_network"
|
||||
threads: 1
|
||||
resources: mem=500
|
||||
# group: 'nonfeedin_preparation'
|
||||
script: "scripts/base_network.py"
|
||||
|
||||
|
||||
rule build_shapes:
|
||||
input:
|
||||
naturalearth='data/bundle/naturalearth/ne_10m_admin_0_countries.shp',
|
||||
@ -118,9 +119,9 @@ rule build_shapes:
|
||||
log: "logs/build_shapes.log"
|
||||
threads: 1
|
||||
resources: mem=500
|
||||
# group: 'nonfeedin_preparation'
|
||||
script: "scripts/build_shapes.py"
|
||||
|
||||
|
||||
rule build_bus_regions:
|
||||
input:
|
||||
country_shapes='resources/country_shapes.geojson',
|
||||
@ -130,20 +131,21 @@ rule build_bus_regions:
|
||||
regions_onshore="resources/regions_onshore.geojson",
|
||||
regions_offshore="resources/regions_offshore.geojson"
|
||||
log: "logs/build_bus_regions.log"
|
||||
threads: 1
|
||||
resources: mem=1000
|
||||
# group: 'nonfeedin_preparation'
|
||||
script: "scripts/build_bus_regions.py"
|
||||
|
||||
if config['enable'].get('build_cutout', False):
|
||||
|
||||
if config['enable'].get('build_cutout', False):
|
||||
rule build_cutout:
|
||||
output: directory("cutouts/{cutout}")
|
||||
log: "logs/build_cutout/{cutout}.log"
|
||||
resources: mem=config['atlite'].get('nprocesses', 4) * 1000
|
||||
threads: config['atlite'].get('nprocesses', 4)
|
||||
benchmark: "benchmarks/build_cutout_{cutout}"
|
||||
# group: 'feedin_preparation'
|
||||
threads: ATLITE_NPROCESSES
|
||||
resources: mem=ATLITE_NPROCESSES * 1000
|
||||
script: "scripts/build_cutout.py"
|
||||
|
||||
|
||||
if config['enable'].get('retrieve_cutout', True):
|
||||
rule retrieve_cutout:
|
||||
output: directory(expand("cutouts/{cutouts}", **config['atlite'])),
|
||||
@ -151,43 +153,46 @@ if config['enable'].get('retrieve_cutout', True):
|
||||
script: 'scripts/retrieve_cutout.py'
|
||||
|
||||
|
||||
if config['enable'].get('build_natura_raster', False):
|
||||
if config['enable'].get('build_natura_raster', False):
|
||||
rule build_natura_raster:
|
||||
input:
|
||||
input:
|
||||
natura="data/bundle/natura/Natura2000_end2015.shp",
|
||||
cutouts=expand("cutouts/{cutouts}", **config['atlite'])
|
||||
output: "resources/natura.tiff"
|
||||
log: "logs/build_natura_raster.log"
|
||||
script: "scripts/build_natura_raster.py"
|
||||
|
||||
|
||||
if config['enable'].get('retrieve_natura_raster', True):
|
||||
rule retrieve_natura_raster:
|
||||
output: "resources/natura.tiff"
|
||||
log: "logs/retrieve_natura_raster.log"
|
||||
script: 'scripts/retrieve_natura_raster.py'
|
||||
|
||||
|
||||
rule build_renewable_profiles:
|
||||
input:
|
||||
base_network="networks/base.nc",
|
||||
corine="data/bundle/corine/g250_clc06_V18_5.tif",
|
||||
natura="resources/natura.tiff",
|
||||
gebco=lambda wildcards: ("data/bundle/GEBCO_2014_2D.nc"
|
||||
if "max_depth" in config["renewable"][wildcards.technology].keys()
|
||||
else []),
|
||||
gebco=lambda w: ("data/bundle/GEBCO_2014_2D.nc"
|
||||
if "max_depth" in config["renewable"][w.technology].keys()
|
||||
else []),
|
||||
country_shapes='resources/country_shapes.geojson',
|
||||
offshore_shapes='resources/offshore_shapes.geojson',
|
||||
regions=lambda wildcards: ("resources/regions_onshore.geojson"
|
||||
if wildcards.technology in ('onwind', 'solar')
|
||||
else "resources/regions_offshore.geojson"),
|
||||
cutout=lambda wildcards: "cutouts/" + config["renewable"][wildcards.technology]['cutout']
|
||||
output: profile="resources/profile_{technology}.nc",
|
||||
regions=lambda w: ("resources/regions_onshore.geojson"
|
||||
if w.technology in ('onwind', 'solar')
|
||||
else "resources/regions_offshore.geojson"),
|
||||
cutout=lambda w: "cutouts/" + config["renewable"][w.technology]['cutout']
|
||||
output:
|
||||
profile="resources/profile_{technology}.nc",
|
||||
log: "logs/build_renewable_profile_{technology}.log"
|
||||
resources: mem=config['atlite'].get('nprocesses', 2) * 5000
|
||||
threads: config['atlite'].get('nprocesses', 2)
|
||||
benchmark: "benchmarks/build_renewable_profiles_{technology}"
|
||||
# group: 'feedin_preparation'
|
||||
threads: ATLITE_NPROCESSES
|
||||
resources: mem=ATLITE_NPROCESSES * 5000
|
||||
script: "scripts/build_renewable_profiles.py"
|
||||
|
||||
|
||||
if 'hydro' in config['renewable'].keys():
|
||||
rule build_hydro_profile:
|
||||
input:
|
||||
@ -197,9 +202,9 @@ if 'hydro' in config['renewable'].keys():
|
||||
output: 'resources/profile_hydro.nc'
|
||||
log: "logs/build_hydro_profile.log"
|
||||
resources: mem=5000
|
||||
# group: 'feedin_preparation'
|
||||
script: 'scripts/build_hydro_profile.py'
|
||||
|
||||
|
||||
rule add_electricity:
|
||||
input:
|
||||
base_network='networks/base.nc',
|
||||
@ -210,79 +215,78 @@ rule add_electricity:
|
||||
geth_hydro_capacities='data/geth2015_hydro_capacities.csv',
|
||||
load='resources/load.csv',
|
||||
nuts3_shapes='resources/nuts3_shapes.geojson',
|
||||
**{'profile_' + t: "resources/profile_" + t + ".nc"
|
||||
for t in config['renewable']}
|
||||
**{f"profile_{tech}": f"resources/profile_{tech}.nc"
|
||||
for tech in config['renewable']}
|
||||
output: "networks/elec.nc"
|
||||
log: "logs/add_electricity.log"
|
||||
benchmark: "benchmarks/add_electricity"
|
||||
threads: 1
|
||||
resources: mem=3000
|
||||
# group: 'build_pypsa_networks'
|
||||
script: "scripts/add_electricity.py"
|
||||
|
||||
|
||||
rule simplify_network:
|
||||
input:
|
||||
network='networks/{network}.nc',
|
||||
network='networks/elec.nc',
|
||||
tech_costs=COSTS,
|
||||
regions_onshore="resources/regions_onshore.geojson",
|
||||
regions_offshore="resources/regions_offshore.geojson"
|
||||
output:
|
||||
network='networks/{network}_s{simpl}.nc',
|
||||
regions_onshore="resources/regions_onshore_{network}_s{simpl}.geojson",
|
||||
regions_offshore="resources/regions_offshore_{network}_s{simpl}.geojson",
|
||||
busmap='resources/busmap_{network}_s{simpl}.csv'
|
||||
log: "logs/simplify_network/{network}_s{simpl}.log"
|
||||
benchmark: "benchmarks/simplify_network/{network}_s{simpl}"
|
||||
network='networks/elec_s{simpl}.nc',
|
||||
regions_onshore="resources/regions_onshore_elec_s{simpl}.geojson",
|
||||
regions_offshore="resources/regions_offshore_elec_s{simpl}.geojson",
|
||||
busmap='resources/busmap_elec_s{simpl}.csv'
|
||||
log: "logs/simplify_network/elec_s{simpl}.log"
|
||||
benchmark: "benchmarks/simplify_network/elec_s{simpl}"
|
||||
threads: 1
|
||||
resources: mem=4000
|
||||
# group: 'build_pypsa_networks'
|
||||
script: "scripts/simplify_network.py"
|
||||
|
||||
|
||||
rule cluster_network:
|
||||
input:
|
||||
network='networks/{network}_s{simpl}.nc',
|
||||
regions_onshore="resources/regions_onshore_{network}_s{simpl}.geojson",
|
||||
regions_offshore="resources/regions_offshore_{network}_s{simpl}.geojson",
|
||||
busmap=ancient('resources/busmap_{network}_s{simpl}.csv'),
|
||||
custom_busmap=("data/custom_busmap_{network}_s{simpl}_{clusters}.csv"
|
||||
network='networks/elec_s{simpl}.nc',
|
||||
regions_onshore="resources/regions_onshore_elec_s{simpl}.geojson",
|
||||
regions_offshore="resources/regions_offshore_elec_s{simpl}.geojson",
|
||||
busmap=ancient('resources/busmap_elec_s{simpl}.csv'),
|
||||
custom_busmap=("data/custom_busmap_elec_s{simpl}_{clusters}.csv"
|
||||
if config["enable"].get("custom_busmap", False) else []),
|
||||
tech_costs=COSTS
|
||||
output:
|
||||
network='networks/{network}_s{simpl}_{clusters}.nc',
|
||||
regions_onshore="resources/regions_onshore_{network}_s{simpl}_{clusters}.geojson",
|
||||
regions_offshore="resources/regions_offshore_{network}_s{simpl}_{clusters}.geojson",
|
||||
busmap="resources/busmap_{network}_s{simpl}_{clusters}.csv",
|
||||
linemap="resources/linemap_{network}_s{simpl}_{clusters}.csv"
|
||||
log: "logs/cluster_network/{network}_s{simpl}_{clusters}.log"
|
||||
benchmark: "benchmarks/cluster_network/{network}_s{simpl}_{clusters}"
|
||||
network='networks/elec_s{simpl}_{clusters}.nc',
|
||||
regions_onshore="resources/regions_onshore_elec_s{simpl}_{clusters}.geojson",
|
||||
regions_offshore="resources/regions_offshore_elec_s{simpl}_{clusters}.geojson",
|
||||
busmap="resources/busmap_elec_s{simpl}_{clusters}.csv",
|
||||
linemap="resources/linemap_elec_s{simpl}_{clusters}.csv"
|
||||
log: "logs/cluster_network/elec_s{simpl}_{clusters}.log"
|
||||
benchmark: "benchmarks/cluster_network/elec_s{simpl}_{clusters}"
|
||||
threads: 1
|
||||
resources: mem=3000
|
||||
# group: 'build_pypsa_networks'
|
||||
script: "scripts/cluster_network.py"
|
||||
|
||||
|
||||
rule add_extra_components:
|
||||
input:
|
||||
network='networks/{network}_s{simpl}_{clusters}.nc',
|
||||
network='networks/elec_s{simpl}_{clusters}.nc',
|
||||
tech_costs=COSTS,
|
||||
output: 'networks/{network}_s{simpl}_{clusters}_ec.nc'
|
||||
log: "logs/add_extra_components/{network}_s{simpl}_{clusters}.log"
|
||||
benchmark: "benchmarks/add_extra_components/{network}_s{simpl}_{clusters}_ec"
|
||||
output: 'networks/elec_s{simpl}_{clusters}_ec.nc'
|
||||
log: "logs/add_extra_components/elec_s{simpl}_{clusters}.log"
|
||||
benchmark: "benchmarks/add_extra_components/elec_s{simpl}_{clusters}_ec"
|
||||
threads: 1
|
||||
resources: mem=3000
|
||||
# group: 'build_pypsa_networks'
|
||||
script: "scripts/add_extra_components.py"
|
||||
|
||||
|
||||
rule prepare_network:
|
||||
input: 'networks/{network}_s{simpl}_{clusters}_ec.nc', tech_costs=COSTS
|
||||
output: 'networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc'
|
||||
log: "logs/prepare_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"
|
||||
input: 'networks/elec_s{simpl}_{clusters}_ec.nc', tech_costs=COSTS
|
||||
output: 'networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc'
|
||||
log: "logs/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"
|
||||
benchmark: "benchmarks/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||
threads: 1
|
||||
resources: mem=4000
|
||||
# benchmark: "benchmarks/prepare_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||
script: "scripts/prepare_network.py"
|
||||
|
||||
|
||||
def memory(w):
|
||||
factor = 3.
|
||||
for o in w.opts.split('-'):
|
||||
@ -299,48 +303,49 @@ def memory(w):
|
||||
return int(factor * (18000 + 180 * int(w.clusters[:-1])))
|
||||
else:
|
||||
return int(factor * (10000 + 195 * int(w.clusters)))
|
||||
# return 4890+310 * int(w.clusters)
|
||||
|
||||
|
||||
rule solve_network:
|
||||
input: "networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
|
||||
output: "results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
|
||||
shadow: "shallow"
|
||||
input: "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
|
||||
output: "results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
|
||||
log:
|
||||
solver=normpath("logs/solve_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log"),
|
||||
python="logs/solve_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log",
|
||||
memory="logs/solve_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log"
|
||||
benchmark: "benchmarks/solve_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||
solver=normpath("logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log"),
|
||||
python="logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log",
|
||||
memory="logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log"
|
||||
benchmark: "benchmarks/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||
threads: 4
|
||||
resources: mem=memory
|
||||
# group: "solve" # with group, threads is ignored https://bitbucket.org/snakemake/snakemake/issues/971/group-job-description-does-not-contain
|
||||
shadow: "shallow"
|
||||
script: "scripts/solve_network.py"
|
||||
|
||||
|
||||
rule solve_operations_network:
|
||||
input:
|
||||
unprepared="networks/{network}_s{simpl}_{clusters}_ec.nc",
|
||||
optimized="results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
|
||||
output: "results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc"
|
||||
shadow: "shallow"
|
||||
unprepared="networks/elec_s{simpl}_{clusters}_ec.nc",
|
||||
optimized="results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
|
||||
output: "results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc"
|
||||
log:
|
||||
solver=normpath("logs/solve_operations_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log"),
|
||||
python="logs/solve_operations_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log",
|
||||
memory="logs/solve_operations_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_memory.log"
|
||||
benchmark: "benchmarks/solve_operations_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||
solver=normpath("logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log"),
|
||||
python="logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log",
|
||||
memory="logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_memory.log"
|
||||
benchmark: "benchmarks/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||
threads: 4
|
||||
resources: mem=(lambda w: 5000 + 372 * int(w.clusters))
|
||||
# group: "solve_operations"
|
||||
shadow: "shallow"
|
||||
script: "scripts/solve_operations_network.py"
|
||||
|
||||
|
||||
rule plot_network:
|
||||
input:
|
||||
network="results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||
network="results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||
tech_costs=COSTS
|
||||
output:
|
||||
only_map="results/plots/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}.{ext}",
|
||||
ext="results/plots/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_ext.{ext}"
|
||||
log: "logs/plot_network/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_{ext}.log"
|
||||
only_map="results/plots/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}.{ext}",
|
||||
ext="results/plots/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_ext.{ext}"
|
||||
log: "logs/plot_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_{ext}.log"
|
||||
script: "scripts/plot_network.py"
|
||||
|
||||
|
||||
def input_make_summary(w):
|
||||
# It's mildly hacky to include the separate costs input as first entry
|
||||
if w.ll.endswith("all"):
|
||||
@ -350,48 +355,54 @@ def input_make_summary(w):
|
||||
else:
|
||||
ll = w.ll
|
||||
return ([COSTS] +
|
||||
expand("results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||
expand("results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||
network=w.network,
|
||||
ll=ll,
|
||||
**{k: config["scenario"][k] if getattr(w, k) == "all" else getattr(w, k)
|
||||
for k in ["simpl", "clusters", "opts"]}))
|
||||
|
||||
|
||||
rule make_summary:
|
||||
input: input_make_summary
|
||||
output: directory("results/summaries/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}")
|
||||
log: "logs/make_summary/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.log",
|
||||
output: directory("results/summaries/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}")
|
||||
log: "logs/make_summary/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.log",
|
||||
script: "scripts/make_summary.py"
|
||||
|
||||
|
||||
rule plot_summary:
|
||||
input: "results/summaries/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}"
|
||||
output: "results/plots/summary_{summary}_{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.{ext}"
|
||||
log: "logs/plot_summary/{summary}_{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}_{ext}.log"
|
||||
input: "results/summaries/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}"
|
||||
output: "results/plots/summary_{summary}_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.{ext}"
|
||||
log: "logs/plot_summary/{summary}_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}_{ext}.log"
|
||||
script: "scripts/plot_summary.py"
|
||||
|
||||
def input_plot_p_nom_max(wildcards):
|
||||
return [('networks/{network}_s{simpl}{maybe_cluster}.nc'
|
||||
.format(maybe_cluster=('' if c == 'full' else ('_' + c)), **wildcards))
|
||||
for c in wildcards.clusts.split(",")]
|
||||
|
||||
def input_plot_p_nom_max(w):
|
||||
return [("networks/elec_s{simpl}{maybe_cluster}.nc"
|
||||
.format(maybe_cluster=('' if c == 'full' else ('_' + c)), **w))
|
||||
for c in w.clusts.split(",")]
|
||||
|
||||
|
||||
rule plot_p_nom_max:
|
||||
input: input_plot_p_nom_max
|
||||
output: "results/plots/{network}_s{simpl}_cum_p_nom_max_{clusts}_{techs}_{country}.{ext}"
|
||||
log: "logs/plot_p_nom_max/{network}_s{simpl}_{clusts}_{techs}_{country}_{ext}.log"
|
||||
output: "results/plots/elec_s{simpl}_cum_p_nom_max_{clusts}_{techs}_{country}.{ext}"
|
||||
log: "logs/plot_p_nom_max/elec_s{simpl}_{clusts}_{techs}_{country}_{ext}.log"
|
||||
script: "scripts/plot_p_nom_max.py"
|
||||
|
||||
|
||||
rule build_country_flh:
|
||||
input:
|
||||
base_network="networks/base.nc",
|
||||
corine="data/bundle/corine/g250_clc06_V18_5.tif",
|
||||
natura="resources/natura.tiff",
|
||||
gebco=lambda wildcards: ("data/bundle/GEBCO_2014_2D.nc"
|
||||
if "max_depth" in config["renewable"][wildcards.technology].keys()
|
||||
else []),
|
||||
gebco=lambda w: ("data/bundle/GEBCO_2014_2D.nc"
|
||||
if "max_depth" in config["renewable"][w.technology].keys()
|
||||
else []),
|
||||
country_shapes='resources/country_shapes.geojson',
|
||||
offshore_shapes='resources/offshore_shapes.geojson',
|
||||
pietzker="data/pietzker2014.xlsx",
|
||||
regions=lambda w: ("resources/country_shapes.geojson"
|
||||
if w.technology in ('onwind', 'solar')
|
||||
else "resources/offshore_shapes.geojson"),
|
||||
if w.technology in ('onwind', 'solar')
|
||||
else "resources/offshore_shapes.geojson"),
|
||||
cutout=lambda w: "cutouts/" + config["renewable"][w.technology]['cutout']
|
||||
output:
|
||||
area="resources/country_flh_area_{technology}.csv",
|
||||
@ -402,9 +413,4 @@ rule build_country_flh:
|
||||
log: "logs/build_country_flh_{technology}.log"
|
||||
resources: mem=10000
|
||||
benchmark: "benchmarks/build_country_flh_{technology}"
|
||||
# group: 'feedin_preparation'
|
||||
script: "scripts/build_country_flh.py"
|
||||
|
||||
# Local Variables:
|
||||
# mode: python
|
||||
# End:
|
||||
|
22
cluster.yaml
22
cluster.yaml
@ -1,22 +0,0 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
__default__:
|
||||
log: "logs/cluster/{{name}}.log"
|
||||
|
||||
feedin_preparation:
|
||||
walltime: "12:00:00"
|
||||
|
||||
solve_network:
|
||||
walltime: "05:00:00:00"
|
||||
|
||||
trace_solve_network:
|
||||
walltime: "05:00:00:00"
|
||||
|
||||
solve:
|
||||
walltime: "05:00:00:00"
|
||||
threads: 4 # Group threads are not aggregated
|
||||
|
||||
solve_operations:
|
||||
walltime: "01:00:00:00"
|
@ -12,7 +12,6 @@ logging:
|
||||
summary_dir: results
|
||||
|
||||
scenario:
|
||||
sectors: [E]
|
||||
simpl: ['']
|
||||
ll: ['copt']
|
||||
clusters: [37, 128, 256, 512, 1024]
|
||||
@ -265,67 +264,18 @@ plotting:
|
||||
'waste' : '#68896b'
|
||||
'geothermal' : '#ba91b1'
|
||||
"OCGT" : "#d35050"
|
||||
"OCGT marginal" : "#d35050"
|
||||
"OCGT-heat" : "#d35050"
|
||||
"gas boiler" : "#d35050"
|
||||
"gas boilers" : "#d35050"
|
||||
"gas boiler marginal" : "#d35050"
|
||||
"gas-to-power/heat" : "#d35050"
|
||||
"gas" : "#d35050"
|
||||
"natural gas" : "#d35050"
|
||||
"CCGT" : "#b20101"
|
||||
"CCGT marginal" : "#b20101"
|
||||
"Nuclear" : "#ff9000"
|
||||
"Nuclear marginal" : "#ff9000"
|
||||
"nuclear" : "#ff9000"
|
||||
"coal" : "#707070"
|
||||
"Coal" : "#707070"
|
||||
"Coal marginal" : "#707070"
|
||||
"lignite" : "#9e5a01"
|
||||
"Lignite" : "#9e5a01"
|
||||
"Lignite marginal" : "#9e5a01"
|
||||
"Oil" : "#262626"
|
||||
"oil" : "#262626"
|
||||
"H2" : "#ea048a"
|
||||
"hydrogen storage" : "#ea048a"
|
||||
"Sabatier" : "#a31597"
|
||||
"methanation" : "#a31597"
|
||||
"helmeth" : "#a31597"
|
||||
"DAC" : "#d284ff"
|
||||
"co2 stored" : "#e5e5e5"
|
||||
"CO2 sequestration" : "#e5e5e5"
|
||||
"battery" : "#b8ea04"
|
||||
"battery storage" : "#b8ea04"
|
||||
"Li ion" : "#b8ea04"
|
||||
"BEV charger" : "#e2ff7c"
|
||||
"V2G" : "#7a9618"
|
||||
"transport fuel cell" : "#e884be"
|
||||
"retrofitting" : "#e0d6a8"
|
||||
"building retrofitting" : "#e0d6a8"
|
||||
"heat pumps" : "#ff9768"
|
||||
"heat pump" : "#ff9768"
|
||||
"air heat pump" : "#ffbea0"
|
||||
"ground heat pump" : "#ff7a3d"
|
||||
"power-to-heat" : "#a59e7c"
|
||||
"power-to-gas" : "#db8585"
|
||||
"power-to-liquid" : "#a9acd1"
|
||||
"Fischer-Tropsch" : "#a9acd1"
|
||||
"resistive heater" : "#aa4925"
|
||||
"water tanks" : "#401f75"
|
||||
"hot water storage" : "#401f75"
|
||||
"hot water charging" : "#351c5e"
|
||||
"hot water discharging" : "#683ab2"
|
||||
"CHP" : "#d80a56"
|
||||
"CHP heat" : "#d80a56"
|
||||
"CHP electric" : "#d80a56"
|
||||
"district heating" : "#93864b"
|
||||
"Ambient" : "#262626"
|
||||
"Electric load" : "#f9d002"
|
||||
"electricity" : "#f9d002"
|
||||
"Heat load" : "#d35050"
|
||||
"heat" : "#d35050"
|
||||
"Transport load" : "#235ebc"
|
||||
"transport" : "#235ebc"
|
||||
"lines" : "#70af1d"
|
||||
"transmission lines" : "#70af1d"
|
||||
"AC-AC" : "#70af1d"
|
||||
@ -345,18 +295,5 @@ plotting:
|
||||
hydro: "Reservoir & Dam"
|
||||
battery: "Battery Storage"
|
||||
H2: "Hydrogen Storage"
|
||||
lines: "Transmission lines"
|
||||
ror: "Run of river"
|
||||
nice_names_n:
|
||||
OCGT: "Open-Cycle\nGas"
|
||||
CCGT: "Combined-Cycle\nGas"
|
||||
offwind-ac: "Offshore\nWind (AC)"
|
||||
offwind-dc: "Offshore\nWind (DC)"
|
||||
onwind: "Onshore\nWind"
|
||||
battery: "Battery\nStorage"
|
||||
H2: "Hydrogen\nStorage"
|
||||
lines: "Transmission\nlines"
|
||||
ror: "Run of\nriver"
|
||||
PHS: "Pumped Hydro\nStorage"
|
||||
hydro: "Reservoir\n& Dam"
|
||||
|
||||
lines: "Transmission Lines"
|
||||
ror: "Run of River"
|
||||
|
@ -11,7 +11,6 @@ logging:
|
||||
summary_dir: results
|
||||
|
||||
scenario:
|
||||
sectors: [E]
|
||||
simpl: ['']
|
||||
ll: ['copt']
|
||||
clusters: [5]
|
||||
@ -177,26 +176,8 @@ solving:
|
||||
clip_p_max_pu: 0.01
|
||||
skip_iterations: false
|
||||
track_iterations: false
|
||||
#nhours: 10
|
||||
solver:
|
||||
name: cbc
|
||||
# solver:
|
||||
# name: gurobi
|
||||
# threads: 4
|
||||
# method: 2 # barrier
|
||||
# crossover: 0
|
||||
# BarConvTol: 1.e-5
|
||||
# FeasibilityTol: 1.e-6
|
||||
# AggFill: 0
|
||||
# PreDual: 0
|
||||
# GURO_PAR_BARDENSETHRESH: 200
|
||||
# solver:
|
||||
# name: cplex
|
||||
# threads: 4
|
||||
# lpmethod: 4 # barrier
|
||||
# solutiontype: 2 # non basic solution, ie no crossover
|
||||
# barrier_convergetol: 1.e-5
|
||||
# feasopt_tolerance: 1.e-6
|
||||
|
||||
plotting:
|
||||
map:
|
||||
@ -244,67 +225,18 @@ plotting:
|
||||
'waste' : '#68896b'
|
||||
'geothermal' : '#ba91b1'
|
||||
"OCGT" : "#d35050"
|
||||
"OCGT marginal" : "#d35050"
|
||||
"OCGT-heat" : "#d35050"
|
||||
"gas boiler" : "#d35050"
|
||||
"gas boilers" : "#d35050"
|
||||
"gas boiler marginal" : "#d35050"
|
||||
"gas-to-power/heat" : "#d35050"
|
||||
"gas" : "#d35050"
|
||||
"natural gas" : "#d35050"
|
||||
"CCGT" : "#b20101"
|
||||
"CCGT marginal" : "#b20101"
|
||||
"Nuclear" : "#ff9000"
|
||||
"Nuclear marginal" : "#ff9000"
|
||||
"nuclear" : "#ff9000"
|
||||
"coal" : "#707070"
|
||||
"Coal" : "#707070"
|
||||
"Coal marginal" : "#707070"
|
||||
"lignite" : "#9e5a01"
|
||||
"Lignite" : "#9e5a01"
|
||||
"Lignite marginal" : "#9e5a01"
|
||||
"Oil" : "#262626"
|
||||
"oil" : "#262626"
|
||||
"H2" : "#ea048a"
|
||||
"hydrogen storage" : "#ea048a"
|
||||
"Sabatier" : "#a31597"
|
||||
"methanation" : "#a31597"
|
||||
"helmeth" : "#a31597"
|
||||
"DAC" : "#d284ff"
|
||||
"co2 stored" : "#e5e5e5"
|
||||
"CO2 sequestration" : "#e5e5e5"
|
||||
"battery" : "#b8ea04"
|
||||
"battery storage" : "#b8ea04"
|
||||
"Li ion" : "#b8ea04"
|
||||
"BEV charger" : "#e2ff7c"
|
||||
"V2G" : "#7a9618"
|
||||
"transport fuel cell" : "#e884be"
|
||||
"retrofitting" : "#e0d6a8"
|
||||
"building retrofitting" : "#e0d6a8"
|
||||
"heat pumps" : "#ff9768"
|
||||
"heat pump" : "#ff9768"
|
||||
"air heat pump" : "#ffbea0"
|
||||
"ground heat pump" : "#ff7a3d"
|
||||
"power-to-heat" : "#a59e7c"
|
||||
"power-to-gas" : "#db8585"
|
||||
"power-to-liquid" : "#a9acd1"
|
||||
"Fischer-Tropsch" : "#a9acd1"
|
||||
"resistive heater" : "#aa4925"
|
||||
"water tanks" : "#401f75"
|
||||
"hot water storage" : "#401f75"
|
||||
"hot water charging" : "#351c5e"
|
||||
"hot water discharging" : "#683ab2"
|
||||
"CHP" : "#d80a56"
|
||||
"CHP heat" : "#d80a56"
|
||||
"CHP electric" : "#d80a56"
|
||||
"district heating" : "#93864b"
|
||||
"Ambient" : "#262626"
|
||||
"Electric load" : "#f9d002"
|
||||
"electricity" : "#f9d002"
|
||||
"Heat load" : "#d35050"
|
||||
"heat" : "#d35050"
|
||||
"Transport load" : "#235ebc"
|
||||
"transport" : "#235ebc"
|
||||
"lines" : "#70af1d"
|
||||
"transmission lines" : "#70af1d"
|
||||
"AC-AC" : "#70af1d"
|
||||
@ -324,17 +256,5 @@ plotting:
|
||||
hydro: "Reservoir & Dam"
|
||||
battery: "Battery Storage"
|
||||
H2: "Hydrogen Storage"
|
||||
lines: "Transmission lines"
|
||||
ror: "Run of river"
|
||||
nice_names_n:
|
||||
OCGT: "Open-Cycle\nGas"
|
||||
CCGT: "Combined-Cycle\nGas"
|
||||
offwind-ac: "Offshore\nWind (AC)"
|
||||
offwind-dc: "Offshore\nWind (DC)"
|
||||
onwind: "Onshore\nWind"
|
||||
battery: "Battery\nStorage"
|
||||
H2: "Hydrogen\nStorage"
|
||||
lines: "Transmission\nlines"
|
||||
ror: "Run of\nriver"
|
||||
PHS: "Pumped Hydro\nStorage"
|
||||
hydro: "Reservoir\n& Dam"
|
||||
lines: "Transmission Lines"
|
||||
ror: "Run of River"
|
||||
|
@ -23,4 +23,4 @@ HVDC Ultranet,Osterath (DE),Philippsburg (DE),,314,600,in permitting,,https://ty
|
||||
Gridlink,Kingsnorth (UK),Warande (FR),160,,1400,in permitting,,https://tyndp.entsoe.eu/tyndp2018/projects/projects/285,0.596111111111111,51.41972,2.376776,51.034368
|
||||
NeuConnect,Grain (UK),Fedderwarden (DE),680,,1400,in permitting,,https://tyndp.entsoe.eu/tyndp2018/projects/projects/309,0.716666666666667,51.44,8.046524,53.562763
|
||||
NordBalt,Klaipeda (LT),Nybro (SE),450,,700,built,,https://en.wikipedia.org/wiki/NordBalt,21.256667,55.681667,15.854167,56.767778
|
||||
Estlink 1,Harku (EE),Espoo (FI),105,,350,built,,https://en.wikipedia.org/wiki/Estlink,24.560278,59.384722,24.551667,60.203889
|
||||
Estlink 1,Harku (EE),Espoo (FI),105,,350,built,,https://en.wikipedia.org/wiki/Estlink,24.560278,59.384722,24.551667,60.203889
|
||||
|
|
@ -36,10 +36,10 @@ Link:
|
||||
"8068": "5819" # fix GB location of Anglo-Scottish interconnector
|
||||
length:
|
||||
index:
|
||||
"12998": 409.0
|
||||
"12998": 409.0
|
||||
"5627": 26.39
|
||||
bus0:
|
||||
index:
|
||||
"14552": "5819" # fix GB location of GB-IE interconnector
|
||||
"5628": "7276" # bus0 == bus1 to remove link in remove_unconnected_components
|
||||
"12997": "7276" # bus0 == bus1 to remove link in remove_unconnected_components
|
||||
"12997": "7276" # bus0 == bus1 to remove link in remove_unconnected_components
|
||||
|
74
doc/_static/theme_overrides.css
vendored
74
doc/_static/theme_overrides.css
vendored
@ -2,22 +2,78 @@
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
*/
|
||||
|
||||
.wy-side-nav-search {
|
||||
background-color: #eeeeee;
|
||||
}
|
||||
|
||||
.wy-side-nav-search .wy-dropdown>a,
|
||||
.wy-side-nav-search>a {
|
||||
color: rgb(34, 97, 156)
|
||||
}
|
||||
|
||||
.wy-side-nav-search>div.version {
|
||||
color: rgb(34, 97, 156)
|
||||
}
|
||||
|
||||
.wy-menu-vertical header,
|
||||
.wy-menu-vertical p.caption,
|
||||
.rst-versions a {
|
||||
color: #999999;
|
||||
}
|
||||
|
||||
.wy-menu-vertical a.reference:hover,
|
||||
.wy-menu-vertical a.reference.internal:hover {
|
||||
background: #dddddd;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.wy-nav-side {
|
||||
background: #efefef;
|
||||
}
|
||||
|
||||
.wy-menu-vertical a.reference {
|
||||
color: #000;
|
||||
}
|
||||
|
||||
.rst-versions .rst-current-version,
|
||||
.wy-nav-top,
|
||||
.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a:hover {
|
||||
background: #002221;
|
||||
}
|
||||
|
||||
.wy-nav-content .highlight {
|
||||
background: #ffffff;
|
||||
}
|
||||
|
||||
.wy-nav-content .highlight .nn,
|
||||
.wy-nav-content .admonition.warning a {
|
||||
color: #dddddd;
|
||||
}
|
||||
|
||||
.rst-content code.literal,
|
||||
.rst-content tt.literal {
|
||||
color: rgb(34, 97, 156)
|
||||
}
|
||||
|
||||
.wy-nav-content a.reference {
|
||||
color: rgb(34, 97, 156);
|
||||
}
|
||||
|
||||
|
||||
/* override table width restrictions */
|
||||
|
||||
@media screen and (min-width: 767px) {
|
||||
|
||||
.wy-table-responsive table td {
|
||||
/* !important prevents the common CSS stylesheets from overriding
|
||||
/* !important prevents the common CSS stylesheets from overriding
|
||||
this as on RTD they are loaded after this stylesheet */
|
||||
white-space: normal !important;
|
||||
/* background: #eeeeee !important; */
|
||||
white-space: normal !important;
|
||||
background: rgb(250, 250, 250) !important;
|
||||
}
|
||||
|
||||
.wy-table-responsive {
|
||||
max-width: 100%;
|
||||
overflow: visible !important;
|
||||
max-width: 100%;
|
||||
overflow: visible !important;
|
||||
}
|
||||
|
||||
.wy-nav-content {
|
||||
max-width: 910px !important;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
..
|
||||
SPDX-FileCopyrightText: 2020 Maximilian Parzen and Emmanuel Paez
|
||||
|
||||
|
||||
SPDX-License-Identifier: CC-BY-4.0
|
||||
|
||||
|
||||
@ -18,7 +18,7 @@ Google Cloud Platform (GCP)
|
||||
We are happy to take pull requests explaining where the procedures deviate from the descriptions below for other operating systems.
|
||||
Likewise, tutorials for other cloud computing solutions are also highly welcome.
|
||||
|
||||
The Google Cloud Platform (GCP) is a cloud computing service you can use to run PyPSA-Eur calculations, especially if
|
||||
The Google Cloud Platform (GCP) is a cloud computing service you can use to run PyPSA-Eur calculations, especially if
|
||||
|
||||
- you do not have immediate access to high-performance computating facilities,
|
||||
- you have problems with the Windows operating system and want a quick run on a linux-based system,
|
||||
@ -26,7 +26,7 @@ The Google Cloud Platform (GCP) is a cloud computing service you can use to run
|
||||
- you need quick results (trial version includes 32 vCPU cores and up to 800 GB of memory).
|
||||
|
||||
With the Google Cloud Platform you set up a virtual machine in the cloud which can store and operate data.
|
||||
Like on your local computer, you have to install all software and solvers, and create paths on the virtual machine to set up the required environment.
|
||||
Like on your local computer, you have to install all software and solvers, and create paths on the virtual machine to set up the required environment.
|
||||
The 300$ free trial budget is offered which equals roughly 10-20 simulations with 180 nodes at hourly basis.
|
||||
The following steps are required:
|
||||
|
||||
@ -38,7 +38,7 @@ The following steps are required:
|
||||
Step 1 - Google Cloud Platform registration
|
||||
-------------------------------------------
|
||||
|
||||
First, register at the `Google Cloud Platform <https://console.cloud.google.com>`_ (GCP).
|
||||
First, register at the `Google Cloud Platform <https://console.cloud.google.com>`_ (GCP).
|
||||
Ann active bank account is required, which will not be charged unless you exceed the trial budget.
|
||||
|
||||
Step 2 - Create your Virtual Machine instance
|
||||
@ -73,26 +73,26 @@ Step 3 - Installation of Cloud SDK
|
||||
- Download Google Cloud SDK `SDK <https://cloud.google.com/sdk>`_. Check that you are logged in in your Google account. The link should lead you to the Windows installation of Google Cloud SDK.
|
||||
- Follow the "Quickstart for Windows - Before you begin" steps.
|
||||
- After the successfull installation and initialization, close the Google Cloud SDK reopen it again. Type the following command into the "Google Cloud SDK Shell":
|
||||
|
||||
|
||||
.. code:: bash
|
||||
|
||||
|
||||
gcloud compute ssh <your VM instance name> -- -L 8888:localhost:8888
|
||||
|
||||
|
||||
- This command above will open a PuTTy command window that is connected to your Virtual Machine. Time to celebrate if it works!
|
||||
- Now install all necessary tools. As little help, the first steps:
|
||||
- Now install all necessary tools. As little help, the first steps:
|
||||
.. code:: bash
|
||||
|
||||
|
||||
sudo apt-get update
|
||||
sudo apt-get install bzip2 libxml2-dev
|
||||
sudo apt-get install wget
|
||||
wget https://repo.anaconda.com/archive/Anaconda3-2020.07-Linux-x86_64.sh (Check the link. To be up to date with anaconda, check the Anaconda website https://www.anaconda.com/products/individual )
|
||||
ls (to see what anaconda file to bash)
|
||||
bash Anaconda3-2020.07-Linux-x86_64.sh
|
||||
source ~/.bashrc
|
||||
|
||||
bash Anaconda3-2020.07-Linux-x86_64.sh
|
||||
source ~/.bashrc
|
||||
|
||||
- Close and reopen the PuTTy file (-> open Google Cloud SDK -> initialize again with the command above to open the PuTTY command window). Now ``conda`` can be listed with ``conda list``.
|
||||
Noq you can follow the standard installation instructions to finalize your machine (don't forget the solvers - for bigger simulations use commercial solvers such as Gurobi).
|
||||
|
||||
|
||||
Step 4 - Installation of WinSCP
|
||||
-------------------------------
|
||||
|
||||
@ -102,22 +102,22 @@ Make sure that your instance is operating for the next steps.
|
||||
- Download `WinSCP <https://winscp.net/eng/download.php>`_ and follow the default installation steps.
|
||||
- Open WinSCP after the installation. A login window will open.
|
||||
- Keep SFTP as file protocol.
|
||||
- As host name insert the External IP of your VM (click in your internet browser on your GCP VM instance to see the external IP)
|
||||
- As host name insert the External IP of your VM (click in your internet browser on your GCP VM instance to see the external IP)
|
||||
- Set the User name in WinSCP to the name you see in your PuTTy window (check step 3 - for instance [username]@[VM-name]:~$)
|
||||
- Click on the advanced setting. SSH -> Authentication.
|
||||
- Option 1. Click on the Tools button and "Install Public Key into Server..". Somewhere in your folder structure must be a public key. I found it with the following folder syntax on my local windows computer -> :\Users\...\.ssh (there should be a PKK file).
|
||||
- Option 2. Click on the Tools button and "Generate new key pair...". Save the private key at a folder you remember and add it to the "private key file" field in WinSCP. Upload the public key to the metadeta of your instance.
|
||||
- Click on the advanced setting. SSH -> Authentication.
|
||||
- Option 1. Click on the Tools button and "Install Public Key into Server..". Somewhere in your folder structure must be a public key. I found it with the following folder syntax on my local windows computer -> :\Users\...\.ssh (there should be a PKK file).
|
||||
- Option 2. Click on the Tools button and "Generate new key pair...". Save the private key at a folder you remember and add it to the "private key file" field in WinSCP. Upload the public key to the metadeta of your instance.
|
||||
- Click ok and save. Then click Login. If successfull WinSCP will open on the left side your local computer folder structure and on the right side the folder strucutre of your VM. (If you followed Option 2 and its not initially working. Stop your instance, refresh the website, reopen the WinSCP field. Afterwards your your Login should be successfull)
|
||||
If you had struggle with the above steps, you could also try `this video <https://www.youtube.com/watch?v=lYx1oQkEF0E>`_.
|
||||
|
||||
.. note::
|
||||
Double check the External IP of your VM before you try to login with WinSCP. It's often a cause for an error.
|
||||
|
||||
|
||||
Step 5 - Extra. Copying your instance with all its data and paths included
|
||||
--------------------------------------------------------------------------
|
||||
|
||||
Especially if you think about operating several instance for quicker simulations, you can create a so called `"image" <https://console.cloud.google.com/compute/images?authuser=1&project=exalted-country-284917>`_ of the virtual machine.
|
||||
The "image" includes all the data and software set-ups from your VM. Afterwards you can create a VM from an image and avoid all the installation steps above.
|
||||
The "image" includes all the data and software set-ups from your VM. Afterwards you can create a VM from an image and avoid all the installation steps above.
|
||||
|
||||
Important points when to solve networks in PyPSA
|
||||
------------------------------------------------
|
||||
@ -126,4 +126,4 @@ If you use the GCP with the default PyPSA-Eur settings, your budget will be used
|
||||
|
||||
- Always test using low resolution networks; i.e. a single country at 5 nodes and 24h resolution for 2 month of weather data.
|
||||
- Adjust your solver in the ``config.yaml`` file. Set ``solving: skip_iterations: true``.
|
||||
This will lead to a single solver iteration which is often precise enough.
|
||||
This will lead to a single solver iteration which is often precise enough.
|
||||
|
@ -5,4 +5,4 @@ cutouts,,,
|
||||
-- -- module,--,"One of {'era5','sarah'}","Source of the reanalysis weather dataset (e.g. `ERA5 <https://www.ecmwf.int/en/forecasts/datasets/reanalysis-datasets/era5>`_ or `SARAH-2 <https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=SARAH_V002>`_)"
|
||||
-- -- xs,°,"Float interval within [-180, 180]","Range of longitudes to download weather data for."
|
||||
-- -- ys,°,"Float interval within [-90, 90]","Range of latitudes to download weather data for."
|
||||
-- -- years,--,"Integer interval within [1979,2018]","Range of years to download weather data for."
|
||||
-- -- years,--,"Integer interval within [1979,2018]","Range of years to download weather data for."
|
||||
|
|
@ -5,4 +5,4 @@ USD2013_to_EUR2013,--,float,"Exchange rate from USD :math:`_{2013}` to EUR :math
|
||||
capital_cost,EUR/MW,"Keys should be in the 'technology' column of ``data/costs.csv``. Values can be any float.","For the given technologies, assumptions about their capital investment costs are set to the corresponding value. Optional; overwrites cost assumptions from ``data/costs.csv``."
|
||||
marginal_cost,EUR/MWh,"Keys should be in the 'technology' column of ``data/costs.csv``. Values can be any float.","For the given technologies, assumptions about their marginal operating costs are set to the corresponding value. Optional; overwrites cost assumptions from ``data/costs.csv``."
|
||||
emission_prices,,,"Specify exogenous prices for emission types listed in ``network.carriers`` to marginal costs."
|
||||
-- co2,EUR/t,float,"Exogenous price of carbon-dioxide added to the marginal costs of fossil-fuelled generators according to their carbon intensity. Added through the keyword ``Ep`` in the ``{opts}`` wildcard only in the rule :mod:`prepare_network``."
|
||||
-- co2,EUR/t,float,"Exogenous price of carbon-dioxide added to the marginal costs of fossil-fuelled generators according to their carbon intensity. Added through the keyword ``Ep`` in the ``{opts}`` wildcard only in the rule :mod:`prepare_network``."
|
||||
|
|
@ -3,4 +3,4 @@ cutout,--,"Must be 'europe-2013-era5'","Specifies the directory where the releva
|
||||
carriers,--,"Any subset of {'ror', 'PHS', 'hydro'}","Specifies the types of hydro power plants to build per-unit availability time series for. 'ror' stands for run-of-river plants, 'PHS' represents pumped-hydro storage, and 'hydro' stands for hydroelectric dams."
|
||||
PHS_max_hours,h,float,"Maximum state of charge capacity of the pumped-hydro storage (PHS) in terms of hours at full output capacity ``p_nom``. Cf. `PyPSA documentation <https://pypsa.readthedocs.io/en/latest/components.html#storage-unit>`_."
|
||||
hydro_max_hours,h,"Any of {float, 'energy_capacity_totals_by_country', 'estimate_by_large_installations'}","Maximum state of charge capacity of the pumped-hydro storage (PHS) in terms of hours at full output capacity ``p_nom`` or heuristically determined. Cf. `PyPSA documentation <https://pypsa.readthedocs.io/en/latest/components.html#storage-unit>`_."
|
||||
clip_min_inflow,MW,float,"To avoid too small values in the inflow time series, values below this threshold are set to zero."
|
||||
clip_min_inflow,MW,float,"To avoid too small values in the inflow time series, values below this threshold are set to zero."
|
||||
|
|
@ -3,4 +3,4 @@ types,--,"Values should specify a `line type in PyPSA <https://pypsa.readthedocs
|
||||
s_max_pu,--,"Value in [0.,1.]","Correction factor for line capacities (``s_nom``) to approximate :math:`N-1` security and reserve capacity for reactive power flows"
|
||||
s_nom_max,MW,"float","Global upper limit for the maximum capacity of each extendable line."
|
||||
length_factor,--,float,"Correction factor to account for the fact that buses are *not* connected by lines through air-line distance."
|
||||
under_construction,--,"One of {'zero': set capacity to zero, 'remove': remove completely, 'keep': keep with full capacity}","Specifies how to handle lines which are currently under construction."
|
||||
under_construction,--,"One of {'zero': set capacity to zero, 'remove': remove completely, 'keep': keep with full capacity}","Specifies how to handle lines which are currently under construction."
|
||||
|
|
@ -2,4 +2,4 @@
|
||||
p_max_pu,--,"Value in [0.,1.]","Correction factor for link capacities ``p_nom``."
|
||||
p_nom_max,MW,"float","Global upper limit for the maximum capacity of each extendable DC link."
|
||||
include_tyndp,bool,"{'true', 'false'}","Specifies whether to add HVDC link projects from the `TYNDP 2018 <https://tyndp.entsoe.eu/tyndp2018/projects/>`_ which are at least in permitting."
|
||||
under_construction,--,"One of {'zero': set capacity to zero, 'remove': remove completely, 'keep': keep with full capacity}","Specifies how to handle lines which are currently under construction."
|
||||
under_construction,--,"One of {'zero': set capacity to zero, 'remove': remove completely, 'keep': keep with full capacity}","Specifies how to handle lines which are currently under construction."
|
||||
|
|
@ -10,4 +10,4 @@ max_depth,m,float,"Maximum sea water depth at which wind turbines can be build.
|
||||
min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential."
|
||||
potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`"
|
||||
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
|
||||
keep_all_available_areas,bool,"{'true', 'false'}","Use all availabe weather cells for renewable profile and potential generation. The default ignores weather cells where only less than 1 MW can be installed."
|
||||
keep_all_available_areas,bool,"{'true', 'false'}","Use all availabe weather cells for renewable profile and potential generation. The default ignores weather cells where only less than 1 MW can be installed."
|
||||
|
|
@ -10,4 +10,4 @@ max_depth,m,float,"Maximum sea water depth at which wind turbines can be build.
|
||||
min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential."
|
||||
potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`"
|
||||
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
|
||||
keep_all_available_areas,bool,"{'true', 'false'}","Use all availabe weather cells for renewable profile and potential generation. The default ignores weather cells where only less than 1 MW can be installed."
|
||||
keep_all_available_areas,bool,"{'true', 'false'}","Use all availabe weather cells for renewable profile and potential generation. The default ignores weather cells where only less than 1 MW can be installed."
|
||||
|
|
@ -11,4 +11,4 @@ corine,,,
|
||||
natura,bool,"{true, false}","Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``."
|
||||
potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`"
|
||||
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
|
||||
keep_all_available_areas,bool,"{'true', 'false'}","Use all availabe weather cells for renewable profile and potential generation. The default ignores weather cells where only less than 1 MW can be installed."
|
||||
keep_all_available_areas,bool,"{'true', 'false'}","Use all availabe weather cells for renewable profile and potential generation. The default ignores weather cells where only less than 1 MW can be installed."
|
||||
|
|
@ -8,4 +8,4 @@ Trigger, Description, Definition, Status
|
||||
``ATK``, "Require each node to be autarkic. Example: ``ATK`` removes all lines and links. ``ATKc`` removes all cross-border lines and links.", ``prepare_network``, In active use
|
||||
``BAU``, Add a per-``carrier`` minimal overall capacity; i.e. at least ``40GW`` of ``OCGT`` in Europe; configured in ``electricity: BAU_mincapacities``, ``solve_network``: `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L66>`_, Untested
|
||||
``SAFE``, Add a capacity reserve margin of a certain fraction above the peak demand to which renewable generators and storage do *not* contribute. Ignores network., ``solve_network`` `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L73>`_, Untested
|
||||
``carrier+{c|p}factor``, "Alter the capital cost (``c``) or installable potential (``p``) of a carrier by a factor. Example: ``solar+c0.5`` reduces the capital cost of solar to 50\% of original values.", ``prepare_network``, In active use
|
||||
``carrier+{c|p}factor``, "Alter the capital cost (``c``) or installable potential (``p``) of a carrier by a factor. Example: ``solar+c0.5`` reduces the capital cost of solar to 50\% of original values.", ``prepare_network``, In active use
|
||||
|
|
@ -12,4 +12,3 @@ energy_min,TWh,float,"Lower y-axis limit in energy bar plots."
|
||||
energy_threshold,TWh,float,"Threshold below which technologies will not be shown in energy bar plots."
|
||||
tech_colors,--,"carrier -> HEX colour code","Mapping from network ``carrier`` to a colour (`HEX colour code <https://en.wikipedia.org/wiki/Web_colors#Hex_triplet>`_)."
|
||||
nice_names,--,"str -> str","Mapping from network ``carrier`` to a more readable name."
|
||||
nice_names_n,--,"str -> str","Same as nice_names, but with linebreaks."
|
|
@ -1,6 +1,5 @@
|
||||
,Unit,Values,Description
|
||||
sectors,--,"Must be 'elec'","Placeholder for integration of other energy sectors."
|
||||
simpl,--,cf. :ref:`simpl`,"List of ``{simpl}`` wildcards to run."
|
||||
ll,--,cf. :ref:`ll`,"List of ``{ll}`` wildcards to run."
|
||||
clusters,--,cf. :ref:`clusters`,"List of ``{clusters}`` wildcards to run."
|
||||
opts,--,cf. :ref:`opts`,"List of ``{opts}`` wildcards to run."
|
||||
ll,--,cf. :ref:`ll`,"List of ``{ll}`` wildcards to run."
|
||||
opts,--,cf. :ref:`opts`,"List of ``{opts}`` wildcards to run."
|
||||
|
|
@ -1,4 +1,4 @@
|
||||
,Unit,Values,Description
|
||||
start,--,"str or datetime-like; e.g. YYYY-MM-DD","Left bound of date range"
|
||||
end,--,"str or datetime-like; e.g. YYYY-MM-DD","Right bound of date range"
|
||||
closed,--,"One of {None, ‘left’, ‘right’}","Make the time interval closed to the ``left``, ``right``, or both sides ``None``."
|
||||
closed,--,"One of {None, ‘left’, ‘right’}","Make the time interval closed to the ``left``, ``right``, or both sides ``None``."
|
||||
|
|
@ -12,4 +12,4 @@ corine,--,"Any subset of the `CORINE Land Cover code list <http://www.eea.europa
|
||||
natura,bool,"{true, false}","Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``."
|
||||
potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`"
|
||||
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
|
||||
keep_all_available_areas,bool,"{'true', 'false'}","Use all availabe weather cells for renewable profile and potential generation. The default ignores weather cells where only less than 1 MW can be installed."
|
||||
keep_all_available_areas,bool,"{'true', 'false'}","Use all availabe weather cells for renewable profile and potential generation. The default ignores weather cells where only less than 1 MW can be installed."
|
||||
|
|
@ -7,4 +7,4 @@ max_iterations,--,int,"Maximum number of solving iterations in between which res
|
||||
nhours,--,int,"Specifies the :math:`n` first snapshots to take into account. Must be less than the total number of snapshots. Rather recommended only for debugging."
|
||||
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
|
||||
skip_iterations,bool,"{'true','false'}","Skip iterating, do not update impedances of branches."
|
||||
track_iterations,bool,"{'true','false'}","Flag whether to store the intermediate branch capacities and objective function values are recorded for each iteration in ``network.lines['s_nom_opt_X']`` (where ``X`` labels the iteration)"
|
||||
track_iterations,bool,"{'true','false'}","Flag whether to store the intermediate branch capacities and objective function values are recorded for each iteration in ``network.lines['s_nom_opt_X']`` (where ``X`` labels the iteration)"
|
||||
|
|
@ -1,3 +1,3 @@
|
||||
,Unit,Values,Description
|
||||
name,--,"One of {'gurobi', 'cplex', 'cbc', 'glpk', 'ipopt'}; potentially more possible","Solver to use for optimisation problems in the workflow; e.g. clustering and linear optimal power flow."
|
||||
opts,--,"Parameter list for `Gurobi <https://www.gurobi.com/documentation/8.1/refman/parameters.html>`_ and `CPLEX <https://www.ibm.com/support/knowledgecenter/SSSA5P_12.5.1/ilog.odms.cplex.help/CPLEX/Parameters/topics/introListAlpha.html>`_","Solver specific parameter settings."
|
||||
opts,--,"Parameter list for `Gurobi <https://www.gurobi.com/documentation/8.1/refman/parameters.html>`_ and `CPLEX <https://www.ibm.com/support/knowledgecenter/SSSA5P_12.5.1/ilog.odms.cplex.help/CPLEX/Parameters/topics/introListAlpha.html>`_","Solver specific parameter settings."
|
||||
|
|
@ -3,7 +3,7 @@ version,--,0.x.x,"Version of PyPSA-Eur"
|
||||
tutorial,bool,"{true, false}","Switch to retrieve the tutorial data set instead of the full data set."
|
||||
logging,,,
|
||||
-- level,--,"Any of {'INFO', 'WARNING', 'ERROR'}","Restrict console outputs to all infos, warning or errors only"
|
||||
-- format,--,"e.g. ``%(levelname)s:%(name)s:%(message)s``","Custom format for log messages. See `LogRecord <https://docs.python.org/3/library/logging.html#logging.LogRecord>`_ attributes."
|
||||
-- format,--,"","Custom format for log messages. See `LogRecord <https://docs.python.org/3/library/logging.html#logging.LogRecord>`_ attributes."
|
||||
summary_dir,--,"e.g. 'results'","Directory into which results are written."
|
||||
countries,--,"Subset of {'AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK'}","European countries defined by their `Two-letter country codes (ISO 3166-1) <https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2>`_ which should be included in the energy system model."
|
||||
focus_weights,--,"Keys should be two-digit country codes (e.g. DE) and values should range between 0 and 1","Ratio of total clusters for particular countries. the remaining weight is distributed according to mean load. An example: ``focus_weights: DE: 0.6 FR: 0.2``."
|
||||
@ -14,4 +14,4 @@ enable,,,
|
||||
-- retrieve_cutout,bool,"{true, false}","Switch to enable the retrieval of cutouts from zenodo with :mod:`retrieve_cutout`."
|
||||
-- build_natura_raster,bool,"{true, false}","Switch to enable the creation of the raster ``natura.tiff`` via the rule :mod:`build_natura_raster`."
|
||||
-- retrieve_natura_raster,bool,"{true, false}","Switch to enable the retrieval of ``natura.tiff`` from zenodo with :mod:`retrieve_natura_raster`."
|
||||
-- custom_busmap,bool,"{true, false}","Switch to enable the use of custom busmaps in rule :mod:`cluster_network`. If activated the rule looks for provided busmaps at ``data/custom_busmap_elec_s{simpl}_{clusters}.csv`` which should have the same format as ``resources/busmap_elec_s{simpl}_{clusters}.csv``, i.e. the index should contain the buses of ``networks/elec_s{simpl}.nc``."
|
||||
-- custom_busmap,bool,"{true, false}","Switch to enable the use of custom busmaps in rule :mod:`cluster_network`. If activated the rule looks for provided busmaps at ``data/custom_busmap_elec_s{simpl}_{clusters}.csv`` which should have the same format as ``resources/busmap_elec_s{simpl}_{clusters}.csv``, i.e. the index should contain the buses of ``networks/elec_s{simpl}.nc``."
|
||||
|
|
@ -1,4 +1,4 @@
|
||||
,Unit,Values,Description
|
||||
x,p.u.,float,"Series reactance (per unit, using ``s_nom`` as base power of the transformer. Overwritten if ``type`` is specified."
|
||||
s_nom,MVA,float,"Limit of apparent power which can pass through branch. Overwritten if ``type`` is specified."
|
||||
type,--,"A `transformer type in PyPSA <https://pypsa.readthedocs.io/en/latest/components.html#transformer-types>`_.","Specifies transformer types to assume for the transformers of the ENTSO-E grid extraction."
|
||||
type,--,"A `transformer type in PyPSA <https://pypsa.readthedocs.io/en/latest/components.html#transformer-types>`_.","Specifies transformer types to assume for the transformers of the ENTSO-E grid extraction."
|
||||
|
|
@ -36,10 +36,10 @@ investment changes as more ambitious greenhouse-gas emission reduction targets a
|
||||
|
||||
The ``scenario`` section is an extraordinary section of the config file
|
||||
that is strongly connected to the :ref:`wildcards` and is designed to
|
||||
facilitate running multiple scenarios through a single command
|
||||
facilitate running multiple scenarios through a single command
|
||||
|
||||
.. code:: bash
|
||||
|
||||
|
||||
snakemake -j 1 solve_all_elec_networks
|
||||
|
||||
For each wildcard, a **list of values** is provided. The rule ``solve_all_elec_networks`` will trigger the rules for creating ``results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc`` for **all combinations** of the provided wildcard values as defined by Python's `itertools.product(...) <https://docs.python.org/2/library/itertools.html#itertools.product>`_ function that snakemake's `expand(...) function <https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#targets>`_ uses.
|
||||
|
@ -50,4 +50,4 @@ Default Cost Assumptions
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
:widths: 10,3,5,4,6,8
|
||||
:file: ../data/costs.csv
|
||||
:file: ../data/costs.csv
|
||||
|
@ -8,7 +8,7 @@ PyPSA-Eur: An Open Optimisation Model of the European Transmission System
|
||||
|
||||
.. image:: https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases
|
||||
:alt: GitHub release (latest by date including pre-releases)
|
||||
|
||||
|
||||
.. image:: https://travis-ci.org/PyPSA/pypsa-eur.svg?branch=master
|
||||
:target: https://travis-ci.org/PyPSA/pypsa-eur
|
||||
|
||||
@ -101,9 +101,9 @@ Documentation
|
||||
simplification
|
||||
solving
|
||||
plotting
|
||||
|
||||
|
||||
**References**
|
||||
|
||||
|
||||
* :doc:`release_notes`
|
||||
* :doc:`limitations`
|
||||
* :doc:`contributing`
|
||||
@ -183,7 +183,7 @@ There are pre-built networks available as a dataset on Zenodo as well for every
|
||||
The included ``.nc`` files are PyPSA network files which can be imported with PyPSA via:
|
||||
|
||||
.. code:: python
|
||||
|
||||
|
||||
import pypsa
|
||||
|
||||
filename = "elec_s_1024_ec.nc" # example
|
||||
@ -202,7 +202,7 @@ PyPSA-Eur work is released under multiple licenses:
|
||||
See the individual files and the `dep5 <.reuse/dep5>`_ file for license details.
|
||||
|
||||
Additionally, different licenses and terms of use also apply to the various input data, which are summarised below.
|
||||
More details are included in
|
||||
More details are included in
|
||||
`the description of the data bundles on zenodo <https://zenodo.org/record/3517935#.XbGeXvzRZGo>`_.
|
||||
|
||||
.. csv-table::
|
||||
|
@ -1,6 +1,6 @@
|
||||
..
|
||||
SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors
|
||||
|
||||
|
||||
SPDX-License-Identifier: CC-BY-4.0
|
||||
|
||||
.. _installation:
|
||||
@ -37,12 +37,12 @@ We recommend using the package manager and environment management system ``conda
|
||||
Install `miniconda <https://docs.conda.io/en/latest/miniconda.html>`_, which is a mini version of `Anaconda <https://www.anaconda.com/>`_ that includes only ``conda`` and its dependencies or make sure ``conda`` is already installed on your system.
|
||||
For instructions for your operating system follow the ``conda`` `installation guide <https://docs.conda.io/projects/conda/en/latest/user-guide/install/>`_.
|
||||
|
||||
The python package requirements are curated in the `environment.yaml <https://github.com/PyPSA/pypsa-eur/blob/master/environment.yaml>`_ file.
|
||||
The python package requirements are curated in the `envs/environment.yaml <https://github.com/PyPSA/pypsa-eur/blob/master/envs/environment.yaml>`_ file.
|
||||
The environment can be installed and activated using
|
||||
|
||||
.. code:: bash
|
||||
|
||||
.../pypsa-eur % conda env create -f environment.yaml
|
||||
.../pypsa-eur % conda env create -f envs/environment.yaml
|
||||
|
||||
.../pypsa-eur % conda activate pypsa-eur
|
||||
|
||||
@ -55,14 +55,14 @@ The environment can be installed and activated using
|
||||
`mamba <https://github.com/QuantStack/mamba>`_ as a fast drop-in replacement via
|
||||
|
||||
.. code:: bash
|
||||
|
||||
|
||||
conda install -c conda-forge mamba
|
||||
|
||||
and then install the environment with
|
||||
|
||||
.. code:: bash
|
||||
|
||||
mamba env create -f environment.yaml
|
||||
mamba env create -f envs/environment.yaml
|
||||
|
||||
Install a Solver
|
||||
================
|
||||
@ -102,10 +102,10 @@ For installation instructions of these solvers for your operating system, follow
|
||||
and on Windows
|
||||
|
||||
.. code:: bash
|
||||
|
||||
|
||||
conda activate pypsa-eur
|
||||
conda install -c conda-forge ipopt glpk
|
||||
|
||||
|
||||
|
||||
.. _defaultconfig:
|
||||
|
||||
@ -113,7 +113,7 @@ Set Up the Default Configuration
|
||||
================================
|
||||
|
||||
PyPSA-Eur has several configuration options that must be specified in a ``config.yaml`` file located in the root directory.
|
||||
An example configuration ``config.default.yaml`` is maintained in the repository.
|
||||
An example configuration ``config.default.yaml`` is maintained in the repository.
|
||||
More details on the configuration options are in :ref:`config`.
|
||||
|
||||
Before first use, create a ``config.yaml`` by copying the example.
|
||||
|
@ -1,6 +1,6 @@
|
||||
..
|
||||
SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors
|
||||
|
||||
|
||||
SPDX-License-Identifier: CC-BY-4.0
|
||||
|
||||
.. _intro:
|
||||
@ -64,4 +64,4 @@ Folder Structure
|
||||
System Requirements
|
||||
===================
|
||||
|
||||
Building the model with the scripts in this repository uses up to 20 GB of memory. Computing optimal investment and operation scenarios requires a strong interior-point solver compatible with the modelling library `Pyomo <https://www.pyomo.org>`_ like `Gurobi <http://www.gurobi.com/>`_ or `CPLEX <https://www.ibm.com/analytics/cplex-optimizer>`_ with up to 100 GB of memory.
|
||||
Building the model with the scripts in this repository uses up to 20 GB of memory. Computing optimal investment and operation scenarios requires a strong interior-point solver compatible with the modelling library `Pyomo <https://www.pyomo.org>`_ like `Gurobi <http://www.gurobi.com/>`_ or `CPLEX <https://www.ibm.com/analytics/cplex-optimizer>`_ with up to 100 GB of memory.
|
||||
|
@ -37,7 +37,7 @@ improving the approximations.
|
||||
not spatially disaggregated; assuming, as we have done, that the load time series
|
||||
shape is the same at each node within each country ignores local differences.
|
||||
|
||||
- **Currently installed renewable capacities:**
|
||||
- **Currently installed renewable capacities:**
|
||||
Information on existing wind, solar and small hydro, geothermal, marine and
|
||||
biomass power plants are excluded from the dataset because of a lack of data
|
||||
availability in many countries. Approximate distributions of wind and solar
|
||||
@ -56,4 +56,3 @@ improving the approximations.
|
||||
Belarus, Ukraine, Turkey and Morocco have not been taken into account;
|
||||
islands which are not connected to the main European system, such as Malta,
|
||||
Crete and Cyprus, are also excluded from the model.
|
||||
|
@ -173,4 +173,4 @@ Rule ``plot_network``
|
||||
.. automodule:: plot_network
|
||||
|
||||
.. image:: img/tech-colors.png
|
||||
:align: center
|
||||
:align: center
|
||||
|
@ -2,7 +2,7 @@
|
||||
SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors
|
||||
|
||||
SPDX-License-Identifier: CC-BY-4.0
|
||||
|
||||
|
||||
.. _base:
|
||||
|
||||
Rule ``base_network``
|
||||
@ -51,4 +51,4 @@ Rule ``base_network``
|
||||
|
||||
|
|
||||
|
||||
.. automodule:: base_network
|
||||
.. automodule:: base_network
|
||||
|
@ -48,4 +48,4 @@ Rule ``build_bus_regions``
|
||||
|
||||
|
|
||||
|
||||
.. automodule:: build_bus_regions
|
||||
.. automodule:: build_bus_regions
|
||||
|
@ -39,4 +39,4 @@ Rule ``build_cutout``
|
||||
|
||||
|
|
||||
|
||||
.. automodule:: build_cutout
|
||||
.. automodule:: build_cutout
|
||||
|
@ -32,7 +32,7 @@ Upcoming Release
|
||||
|
||||
* Corrected setting of exogenous emission price (in config -> cost -> emission price). This was not weighted by the efficiency and effective emission of the generators. Fixed in `#171 <https://github.com/PyPSA/pypsa-eur/pull/171>`_.
|
||||
|
||||
* Don't remove capital costs from lines and links, when imposing a line volume limit (wildcard ``lv``) or a line cost limit (``lc``). Previously, these were removed to move the expansion in direction of the limit.
|
||||
* Don't remove capital costs from lines and links, when imposing a line volume limit (wildcard ``lv``) or a line cost limit (``lc``). Previously, these were removed to move the expansion in direction of the limit.
|
||||
|
||||
* Fix bug of clustering offwind-{ac,dc} sites in the option of high-resolution sites for renewables. Now, there are more sites for offwind-{ac,dc} available than network nodes. Before, they were clustered to the resolution of the network. (e.g. elec_s1024_37m.nc: 37 network nodes, 1024 sites)
|
||||
|
||||
@ -42,6 +42,9 @@ Upcoming Release
|
||||
|
||||
* The mappings for clustered lines and buses produced by the ``simplify_network`` and ``cluster_network`` rules changed from Hierarchical Data Format (.h5) to Comma-Separated Values format (.csv) (`#198 <https://github.com/PyPSA/pypsa-eur/pull/198>`_)
|
||||
|
||||
* Multiple smaller changes: Removed unused ``{network}`` wildcard, moved environment files to dedicated ``envs`` folder,
|
||||
removed sector-coupling components from configuration files, minor refactoring and code cleaning (`#190 <https://github.com/PyPSA/pypsa-eur/pull 190>`_).
|
||||
|
||||
* Added an option to use custom busmaps in rule :mod:`cluster_network`. To use this feature set ``enable: custom_busmap: true``.
|
||||
Then, the rule looks for custom busmaps at ``data/custom_busmap_elec_s{simpl}_{clusters}.csv``,
|
||||
which should have the same format as ``resources/busmap_elec_s{simpl}_{clusters}.csv``.
|
||||
@ -122,7 +125,7 @@ This is the first release of PyPSA-Eur, a model of the European power system at
|
||||
|
||||
* Hydrogen pipelines (including cost assumptions) can now be added alongside clustered network connections in the rule :mod:`add_extra_components` . Set ``electricity: extendable_carriers: Link: [H2 pipeline]`` and ensure hydrogen storage is modelled as a ``Store``. This is a first simplified stage (`#108 <https://github.com/PyPSA/pypsa-eur/pull/108>`_).
|
||||
|
||||
* Logfiles for all rules of the ``snakemake`` workflow are now written in the folder ``log/`` (`#102 <https://github.com/PyPSA/pypsa-eur/pull/102>`_).
|
||||
* Logfiles for all rules of the ``snakemake`` workflow are now written in the folder ``log/`` (`#102 <https://github.com/PyPSA/pypsa-eur/pull/102>`_).
|
||||
|
||||
* The new function ``_helpers.mock_snakemake`` creates a ``snakemake`` object which mimics the actual ``snakemake`` object produced by workflow by parsing the ``Snakefile`` and setting all paths for inputs, outputs, and logs. This allows running all scripts within a (I)python terminal (or just by calling ``python <script-name>``) and thereby facilitates developing and debugging scripts significantly (`#107 <https://github.com/PyPSA/pypsa-eur/pull/107>`_).
|
||||
|
||||
@ -133,8 +136,8 @@ Release Process
|
||||
|
||||
* Finalise release notes at ``doc/release_notes.rst``.
|
||||
|
||||
* Update ``environment.fixedversions.yaml`` via
|
||||
``conda env export -n pypsa-eur -f environment.fixedversions.yaml --no-builds``
|
||||
* Update ``envs/environment.fixed.yaml`` via
|
||||
``conda env export -n pypsa-eur -f envs/environment.fixed.yaml --no-builds``
|
||||
from an up-to-date `pypsa-eur` environment.
|
||||
|
||||
* Update version number in ``doc/conf.py`` and ``*config.*.yaml``.
|
||||
|
@ -22,7 +22,7 @@ After simplification and clustering of the network, additional components may be
|
||||
|
||||
.. toctree::
|
||||
:caption: Overview
|
||||
|
||||
|
||||
simplification/simplify_network
|
||||
simplification/cluster_network
|
||||
simplification/add_extra_components
|
||||
|
@ -1,6 +1,6 @@
|
||||
..
|
||||
SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors
|
||||
|
||||
|
||||
SPDX-License-Identifier: CC-BY-4.0
|
||||
|
||||
.. _tutorial:
|
||||
@ -48,7 +48,7 @@ The model can be adapted to only include selected countries (e.g. Germany) inste
|
||||
.. literalinclude:: ../config.tutorial.yaml
|
||||
:language: yaml
|
||||
:lines: 20
|
||||
|
||||
|
||||
Likewise, the example's temporal scope can be restricted (e.g. to a single month).
|
||||
|
||||
.. literalinclude:: ../config.tutorial.yaml
|
||||
@ -119,8 +119,8 @@ orders ``snakemake`` to run the script ``solve_network`` that produces the solve
|
||||
.. code::
|
||||
|
||||
rule solve_network:
|
||||
input: "networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
|
||||
output: "results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
|
||||
input: "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
|
||||
output: "results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
|
||||
[...]
|
||||
script: "scripts/solve_network.py"
|
||||
|
||||
@ -132,7 +132,7 @@ orders ``snakemake`` to run the script ``solve_network`` that produces the solve
|
||||
Windows users should add the option ``--keep-target-files`` to the command or instead run ``snakemake -j 1 solve_all_elec_networks``.
|
||||
|
||||
This triggers a workflow of multiple preceding jobs that depend on each rule's inputs and outputs:
|
||||
|
||||
|
||||
.. graphviz::
|
||||
:align: center
|
||||
|
||||
@ -184,7 +184,7 @@ This triggers a workflow of multiple preceding jobs that depend on each rule's i
|
||||
7 -> 11
|
||||
5 -> 11
|
||||
12 -> 11
|
||||
}
|
||||
}
|
||||
|
||||
|
|
||||
|
||||
@ -229,8 +229,8 @@ A job (here ``simplify_network``) will display its attributes and normally some
|
||||
INFO:__main__:Mapping all network lines onto a single 380kV layer
|
||||
INFO:__main__:Simplifying connected link components
|
||||
INFO:__main__:Removing stubs
|
||||
INFO:__main__:Displacing offwind-ac generator(s) and adding connection costs to capital_costs: 20128 Eur/MW/a for `5718 offwind-ac`
|
||||
INFO:__main__:Displacing offwind-dc generator(s) and adding connection costs to capital_costs: 14994 Eur/MW/a for `5718 offwind-dc`, 26939 Eur/MW/a for `5724 offwind-dc`, 29621 Eur/MW/a for `5725 offwind-dc`
|
||||
INFO:__main__:Displacing offwind-ac generator(s) and adding connection costs to capital_costs: 20128 Eur/MW/a for `5718 offwind-ac`
|
||||
INFO:__main__:Displacing offwind-dc generator(s) and adding connection costs to capital_costs: 14994 Eur/MW/a for `5718 offwind-dc`, 26939 Eur/MW/a for `5724 offwind-dc`, 29621 Eur/MW/a for `5725 offwind-dc`
|
||||
INFO:pypsa.io:Exported network elec_s.nc has lines, carriers, links, storage_units, loads, buses, generators
|
||||
[<DATETIME>]
|
||||
Finished job 3.
|
||||
@ -293,5 +293,5 @@ For inspiration, read the `examples section in the PyPSA documentation <https://
|
||||
.. note::
|
||||
|
||||
There are rules for summaries and plotting available in the repository of PyPSA-Eur.
|
||||
|
||||
They are currently under revision and therefore not yet documented.
|
||||
|
||||
They are currently under revision and therefore not yet documented.
|
||||
|
@ -18,16 +18,6 @@ what data to retrieve and what files to produce.
|
||||
Detailed explanations of how wildcards work in ``snakemake`` can be found in the
|
||||
`relevant section of the documentation <https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#wildcards>`_.
|
||||
|
||||
.. _network:
|
||||
|
||||
The ``{network}`` wildcard
|
||||
==========================
|
||||
|
||||
The ``{network}`` wildcard specifies the considered energy sector(s)
|
||||
and, as currently only ``elec`` (for electricity) is included,
|
||||
it currently represents rather a placeholder wildcard to facilitate
|
||||
future extensions including multiple energy sectors at once.
|
||||
|
||||
.. _simpl:
|
||||
|
||||
The ``{simpl}`` wildcard
|
||||
@ -75,7 +65,7 @@ The wildcard, in general, consists of two parts:
|
||||
|
||||
2. The second part can be
|
||||
``opt`` or a float bigger than one (e.g. 1.25).
|
||||
|
||||
|
||||
(a) If ``opt`` is chosen line expansion is optimised
|
||||
according to its capital cost
|
||||
(where the choice ``v`` only considers overhead costs for HVDC transmission lines, while
|
||||
@ -84,7 +74,7 @@ The wildcard, in general, consists of two parts:
|
||||
|
||||
(b) ``v1.25`` will limit the total volume of line expansion
|
||||
to 25 % of currently installed capacities weighted by
|
||||
individual line lengths; investment costs are neglected.
|
||||
individual line lengths; investment costs are neglected.
|
||||
|
||||
(c) ``c1.25`` will allow to build a transmission network that
|
||||
costs no more than 25 % more than the current system.
|
||||
|
@ -5,19 +5,17 @@
|
||||
name: pypsa-eur-docs
|
||||
channels:
|
||||
- conda-forge
|
||||
#- bioconda
|
||||
dependencies:
|
||||
#- python
|
||||
- python<=3.7
|
||||
- pip
|
||||
- pypsa>=0.17.1
|
||||
- atlite=0.0.3
|
||||
- pre-commit
|
||||
|
||||
# Dependencies of the workflow itself
|
||||
#- xlrd
|
||||
- scikit-learn
|
||||
- pycountry
|
||||
- seaborn
|
||||
#- snakemake-minimal
|
||||
- memory_profiler
|
||||
- yaml
|
||||
- pytables
|
||||
@ -25,31 +23,21 @@ dependencies:
|
||||
|
||||
# Second order dependencies which should really be deps of atlite
|
||||
- xarray
|
||||
#- netcdf4
|
||||
#- bottleneck
|
||||
#- toolz
|
||||
#- dask
|
||||
- progressbar2
|
||||
- pyyaml>=5.1.0
|
||||
|
||||
# Include ipython so that one does not inadvertently drop out of the conda
|
||||
# environment by calling ipython
|
||||
# - ipython
|
||||
|
||||
# GIS dependencies have to come all from conda-forge
|
||||
- conda-forge::cartopy
|
||||
- conda-forge::fiona
|
||||
- conda-forge::proj
|
||||
- conda-forge::pyshp
|
||||
- conda-forge::geopandas
|
||||
- conda-forge::rasterio
|
||||
- conda-forge::shapely
|
||||
- conda-forge::libgdal
|
||||
- cartopy
|
||||
- fiona
|
||||
- proj
|
||||
- pyshp
|
||||
- geopandas
|
||||
- rasterio
|
||||
- shapely
|
||||
- libgdal
|
||||
|
||||
# The FRESNA/KIT stuff is not packaged for conda yet
|
||||
- pip:
|
||||
- vresutils==0.3.1
|
||||
- tsam>=1.1.0
|
||||
- git+https://github.com/PyPSA/glaes.git#egg=glaes
|
||||
- git+https://github.com/PyPSA/geokit.git#egg=geokit
|
||||
- cdsapi
|
@ -37,8 +37,7 @@ dependencies:
|
||||
- progressbar2
|
||||
- pyyaml>=5.1.0
|
||||
|
||||
# Include ipython so that one does not inadvertently drop out of the conda
|
||||
# environment by calling ipython
|
||||
# Keep in conda environment when calling ipython
|
||||
- ipython
|
||||
|
||||
# GIS dependencies:
|
||||
@ -52,9 +51,6 @@ dependencies:
|
||||
- libgdal<=3.0.4
|
||||
- descartes
|
||||
|
||||
# Solvers
|
||||
- gurobi:gurobi # until https://github.com/conda-forge/pypsa-feedstock/issues/4 closed
|
||||
|
||||
- pip:
|
||||
- vresutils==0.3.1
|
||||
- tsam>=1.1.0
|
@ -44,6 +44,7 @@ def configure_logging(snakemake, skip_handlers=False):
|
||||
})
|
||||
logging.basicConfig(**kwargs)
|
||||
|
||||
|
||||
def load_network(import_name=None, custom_components=None):
|
||||
"""
|
||||
Helper for importing a pypsa.Network with additional custom components.
|
||||
@ -70,7 +71,6 @@ def load_network(import_name=None, custom_components=None):
|
||||
-------
|
||||
pypsa.Network
|
||||
"""
|
||||
|
||||
import pypsa
|
||||
from pypsa.descriptors import Dict
|
||||
|
||||
@ -90,10 +90,12 @@ def load_network(import_name=None, custom_components=None):
|
||||
override_components=override_components,
|
||||
override_component_attrs=override_component_attrs)
|
||||
|
||||
|
||||
def pdbcast(v, h):
|
||||
return pd.DataFrame(v.values.reshape((-1, 1)) * h.values,
|
||||
index=v.index, columns=h.index)
|
||||
|
||||
|
||||
def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True):
|
||||
import pypsa
|
||||
from add_electricity import update_transmission_costs, load_costs
|
||||
@ -113,11 +115,11 @@ def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True):
|
||||
if combine_hydro_ps:
|
||||
n.storage_units.loc[n.storage_units.carrier.isin({'PHS', 'hydro'}), 'carrier'] = 'hydro+PHS'
|
||||
|
||||
# #if the carrier was not set on the heat storage units
|
||||
# if the carrier was not set on the heat storage units
|
||||
# bus_carrier = n.storage_units.bus.map(n.buses.carrier)
|
||||
# n.storage_units.loc[bus_carrier == "heat","carrier"] = "water tanks"
|
||||
|
||||
Nyears = n.snapshot_weightings.sum()/8760.
|
||||
Nyears = n.snapshot_weightings.sum() / 8760.
|
||||
costs = load_costs(Nyears, tech_costs, config['costs'], config['electricity'])
|
||||
update_transmission_costs(n, costs)
|
||||
|
||||
|
@ -85,25 +85,28 @@ It further adds extendable ``generators`` with **zero** capacity for
|
||||
- additional open- and combined-cycle gas turbines (if ``OCGT`` and/or ``CCGT`` is listed in the config setting ``electricity: extendable_carriers``)
|
||||
"""
|
||||
|
||||
from vresutils.costdata import annuity
|
||||
from vresutils import transfer as vtransfer
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import pypsa
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import xarray as xr
|
||||
import geopandas as gpd
|
||||
import pypsa
|
||||
import powerplantmatching as ppm
|
||||
|
||||
from vresutils.costdata import annuity
|
||||
from vresutils.load import timeseries_opsd
|
||||
from vresutils import transfer as vtransfer
|
||||
|
||||
idx = pd.IndexSlice
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def normed(s): return s/s.sum()
|
||||
|
||||
|
||||
def _add_missing_carriers_from_costs(n, costs, carriers):
|
||||
missing_carriers = pd.Index(carriers).difference(n.carriers.index)
|
||||
if missing_carriers.empty: return
|
||||
@ -115,6 +118,7 @@ def _add_missing_carriers_from_costs(n, costs, carriers):
|
||||
emissions.index = missing_carriers
|
||||
n.import_components_from_dataframe(emissions, 'Carrier')
|
||||
|
||||
|
||||
def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
|
||||
if tech_costs is None:
|
||||
tech_costs = snakemake.input.tech_costs
|
||||
@ -184,21 +188,17 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
|
||||
|
||||
return costs
|
||||
|
||||
|
||||
def load_powerplants(ppl_fn=None):
|
||||
if ppl_fn is None:
|
||||
ppl_fn = snakemake.input.powerplants
|
||||
carrier_dict = {'ocgt': 'OCGT', 'ccgt': 'CCGT', 'bioenergy':'biomass',
|
||||
carrier_dict = {'ocgt': 'OCGT', 'ccgt': 'CCGT', 'bioenergy': 'biomass',
|
||||
'ccgt, thermal': 'CCGT', 'hard coal': 'coal'}
|
||||
return (pd.read_csv(ppl_fn, index_col=0, dtype={'bus': 'str'})
|
||||
.powerplant.to_pypsa_names()
|
||||
.rename(columns=str.lower).drop(columns=['efficiency'])
|
||||
.replace({'carrier': carrier_dict}))
|
||||
|
||||
# =============================================================================
|
||||
# Attach components
|
||||
# =============================================================================
|
||||
|
||||
# ### Load
|
||||
|
||||
def attach_load(n):
|
||||
substation_lv_i = n.buses.index[n.buses['substation_lv']]
|
||||
@ -238,7 +238,6 @@ def attach_load(n):
|
||||
|
||||
n.madd("Load", substation_lv_i, bus=substation_lv_i, p_set=load)
|
||||
|
||||
### Set line costs
|
||||
|
||||
def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=False):
|
||||
n.lines['capital_cost'] = (n.lines['length'] * length_factor *
|
||||
@ -259,7 +258,6 @@ def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=Fal
|
||||
costs.at['HVDC inverter pair', 'capital_cost'])
|
||||
n.links.loc[dc_b, 'capital_cost'] = costs
|
||||
|
||||
### Generators
|
||||
|
||||
def attach_wind_and_solar(n, costs):
|
||||
for tech in snakemake.config['renewable']:
|
||||
@ -298,15 +296,17 @@ def attach_wind_and_solar(n, costs):
|
||||
p_max_pu=ds['profile'].transpose('time', 'bus').to_pandas())
|
||||
|
||||
|
||||
|
||||
def attach_conventional_generators(n, costs, ppl):
|
||||
carriers = snakemake.config['electricity']['conventional_carriers']
|
||||
|
||||
_add_missing_carriers_from_costs(n, costs, carriers)
|
||||
|
||||
ppl = (ppl.query('carrier in @carriers').join(costs, on='carrier')
|
||||
.rename(index=lambda s: 'C' + str(s)))
|
||||
|
||||
logger.info('Adding {} generators with capacities\n{}'
|
||||
.format(len(ppl), ppl.groupby('carrier').p_nom.sum()))
|
||||
|
||||
n.madd("Generator", ppl.index,
|
||||
carrier=ppl.carrier,
|
||||
bus=ppl.bus,
|
||||
@ -314,6 +314,7 @@ def attach_conventional_generators(n, costs, ppl):
|
||||
efficiency=ppl.efficiency,
|
||||
marginal_cost=ppl.marginal_cost,
|
||||
capital_cost=0)
|
||||
|
||||
logger.warning(f'Capital costs for conventional generators put to 0 EUR/MW.')
|
||||
|
||||
|
||||
@ -363,8 +364,8 @@ def attach_hydro(n, costs, ppl):
|
||||
.where(lambda df: df<=1., other=1.)))
|
||||
|
||||
if 'PHS' in carriers and not phs.empty:
|
||||
# fill missing max hours to config value and assume no natural inflow
|
||||
# due to lack of data
|
||||
# fill missing max hours to config value and
|
||||
# assume no natural inflow due to lack of data
|
||||
phs = phs.replace({'max_hours': {0: c['PHS_max_hours']}})
|
||||
n.madd('StorageUnit', phs.index,
|
||||
carrier='PHS',
|
||||
@ -402,7 +403,6 @@ def attach_hydro(n, costs, ppl):
|
||||
hydro_max_hours = hydro.max_hours.where(hydro.max_hours > 0,
|
||||
hydro.country.map(max_hours_country)).fillna(6)
|
||||
|
||||
|
||||
n.madd('StorageUnit', hydro.index, carrier='hydro',
|
||||
bus=hydro['bus'],
|
||||
p_nom=hydro['p_nom'],
|
||||
@ -421,6 +421,7 @@ def attach_hydro(n, costs, ppl):
|
||||
def attach_extendable_generators(n, costs, ppl):
|
||||
elec_opts = snakemake.config['electricity']
|
||||
carriers = pd.Index(elec_opts['extendable_carriers']['Generator'])
|
||||
|
||||
_add_missing_carriers_from_costs(n, costs, carriers)
|
||||
|
||||
for tech in carriers:
|
||||
@ -486,10 +487,11 @@ def estimate_renewable_capacities(n, tech_map=None):
|
||||
n.generators.loc[tech_i, 'p_nom'] = (
|
||||
(n.generators_t.p_max_pu[tech_i].mean() *
|
||||
n.generators.loc[tech_i, 'p_nom_max']) # maximal yearly generation
|
||||
.groupby(n.generators.bus.map(n.buses.country)) # for each country
|
||||
.groupby(n.generators.bus.map(n.buses.country))
|
||||
.transform(lambda s: normed(s) * tech_capacities.at[s.name])
|
||||
.where(lambda s: s>0.1, 0.)) # only capacities above 100kW
|
||||
|
||||
|
||||
def add_nice_carrier_names(n, config=None):
|
||||
if config is None: config = snakemake.config
|
||||
carrier_i = n.carriers.index
|
||||
@ -511,7 +513,7 @@ if __name__ == "__main__":
|
||||
configure_logging(snakemake)
|
||||
|
||||
n = pypsa.Network(snakemake.input.base_network)
|
||||
Nyears = n.snapshot_weightings.sum()/8760.
|
||||
Nyears = n.snapshot_weightings.sum() / 8760.
|
||||
|
||||
costs = load_costs(Nyears)
|
||||
ppl = load_powerplants()
|
||||
|
@ -37,30 +37,33 @@ Inputs
|
||||
Outputs
|
||||
-------
|
||||
|
||||
- ``networks/{network}_s{simpl}_{clusters}_ec.nc``:
|
||||
- ``networks/elec_s{simpl}_{clusters}_ec.nc``:
|
||||
|
||||
|
||||
Description
|
||||
-----------
|
||||
|
||||
The rule :mod:`add_extra_components` attaches additional extendable components to the clustered and simplified network. These can be configured in the ``config.yaml`` at ``electricity: extendable_carriers: ``. It processes ``networks/{network}_s{simpl}_{clusters}.nc`` to build ``networks/{network}_s{simpl}_{clusters}_ec.nc``, which in contrast to the former (depending on the configuration) contain with **zero** initial capacity
|
||||
The rule :mod:`add_extra_components` attaches additional extendable components to the clustered and simplified network. These can be configured in the ``config.yaml`` at ``electricity: extendable_carriers: ``. It processes ``networks/elec_s{simpl}_{clusters}.nc`` to build ``networks/elec_s{simpl}_{clusters}_ec.nc``, which in contrast to the former (depending on the configuration) contain with **zero** initial capacity
|
||||
|
||||
- ``StorageUnits`` of carrier 'H2' and/or 'battery'. If this option is chosen, every bus is given an extendable ``StorageUnit`` of the corresponding carrier. The energy and power capacities are linked through a parameter that specifies the energy capacity as maximum hours at full dispatch power and is configured in ``electricity: max_hours:``. This linkage leads to one investment variable per storage unit. The default ``max_hours`` lead to long-term hydrogen and short-term battery storage units.
|
||||
|
||||
- ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If this option is chosen, the script adds extra buses with corresponding carrier where energy ``Stores`` are attached and which are connected to the corresponding power buses via two links, one each for charging and discharging. This leads to three investment variables for the energy capacity, charging and discharging capacity of the storage unit.
|
||||
"""
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import pypsa
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import pypsa
|
||||
|
||||
from add_electricity import (load_costs, add_nice_carrier_names,
|
||||
_add_missing_carriers_from_costs)
|
||||
|
||||
idx = pd.IndexSlice
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def attach_storageunits(n, costs):
|
||||
elec_opts = snakemake.config['electricity']
|
||||
carriers = elec_opts['extendable_carriers']['StorageUnit']
|
||||
@ -85,6 +88,7 @@ def attach_storageunits(n, costs):
|
||||
max_hours=max_hours[carrier],
|
||||
cyclic_state_of_charge=True)
|
||||
|
||||
|
||||
def attach_stores(n, costs):
|
||||
elec_opts = snakemake.config['electricity']
|
||||
carriers = elec_opts['extendable_carriers']['Store']
|
||||
@ -147,6 +151,7 @@ def attach_stores(n, costs):
|
||||
capital_cost=costs.at['battery inverter', 'capital_cost'],
|
||||
p_nom_extendable=True)
|
||||
|
||||
|
||||
def attach_hydrogen_pipelines(n, costs):
|
||||
elec_opts = snakemake.config['electricity']
|
||||
ext_carriers = elec_opts['extendable_carriers']
|
||||
@ -179,6 +184,7 @@ def attach_hydrogen_pipelines(n, costs):
|
||||
efficiency=costs.at['H2 pipeline','efficiency'],
|
||||
carrier="H2 pipeline")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
@ -187,7 +193,7 @@ if __name__ == "__main__":
|
||||
configure_logging(snakemake)
|
||||
|
||||
n = pypsa.Network(snakemake.input.network)
|
||||
Nyears = n.snapshot_weightings.sum()/8760.
|
||||
Nyears = n.snapshot_weightings.sum() / 8760.
|
||||
costs = load_costs(Nyears, tech_costs=snakemake.input.tech_costs,
|
||||
config=snakemake.config['costs'],
|
||||
elec_config=snakemake.config['electricity'])
|
||||
|
@ -63,14 +63,16 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import pypsa
|
||||
import yaml
|
||||
import pandas as pd
|
||||
import geopandas as gpd
|
||||
import numpy as np
|
||||
import scipy as sp
|
||||
import networkx as nx
|
||||
|
||||
from scipy.sparse import csgraph
|
||||
from six import iteritems
|
||||
from itertools import product
|
||||
@ -78,9 +80,8 @@ from itertools import product
|
||||
from shapely.geometry import Point, LineString
|
||||
import shapely, shapely.prepared, shapely.wkt
|
||||
|
||||
import networkx as nx
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
import pypsa
|
||||
|
||||
def _get_oid(df):
|
||||
if "tags" in df.columns:
|
||||
@ -88,12 +89,14 @@ def _get_oid(df):
|
||||
else:
|
||||
return pd.Series(np.nan, df.index)
|
||||
|
||||
|
||||
def _get_country(df):
|
||||
if "tags" in df.columns:
|
||||
return df.tags.str.extract('"country"=>"([A-Z]{2})"', expand=False)
|
||||
else:
|
||||
return pd.Series(np.nan, df.index)
|
||||
|
||||
|
||||
def _find_closest_links(links, new_links, distance_upper_bound=1.5):
|
||||
treecoords = np.asarray([np.asarray(shapely.wkt.loads(s))[[0, -1]].flatten()
|
||||
for s in links.geometry])
|
||||
@ -109,6 +112,7 @@ def _find_closest_links(links, new_links, distance_upper_bound=1.5):
|
||||
[lambda ds: ~ds.index.duplicated(keep='first')]\
|
||||
.sort_index()['i']
|
||||
|
||||
|
||||
def _load_buses_from_eg():
|
||||
buses = (pd.read_csv(snakemake.input.eg_buses, quotechar="'",
|
||||
true_values='t', false_values='f',
|
||||
@ -130,6 +134,7 @@ def _load_buses_from_eg():
|
||||
|
||||
return pd.DataFrame(buses.loc[buses_in_europe_b & buses_with_v_nom_to_keep_b])
|
||||
|
||||
|
||||
def _load_transformers_from_eg(buses):
|
||||
transformers = (pd.read_csv(snakemake.input.eg_transformers, quotechar="'",
|
||||
true_values='t', false_values='f',
|
||||
@ -140,6 +145,7 @@ def _load_transformers_from_eg(buses):
|
||||
|
||||
return transformers
|
||||
|
||||
|
||||
def _load_converters_from_eg(buses):
|
||||
converters = (pd.read_csv(snakemake.input.eg_converters, quotechar="'",
|
||||
true_values='t', false_values='f',
|
||||
@ -241,6 +247,7 @@ def _add_links_from_tyndp(buses, links):
|
||||
|
||||
return buses, links.append(links_tyndp, sort=True)
|
||||
|
||||
|
||||
def _load_lines_from_eg(buses):
|
||||
lines = (pd.read_csv(snakemake.input.eg_lines, quotechar="'", true_values='t', false_values='f',
|
||||
dtype=dict(line_id='str', bus0='str', bus1='str',
|
||||
@ -254,11 +261,13 @@ def _load_lines_from_eg(buses):
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def _apply_parameter_corrections(n):
|
||||
with open(snakemake.input.parameter_corrections) as f:
|
||||
corrections = yaml.safe_load(f)
|
||||
|
||||
if corrections is None: return
|
||||
|
||||
for component, attrs in iteritems(corrections):
|
||||
df = n.df(component)
|
||||
oid = _get_oid(df)
|
||||
@ -275,6 +284,7 @@ def _apply_parameter_corrections(n):
|
||||
inds = r.index.intersection(df.index)
|
||||
df.loc[inds, attr] = r[inds].astype(df[attr].dtype)
|
||||
|
||||
|
||||
def _set_electrical_parameters_lines(lines):
|
||||
v_noms = snakemake.config['electricity']['voltages']
|
||||
linetypes = snakemake.config['lines']['types']
|
||||
@ -286,12 +296,14 @@ def _set_electrical_parameters_lines(lines):
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def _set_lines_s_nom_from_linetypes(n):
|
||||
n.lines['s_nom'] = (
|
||||
np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) *
|
||||
n.lines['v_nom'] * n.lines.num_parallel
|
||||
)
|
||||
|
||||
|
||||
def _set_electrical_parameters_links(links):
|
||||
if links.empty: return links
|
||||
|
||||
@ -300,24 +312,25 @@ def _set_electrical_parameters_links(links):
|
||||
links['p_min_pu'] = -p_max_pu
|
||||
|
||||
links_p_nom = pd.read_csv(snakemake.input.links_p_nom)
|
||||
|
||||
#Filter links that are not in operation anymore
|
||||
|
||||
# filter links that are not in operation anymore
|
||||
removed_b = links_p_nom.Remarks.str.contains('Shut down|Replaced', na=False)
|
||||
links_p_nom = links_p_nom[~removed_b]
|
||||
|
||||
#find closest link for all links in links_p_nom
|
||||
|
||||
# find closest link for all links in links_p_nom
|
||||
links_p_nom['j'] = _find_closest_links(links, links_p_nom)
|
||||
|
||||
links_p_nom = links_p_nom.groupby(['j'],as_index=False).agg({'Power (MW)': 'sum'})
|
||||
|
||||
|
||||
links_p_nom = links_p_nom.groupby(['j'],as_index=False).agg({'Power (MW)': 'sum'})
|
||||
|
||||
p_nom = links_p_nom.dropna(subset=["j"]).set_index("j")["Power (MW)"]
|
||||
|
||||
|
||||
# Don't update p_nom if it's already set
|
||||
p_nom_unset = p_nom.drop(links.index[links.p_nom.notnull()], errors='ignore') if "p_nom" in links else p_nom
|
||||
links.loc[p_nom_unset.index, "p_nom"] = p_nom_unset
|
||||
|
||||
return links
|
||||
|
||||
|
||||
def _set_electrical_parameters_converters(converters):
|
||||
p_max_pu = snakemake.config['links'].get('p_max_pu', 1.)
|
||||
converters['p_max_pu'] = p_max_pu
|
||||
@ -331,6 +344,7 @@ def _set_electrical_parameters_converters(converters):
|
||||
|
||||
return converters
|
||||
|
||||
|
||||
def _set_electrical_parameters_transformers(transformers):
|
||||
config = snakemake.config['transformers']
|
||||
|
||||
@ -341,9 +355,11 @@ def _set_electrical_parameters_transformers(transformers):
|
||||
|
||||
return transformers
|
||||
|
||||
|
||||
def _remove_dangling_branches(branches, buses):
|
||||
return pd.DataFrame(branches.loc[branches.bus0.isin(buses.index) & branches.bus1.isin(buses.index)])
|
||||
|
||||
|
||||
def _remove_unconnected_components(network):
|
||||
_, labels = csgraph.connected_components(network.adjacency_matrix(), directed=False)
|
||||
component = pd.Series(labels, index=network.buses.index)
|
||||
@ -356,6 +372,7 @@ def _remove_unconnected_components(network):
|
||||
|
||||
return network[component == component_sizes.index[0]]
|
||||
|
||||
|
||||
def _set_countries_and_substations(n):
|
||||
|
||||
buses = n.buses
|
||||
@ -442,6 +459,7 @@ def _set_countries_and_substations(n):
|
||||
|
||||
return buses
|
||||
|
||||
|
||||
def _replace_b2b_converter_at_country_border_by_link(n):
|
||||
# Affects only the B2B converter in Lithuania at the Polish border at the moment
|
||||
buscntry = n.buses.country
|
||||
@ -479,6 +497,7 @@ def _replace_b2b_converter_at_country_border_by_link(n):
|
||||
logger.info("Replacing B2B converter `{}` together with bus `{}` and line `{}` by an HVDC tie-line {}-{}"
|
||||
.format(i, b0, line, linkcntry.at[i], buscntry.at[b1]))
|
||||
|
||||
|
||||
def _set_links_underwater_fraction(n):
|
||||
if n.links.empty: return
|
||||
|
||||
@ -489,6 +508,7 @@ def _set_links_underwater_fraction(n):
|
||||
links = gpd.GeoSeries(n.links.geometry.dropna().map(shapely.wkt.loads))
|
||||
n.links['underwater_fraction'] = links.intersection(offshore_shape).length / links.length
|
||||
|
||||
|
||||
def _adjust_capacities_of_under_construction_branches(n):
|
||||
lines_mode = snakemake.config['lines'].get('under_construction', 'undef')
|
||||
if lines_mode == 'zero':
|
||||
@ -513,6 +533,7 @@ def _adjust_capacities_of_under_construction_branches(n):
|
||||
|
||||
return n
|
||||
|
||||
|
||||
def base_network():
|
||||
buses = _load_buses_from_eg()
|
||||
|
||||
@ -534,7 +555,7 @@ def base_network():
|
||||
n.name = 'PyPSA-Eur'
|
||||
|
||||
n.set_snapshots(pd.date_range(freq='h', **snakemake.config['snapshots']))
|
||||
n.snapshot_weightings[:] *= 8760./n.snapshot_weightings.sum()
|
||||
n.snapshot_weightings[:] *= 8760. / n.snapshot_weightings.sum()
|
||||
|
||||
n.import_components_from_dataframe(buses, "Bus")
|
||||
n.import_components_from_dataframe(lines, "Line")
|
||||
@ -565,4 +586,5 @@ if __name__ == "__main__":
|
||||
configure_logging(snakemake)
|
||||
|
||||
n = base_network()
|
||||
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
@ -42,17 +42,24 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
from vresutils.graph import voronoi_partition_pts
|
||||
|
||||
import pypsa
|
||||
import os
|
||||
|
||||
import pandas as pd
|
||||
import geopandas as gpd
|
||||
|
||||
import pypsa
|
||||
from vresutils.graph import voronoi_partition_pts
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def save_to_geojson(s, fn):
|
||||
if os.path.exists(fn):
|
||||
os.unlink(fn)
|
||||
schema = {**gpd.io.file.infer_schema(s), 'geometry': 'Unknown'}
|
||||
s.to_file(fn, driver='GeoJSON', schema=schema)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
@ -96,12 +103,6 @@ if __name__ == "__main__":
|
||||
offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2]
|
||||
offshore_regions.append(offshore_regions_c)
|
||||
|
||||
def save_to_geojson(s, fn):
|
||||
if os.path.exists(fn):
|
||||
os.unlink(fn)
|
||||
schema = {**gpd.io.file.infer_schema(s), 'geometry': 'Unknown'}
|
||||
s.to_file(fn, driver='GeoJSON', schema=schema)
|
||||
|
||||
save_to_geojson(pd.concat(onshore_regions, ignore_index=True), snakemake.output.regions_onshore)
|
||||
|
||||
save_to_geojson(pd.concat(offshore_regions, ignore_index=True), snakemake.output.regions_offshore)
|
||||
|
@ -63,7 +63,6 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import os
|
||||
@ -84,6 +83,9 @@ import progressbar as pgb
|
||||
|
||||
from build_renewable_profiles import init_globals, calculate_potential
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def build_area(flh, countries, areamatrix, breaks, fn):
|
||||
area_unbinned = xr.DataArray(areamatrix.todense(), [countries, capacity_factor.coords['spatial']])
|
||||
bins = xr.DataArray(pd.cut(flh.to_series(), bins=breaks), flh.coords, name="bins")
|
||||
@ -92,6 +94,7 @@ def build_area(flh, countries, areamatrix, breaks, fn):
|
||||
area.columns = area.columns.map(lambda s: s.left)
|
||||
return area
|
||||
|
||||
|
||||
def plot_area_not_solar(area, countries):
|
||||
# onshore wind/offshore wind
|
||||
a = area.T
|
||||
|
@ -92,12 +92,13 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import os
|
||||
import atlite
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
@ -113,4 +114,6 @@ if __name__ == "__main__":
|
||||
cutout_dir=os.path.dirname(snakemake.output[0]),
|
||||
**cutout_params)
|
||||
|
||||
cutout.prepare(nprocesses=snakemake.config['atlite'].get('nprocesses', 4))
|
||||
nprocesses = snakemake.config['atlite'].get('nprocesses', 4)
|
||||
|
||||
cutout.prepare(nprocesses=nprocesses)
|
||||
|
@ -60,7 +60,6 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import os
|
||||
@ -68,6 +67,8 @@ import atlite
|
||||
import geopandas as gpd
|
||||
from vresutils import hydro as vhydro
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
@ -75,8 +76,8 @@ if __name__ == "__main__":
|
||||
configure_logging(snakemake)
|
||||
|
||||
config = snakemake.config['renewable']['hydro']
|
||||
cutout = atlite.Cutout(config['cutout'],
|
||||
cutout_dir=os.path.dirname(snakemake.input.cutout))
|
||||
cutout_dir = os.path.dirname(snakemake.input.cutout)
|
||||
cutout = atlite.Cutout(config['cutout'], cutout_dir=cutout_dir)
|
||||
|
||||
countries = snakemake.config['countries']
|
||||
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'].reindex(countries)
|
||||
@ -84,9 +85,9 @@ if __name__ == "__main__":
|
||||
|
||||
eia_stats = vhydro.get_eia_annual_hydro_generation(snakemake.input.eia_hydro_generation).reindex(columns=countries)
|
||||
inflow = cutout.runoff(shapes=country_shapes,
|
||||
smooth=True,
|
||||
lower_threshold_quantile=True,
|
||||
normalize_using_yearly=eia_stats)
|
||||
smooth=True,
|
||||
lower_threshold_quantile=True,
|
||||
normalize_using_yearly=eia_stats)
|
||||
|
||||
if 'clip_min_inflow' in config:
|
||||
inflow.values[inflow.values < config['clip_min_inflow']] = 0.
|
||||
|
@ -1,4 +1,7 @@
|
||||
# coding: utf-8
|
||||
# SPDX-FileCopyrightText: : 2020 @JanFrederickUnnewehr, The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""
|
||||
|
||||
This rule downloads the load data from `Open Power System Data Time series <https://data.open-power-system-data.org/time_series/>`_. For all countries in the network, the per country load timeseries with suffix ``_load_actual_entsoe_transparency`` are extracted from the dataset. After filling small gaps linearly and large gaps by copying time-slice of a given period, the load data is exported to a ``.csv`` file.
|
||||
|
@ -41,6 +41,7 @@ Description
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
|
||||
import atlite
|
||||
import geokit as gk
|
||||
from pathlib import Path
|
||||
@ -58,7 +59,7 @@ def determine_cutout_xXyY(cutout_name):
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
snakemake = mock_snakemake('build_natura_raster') #has to be enabled
|
||||
snakemake = mock_snakemake('build_natura_raster')
|
||||
configure_logging(snakemake)
|
||||
|
||||
cutout_dir = Path(snakemake.input.cutouts[0]).parent.resolve()
|
||||
|
@ -72,16 +72,18 @@ The configuration options ``electricity: powerplants_filter`` and ``electricity:
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
from scipy.spatial import cKDTree as KDTree
|
||||
|
||||
import pypsa
|
||||
import powerplantmatching as pm
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
from scipy.spatial import cKDTree as KDTree
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def add_custom_powerplants(ppl):
|
||||
custom_ppl_query = snakemake.config['electricity']['custom_powerplants']
|
||||
if not custom_ppl_query:
|
||||
@ -94,7 +96,6 @@ def add_custom_powerplants(ppl):
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
snakemake = mock_snakemake('build_powerplants')
|
||||
|
@ -181,27 +181,28 @@ node (`p_nom_max`): ``simple`` and ``conservative``:
|
||||
|
||||
"""
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
import os
|
||||
import atlite
|
||||
|
||||
import numpy as np
|
||||
import xarray as xr
|
||||
import pandas as pd
|
||||
import multiprocessing as mp
|
||||
import matplotlib.pyplot as plt
|
||||
import progressbar as pgb
|
||||
|
||||
from scipy.sparse import csr_matrix, vstack
|
||||
|
||||
from pypsa.geo import haversine
|
||||
from vresutils import landuse as vlanduse
|
||||
from vresutils.array import spdiag
|
||||
|
||||
import progressbar as pgb
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
bounds = dx = dy = config = paths = gebco = clc = natura = None
|
||||
|
||||
|
||||
def init_globals(bounds_xXyY, n_dx, n_dy, n_config, n_paths):
|
||||
# Late import so that the GDAL Context is only created in the new processes
|
||||
global gl, gk, gdal
|
||||
@ -227,6 +228,7 @@ def init_globals(bounds_xXyY, n_dx, n_dy, n_config, n_paths):
|
||||
|
||||
natura = gk.raster.loadRaster(paths["natura"])
|
||||
|
||||
|
||||
def downsample_to_coarse_grid(bounds, dx, dy, mask, data):
|
||||
# The GDAL warp function with the 'average' resample algorithm needs a band of zero values of at least
|
||||
# the size of one coarse cell around the original raster or it produces erroneous results
|
||||
@ -238,6 +240,7 @@ def downsample_to_coarse_grid(bounds, dx, dy, mask, data):
|
||||
assert gdal.Warp(average, padded, resampleAlg='average') == 1, "gdal warp failed: %s" % gdal.GetLastErrorMsg()
|
||||
return average
|
||||
|
||||
|
||||
def calculate_potential(gid, save_map=None):
|
||||
feature = gk.vector.extractFeature(paths["regions"], where=gid)
|
||||
ec = gl.ExclusionCalculator(feature.geom)
|
||||
|
@ -92,6 +92,7 @@ def _get_country(target, **keys):
|
||||
except (KeyError, AttributeError):
|
||||
return np.nan
|
||||
|
||||
|
||||
def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True):
|
||||
if isinstance(polys, MultiPolygon):
|
||||
polys = sorted(polys, key=attrgetter('area'), reverse=True)
|
||||
@ -105,6 +106,7 @@ def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True):
|
||||
polys = mainpoly
|
||||
return polys.simplify(tolerance=tolerance)
|
||||
|
||||
|
||||
def countries():
|
||||
cntries = snakemake.config['countries']
|
||||
if 'RS' in cntries: cntries.append('KV')
|
||||
@ -121,6 +123,7 @@ def countries():
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def eez(country_shapes):
|
||||
df = gpd.read_file(snakemake.input.eez)
|
||||
df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in snakemake.config['countries']])]
|
||||
@ -130,6 +133,7 @@ def eez(country_shapes):
|
||||
s.index.name = "name"
|
||||
return s
|
||||
|
||||
|
||||
def country_cover(country_shapes, eez_shapes=None):
|
||||
shapes = list(country_shapes)
|
||||
if eez_shapes is not None:
|
||||
@ -140,6 +144,7 @@ def country_cover(country_shapes, eez_shapes=None):
|
||||
europe_shape = max(europe_shape, key=attrgetter('area'))
|
||||
return Polygon(shell=europe_shape.exterior)
|
||||
|
||||
|
||||
def nuts3(country_shapes):
|
||||
df = gpd.read_file(snakemake.input.nuts3)
|
||||
df = df.loc[df['STAT_LEVL_'] == 3]
|
||||
@ -158,7 +163,6 @@ def nuts3(country_shapes):
|
||||
.applymap(lambda x: pd.to_numeric(x, errors='coerce'))
|
||||
.fillna(method='bfill', axis=1))['2014']
|
||||
|
||||
# Swiss data
|
||||
cantons = pd.read_csv(snakemake.input.ch_cantons)
|
||||
cantons = cantons.set_index(cantons['HASC'].str[3:])['NUTS']
|
||||
cantons = cantons.str.pad(5, side='right', fillchar='0')
|
||||
@ -197,6 +201,7 @@ def nuts3(country_shapes):
|
||||
|
||||
return df
|
||||
|
||||
|
||||
def save_to_geojson(df, fn):
|
||||
if os.path.exists(fn):
|
||||
os.unlink(fn)
|
||||
@ -206,20 +211,23 @@ def save_to_geojson(df, fn):
|
||||
schema = {**gpd.io.file.infer_schema(df), 'geometry': 'Unknown'}
|
||||
df.to_file(fn, driver='GeoJSON', schema=schema)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
snakemake = mock_snakemake('build_shapes')
|
||||
configure_logging(snakemake)
|
||||
|
||||
out = snakemake.output
|
||||
|
||||
country_shapes = countries()
|
||||
save_to_geojson(country_shapes, snakemake.output.country_shapes)
|
||||
save_to_geojson(country_shapes, out.country_shapes)
|
||||
|
||||
offshore_shapes = eez(country_shapes)
|
||||
save_to_geojson(offshore_shapes, snakemake.output.offshore_shapes)
|
||||
save_to_geojson(offshore_shapes, out.offshore_shapes)
|
||||
|
||||
europe_shape = country_cover(country_shapes, offshore_shapes)
|
||||
save_to_geojson(gpd.GeoSeries(europe_shape), snakemake.output.europe_shape)
|
||||
save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape)
|
||||
|
||||
nuts3_shapes = nuts3(country_shapes)
|
||||
save_to_geojson(nuts3_shapes, snakemake.output.nuts3_shapes)
|
||||
save_to_geojson(nuts3_shapes, out.nuts3_shapes)
|
||||
|
@ -31,28 +31,28 @@ Relevant Settings
|
||||
Inputs
|
||||
------
|
||||
|
||||
- ``resources/regions_onshore_{network}_s{simpl}.geojson``: confer :ref:`simplify`
|
||||
- ``resources/regions_offshore_{network}_s{simpl}.geojson``: confer :ref:`simplify`
|
||||
- ``resources/busmap_{network}_s{simpl}.csv``: confer :ref:`simplify`
|
||||
- ``networks/{network}_s{simpl}.nc``: confer :ref:`simplify`
|
||||
- ``data/custom_busmap_{network}_s{simpl}_{clusters}.csv``: optional input
|
||||
- ``resources/regions_onshore_elec_s{simpl}.geojson``: confer :ref:`simplify`
|
||||
- ``resources/regions_offshore_elec_s{simpl}.geojson``: confer :ref:`simplify`
|
||||
- ``resources/busmap_elec_s{simpl}.csv``: confer :ref:`simplify`
|
||||
- ``networks/elec_s{simpl}.nc``: confer :ref:`simplify`
|
||||
- ``data/custom_busmap_elec_s{simpl}_{clusters}.csv``: optional input
|
||||
|
||||
Outputs
|
||||
-------
|
||||
|
||||
- ``resources/regions_onshore_{network}_s{simpl}_{clusters}.geojson``:
|
||||
- ``resources/regions_onshore_elec_s{simpl}_{clusters}.geojson``:
|
||||
|
||||
.. image:: ../img/regions_onshore_elec_s_X.png
|
||||
:scale: 33 %
|
||||
|
||||
- ``resources/regions_offshore_{network}_s{simpl}_{clusters}.geojson``:
|
||||
- ``resources/regions_offshore_elec_s{simpl}_{clusters}.geojson``:
|
||||
|
||||
.. image:: ../img/regions_offshore_elec_s_X.png
|
||||
:scale: 33 %
|
||||
|
||||
- ``resources/busmap_{network}_s{simpl}_{clusters}.csv``: Mapping of buses from ``networks/elec_s{simpl}.nc`` to ``networks/elec_s{simpl}_{clusters}.nc``;
|
||||
- ``resources/linemap_{network}_s{simpl}_{clusters}.csv``: Mapping of lines from ``networks/elec_s{simpl}.nc`` to ``networks/elec_s{simpl}_{clusters}.nc``;
|
||||
- ``networks/{network}_s{simpl}_{clusters}.nc``:
|
||||
- ``resources/busmap_elec_s{simpl}_{clusters}.csv``: Mapping of buses from ``networks/elec_s{simpl}.nc`` to ``networks/elec_s{simpl}_{clusters}.nc``;
|
||||
- ``resources/linemap_elec_s{simpl}_{clusters}.csv``: Mapping of lines from ``networks/elec_s{simpl}.nc`` to ``networks/elec_s{simpl}_{clusters}.nc``;
|
||||
- ``networks/elec_s{simpl}_{clusters}.nc``:
|
||||
|
||||
.. image:: ../img/elec_s_X.png
|
||||
:scale: 40 %
|
||||
@ -117,36 +117,38 @@ Exemplary unsolved network clustered to 37 nodes:
|
||||
|
||||
.. image:: ../img/elec_s_37.png
|
||||
:scale: 40 %
|
||||
:align: center
|
||||
:align: center
|
||||
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import pandas as pd
|
||||
idx = pd.IndexSlice
|
||||
|
||||
import pypsa
|
||||
import os
|
||||
import shapely
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import geopandas as gpd
|
||||
import shapely
|
||||
import pyomo.environ as po
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
|
||||
from six.moves import reduce
|
||||
|
||||
import pyomo.environ as po
|
||||
|
||||
import pypsa
|
||||
from pypsa.networkclustering import (busmap_by_kmeans, busmap_by_spectral_clustering,
|
||||
_make_consense, get_clustering_from_busmap)
|
||||
|
||||
from add_electricity import load_costs
|
||||
|
||||
def normed(x):
|
||||
return (x/x.sum()).fillna(0.)
|
||||
idx = pd.IndexSlice
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def normed(x): return (x/x.sum()).fillna(0.)
|
||||
|
||||
|
||||
def weighting_for_country(n, x):
|
||||
conv_carriers = {'OCGT','CCGT','PHS', 'hydro'}
|
||||
@ -164,22 +166,13 @@ def weighting_for_country(n, x):
|
||||
g = normed(gen.reindex(b_i, fill_value=0))
|
||||
l = normed(load.reindex(b_i, fill_value=0))
|
||||
|
||||
w= g + l
|
||||
w = g + l
|
||||
return (w * (100. / w.max())).clip(lower=1.).astype(int)
|
||||
|
||||
|
||||
## Plot weighting for Germany
|
||||
|
||||
def plot_weighting(n, country, country_shape=None):
|
||||
n.plot(bus_sizes=(2*weighting_for_country(n.buses.loc[n.buses.country == country])).reindex(n.buses.index, fill_value=1))
|
||||
if country_shape is not None:
|
||||
plt.xlim(country_shape.bounds[0], country_shape.bounds[2])
|
||||
plt.ylim(country_shape.bounds[1], country_shape.bounds[3])
|
||||
|
||||
|
||||
# # Determining the number of clusters per country
|
||||
|
||||
def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None):
|
||||
"""Determine the number of clusters per country"""
|
||||
|
||||
if solver_name is None:
|
||||
solver_name = snakemake.config['solving']['solver']['name']
|
||||
|
||||
@ -191,7 +184,7 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None):
|
||||
N = n.buses.groupby(['country', 'sub_network']).size()
|
||||
|
||||
assert n_clusters >= len(N) and n_clusters <= N.sum(), \
|
||||
"Number of clusters must be {} <= n_clusters <= {} for this selection of countries.".format(len(N), N.sum())
|
||||
f"Number of clusters must be {len(N)} <= n_clusters <= {N.sum()} for this selection of countries."
|
||||
|
||||
if focus_weights is not None:
|
||||
|
||||
@ -207,7 +200,7 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None):
|
||||
|
||||
logger.warning('Using custom focus weights for determining number of clusters.')
|
||||
|
||||
assert np.isclose(L.sum(), 1.0, rtol=1e-3), "Country weights L must sum up to 1.0 when distributing clusters. Is {}.".format(L.sum())
|
||||
assert np.isclose(L.sum(), 1.0, rtol=1e-3), f"Country weights L must sum up to 1.0 when distributing clusters. Is {L.sum()}."
|
||||
|
||||
m = po.ConcreteModel()
|
||||
def n_bounds(model, *n_id):
|
||||
@ -223,10 +216,11 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None):
|
||||
opt = po.SolverFactory('ipopt')
|
||||
|
||||
results = opt.solve(m)
|
||||
assert results['Solver'][0]['Status'] == 'ok', "Solver returned non-optimally: {}".format(results)
|
||||
assert results['Solver'][0]['Status'] == 'ok', f"Solver returned non-optimally: {results}"
|
||||
|
||||
return pd.Series(m.n.get_values(), index=L.index).astype(int)
|
||||
|
||||
|
||||
def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algorithm="kmeans", **algorithm_kwds):
|
||||
if algorithm == "kmeans":
|
||||
algorithm_kwds.setdefault('n_init', 1000)
|
||||
@ -245,7 +239,7 @@ def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algori
|
||||
|
||||
def busmap_for_country(x):
|
||||
prefix = x.name[0] + x.name[1] + ' '
|
||||
logger.debug("Determining busmap for country {}".format(prefix[:-1]))
|
||||
logger.debug(f"Determining busmap for country {prefix[:-1]}")
|
||||
if len(x) == 1:
|
||||
return pd.Series(prefix + '0', index=x.index)
|
||||
weight = weighting_for_country(n, x)
|
||||
@ -262,12 +256,6 @@ def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algori
|
||||
return (n.buses.groupby(['country', 'sub_network'], group_keys=False)
|
||||
.apply(busmap_for_country).squeeze().rename('busmap'))
|
||||
|
||||
def plot_busmap_for_n_clusters(n, n_clusters=50):
|
||||
busmap = busmap_for_n_clusters(n, n_clusters)
|
||||
cs = busmap.unique()
|
||||
cr = sns.color_palette("hls", len(cs))
|
||||
n.plot(bus_colors=busmap.map(dict(zip(cs, cr))))
|
||||
del cs, cr
|
||||
|
||||
def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carriers=None,
|
||||
line_length_factor=1.25, potential_mode='simple', solver_name="cbc",
|
||||
@ -278,8 +266,7 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
|
||||
elif potential_mode == 'conservative':
|
||||
p_nom_max_strategy = np.min
|
||||
else:
|
||||
raise AttributeError("potential_mode should be one of 'simple' or 'conservative', "
|
||||
"but is '{}'".format(potential_mode))
|
||||
raise AttributeError(f"potential_mode should be one of 'simple' or 'conservative' but is '{potential_mode}'")
|
||||
|
||||
if custom_busmap:
|
||||
busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True)
|
||||
@ -309,6 +296,7 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
|
||||
|
||||
return clustering
|
||||
|
||||
|
||||
def save_to_geojson(s, fn):
|
||||
if os.path.exists(fn):
|
||||
os.unlink(fn)
|
||||
@ -316,6 +304,7 @@ def save_to_geojson(s, fn):
|
||||
schema = {**gpd.io.file.infer_schema(df), 'geometry': 'Unknown'}
|
||||
df.to_file(fn, driver='GeoJSON', schema=schema)
|
||||
|
||||
|
||||
def cluster_regions(busmaps, input=None, output=None):
|
||||
if input is None: input = snakemake.input
|
||||
if output is None: output = snakemake.output
|
||||
@ -329,6 +318,17 @@ def cluster_regions(busmaps, input=None, output=None):
|
||||
regions_c.index.name = 'name'
|
||||
save_to_geojson(regions_c, getattr(output, which))
|
||||
|
||||
|
||||
def plot_busmap_for_n_clusters(n, n_clusters, fn=None):
|
||||
busmap = busmap_for_n_clusters(n, n_clusters)
|
||||
cs = busmap.unique()
|
||||
cr = sns.color_palette("hls", len(cs))
|
||||
n.plot(bus_colors=busmap.map(dict(zip(cs, cr))))
|
||||
if fn is not None:
|
||||
plt.savefig(fn, bbox_inches='tight')
|
||||
del cs, cr
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
@ -54,22 +54,22 @@ Replacing '/summaries/' with '/plots/' creates nice colored maps of the results.
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import os
|
||||
|
||||
from six import iteritems
|
||||
import pypsa
|
||||
import pandas as pd
|
||||
|
||||
import pypsa
|
||||
|
||||
from six import iteritems
|
||||
from add_electricity import load_costs, update_transmission_costs
|
||||
|
||||
idx = pd.IndexSlice
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
opt_name = {"Store": "e", "Line" : "s", "Transformer" : "s"}
|
||||
|
||||
|
||||
def _add_indexed_rows(df, raw_index):
|
||||
new_index = df.index|pd.MultiIndex.from_product(raw_index)
|
||||
if isinstance(new_index, pd.Index):
|
||||
@ -77,6 +77,7 @@ def _add_indexed_rows(df, raw_index):
|
||||
|
||||
return df.reindex(new_index)
|
||||
|
||||
|
||||
def assign_carriers(n):
|
||||
|
||||
if "carrier" not in n.loads:
|
||||
@ -97,7 +98,8 @@ def assign_carriers(n):
|
||||
if "EU gas store" in n.stores.index and n.stores.loc["EU gas Store","carrier"] == "":
|
||||
n.stores.loc["EU gas Store","carrier"] = "gas Store"
|
||||
|
||||
def calculate_costs(n,label,costs):
|
||||
|
||||
def calculate_costs(n, label, costs):
|
||||
|
||||
for c in n.iterate_components(n.branch_components|n.controllable_one_port_components^{"Load"}):
|
||||
capital_costs = c.df.capital_cost*c.df[opt_name.get(c.name,"p") + "_nom_opt"]
|
||||
@ -130,7 +132,7 @@ def calculate_costs(n,label,costs):
|
||||
|
||||
return costs
|
||||
|
||||
def calculate_curtailment(n,label,curtailment):
|
||||
def calculate_curtailment(n, label, curtailment):
|
||||
|
||||
avail = n.generators_t.p_max_pu.multiply(n.generators.p_nom_opt).sum().groupby(n.generators.carrier).sum()
|
||||
used = n.generators_t.p.sum().groupby(n.generators.carrier).sum()
|
||||
@ -139,7 +141,7 @@ def calculate_curtailment(n,label,curtailment):
|
||||
|
||||
return curtailment
|
||||
|
||||
def calculate_energy(n,label,energy):
|
||||
def calculate_energy(n, label, energy):
|
||||
|
||||
for c in n.iterate_components(n.one_port_components|n.branch_components):
|
||||
|
||||
@ -159,6 +161,7 @@ def include_in_summary(summary, multiindexprefix, label, item):
|
||||
summary = _add_indexed_rows(summary, raw_index)
|
||||
|
||||
summary.loc[idx[raw_index], label] = item.values
|
||||
|
||||
return summary
|
||||
|
||||
def calculate_capacity(n,label,capacity):
|
||||
@ -178,7 +181,7 @@ def calculate_capacity(n,label,capacity):
|
||||
|
||||
return capacity
|
||||
|
||||
def calculate_supply(n,label,supply):
|
||||
def calculate_supply(n, label, supply):
|
||||
"""calculate the max dispatch of each component at the buses where the loads are attached"""
|
||||
|
||||
load_types = n.loads.carrier.value_counts().index
|
||||
@ -224,7 +227,8 @@ def calculate_supply(n,label,supply):
|
||||
|
||||
return supply
|
||||
|
||||
def calculate_supply_energy(n,label,supply_energy):
|
||||
|
||||
def calculate_supply_energy(n, label, supply_energy):
|
||||
"""calculate the total dispatch of each component at the buses where the loads are attached"""
|
||||
|
||||
load_types = n.loads.carrier.value_counts().index
|
||||
@ -269,6 +273,7 @@ def calculate_supply_energy(n,label,supply_energy):
|
||||
|
||||
return supply_energy
|
||||
|
||||
|
||||
def calculate_metrics(n,label,metrics):
|
||||
|
||||
metrics = metrics.reindex(metrics.index|pd.Index(["line_volume","line_volume_limit","line_volume_AC","line_volume_DC","line_volume_shadow","co2_shadow"]))
|
||||
@ -295,16 +300,15 @@ def calculate_prices(n,label,prices):
|
||||
|
||||
prices = prices.reindex(prices.index|bus_type.value_counts().index)
|
||||
|
||||
#WARNING: this is time-averaged, should really be load-weighted average
|
||||
logger.warning("Prices are time-averaged, not load-weighted")
|
||||
prices[label] = n.buses_t.marginal_price.mean().groupby(bus_type).mean()
|
||||
|
||||
return prices
|
||||
|
||||
|
||||
|
||||
def calculate_weighted_prices(n,label,weighted_prices):
|
||||
# Warning: doesn't include storage units as loads
|
||||
|
||||
logger.warning("Weighted prices don't include storage units as loads")
|
||||
|
||||
weighted_prices = weighted_prices.reindex(pd.Index(["electricity","heat","space heat","urban heat","space urban heat","gas","H2"]))
|
||||
|
||||
@ -347,7 +351,7 @@ def calculate_weighted_prices(n,label,weighted_prices):
|
||||
|
||||
load += n.links_t.p0[names].groupby(n.links.loc[names,"bus0"],axis=1).sum(axis=1)
|
||||
|
||||
#Add H2 Store when charging
|
||||
# Add H2 Store when charging
|
||||
if carrier == "H2":
|
||||
stores = n.stores_t.p[buses+ " Store"].groupby(n.stores.loc[buses+ " Store","bus"],axis=1).sum(axis=1)
|
||||
stores[stores > 0.] = 0.
|
||||
@ -361,62 +365,6 @@ def calculate_weighted_prices(n,label,weighted_prices):
|
||||
return weighted_prices
|
||||
|
||||
|
||||
|
||||
# BROKEN don't use
|
||||
#
|
||||
# def calculate_market_values(n, label, market_values):
|
||||
# # Warning: doesn't include storage units
|
||||
|
||||
# n.buses["suffix"] = n.buses.index.str[2:]
|
||||
# suffix = ""
|
||||
# buses = n.buses.index[n.buses.suffix == suffix]
|
||||
|
||||
# ## First do market value of generators ##
|
||||
# generators = n.generators.index[n.buses.loc[n.generators.bus,"suffix"] == suffix]
|
||||
# techs = n.generators.loc[generators,"carrier"].value_counts().index
|
||||
# market_values = market_values.reindex(market_values.index | techs)
|
||||
|
||||
# for tech in techs:
|
||||
# gens = generators[n.generators.loc[generators,"carrier"] == tech]
|
||||
# dispatch = n.generators_t.p[gens].groupby(n.generators.loc[gens,"bus"],axis=1).sum().reindex(columns=buses,fill_value=0.)
|
||||
# revenue = dispatch*n.buses_t.marginal_price[buses]
|
||||
# market_values.at[tech,label] = revenue.sum().sum()/dispatch.sum().sum()
|
||||
|
||||
# ## Now do market value of links ##
|
||||
|
||||
# for i in ["0","1"]:
|
||||
# all_links = n.links.index[n.buses.loc[n.links["bus"+i],"suffix"] == suffix]
|
||||
# techs = n.links.loc[all_links,"carrier"].value_counts().index
|
||||
# market_values = market_values.reindex(market_values.index | techs)
|
||||
|
||||
# for tech in techs:
|
||||
# links = all_links[n.links.loc[all_links,"carrier"] == tech]
|
||||
# dispatch = n.links_t["p"+i][links].groupby(n.links.loc[links,"bus"+i],axis=1).sum().reindex(columns=buses,fill_value=0.)
|
||||
# revenue = dispatch*n.buses_t.marginal_price[buses]
|
||||
# market_values.at[tech,label] = revenue.sum().sum()/dispatch.sum().sum()
|
||||
|
||||
# return market_values
|
||||
|
||||
|
||||
# OLD CODE must be adapted
|
||||
|
||||
# def calculate_price_statistics(n, label, price_statistics):
|
||||
|
||||
|
||||
# price_statistics = price_statistics.reindex(price_statistics.index|pd.Index(["zero_hours","mean","standard_deviation"]))
|
||||
# n.buses["suffix"] = n.buses.index.str[2:]
|
||||
# suffix = ""
|
||||
# buses = n.buses.index[n.buses.suffix == suffix]
|
||||
|
||||
# threshold = 0.1 #higher than phoney marginal_cost of wind/solar
|
||||
# df = pd.DataFrame(data=0.,columns=buses,index=n.snapshots)
|
||||
# df[n.buses_t.marginal_price[buses] < threshold] = 1.
|
||||
# price_statistics.at["zero_hours", label] = df.sum().sum()/(df.shape[0]*df.shape[1])
|
||||
# price_statistics.at["mean", label] = n.buses_t.marginal_price[buses].unstack().mean()
|
||||
# price_statistics.at["standard_deviation", label] = n.buses_t.marginal_price[buses].unstack().std()
|
||||
# return price_statistics
|
||||
|
||||
|
||||
outputs = ["costs",
|
||||
"curtailment",
|
||||
"energy",
|
||||
@ -425,11 +373,10 @@ outputs = ["costs",
|
||||
"supply_energy",
|
||||
"prices",
|
||||
"weighted_prices",
|
||||
# "price_statistics",
|
||||
# "market_values",
|
||||
"metrics",
|
||||
]
|
||||
|
||||
|
||||
def make_summaries(networks_dict, country='all'):
|
||||
|
||||
columns = pd.MultiIndex.from_tuples(networks_dict.keys(),names=["simpl","clusters","ll","opts"])
|
||||
@ -454,7 +401,7 @@ def make_summaries(networks_dict, country='all'):
|
||||
if country != 'all':
|
||||
n = n[n.buses.country == country]
|
||||
|
||||
Nyears = n.snapshot_weightings.sum()/8760.
|
||||
Nyears = n.snapshot_weightings.sum() / 8760.
|
||||
costs = load_costs(Nyears, snakemake.input[0],
|
||||
snakemake.config['costs'], snakemake.config['electricity'])
|
||||
update_transmission_costs(n, costs, simple_hvdc_costs=False)
|
||||
@ -484,7 +431,6 @@ if __name__ == "__main__":
|
||||
network_dir = os.path.join('results', 'networks')
|
||||
configure_logging(snakemake)
|
||||
|
||||
|
||||
def expand_from_wildcard(key):
|
||||
w = getattr(snakemake.wildcards, key)
|
||||
return snakemake.config["scenario"][key] if w == "all" else [w]
|
||||
@ -504,8 +450,6 @@ if __name__ == "__main__":
|
||||
for l in ll
|
||||
for opts in expand_from_wildcard("opts")}
|
||||
|
||||
print(networks_dict)
|
||||
|
||||
dfs = make_summaries(networks_dict, country=snakemake.wildcards.country)
|
||||
|
||||
to_csv(dfs)
|
||||
|
@ -20,7 +20,6 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs,
|
||||
configure_logging)
|
||||
|
||||
@ -35,6 +34,9 @@ from matplotlib.patches import Circle, Ellipse
|
||||
from matplotlib.legend_handler import HandlerPatch
|
||||
to_rgba = mpl.colors.colorConverter.to_rgba
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_handler_map_to_scale_circles_as_in(ax, dont_resize_actively=False):
|
||||
fig = ax.get_figure()
|
||||
def axes2pt():
|
||||
@ -57,9 +59,11 @@ def make_handler_map_to_scale_circles_as_in(ax, dont_resize_actively=False):
|
||||
return e
|
||||
return {Circle: HandlerPatch(patch_func=legend_circle_handler)}
|
||||
|
||||
|
||||
def make_legend_circles_for(sizes, scale=1.0, **kw):
|
||||
return [Circle((0,0), radius=(s/scale)**0.5, **kw) for s in sizes]
|
||||
|
||||
|
||||
def set_plot_style():
|
||||
plt.style.use(['classic', 'seaborn-white',
|
||||
{'axes.grid': False, 'grid.linestyle': '--', 'grid.color': u'0.6',
|
||||
@ -69,9 +73,9 @@ def set_plot_style():
|
||||
'legend.fontsize': 'medium',
|
||||
'lines.linewidth': 1.5,
|
||||
'pdf.fonttype': 42,
|
||||
# 'font.family': 'Times New Roman'
|
||||
}])
|
||||
|
||||
|
||||
def plot_map(n, ax=None, attribute='p_nom', opts={}):
|
||||
if ax is None:
|
||||
ax = plt.gca()
|
||||
@ -114,16 +118,11 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}):
|
||||
bus_sizes=0,
|
||||
bus_colors=tech_colors,
|
||||
boundaries=map_boundaries,
|
||||
geomap=True, # TODO : Turn to False, after the release of PyPSA 0.14.2 (refer to https://github.com/PyPSA/PyPSA/issues/75)
|
||||
geomap=False,
|
||||
ax=ax)
|
||||
ax.set_aspect('equal')
|
||||
ax.axis('off')
|
||||
|
||||
# x1, y1, x2, y2 = map_boundaries
|
||||
# ax.set_xlim(x1, x2)
|
||||
# ax.set_ylim(y1, y2)
|
||||
|
||||
|
||||
# Rasterize basemap
|
||||
# TODO : Check if this also works with cartopy
|
||||
for c in ax.collections[:2]: c.set_rasterized(True)
|
||||
@ -176,13 +175,9 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}):
|
||||
|
||||
return fig
|
||||
|
||||
#n = load_network_for_plots(snakemake.input.network, opts, combine_hydro_ps=False)
|
||||
|
||||
|
||||
def plot_total_energy_pie(n, ax=None):
|
||||
"""Add total energy pie plot"""
|
||||
if ax is None:
|
||||
ax = plt.gca()
|
||||
if ax is None: ax = plt.gca()
|
||||
|
||||
ax.set_title('Energy per technology', fontdict=dict(fontsize="medium"))
|
||||
|
||||
@ -190,7 +185,7 @@ def plot_total_energy_pie(n, ax=None):
|
||||
|
||||
patches, texts, autotexts = ax.pie(e_primary,
|
||||
startangle=90,
|
||||
labels = e_primary.rename(opts['nice_names_n']).index,
|
||||
labels = e_primary.rename(opts['nice_names']).index,
|
||||
autopct='%.0f%%',
|
||||
shadow=False,
|
||||
colors = [opts['tech_colors'][tech] for tech in e_primary.index])
|
||||
@ -200,9 +195,7 @@ def plot_total_energy_pie(n, ax=None):
|
||||
t2.remove()
|
||||
|
||||
def plot_total_cost_bar(n, ax=None):
|
||||
"""Add average system cost bar plot"""
|
||||
if ax is None:
|
||||
ax = plt.gca()
|
||||
if ax is None: ax = plt.gca()
|
||||
|
||||
total_load = (n.snapshot_weightings * n.loads_t.p.sum(axis=1)).sum()
|
||||
tech_colors = opts['tech_colors']
|
||||
@ -240,14 +233,13 @@ def plot_total_cost_bar(n, ax=None):
|
||||
if abs(data[-1]) < 5:
|
||||
continue
|
||||
|
||||
text = ax.text(1.1,(bottom-0.5*data)[-1]-3,opts['nice_names_n'].get(ind,ind))
|
||||
text = ax.text(1.1,(bottom-0.5*data)[-1]-3,opts['nice_names'].get(ind,ind))
|
||||
texts.append(text)
|
||||
|
||||
ax.set_ylabel("Average system cost [Eur/MWh]")
|
||||
ax.set_ylim([0, 80]) # opts['costs_max']])
|
||||
ax.set_ylim([0, opts.get('costs_max', 80)])
|
||||
ax.set_xlim([0, 1])
|
||||
#ax.set_xticks([0.5])
|
||||
ax.set_xticklabels([]) #["w/o\nEp", "w/\nEp"])
|
||||
ax.set_xticklabels([])
|
||||
ax.grid(True, axis="y", color='k', linestyle='dotted')
|
||||
|
||||
|
||||
@ -280,8 +272,6 @@ if __name__ == "__main__":
|
||||
ax2 = fig.add_axes([-0.075, 0.1, 0.1, 0.45])
|
||||
plot_total_cost_bar(n, ax2)
|
||||
|
||||
#fig.tight_layout()
|
||||
|
||||
ll = snakemake.wildcards.ll
|
||||
ll_type = ll[0]
|
||||
ll_factor = ll[1:]
|
||||
|
@ -19,19 +19,19 @@ Description
|
||||
|
||||
"""
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import pypsa
|
||||
|
||||
import pandas as pd
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def cum_p_nom_max(net, tech, country=None):
|
||||
carrier_b = net.generators.carrier == tech
|
||||
|
||||
generators = \
|
||||
pd.DataFrame(dict(
|
||||
generators = pd.DataFrame(dict(
|
||||
p_nom_max=net.generators.loc[carrier_b, 'p_nom_max'],
|
||||
p_max_pu=net.generators_t.p_max_pu.loc[:,carrier_b].mean(),
|
||||
country=net.generators.loc[carrier_b, 'bus'].map(net.buses.country)
|
||||
|
@ -21,41 +21,19 @@ Description
|
||||
|
||||
import os
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import pandas as pd
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
#consolidate and rename
|
||||
def rename_techs(label):
|
||||
if label.startswith("central "):
|
||||
label = label[len("central "):]
|
||||
elif label.startswith("urban "):
|
||||
label = label[len("urban "):]
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if "retrofitting" in label:
|
||||
label = "building retrofitting"
|
||||
elif "H2" in label:
|
||||
|
||||
def rename_techs(label):
|
||||
if "H2" in label:
|
||||
label = "hydrogen storage"
|
||||
elif "CHP" in label:
|
||||
label = "CHP"
|
||||
elif "water tank" in label:
|
||||
label = "water tanks"
|
||||
elif label == "water tanks":
|
||||
label = "hot water storage"
|
||||
elif "gas" in label and label != "gas boiler":
|
||||
label = "natural gas"
|
||||
elif "solar thermal" in label:
|
||||
label = "solar thermal"
|
||||
elif label == "solar":
|
||||
label = "solar PV"
|
||||
elif label == "heat pump":
|
||||
label = "air heat pump"
|
||||
elif label == "Sabatier":
|
||||
label = "methanation"
|
||||
elif label == "offwind":
|
||||
label = "offshore wind"
|
||||
elif label == "offwind-ac":
|
||||
label = "offshore wind ac"
|
||||
elif label == "offwind-dc":
|
||||
@ -68,15 +46,14 @@ def rename_techs(label):
|
||||
label = "hydroelectricity"
|
||||
elif label == "PHS":
|
||||
label = "hydroelectricity"
|
||||
elif label == "co2 Store":
|
||||
label = "DAC"
|
||||
elif "battery" in label:
|
||||
label = "battery storage"
|
||||
|
||||
return label
|
||||
|
||||
|
||||
preferred_order = pd.Index(["transmission lines","hydroelectricity","hydro reservoir","run of river","pumped hydro storage","onshore wind","offshore wind ac", "offshore wind dc","solar PV","solar thermal","building retrofitting","ground heat pump","air heat pump","resistive heater","CHP","OCGT","gas boiler","gas","natural gas","methanation","hydrogen storage","battery storage","hot water storage"])
|
||||
preferred_order = pd.Index(["transmission lines","hydroelectricity","hydro reservoir","run of river","pumped hydro storage","onshore wind","offshore wind ac", "offshore wind dc","solar PV","solar thermal","OCGT","hydrogen storage","battery storage"])
|
||||
|
||||
|
||||
def plot_costs(infn, fn=None):
|
||||
|
||||
|
@ -37,11 +37,26 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import pandas as pd
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def multiply(s):
|
||||
return s.str[0].astype(float) * s.str[1].astype(float)
|
||||
|
||||
|
||||
def extract_coordinates(s):
|
||||
regex = (r"(\d{1,2})°(\d{1,2})′(\d{1,2})″(N|S) "
|
||||
r"(\d{1,2})°(\d{1,2})′(\d{1,2})″(E|W)")
|
||||
e = s.str.extract(regex, expand=True)
|
||||
lat = (e[0].astype(float) + (e[1].astype(float) + e[2].astype(float)/60.)/60.)*e[3].map({'N': +1., 'S': -1.})
|
||||
lon = (e[4].astype(float) + (e[5].astype(float) + e[6].astype(float)/60.)/60.)*e[7].map({'E': +1., 'W': -1.})
|
||||
return lon, lat
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake #rule must be enabled in config
|
||||
@ -50,19 +65,11 @@ if __name__ == "__main__":
|
||||
|
||||
links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0]
|
||||
|
||||
def extract_coordinates(s):
|
||||
regex = (r"(\d{1,2})°(\d{1,2})′(\d{1,2})″(N|S) "
|
||||
r"(\d{1,2})°(\d{1,2})′(\d{1,2})″(E|W)")
|
||||
e = s.str.extract(regex, expand=True)
|
||||
lat = (e[0].astype(float) + (e[1].astype(float) + e[2].astype(float)/60.)/60.)*e[3].map({'N': +1., 'S': -1.})
|
||||
lon = (e[4].astype(float) + (e[5].astype(float) + e[6].astype(float)/60.)/60.)*e[7].map({'E': +1., 'W': -1.})
|
||||
return lon, lat
|
||||
mw = "Power (MW)"
|
||||
m_b = links_p_nom[mw].str.contains('x').fillna(False)
|
||||
|
||||
m_b = links_p_nom["Power (MW)"].str.contains('x').fillna(False)
|
||||
def multiply(s): return s.str[0].astype(float) * s.str[1].astype(float)
|
||||
|
||||
links_p_nom.loc[m_b, "Power (MW)"] = links_p_nom.loc[m_b, "Power (MW)"].str.split('x').pipe(multiply)
|
||||
links_p_nom["Power (MW)"] = links_p_nom["Power (MW)"].str.extract("[-/]?([\d.]+)", expand=False).astype(float)
|
||||
links_p_nom.loc[m_b, mw] = links_p_nom.loc[m_b, mw].str.split('x').pipe(multiply)
|
||||
links_p_nom[mw] = links_p_nom[mw].str.extract("[-/]?([\d.]+)", expand=False).astype(float)
|
||||
|
||||
links_p_nom['x1'], links_p_nom['y1'] = extract_coordinates(links_p_nom['Converterstation 1'])
|
||||
links_p_nom['x2'], links_p_nom['y2'] = extract_coordinates(links_p_nom['Converterstation 2'])
|
||||
|
@ -38,12 +38,12 @@ Inputs
|
||||
------
|
||||
|
||||
- ``data/costs.csv``: The database of cost assumptions for all included technologies for specific years from various sources; e.g. discount rate, lifetime, investment (CAPEX), fixed operation and maintenance (FOM), variable operation and maintenance (VOM), fuel costs, efficiency, carbon-dioxide intensity.
|
||||
- ``networks/{network}_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
|
||||
- ``networks/elec_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
|
||||
|
||||
Outputs
|
||||
-------
|
||||
|
||||
- ``networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Complete PyPSA network that will be handed to the ``solve_network`` rule.
|
||||
- ``networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Complete PyPSA network that will be handed to the ``solve_network`` rule.
|
||||
|
||||
Description
|
||||
-----------
|
||||
@ -56,19 +56,21 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
from add_electricity import load_costs, update_transmission_costs
|
||||
from six import iteritems
|
||||
|
||||
import numpy as np
|
||||
import re
|
||||
import pypsa
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from six import iteritems
|
||||
|
||||
from add_electricity import load_costs, update_transmission_costs
|
||||
|
||||
idx = pd.IndexSlice
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def add_co2limit(n, Nyears=1., factor=None):
|
||||
|
||||
if factor is not None:
|
||||
@ -129,8 +131,8 @@ def set_transmission_limit(n, ll_type, factor, Nyears=1):
|
||||
n.add('GlobalConstraint', f'l{ll_type}_limit',
|
||||
type=f'transmission_{con_type}_limit',
|
||||
sense='<=', constant=rhs, carrier_attribute='AC, DC')
|
||||
return n
|
||||
|
||||
return n
|
||||
|
||||
|
||||
def average_every_nhours(n, offset):
|
||||
@ -222,7 +224,7 @@ if __name__ == "__main__":
|
||||
opts = snakemake.wildcards.opts.split('-')
|
||||
|
||||
n = pypsa.Network(snakemake.input[0])
|
||||
Nyears = n.snapshot_weightings.sum()/8760.
|
||||
Nyears = n.snapshot_weightings.sum() / 8760.
|
||||
|
||||
set_line_s_max_pu(n)
|
||||
|
||||
@ -245,6 +247,7 @@ if __name__ == "__main__":
|
||||
add_co2limit(n, Nyears, float(m[0]))
|
||||
else:
|
||||
add_co2limit(n, Nyears)
|
||||
break
|
||||
|
||||
for o in opts:
|
||||
oo = o.split("+")
|
||||
|
@ -33,14 +33,15 @@ The :ref:`tutorial` uses a smaller `data bundle <https://zenodo.org/record/35179
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import progress_retrieve, configure_logging
|
||||
|
||||
from pathlib import Path
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Detect running outside of snakemake and mock snakemake for testing
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
snakemake = mock_snakemake('retrieve_databundle')
|
||||
|
@ -30,10 +30,11 @@ This rule, as a substitute for :mod:`build_natura_raster`, downloads an already
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from _helpers import progress_retrieve, configure_logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
@ -47,23 +47,23 @@ Inputs
|
||||
- ``data/costs.csv``: The database of cost assumptions for all included technologies for specific years from various sources; e.g. discount rate, lifetime, investment (CAPEX), fixed operation and maintenance (FOM), variable operation and maintenance (VOM), fuel costs, efficiency, carbon-dioxide intensity.
|
||||
- ``resources/regions_onshore.geojson``: confer :ref:`busregions`
|
||||
- ``resources/regions_offshore.geojson``: confer :ref:`busregions`
|
||||
- ``networks/{network}.nc``: confer :ref:`electricity`
|
||||
- ``networks/elec.nc``: confer :ref:`electricity`
|
||||
|
||||
Outputs
|
||||
-------
|
||||
|
||||
- ``resources/regions_onshore_{network}_s{simpl}.geojson``:
|
||||
- ``resources/regions_onshore_elec_s{simpl}.geojson``:
|
||||
|
||||
.. image:: ../img/regions_onshore_elec_s.png
|
||||
:scale: 33 %
|
||||
|
||||
- ``resources/regions_offshore_{network}_s{simpl}.geojson``:
|
||||
- ``resources/regions_offshore_elec_s{simpl}.geojson``:
|
||||
|
||||
.. image:: ../img/regions_offshore_elec_s .png
|
||||
:scale: 33 %
|
||||
|
||||
- ``resources/busmap_{network}_s{simpl}.csv``: Mapping of buses from ``networks/elec.nc`` to ``networks/elec_s{simpl}.nc``;
|
||||
- ``networks/{network}_s{simpl}.nc``:
|
||||
- ``resources/busmap_elec_s{simpl}.csv``: Mapping of buses from ``networks/elec.nc`` to ``networks/elec_s{simpl}.nc``;
|
||||
- ``networks/elec_s{simpl}.nc``:
|
||||
|
||||
.. image:: ../img/elec_s.png
|
||||
:scale: 33 %
|
||||
@ -83,7 +83,6 @@ The rule :mod:`simplify_network` does up to four things:
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
from cluster_network import clustering_for_n_clusters, cluster_regions
|
||||
@ -101,7 +100,8 @@ import pypsa
|
||||
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
|
||||
from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport
|
||||
|
||||
idx = pd.IndexSlice
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def simplify_network_to_380(n):
|
||||
## All goes to v_nom == 380
|
||||
@ -138,6 +138,7 @@ def simplify_network_to_380(n):
|
||||
|
||||
return n, trafo_map
|
||||
|
||||
|
||||
def _prepare_connection_costs_per_link(n):
|
||||
if n.links.empty: return {}
|
||||
|
||||
@ -156,6 +157,7 @@ def _prepare_connection_costs_per_link(n):
|
||||
|
||||
return connection_costs_per_link
|
||||
|
||||
|
||||
def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None, buses=None):
|
||||
if connection_costs_per_link is None:
|
||||
connection_costs_per_link = _prepare_connection_costs_per_link(n)
|
||||
@ -175,6 +177,7 @@ def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None,
|
||||
|
||||
return connection_costs_to_bus
|
||||
|
||||
|
||||
def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus):
|
||||
for tech in connection_costs_to_bus:
|
||||
tech_b = n.generators.carrier == tech
|
||||
@ -184,6 +187,7 @@ def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus):
|
||||
logger.info("Displacing {} generator(s) and adding connection costs to capital_costs: {} "
|
||||
.format(tech, ", ".join("{:.0f} Eur/MW/a for `{}`".format(d, b) for b, d in costs.iteritems())))
|
||||
|
||||
|
||||
def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate_one_ports={"Load", "StorageUnit"}):
|
||||
def replace_components(n, c, df, pnl):
|
||||
n.mremove(c, n.df(c).index)
|
||||
@ -208,6 +212,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate
|
||||
df = n.df(c)
|
||||
n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)])
|
||||
|
||||
|
||||
def simplify_links(n):
|
||||
## Complex multi-node links are folded into end-points
|
||||
logger.info("Simplifying connected link components")
|
||||
@ -303,6 +308,7 @@ def simplify_links(n):
|
||||
_aggregate_and_move_components(n, busmap, connection_costs_to_bus)
|
||||
return n, busmap
|
||||
|
||||
|
||||
def remove_stubs(n):
|
||||
logger.info("Removing stubs")
|
||||
|
||||
@ -314,6 +320,7 @@ def remove_stubs(n):
|
||||
|
||||
return n, busmap
|
||||
|
||||
|
||||
def cluster(n, n_clusters):
|
||||
logger.info(f"Clustering to {n_clusters} buses")
|
||||
|
||||
@ -334,6 +341,7 @@ def cluster(n, n_clusters):
|
||||
|
||||
return clustering.network, clustering.busmap
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
@ -40,12 +40,12 @@ Relevant Settings
|
||||
Inputs
|
||||
------
|
||||
|
||||
- ``networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`prepare`
|
||||
- ``networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`prepare`
|
||||
|
||||
Outputs
|
||||
-------
|
||||
|
||||
- ``results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Solved PyPSA network including optimisation results
|
||||
- ``results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Solved PyPSA network including optimisation results
|
||||
|
||||
.. image:: ../img/results.png
|
||||
:scale: 40 %
|
||||
@ -85,7 +85,6 @@ Details (and errors made through this heuristic) are discussed in the paper
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import numpy as np
|
||||
@ -95,9 +94,13 @@ import re
|
||||
import pypsa
|
||||
from pypsa.linopf import (get_var, define_constraints, linexpr, join_exprs,
|
||||
network_lopf, ilopf)
|
||||
|
||||
from pathlib import Path
|
||||
from vresutils.benchmark import memory_logger
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def prepare_network(n, solve_opts):
|
||||
|
||||
if 'clip_p_max_pu' in solve_opts:
|
||||
@ -249,15 +252,16 @@ def extra_functionality(n, snapshots):
|
||||
def solve_network(n, config, solver_log=None, opts='', **kwargs):
|
||||
solver_options = config['solving']['solver'].copy()
|
||||
solver_name = solver_options.pop('name')
|
||||
track_iterations = config['solving']['options'].get('track_iterations', False)
|
||||
min_iterations = config['solving']['options'].get('min_iterations', 4)
|
||||
max_iterations = config['solving']['options'].get('max_iterations', 6)
|
||||
cf_solving = config['solving']['options']
|
||||
track_iterations = cf_solving.get('track_iterations', False)
|
||||
min_iterations = cf_solving.get('min_iterations', 4)
|
||||
max_iterations = cf_solving.get('max_iterations', 6)
|
||||
|
||||
# add to network for extra_functionality
|
||||
n.config = config
|
||||
n.opts = opts
|
||||
|
||||
if config['solving']['options'].get('skip_iterations', False):
|
||||
if cf_solving.get('skip_iterations', False):
|
||||
network_lopf(n, solver_name=solver_name, solver_options=solver_options,
|
||||
extra_functionality=extra_functionality, **kwargs)
|
||||
else:
|
||||
@ -282,8 +286,8 @@ if __name__ == "__main__":
|
||||
opts = snakemake.wildcards.opts.split('-')
|
||||
solve_opts = snakemake.config['solving']['options']
|
||||
|
||||
with memory_logger(filename=getattr(snakemake.log, 'memory', None),
|
||||
interval=30.) as mem:
|
||||
fn = getattr(snakemake.log, 'memory', None)
|
||||
with memory_logger(filename=fn, interval=30.) as mem:
|
||||
n = pypsa.Network(snakemake.input[0])
|
||||
n = prepare_network(n, solve_opts)
|
||||
n = solve_network(n, config=snakemake.config, solver_dir=tmpdir,
|
||||
|
@ -32,13 +32,13 @@ Relevant Settings
|
||||
Inputs
|
||||
------
|
||||
|
||||
- ``networks/{network}_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
|
||||
- ``results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`solve`
|
||||
- ``networks/elec_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
|
||||
- ``results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`solve`
|
||||
|
||||
Outputs
|
||||
-------
|
||||
|
||||
- ``results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc``: Solved PyPSA network for optimal dispatch including optimisation results
|
||||
- ``results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc``: Solved PyPSA network for optimal dispatch including optimisation results
|
||||
|
||||
Description
|
||||
-----------
|
||||
@ -46,7 +46,6 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
|
||||
import pypsa
|
||||
@ -56,6 +55,8 @@ from pathlib import Path
|
||||
from vresutils.benchmark import memory_logger
|
||||
from solve_network import solve_network, prepare_network
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def set_parameters_from_optimized(n, n_optim):
|
||||
lines_typed_i = n.lines.index[n.lines.type != '']
|
||||
n.lines.loc[lines_typed_i, 'num_parallel'] = \
|
||||
@ -107,7 +108,8 @@ if __name__ == "__main__":
|
||||
opts = snakemake.wildcards.opts.split('-')
|
||||
config['solving']['options']['skip_iterations'] = False
|
||||
|
||||
with memory_logger(filename=getattr(snakemake.log, 'memory', None), interval=30.) as mem:
|
||||
fn = getattr(snakemake.log, 'memory', None)
|
||||
with memory_logger(filename=fn, interval=30.) as mem:
|
||||
n = prepare_network(n, solve_opts=snakemake.config['solving']['options'])
|
||||
n = solve_network(n, config, solver_dir=tmpdir,
|
||||
solver_log=snakemake.log.solver, opts=opts)
|
||||
|
@ -11,7 +11,6 @@ logging:
|
||||
summary_dir: results
|
||||
|
||||
scenario:
|
||||
sectors: [E]
|
||||
simpl: ['']
|
||||
ll: ['copt']
|
||||
clusters: [5]
|
||||
@ -242,67 +241,18 @@ plotting:
|
||||
'waste' : '#68896b'
|
||||
'geothermal' : '#ba91b1'
|
||||
"OCGT" : "#d35050"
|
||||
"OCGT marginal" : "#d35050"
|
||||
"OCGT-heat" : "#d35050"
|
||||
"gas boiler" : "#d35050"
|
||||
"gas boilers" : "#d35050"
|
||||
"gas boiler marginal" : "#d35050"
|
||||
"gas-to-power/heat" : "#d35050"
|
||||
"gas" : "#d35050"
|
||||
"natural gas" : "#d35050"
|
||||
"CCGT" : "#b20101"
|
||||
"CCGT marginal" : "#b20101"
|
||||
"Nuclear" : "#ff9000"
|
||||
"Nuclear marginal" : "#ff9000"
|
||||
"nuclear" : "#ff9000"
|
||||
"coal" : "#707070"
|
||||
"Coal" : "#707070"
|
||||
"Coal marginal" : "#707070"
|
||||
"lignite" : "#9e5a01"
|
||||
"Lignite" : "#9e5a01"
|
||||
"Lignite marginal" : "#9e5a01"
|
||||
"Oil" : "#262626"
|
||||
"oil" : "#262626"
|
||||
"H2" : "#ea048a"
|
||||
"hydrogen storage" : "#ea048a"
|
||||
"Sabatier" : "#a31597"
|
||||
"methanation" : "#a31597"
|
||||
"helmeth" : "#a31597"
|
||||
"DAC" : "#d284ff"
|
||||
"co2 stored" : "#e5e5e5"
|
||||
"CO2 sequestration" : "#e5e5e5"
|
||||
"battery" : "#b8ea04"
|
||||
"battery storage" : "#b8ea04"
|
||||
"Li ion" : "#b8ea04"
|
||||
"BEV charger" : "#e2ff7c"
|
||||
"V2G" : "#7a9618"
|
||||
"transport fuel cell" : "#e884be"
|
||||
"retrofitting" : "#e0d6a8"
|
||||
"building retrofitting" : "#e0d6a8"
|
||||
"heat pumps" : "#ff9768"
|
||||
"heat pump" : "#ff9768"
|
||||
"air heat pump" : "#ffbea0"
|
||||
"ground heat pump" : "#ff7a3d"
|
||||
"power-to-heat" : "#a59e7c"
|
||||
"power-to-gas" : "#db8585"
|
||||
"power-to-liquid" : "#a9acd1"
|
||||
"Fischer-Tropsch" : "#a9acd1"
|
||||
"resistive heater" : "#aa4925"
|
||||
"water tanks" : "#401f75"
|
||||
"hot water storage" : "#401f75"
|
||||
"hot water charging" : "#351c5e"
|
||||
"hot water discharging" : "#683ab2"
|
||||
"CHP" : "#d80a56"
|
||||
"CHP heat" : "#d80a56"
|
||||
"CHP electric" : "#d80a56"
|
||||
"district heating" : "#93864b"
|
||||
"Ambient" : "#262626"
|
||||
"Electric load" : "#f9d002"
|
||||
"electricity" : "#f9d002"
|
||||
"Heat load" : "#d35050"
|
||||
"heat" : "#d35050"
|
||||
"Transport load" : "#235ebc"
|
||||
"transport" : "#235ebc"
|
||||
"lines" : "#70af1d"
|
||||
"transmission lines" : "#70af1d"
|
||||
"AC-AC" : "#70af1d"
|
||||
@ -322,17 +272,5 @@ plotting:
|
||||
hydro: "Reservoir & Dam"
|
||||
battery: "Battery Storage"
|
||||
H2: "Hydrogen Storage"
|
||||
lines: "Transmission lines"
|
||||
ror: "Run of river"
|
||||
nice_names_n:
|
||||
OCGT: "Open-Cycle\nGas"
|
||||
CCGT: "Combined-Cycle\nGas"
|
||||
offwind-ac: "Offshore\nWind (AC)"
|
||||
offwind-dc: "Offshore\nWind (DC)"
|
||||
onwind: "Onshore\nWind"
|
||||
battery: "Battery\nStorage"
|
||||
H2: "Hydrogen\nStorage"
|
||||
lines: "Transmission\nlines"
|
||||
ror: "Run of\nriver"
|
||||
PHS: "Pumped Hydro\nStorage"
|
||||
hydro: "Reservoir\n& Dam"
|
||||
lines: "Transmission Lines"
|
||||
ror: "Run of River"
|
||||
|
Loading…
Reference in New Issue
Block a user