Merge branch 'master' into misc/precommit-ci-2

This commit is contained in:
Fabian Hofmann 2022-09-16 14:38:49 +02:00 committed by GitHub
commit affa1c2b88
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 300 additions and 161 deletions

View File

@ -6,7 +6,7 @@ cff-version: 1.1.0
message: "If you use this package, please cite the corresponding manuscript in Energy Strategy Reviews." message: "If you use this package, please cite the corresponding manuscript in Energy Strategy Reviews."
title: "PyPSA-Eur: An open optimisation model of the European transmission system" title: "PyPSA-Eur: An open optimisation model of the European transmission system"
repository: https://github.com/pypsa/pypsa-eur repository: https://github.com/pypsa/pypsa-eur
version: 0.5.0 version: 0.6.0
license: MIT license: MIT
journal: Energy Strategy Reviews journal: Energy Strategy Reviews
doi: 10.1016/j.esr.2018.08.012 doi: 10.1016/j.esr.2018.08.012

257
Snakefile
View File

@ -13,9 +13,14 @@ if not exists("config.yaml"):
configfile: "config.yaml" configfile: "config.yaml"
COSTS="resources/costs.csv" run = config.get("run", {})
RDIR = run["name"] + "/" if run.get("name") else ""
CDIR = RDIR if not run.get("shared_cutouts") else ""
COSTS = "resources/" + RDIR + "costs.csv"
ATLITE_NPROCESSES = config['atlite'].get('nprocesses', 4) ATLITE_NPROCESSES = config['atlite'].get('nprocesses', 4)
wildcard_constraints: wildcard_constraints:
simpl="[a-zA-Z0-9]*|all", simpl="[a-zA-Z0-9]*|all",
clusters="[0-9]+m?|all", clusters="[0-9]+m?|all",
@ -24,25 +29,25 @@ wildcard_constraints:
rule cluster_all_networks: rule cluster_all_networks:
input: expand("networks/elec_s{simpl}_{clusters}.nc", **config['scenario']) input: expand("networks/" + RDIR + "elec_s{simpl}_{clusters}.nc", **config['scenario'])
rule extra_components_all_networks: rule extra_components_all_networks:
input: expand("networks/elec_s{simpl}_{clusters}_ec.nc", **config['scenario']) input: expand("networks/" + RDIR + "elec_s{simpl}_{clusters}_ec.nc", **config['scenario'])
rule prepare_all_networks: rule prepare_all_networks:
input: expand("networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", **config['scenario']) input: expand("networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", **config['scenario'])
rule solve_all_networks: rule solve_all_networks:
input: expand("results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", **config['scenario']) input: expand("results/networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", **config['scenario'])
if config['enable'].get('prepare_links_p_nom', False): if config['enable'].get('prepare_links_p_nom', False):
rule prepare_links_p_nom: rule prepare_links_p_nom:
output: 'data/links_p_nom.csv' output: 'data/links_p_nom.csv'
log: 'logs/prepare_links_p_nom.log' log: "logs/" + RDIR + "prepare_links_p_nom.log"
threads: 1 threads: 1
resources: mem_mb=500 resources: mem_mb=500
script: 'scripts/prepare_links_p_nom.py' script: 'scripts/prepare_links_p_nom.py'
@ -62,7 +67,7 @@ if not config.get('tutorial', False):
if config['enable'].get('retrieve_databundle', True): if config['enable'].get('retrieve_databundle', True):
rule retrieve_databundle: rule retrieve_databundle:
output: expand('data/bundle/{file}', file=datafiles) output: expand('data/bundle/{file}', file=datafiles)
log: "logs/retrieve_databundle.log" log: "logs/" + RDIR + "retrieve_databundle.log"
resources: mem_mb=1000 resources: mem_mb=1000
script: 'scripts/retrieve_databundle.py' script: 'scripts/retrieve_databundle.py'
@ -77,17 +82,17 @@ rule retrieve_load_data:
rule build_load_data: rule build_load_data:
input: "data/load_raw.csv" input: "data/load_raw.csv"
output: "resources/load.csv" output: "resources/" + RDIR + "load.csv"
log: "logs/build_load_data.log" log: "logs/" + RDIR + "build_load_data.log"
resources: mem_mb=5000 resources: mem_mb=5000
script: 'scripts/build_load_data.py' script: 'scripts/build_load_data.py'
rule build_powerplants: rule build_powerplants:
input: input:
base_network="networks/base.nc", base_network="networks/" + RDIR + "base.nc",
custom_powerplants="data/custom_powerplants.csv" custom_powerplants="data/custom_powerplants.csv"
output: "resources/powerplants.csv" output: "resources/" + RDIR + "powerplants.csv"
log: "logs/build_powerplants.log" log: "logs/" + RDIR + "build_powerplants.log"
threads: 1 threads: 1
resources: mem_mb=5000 resources: mem_mb=5000
script: "scripts/build_powerplants.py" script: "scripts/build_powerplants.py"
@ -103,12 +108,12 @@ rule base_network:
parameter_corrections='data/parameter_corrections.yaml', parameter_corrections='data/parameter_corrections.yaml',
links_p_nom='data/links_p_nom.csv', links_p_nom='data/links_p_nom.csv',
links_tyndp='data/links_tyndp.csv', links_tyndp='data/links_tyndp.csv',
country_shapes='resources/country_shapes.geojson', country_shapes="resources/" + RDIR + "country_shapes.geojson",
offshore_shapes='resources/offshore_shapes.geojson', offshore_shapes="resources/" + RDIR + "offshore_shapes.geojson",
europe_shape='resources/europe_shape.geojson' europe_shape="resources/" + RDIR + "europe_shape.geojson"
output: "networks/base.nc" output: "networks/" + RDIR + "base.nc"
log: "logs/base_network.log" log: "logs/" + RDIR + "base_network.log"
benchmark: "benchmarks/base_network" benchmark: "benchmarks/" + RDIR + "base_network"
threads: 1 threads: 1
resources: mem_mb=500 resources: mem_mb=500
script: "scripts/base_network.py" script: "scripts/base_network.py"
@ -124,11 +129,11 @@ rule build_shapes:
ch_cantons='data/bundle/ch_cantons.csv', ch_cantons='data/bundle/ch_cantons.csv',
ch_popgdp='data/bundle/je-e-21.03.02.xls' ch_popgdp='data/bundle/je-e-21.03.02.xls'
output: output:
country_shapes='resources/country_shapes.geojson', country_shapes="resources/" + RDIR + "country_shapes.geojson",
offshore_shapes='resources/offshore_shapes.geojson', offshore_shapes="resources/" + RDIR + "offshore_shapes.geojson",
europe_shape='resources/europe_shape.geojson', europe_shape="resources/" + RDIR + "europe_shape.geojson",
nuts3_shapes='resources/nuts3_shapes.geojson' nuts3_shapes="resources/" + RDIR + "nuts3_shapes.geojson"
log: "logs/build_shapes.log" log: "logs/" + RDIR + "build_shapes.log"
threads: 1 threads: 1
resources: mem_mb=500 resources: mem_mb=500
script: "scripts/build_shapes.py" script: "scripts/build_shapes.py"
@ -136,13 +141,13 @@ rule build_shapes:
rule build_bus_regions: rule build_bus_regions:
input: input:
country_shapes='resources/country_shapes.geojson', country_shapes="resources/" + RDIR + "country_shapes.geojson",
offshore_shapes='resources/offshore_shapes.geojson', offshore_shapes="resources/" + RDIR + "offshore_shapes.geojson",
base_network="networks/base.nc" base_network="networks/" + RDIR + "base.nc"
output: output:
regions_onshore="resources/regions_onshore.geojson", regions_onshore="resources/" + RDIR + "regions_onshore.geojson",
regions_offshore="resources/regions_offshore.geojson" regions_offshore="resources/" + RDIR + "regions_offshore.geojson"
log: "logs/build_bus_regions.log" log: "logs/" + RDIR + "build_bus_regions.log"
threads: 1 threads: 1
resources: mem_mb=1000 resources: mem_mb=1000
script: "scripts/build_bus_regions.py" script: "scripts/build_bus_regions.py"
@ -150,11 +155,11 @@ rule build_bus_regions:
if config['enable'].get('build_cutout', False): if config['enable'].get('build_cutout', False):
rule build_cutout: rule build_cutout:
input: input:
regions_onshore="resources/regions_onshore.geojson", regions_onshore="resources/" + RDIR + "regions_onshore.geojson",
regions_offshore="resources/regions_offshore.geojson" regions_offshore="resources/" + RDIR + "regions_offshore.geojson"
output: "cutouts/{cutout}.nc" output: "cutouts/" + CDIR + "{cutout}.nc"
log: "logs/build_cutout/{cutout}.log" log: "logs/" + CDIR + "build_cutout/{cutout}.log"
benchmark: "benchmarks/build_cutout_{cutout}" benchmark: "benchmarks/" + CDIR + "build_cutout_{cutout}"
threads: ATLITE_NPROCESSES threads: ATLITE_NPROCESSES
resources: mem_mb=ATLITE_NPROCESSES * 1000 resources: mem_mb=ATLITE_NPROCESSES * 1000
script: "scripts/build_cutout.py" script: "scripts/build_cutout.py"
@ -163,8 +168,8 @@ if config['enable'].get('build_cutout', False):
if config['enable'].get('retrieve_cutout', True): if config['enable'].get('retrieve_cutout', True):
rule retrieve_cutout: rule retrieve_cutout:
input: HTTP.remote("zenodo.org/record/6382570/files/{cutout}.nc", keep_local=True, static=True) input: HTTP.remote("zenodo.org/record/6382570/files/{cutout}.nc", keep_local=True, static=True)
output: "cutouts/{cutout}.nc" output: "cutouts/" + CDIR + "{cutout}.nc"
log: "logs/retrieve_cutout_{cutout}.log" log: "logs/" + CDIR + "retrieve_cutout_{cutout}.log"
resources: mem_mb=5000 resources: mem_mb=5000
run: run:
move(input[0], output[0]) move(input[0], output[0])
@ -173,7 +178,7 @@ if config['enable'].get('retrieve_cost_data', True):
rule retrieve_cost_data: rule retrieve_cost_data:
input: HTTP.remote(f"raw.githubusercontent.com/PyPSA/technology-data/{config['costs']['version']}/outputs/costs_{config['costs']['year']}.csv", keep_local=True) input: HTTP.remote(f"raw.githubusercontent.com/PyPSA/technology-data/{config['costs']['version']}/outputs/costs_{config['costs']['year']}.csv", keep_local=True)
output: COSTS output: COSTS
log: "logs/retrieve_cost_data.log" log: "logs/" + RDIR + "retrieve_cost_data.log"
resources: mem_mb=5000 resources: mem_mb=5000
run: run:
move(input[0], output[0]) move(input[0], output[0])
@ -182,17 +187,17 @@ if config['enable'].get('build_natura_raster', False):
rule build_natura_raster: rule build_natura_raster:
input: input:
natura="data/bundle/natura/Natura2000_end2015.shp", natura="data/bundle/natura/Natura2000_end2015.shp",
cutouts=expand("cutouts/{cutouts}.nc", **config['atlite']) cutouts=expand("cutouts/" + CDIR + "{cutouts}.nc", **config['atlite'])
output: "resources/natura.tiff" output: "resources/" + RDIR + "natura.tiff"
resources: mem_mb=5000 resources: mem_mb=5000
log: "logs/build_natura_raster.log" log: "logs/" + RDIR + "build_natura_raster.log"
script: "scripts/build_natura_raster.py" script: "scripts/build_natura_raster.py"
if config['enable'].get('retrieve_natura_raster', True): if config['enable'].get('retrieve_natura_raster', True):
rule retrieve_natura_raster: rule retrieve_natura_raster:
input: HTTP.remote("zenodo.org/record/4706686/files/natura.tiff", keep_local=True, static=True) input: HTTP.remote("zenodo.org/record/4706686/files/natura.tiff", keep_local=True, static=True)
output: "resources/natura.tiff" output: "resources/" + RDIR + "natura.tiff"
resources: mem_mb=5000 resources: mem_mb=5000
run: run:
move(input[0], output[0]) move(input[0], output[0])
@ -209,36 +214,36 @@ rule retrieve_ship_raster:
rule build_ship_raster: rule build_ship_raster:
input: input:
ship_density="data/shipdensity_global.zip", ship_density="data/shipdensity_global.zip",
cutouts=expand("cutouts/{cutouts}.nc", **config['atlite']) cutouts=expand("cutouts/" + CDIR + "{cutouts}.nc", **config['atlite'])
output: "resources/shipdensity_raster.nc" output: "resources/" + RDIR + "shipdensity_raster.nc"
log: "logs/build_ship_raster.log" log: "logs/" + RDIR + "build_ship_raster.log"
resources: mem_mb=5000 resources: mem_mb=5000
benchmark: "benchmarks/build_ship_raster" benchmark: "benchmarks/" + RDIR + "build_ship_raster"
script: "scripts/build_ship_raster.py" script: "scripts/build_ship_raster.py"
rule build_renewable_profiles: rule build_renewable_profiles:
input: input:
base_network="networks/base.nc", base_network="networks/" + RDIR + "base.nc",
corine="data/bundle/corine/g250_clc06_V18_5.tif", corine="data/bundle/corine/g250_clc06_V18_5.tif",
natura=lambda w: ("resources/natura.tiff" natura=lambda w: ("resources/" + RDIR + "natura.tiff"
if config["renewable"][w.technology]["natura"] if config["renewable"][w.technology]["natura"]
else []), else []),
gebco=lambda w: ("data/bundle/GEBCO_2014_2D.nc" gebco=lambda w: ("data/bundle/GEBCO_2014_2D.nc"
if "max_depth" in config["renewable"][w.technology].keys() if "max_depth" in config["renewable"][w.technology].keys()
else []), else []),
ship_density= lambda w: ("resources/shipdensity_raster.nc" ship_density= lambda w: ("resources/" + RDIR + "shipdensity_raster.nc"
if "ship_threshold" in config["renewable"][w.technology].keys() if "ship_threshold" in config["renewable"][w.technology].keys()
else []), else []),
country_shapes='resources/country_shapes.geojson', country_shapes="resources/" + RDIR + "country_shapes.geojson",
offshore_shapes='resources/offshore_shapes.geojson', offshore_shapes="resources/" + RDIR + "offshore_shapes.geojson",
regions=lambda w: ("resources/regions_onshore.geojson" regions=lambda w: ("resources/" + RDIR + "regions_onshore.geojson"
if w.technology in ('onwind', 'solar') if w.technology in ('onwind', 'solar')
else "resources/regions_offshore.geojson"), else "resources/" + RDIR + "regions_offshore.geojson"),
cutout=lambda w: "cutouts/" + config["renewable"][w.technology]['cutout'] + ".nc" cutout=lambda w: "cutouts/" + CDIR + config["renewable"][w.technology]['cutout'] + ".nc"
output: profile="resources/profile_{technology}.nc", output: profile="resources/" + RDIR + "profile_{technology}.nc",
log: "logs/build_renewable_profile_{technology}.log" log: "logs/" + RDIR + "build_renewable_profile_{technology}.log"
benchmark: "benchmarks/build_renewable_profiles_{technology}" benchmark: "benchmarks/" + RDIR + "build_renewable_profiles_{technology}"
threads: ATLITE_NPROCESSES threads: ATLITE_NPROCESSES
resources: mem_mb=ATLITE_NPROCESSES * 5000 resources: mem_mb=ATLITE_NPROCESSES * 5000
wildcard_constraints: technology="(?!hydro).*" # Any technology other than hydro wildcard_constraints: technology="(?!hydro).*" # Any technology other than hydro
@ -247,31 +252,33 @@ rule build_renewable_profiles:
rule build_hydro_profile: rule build_hydro_profile:
input: input:
country_shapes='resources/country_shapes.geojson', country_shapes="resources/" + RDIR + "country_shapes.geojson",
eia_hydro_generation='data/eia_hydro_annual_generation.csv', eia_hydro_generation='data/eia_hydro_annual_generation.csv',
cutout=f"cutouts/{config['renewable']['hydro']['cutout']}.nc" if "hydro" in config["renewable"] else "config['renewable']['hydro']['cutout'] not configured", cutout=f"cutouts/" + CDIR + "{config['renewable']['hydro']['cutout']}.nc" if "hydro" in config["renewable"] else "config['renewable']['hydro']['cutout'] not configured",
output: 'resources/profile_hydro.nc' output: "resources/" + RDIR + "profile_hydro.nc"
log: "logs/build_hydro_profile.log" log: "logs/" + RDIR + "build_hydro_profile.log"
resources: mem_mb=5000 resources: mem_mb=5000
script: 'scripts/build_hydro_profile.py' script: 'scripts/build_hydro_profile.py'
rule add_electricity: rule add_electricity:
input: input:
base_network='networks/base.nc', base_network="networks/" + RDIR + "base.nc",
tech_costs=COSTS, tech_costs=COSTS,
regions="resources/regions_onshore.geojson", regions="resources/" + RDIR + "regions_onshore.geojson",
powerplants='resources/powerplants.csv', powerplants="resources/" + RDIR + "powerplants.csv",
hydro_capacities='data/bundle/hydro_capacities.csv', hydro_capacities='data/bundle/hydro_capacities.csv',
geth_hydro_capacities='data/geth2015_hydro_capacities.csv', geth_hydro_capacities='data/geth2015_hydro_capacities.csv',
load='resources/load.csv', load="resources/" + RDIR + "load.csv",
nuts3_shapes='resources/nuts3_shapes.geojson', nuts3_shapes="resources/" + RDIR + "nuts3_shapes.geojson",
**{f"profile_{tech}": f"resources/profile_{tech}.nc" **{f"profile_{tech}": "resources/" + RDIR + f"profile_{tech}.nc"
for tech in config['renewable']}, for tech in config['renewable']},
**{f"conventional_{carrier}_{attr}": fn for carrier, d in config.get('conventional', {None: {}}).items() for attr, fn in d.items() if str(fn).startswith("data/")}, **{f"conventional_{carrier}_{attr}": fn
output: "networks/elec.nc" for carrier, d in config.get('conventional', {None: {}}).items()
log: "logs/add_electricity.log" for attr, fn in d.items() if str(fn).startswith("data/")},
benchmark: "benchmarks/add_electricity" output: "networks/" + RDIR + "elec.nc"
log: "logs/" + RDIR + "add_electricity.log"
benchmark: "benchmarks/" + RDIR + "add_electricity"
threads: 1 threads: 1
resources: mem_mb=5000 resources: mem_mb=5000
script: "scripts/add_electricity.py" script: "scripts/add_electricity.py"
@ -279,18 +286,18 @@ rule add_electricity:
rule simplify_network: rule simplify_network:
input: input:
network='networks/elec.nc', network="networks/" + RDIR + "elec.nc",
tech_costs=COSTS, tech_costs=COSTS,
regions_onshore="resources/regions_onshore.geojson", regions_onshore="resources/" + RDIR + "regions_onshore.geojson",
regions_offshore="resources/regions_offshore.geojson" regions_offshore="resources/" + RDIR + "regions_offshore.geojson"
output: output:
network='networks/elec_s{simpl}.nc', network="networks/" + RDIR + "elec_s{simpl}.nc",
regions_onshore="resources/regions_onshore_elec_s{simpl}.geojson", regions_onshore="resources/" + RDIR + "regions_onshore_elec_s{simpl}.geojson",
regions_offshore="resources/regions_offshore_elec_s{simpl}.geojson", regions_offshore="resources/" + RDIR + "regions_offshore_elec_s{simpl}.geojson",
busmap='resources/busmap_elec_s{simpl}.csv', busmap="resources/" + RDIR + "busmap_elec_s{simpl}.csv",
connection_costs='resources/connection_costs_s{simpl}.csv' connection_costs="resources/" + RDIR + "connection_costs_s{simpl}.csv"
log: "logs/simplify_network/elec_s{simpl}.log" log: "logs/" + RDIR + "simplify_network/elec_s{simpl}.log"
benchmark: "benchmarks/simplify_network/elec_s{simpl}" benchmark: "benchmarks/" + RDIR + "simplify_network/elec_s{simpl}"
threads: 1 threads: 1
resources: mem_mb=4000 resources: mem_mb=4000
script: "scripts/simplify_network.py" script: "scripts/simplify_network.py"
@ -298,21 +305,21 @@ rule simplify_network:
rule cluster_network: rule cluster_network:
input: input:
network='networks/elec_s{simpl}.nc', network="networks/" + RDIR + "elec_s{simpl}.nc",
regions_onshore="resources/regions_onshore_elec_s{simpl}.geojson", regions_onshore="resources/" + RDIR + "regions_onshore_elec_s{simpl}.geojson",
regions_offshore="resources/regions_offshore_elec_s{simpl}.geojson", regions_offshore="resources/" + RDIR + "regions_offshore_elec_s{simpl}.geojson",
busmap=ancient('resources/busmap_elec_s{simpl}.csv'), busmap=ancient("resources/" + RDIR + "busmap_elec_s{simpl}.csv"),
custom_busmap=("data/custom_busmap_elec_s{simpl}_{clusters}.csv" custom_busmap=("data/custom_busmap_elec_s{simpl}_{clusters}.csv"
if config["enable"].get("custom_busmap", False) else []), if config["enable"].get("custom_busmap", False) else []),
tech_costs=COSTS tech_costs=COSTS
output: output:
network='networks/elec_s{simpl}_{clusters}.nc', network="networks/" + RDIR + "elec_s{simpl}_{clusters}.nc",
regions_onshore="resources/regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_onshore="resources/" + RDIR + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
regions_offshore="resources/regions_offshore_elec_s{simpl}_{clusters}.geojson", regions_offshore="resources/" + RDIR + "regions_offshore_elec_s{simpl}_{clusters}.geojson",
busmap="resources/busmap_elec_s{simpl}_{clusters}.csv", busmap="resources/" + RDIR + "busmap_elec_s{simpl}_{clusters}.csv",
linemap="resources/linemap_elec_s{simpl}_{clusters}.csv" linemap="resources/" + RDIR + "linemap_elec_s{simpl}_{clusters}.csv"
log: "logs/cluster_network/elec_s{simpl}_{clusters}.log" log: "logs/" + RDIR + "cluster_network/elec_s{simpl}_{clusters}.log"
benchmark: "benchmarks/cluster_network/elec_s{simpl}_{clusters}" benchmark: "benchmarks/" + RDIR + "cluster_network/elec_s{simpl}_{clusters}"
threads: 1 threads: 1
resources: mem_mb=6000 resources: mem_mb=6000
script: "scripts/cluster_network.py" script: "scripts/cluster_network.py"
@ -320,21 +327,21 @@ rule cluster_network:
rule add_extra_components: rule add_extra_components:
input: input:
network='networks/elec_s{simpl}_{clusters}.nc', network="networks/" + RDIR + "elec_s{simpl}_{clusters}.nc",
tech_costs=COSTS, tech_costs=COSTS,
output: 'networks/elec_s{simpl}_{clusters}_ec.nc' output: "networks/" + RDIR + "elec_s{simpl}_{clusters}_ec.nc"
log: "logs/add_extra_components/elec_s{simpl}_{clusters}.log" log: "logs/" + RDIR + "add_extra_components/elec_s{simpl}_{clusters}.log"
benchmark: "benchmarks/add_extra_components/elec_s{simpl}_{clusters}_ec" benchmark: "benchmarks/" + RDIR + "add_extra_components/elec_s{simpl}_{clusters}_ec"
threads: 1 threads: 1
resources: mem_mb=3000 resources: mem_mb=3000
script: "scripts/add_extra_components.py" script: "scripts/add_extra_components.py"
rule prepare_network: rule prepare_network:
input: 'networks/elec_s{simpl}_{clusters}_ec.nc', tech_costs=COSTS input: "networks/" + RDIR + "elec_s{simpl}_{clusters}_ec.nc", tech_costs=COSTS,
output: 'networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc' output: "networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
log: "logs/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.log" log: "logs/" + RDIR + "prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"
benchmark: "benchmarks/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}" benchmark: "benchmarks/" + RDIR + "prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
threads: 1 threads: 1
resources: mem_mb=4000 resources: mem_mb=4000
script: "scripts/prepare_network.py" script: "scripts/prepare_network.py"
@ -361,13 +368,13 @@ def memory(w):
rule solve_network: rule solve_network:
input: "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc" input: "networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
output: "results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc" output: "results/networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
log: log:
solver=normpath("logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log"), solver=normpath("logs/" + RDIR + "solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log"),
python="logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log", python="logs/" + RDIR + "solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log",
memory="logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log" memory="logs/" + RDIR + "solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log"
benchmark: "benchmarks/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}" benchmark: "benchmarks/" + RDIR + "solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
threads: 4 threads: 4
resources: mem_mb=memory resources: mem_mb=memory
shadow: "minimal" shadow: "minimal"
@ -376,14 +383,14 @@ rule solve_network:
rule solve_operations_network: rule solve_operations_network:
input: input:
unprepared="networks/elec_s{simpl}_{clusters}_ec.nc", unprepared="networks/" + RDIR + "elec_s{simpl}_{clusters}_ec.nc",
optimized="results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc" optimized="results/networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
output: "results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc" output: "results/networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc"
log: log:
solver=normpath("logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log"), solver=normpath("logs/" + RDIR + "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log"),
python="logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log", python="logs/" + RDIR + "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log",
memory="logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_memory.log" memory="logs/" + RDIR + "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_memory.log"
benchmark: "benchmarks/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}" benchmark: "benchmarks/" + RDIR + "solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
threads: 4 threads: 4
resources: mem_mb=(lambda w: 5000 + 372 * int(w.clusters)) resources: mem_mb=(lambda w: 5000 + 372 * int(w.clusters))
shadow: "minimal" shadow: "minimal"
@ -392,12 +399,12 @@ rule solve_operations_network:
rule plot_network: rule plot_network:
input: input:
network="results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", network="results/networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
tech_costs=COSTS tech_costs=COSTS
output: output:
only_map="results/plots/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}.{ext}", only_map="results/plots/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}.{ext}",
ext="results/plots/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_ext.{ext}" ext="results/plots/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_ext.{ext}"
log: "logs/plot_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_{ext}.log" log: "logs/" + RDIR + "plot_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_{ext}.log"
script: "scripts/plot_network.py" script: "scripts/plot_network.py"
@ -410,7 +417,7 @@ def input_make_summary(w):
else: else:
ll = w.ll ll = w.ll
return ([COSTS] + return ([COSTS] +
expand("results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", expand("results/networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
ll=ll, ll=ll,
**{k: config["scenario"][k] if getattr(w, k) == "all" else getattr(w, k) **{k: config["scenario"][k] if getattr(w, k) == "all" else getattr(w, k)
for k in ["simpl", "clusters", "opts"]})) for k in ["simpl", "clusters", "opts"]}))
@ -418,30 +425,30 @@ def input_make_summary(w):
rule make_summary: rule make_summary:
input: input_make_summary input: input_make_summary
output: directory("results/summaries/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}") output: directory("results/summaries/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}")
log: "logs/make_summary/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.log", log: "logs/" + RDIR + "make_summary/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.log",
resources: mem_mb=500 resources: mem_mb=500
script: "scripts/make_summary.py" script: "scripts/make_summary.py"
rule plot_summary: rule plot_summary:
input: "results/summaries/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}" input: "results/summaries/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}"
output: "results/plots/summary_{summary}_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.{ext}" output: "results/plots/" + RDIR + "summary_{summary}_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.{ext}"
log: "logs/plot_summary/{summary}_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}_{ext}.log" log: "logs/" + RDIR + "plot_summary/{summary}_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}_{ext}.log"
resources: mem_mb=500 resources: mem_mb=500
script: "scripts/plot_summary.py" script: "scripts/plot_summary.py"
def input_plot_p_nom_max(w): def input_plot_p_nom_max(w):
return [("results/networks/elec_s{simpl}{maybe_cluster}.nc" return [("results/networks/" + RDIR + "elec_s{simpl}{maybe_cluster}.nc"
.format(maybe_cluster=('' if c == 'full' else ('_' + c)), **w)) .format(maybe_cluster=('' if c == 'full' else ('_' + c)), **w))
for c in w.clusts.split(",")] for c in w.clusts.split(",")]
rule plot_p_nom_max: rule plot_p_nom_max:
input: input_plot_p_nom_max input: input_plot_p_nom_max
output: "results/plots/elec_s{simpl}_cum_p_nom_max_{clusts}_{techs}_{country}.{ext}" output: "results/plots/" + RDIR + "elec_s{simpl}_cum_p_nom_max_{clusts}_{techs}_{country}.{ext}"
log: "logs/plot_p_nom_max/elec_s{simpl}_{clusts}_{techs}_{country}_{ext}.log" log: "logs/" + RDIR + "plot_p_nom_max/elec_s{simpl}_{clusts}_{techs}_{country}_{ext}.log"
resources: mem_mb=500 resources: mem_mb=500
script: "scripts/plot_p_nom_max.py" script: "scripts/plot_p_nom_max.py"

View File

@ -2,13 +2,18 @@
# #
# SPDX-License-Identifier: CC0-1.0 # SPDX-License-Identifier: CC0-1.0
version: 0.5.0 version: 0.6.0
tutorial: false tutorial: false
logging: logging:
level: INFO level: INFO
format: '%(levelname)s:%(name)s:%(message)s' format: '%(levelname)s:%(name)s:%(message)s'
run:
name: "" # use this to keep track of runs with different settings
shared_cutouts: false # set to true to share the default cutout(s) across runs
scenario: scenario:
simpl: [''] simpl: ['']
ll: ['copt'] ll: ['copt']

View File

@ -2,13 +2,15 @@
# #
# SPDX-License-Identifier: CC0-1.0 # SPDX-License-Identifier: CC0-1.0
version: 0.5.0 version: 0.6.0
tutorial: true tutorial: true
logging: logging:
level: INFO level: INFO
format: '%(levelname)s:%(name)s:%(message)s' format: '%(levelname)s:%(name)s:%(message)s'
run:
name: ""
scenario: scenario:
simpl: [''] simpl: ['']

View File

@ -74,9 +74,9 @@ author = u'Jonas Hoersch (KIT, FIAS), Fabian Hofmann (TUB, FIAS), David Schlacht
# built documents. # built documents.
# #
# The short X.Y version. # The short X.Y version.
version = u'0.3' version = u'0.6'
# The full version, including alpha/beta/rc tags. # The full version, including alpha/beta/rc tags.
release = u'0.4.0' release = u'0.6.0'
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. # for a list of supported languages.

View File

@ -28,13 +28,24 @@ Top-level configuration
.. _scenario: .. _scenario:
``scenario`` ``run``
============ =======
It is common conduct to analyse energy system optimisation models for **multiple scenarios** for a variety of reasons, It is common conduct to analyse energy system optimisation models for **multiple scenarios** for a variety of reasons,
e.g. assessing their sensitivity towards changing the temporal and/or geographical resolution or investigating how e.g. assessing their sensitivity towards changing the temporal and/or geographical resolution or investigating how
investment changes as more ambitious greenhouse-gas emission reduction targets are applied. investment changes as more ambitious greenhouse-gas emission reduction targets are applied.
The ``run`` section is used for running and storing scenarios with different configurations which are not covered by :ref:`wildcards`. It determines the path at which resources, networks and results are stored. Therefore the user can run different configurations within the same directory. If a run with a non-empty name should use cutouts shared across runs, set ``shared_cutouts`` to `true`.
.. literalinclude:: ../config.default.yaml
:language: yaml
:start-at: run:
:end-before: scenario:
``scenario``
============
The ``scenario`` section is an extraordinary section of the config file The ``scenario`` section is an extraordinary section of the config file
that is strongly connected to the :ref:`wildcards` and is designed to that is strongly connected to the :ref:`wildcards` and is designed to
facilitate running multiple scenarios through a single command facilitate running multiple scenarios through a single command
@ -96,7 +107,7 @@ Specifies the temporal range to build an energy system model for as arguments to
``atlite`` ``atlite``
========== ==========
Define and specify the ``atlite.Cutout`` used for calculating renewable potentials and time-series. All options except for ``features`` are directly used as `cutout parameters <https://atlite.readthedocs.io/en/latest/ref_api.html#cutout>`_. Define and specify the ``atlite.Cutout`` used for calculating renewable potentials and time-series. All options except for ``features`` are directly used as `cutout parameters <https://atlite.readthedocs.io/en/latest/ref_api.html#cutout>`_.
.. literalinclude:: ../config.default.yaml .. literalinclude:: ../config.default.yaml
:language: yaml :language: yaml

View File

@ -10,9 +10,31 @@ Release Notes
Upcoming Release Upcoming Release
================ ================
* Add functionality to consider shipping routes when calculating the available area for offshore technologies. Data for the shipping density comes from the `Global Shipping Traffic Density dataset <https://datacatalog.worldbank.org/search/dataset/0037580/Global-Shipping-Traffic-Density>` * new feature
* When fixing line voltages to 380kV, the workflow now preserves the transmission capacity instead of the electrical impedance and reactance.
PyPSA-Eur 0.6.0 (10th September 2022)
=====================================
* Functionality to consider shipping routes when calculating the available area
for offshore technologies were added. Data for the shipping density comes from
the `Global Shipping Traffic Density dataset
<https://datacatalog.worldbank.org/search/dataset/0037580/Global-Shipping-Traffic-Density>`_.
* When transforming all transmission lines to a unified voltage level of 380kV,
the workflow now preserves the transmission capacity rather than electrical
impedance and reactance.
* Memory resources are now specified for all rules.
* Filtering of power plant data was adjusted to new versions of
``powerplantmatching``.
* The resolution of land exclusion calculation is now a configurable option. See
setting ``excluder_resolution``.
* The software now supports running the workflow with different settings within the same directory. A new config section ``run`` was created that specifies under which scenario ``name`` the created resources, networks and results should be stored. If ``name`` is not specified, the workflow uses the default paths. The entry ``shared_cutouts`` specifies whether the run should use cutouts from the default root directory or use run-specific cutouts.
PyPSA-Eur 0.5.0 (27th July 2022) PyPSA-Eur 0.5.0 (27th July 2022)
===================================== =====================================

View File

@ -11,17 +11,33 @@ channels:
dependencies: dependencies:
- _libgcc_mutex=0.1 - _libgcc_mutex=0.1
- _openmp_mutex=4.5 - _openmp_mutex=4.5
- abseil-cpp=20210324.2
- affine=2.3.1 - affine=2.3.1
- alsa-lib=1.2.3.2 - alsa-lib=1.2.3.2
- altair=4.2.0
- ampl-mp=3.1.0
- amply=0.1.5 - amply=0.1.5
- anyio=3.6.1
- appdirs=1.4.4 - appdirs=1.4.4
- argon2-cffi=21.3.0
- argon2-cffi-bindings=21.2.0
- arrow-cpp=8.0.0
- asttokens=2.0.5 - asttokens=2.0.5
- atlite=0.2.7 - atlite=0.2.9
- attrs=21.4.0 - attrs=21.4.0
- aws-c-cal=0.5.11
- aws-c-common=0.6.2
- aws-c-event-stream=0.2.7
- aws-c-io=0.10.5
- aws-checksums=0.1.11
- aws-sdk-cpp=1.8.186
- babel=2.10.3
- backcall=0.2.0 - backcall=0.2.0
- backports=1.0 - backports=1.0
- backports.functools_lru_cache=1.6.4 - backports.functools_lru_cache=1.6.4
- beautifulsoup4=4.11.1 - beautifulsoup4=4.11.1
- bleach=5.0.1
- blinker=1.4
- blosc=1.21.1 - blosc=1.21.1
- bokeh=2.4.3 - bokeh=2.4.3
- boost-cpp=1.74.0 - boost-cpp=1.74.0
@ -32,16 +48,17 @@ dependencies:
- brotlipy=0.7.0 - brotlipy=0.7.0
- bzip2=1.0.8 - bzip2=1.0.8
- c-ares=1.18.1 - c-ares=1.18.1
- ca-certificates=2022.6.15 - ca-certificates=2022.6.15.1
- cachetools=5.0.0
- cairo=1.16.0 - cairo=1.16.0
- cartopy=0.20.1 - cartopy=0.20.1
- cdsapi=0.5.1 - cdsapi=0.5.1
- certifi=2022.6.15 - certifi=2022.6.15.1
- cffi=1.15.1 - cffi=1.15.1
- cfitsio=4.0.0 - cfitsio=4.0.0
- cftime=1.6.1 - cftime=1.6.1
- charset-normalizer=2.1.0 - charset-normalizer=2.1.0
- click=8.1.3 - click=8.0.4
- click-plugins=1.1.1 - click-plugins=1.1.1
- cligj=0.7.2 - cligj=0.7.2
- cloudpickle=2.1.0 - cloudpickle=2.1.0
@ -52,6 +69,8 @@ dependencies:
- coin-or-utils=2.11.6 - coin-or-utils=2.11.6
- coincbc=2.10.8 - coincbc=2.10.8
- colorama=0.4.5 - colorama=0.4.5
- colorcet=3.0.0
- commonmark=0.9.1
- configargparse=1.5.3 - configargparse=1.5.3
- connection_pool=0.0.3 - connection_pool=0.0.3
- country_converter=0.7.4 - country_converter=0.7.4
@ -59,23 +78,28 @@ dependencies:
- curl=7.83.1 - curl=7.83.1
- cycler=0.11.0 - cycler=0.11.0
- cytoolz=0.12.0 - cytoolz=0.12.0
- dask=2022.7.1 - dask=2022.7.0
- dask-core=2022.7.1 - dask-core=2022.7.0
- dataclasses=0.8
- datrie=0.8.2 - datrie=0.8.2
- dbus=1.13.6 - dbus=1.13.6
- debugpy=1.6.0
- decorator=5.1.1 - decorator=5.1.1
- defusedxml=0.7.1
- deprecation=2.1.0 - deprecation=2.1.0
- descartes=1.1.0 - descartes=1.1.0
- distributed=2022.7.1 - distributed=2022.7.0
- distro=1.6.0 - distro=1.6.0
- docutils=0.19 - docutils=0.19
- dpath=2.0.6 - dpath=2.0.6
- entrypoints=0.4
- entsoe-py=0.5.4 - entsoe-py=0.5.4
- et_xmlfile=1.0.1 - et_xmlfile=1.0.1
- executing=0.9.1 - executing=0.8.3
- expat=2.4.8 - expat=2.4.8
- filelock=3.7.1 - filelock=3.7.1
- fiona=1.8.20 - fiona=1.8.20
- flit-core=3.7.1
- folium=0.12.1.post1 - folium=0.12.1.post1
- font-ttf-dejavu-sans-mono=2.37 - font-ttf-dejavu-sans-mono=2.37
- font-ttf-inconsolata=3.000 - font-ttf-inconsolata=3.000
@ -88,19 +112,24 @@ dependencies:
- freetype=2.10.4 - freetype=2.10.4
- freexl=1.0.6 - freexl=1.0.6
- fsspec=2022.5.0 - fsspec=2022.5.0
- future=0.18.2
- gdal=3.3.3 - gdal=3.3.3
- geographiclib=1.52 - geographiclib=1.52
- geojson-rewind=1.0.2 - geojson-rewind=1.0.2
- geopandas=0.11.1 - geopandas=0.11.0
- geopandas-base=0.11.1 - geopandas-base=0.11.0
- geopy=2.2.0 - geopy=2.2.0
- geos=3.10.0 - geos=3.10.0
- geotiff=1.7.0 - geotiff=1.7.0
- gettext=0.19.8.1 - gettext=0.19.8.1
- gflags=2.2.2
- giflib=5.2.1 - giflib=5.2.1
- gitdb=4.0.9 - gitdb=4.0.9
- gitpython=3.1.27 - gitpython=3.1.27
- glog=0.6.0
- gmp=6.2.1
- graphite2=1.3.13 - graphite2=1.3.13
- grpc-cpp=1.45.2
- gst-plugins-base=1.18.5 - gst-plugins-base=1.18.5
- gstreamer=1.18.5 - gstreamer=1.18.5
- harfbuzz=2.9.1 - harfbuzz=2.9.1
@ -110,16 +139,23 @@ dependencies:
- icu=68.2 - icu=68.2
- idna=3.3 - idna=3.3
- importlib-metadata=4.11.4 - importlib-metadata=4.11.4
- importlib_resources=5.9.0 - importlib_metadata=4.11.4
- importlib_resources=5.8.0
- iniconfig=1.1.1 - iniconfig=1.1.1
- ipykernel=6.15.1
- ipython=8.4.0 - ipython=8.4.0
- ipython_genutils=0.2.0
- ipywidgets=7.7.1
- jedi=0.18.1 - jedi=0.18.1
- jinja2=3.1.2 - jinja2=3.1.2
- joblib=1.1.0 - joblib=1.1.0
- jpeg=9e - jpeg=9e
- json-c=0.15 - json-c=0.15
- json5=0.9.5
- jsonschema=4.7.2 - jsonschema=4.7.2
- jupyter_core=4.11.1 - jupyter_client=7.3.4
- jupyter_core=4.10.0
- jupyter_server=1.18.1
- kealib=1.4.15 - kealib=1.4.15
- keyutils=1.6.1 - keyutils=1.6.1
- kiwisolver=1.4.4 - kiwisolver=1.4.4
@ -133,6 +169,7 @@ dependencies:
- libbrotlienc=1.0.9 - libbrotlienc=1.0.9
- libcblas=3.9.0 - libcblas=3.9.0
- libclang=11.1.0 - libclang=11.1.0
- libcrc32c=1.1.2
- libcurl=7.83.1 - libcurl=7.83.1
- libdap4=3.20.6 - libdap4=3.20.6
- libdeflate=1.12 - libdeflate=1.12
@ -146,6 +183,7 @@ dependencies:
- libgfortran5=12.1.0 - libgfortran5=12.1.0
- libglib=2.72.1 - libglib=2.72.1
- libgomp=12.1.0 - libgomp=12.1.0
- libgoogle-cloud=1.40.2
- libiconv=1.16 - libiconv=1.16
- libkml=1.3.0 - libkml=1.3.0
- liblapack=3.9.0 - liblapack=3.9.0
@ -159,16 +197,20 @@ dependencies:
- libopus=1.3.1 - libopus=1.3.1
- libpng=1.6.37 - libpng=1.6.37
- libpq=13.5 - libpq=13.5
- libprotobuf=3.20.1
- librttopo=1.1.0 - librttopo=1.1.0
- libsodium=1.0.18
- libspatialindex=1.9.3 - libspatialindex=1.9.3
- libspatialite=5.0.1 - libspatialite=5.0.1
- libssh2=1.10.0 - libssh2=1.10.0
- libstdcxx-ng=12.1.0 - libstdcxx-ng=12.1.0
- libthrift=0.16.0
- libtiff=4.4.0 - libtiff=4.4.0
- libutf8proc=2.7.0
- libuuid=2.32.1 - libuuid=2.32.1
- libvorbis=1.3.7 - libvorbis=1.3.7
- libwebp=1.2.3 - libwebp=1.2.2
- libwebp-base=1.2.3 - libwebp-base=1.2.2
- libxcb=1.13 - libxcb=1.13
- libxkbcommon=1.0.3 - libxkbcommon=1.0.3
- libxml2=2.9.12 - libxml2=2.9.12
@ -181,21 +223,34 @@ dependencies:
- lz4-c=1.9.3 - lz4-c=1.9.3
- lzo=2.10 - lzo=2.10
- mapclassify=2.4.3 - mapclassify=2.4.3
- markdown=3.4.1
- markupsafe=2.1.1 - markupsafe=2.1.1
- matplotlib=3.5.2 - matplotlib=3.5.2
- matplotlib-base=3.5.2 - matplotlib-base=3.5.2
- matplotlib-inline=0.1.3 - matplotlib-inline=0.1.3
- memory_profiler=0.60.0 - memory_profiler=0.60.0
- metis=5.1.0
- mistune=0.8.4
- msgpack-python=1.0.4 - msgpack-python=1.0.4
- mumps-include=5.2.1
- mumps-seq=5.2.1
- munch=2.5.0 - munch=2.5.0
- munkres=1.1.4 - munkres=1.1.4
- mysql-common=8.0.29 - mysql-common=8.0.29
- mysql-libs=8.0.29 - mysql-libs=8.0.29
- nbclassic=0.4.3
- nbclient=0.6.6
- nbconvert=6.5.0
- nbconvert-core=6.5.0
- nbconvert-pandoc=6.5.0
- nbformat=5.4.0 - nbformat=5.4.0
- ncurses=6.3 - ncurses=6.3
- nest-asyncio=1.5.5
- netcdf4=1.6.0 - netcdf4=1.6.0
- networkx=2.8.5 - networkx=2.8.4
- nomkl=1.0 - nomkl=1.0
- notebook=6.4.12
- notebook-shim=0.1.0
- nspr=4.32 - nspr=4.32
- nss=3.78 - nss=3.78
- numexpr=2.8.3 - numexpr=2.8.3
@ -204,8 +259,12 @@ dependencies:
- openjpeg=2.4.0 - openjpeg=2.4.0
- openpyxl=3.0.9 - openpyxl=3.0.9
- openssl=1.1.1q - openssl=1.1.1q
- orc=1.7.5
- packaging=21.3 - packaging=21.3
- pandas=1.4.3 - pandas=1.4.3
- pandoc=2.18
- pandocfilters=1.5.0
- parquet-cpp=1.5.1
- parso=0.8.3 - parso=0.8.3
- partd=1.2.0 - partd=1.2.0
- patsy=0.5.2 - patsy=0.5.2
@ -213,7 +272,7 @@ dependencies:
- pexpect=4.8.0 - pexpect=4.8.0
- pickleshare=0.7.5 - pickleshare=0.7.5
- pillow=9.2.0 - pillow=9.2.0
- pip=22.2 - pip=22.1.2
- pixman=0.40.0 - pixman=0.40.0
- plac=1.3.5 - plac=1.3.5
- pluggy=1.0.0 - pluggy=1.0.0
@ -221,19 +280,26 @@ dependencies:
- poppler=21.09.0 - poppler=21.09.0
- poppler-data=0.4.11 - poppler-data=0.4.11
- postgresql=13.5 - postgresql=13.5
- powerplantmatching=0.5.3 - powerplantmatching=0.5.4
- progressbar2=4.0.0 - progressbar2=4.0.0
- proj=8.1.1 - proj=8.1.1
- prometheus_client=0.14.1
- prompt-toolkit=3.0.30 - prompt-toolkit=3.0.30
- protobuf=3.20.1
- psutil=5.9.1 - psutil=5.9.1
- pthread-stubs=0.4 - pthread-stubs=0.4
- ptyprocess=0.7.0 - ptyprocess=0.7.0
- pulp=2.6.0 - pulp=2.6.0
- pure_eval=0.2.2 - pure_eval=0.2.2
- py=1.11.0 - py=1.11.0
- pyarrow=8.0.0
- pycountry=20.7.3 - pycountry=20.7.3
- pycparser=2.21 - pycparser=2.21
- pyct=0.4.6
- pyct-core=0.4.6
- pydeck=0.7.1
- pygments=2.12.0 - pygments=2.12.0
- pympler=0.9
- pyomo=6.4.1 - pyomo=6.4.1
- pyopenssl=22.0.0 - pyopenssl=22.0.0
- pyparsing=3.0.9 - pyparsing=3.0.9
@ -252,22 +318,32 @@ dependencies:
- python=3.9.13 - python=3.9.13
- python-dateutil=2.8.2 - python-dateutil=2.8.2
- python-fastjsonschema=2.16.1 - python-fastjsonschema=2.16.1
- python-tzdata=2022.1
- python-utils=3.3.3 - python-utils=3.3.3
- python_abi=3.9 - python_abi=3.9
- pytz=2022.1 - pytz=2022.1
- pytz-deprecation-shim=0.1.0.post0
- pyviz_comms=2.2.0
- pyxlsb=1.0.9 - pyxlsb=1.0.9
- pyyaml=6.0 - pyyaml=6.0
- pyzmq=23.2.0
- qt=5.12.9 - qt=5.12.9
- rasterio=1.2.9 - rasterio=1.2.9
- ratelimiter=1.2.0 - ratelimiter=1.2.0
- re2=2022.06.01
- readline=8.1.2 - readline=8.1.2
- requests=2.28.1 - requests=2.28.1
- retry=0.9.2 - retry=0.9.2
- rich=12.5.1
- rtree=1.0.0 - rtree=1.0.0
- s2n=1.0.10
- scikit-learn=1.1.1 - scikit-learn=1.1.1
- scipy=1.8.1 - scipy=1.8.1
- scotch=6.0.9
- seaborn=0.11.2 - seaborn=0.11.2
- seaborn-base=0.11.2 - seaborn-base=0.11.2
- semver=2.13.0
- send2trash=1.8.0
- setuptools=63.2.0 - setuptools=63.2.0
- setuptools-scm=7.0.5 - setuptools-scm=7.0.5
- setuptools_scm=7.0.5 - setuptools_scm=7.0.5
@ -275,21 +351,27 @@ dependencies:
- six=1.16.0 - six=1.16.0
- smart_open=6.0.0 - smart_open=6.0.0
- smmap=3.0.5 - smmap=3.0.5
- snakemake-minimal=7.9.0 - snakemake-minimal=7.8.5
- snappy=1.1.9 - snappy=1.1.9
- sniffio=1.2.0
- snuggs=1.4.7 - snuggs=1.4.7
- sortedcontainers=2.4.0 - sortedcontainers=2.4.0
- soupsieve=2.3.2.post1 - soupsieve=2.3.1
- sqlite=3.39.2 - sqlite=3.39.1
- stack_data=0.3.0 - stack_data=0.3.0
- statsmodels=0.13.2 - statsmodels=0.13.2
- stopit=1.1.2 - stopit=1.1.2
- streamlit=1.10.0
- tabula-py=2.2.0 - tabula-py=2.2.0
- tabulate=0.8.10 - tabulate=0.8.10
- tblib=1.7.0 - tblib=1.7.0
- tenacity=8.0.1
- terminado=0.15.0
- threadpoolctl=3.1.0 - threadpoolctl=3.1.0
- tiledb=2.3.4 - tiledb=2.3.4
- tinycss2=1.1.1
- tk=8.6.12 - tk=8.6.12
- toml=0.10.2
- tomli=2.0.1 - tomli=2.0.1
- toolz=0.12.0 - toolz=0.12.0
- toposort=1.7 - toposort=1.7
@ -300,13 +382,20 @@ dependencies:
- typing_extensions=4.3.0 - typing_extensions=4.3.0
- tzcode=2022a - tzcode=2022a
- tzdata=2022a - tzdata=2022a
- tzlocal=4.2
- unicodedata2=14.0.0 - unicodedata2=14.0.0
- unidecode=1.3.4 - unidecode=1.3.4
- urllib3=1.26.11 - unixodbc=2.3.10
- urllib3=1.26.10
- validators=0.18.2
- watchdog=2.1.9
- wcwidth=0.2.5 - wcwidth=0.2.5
- webencodings=0.5.1
- websocket-client=1.3.3
- wheel=0.37.1 - wheel=0.37.1
- widgetsnbextension=3.6.1
- wrapt=1.14.1 - wrapt=1.14.1
- xarray=2022.6.0 - xarray=2022.3.0
- xerces-c=3.2.3 - xerces-c=3.2.3
- xlrd=2.0.1 - xlrd=2.0.1
- xorg-fixesproto=5.0 - xorg-fixesproto=5.0
@ -330,6 +419,7 @@ dependencies:
- xz=5.2.5 - xz=5.2.5
- yaml=0.2.5 - yaml=0.2.5
- yte=1.5.1 - yte=1.5.1
- zeromq=4.3.4
- zict=2.2.0 - zict=2.2.0
- zipp=3.8.0 - zipp=3.8.0
- zlib=1.2.12 - zlib=1.2.12

View File

@ -11,7 +11,7 @@ dependencies:
- pip - pip
- pypsa>=0.20 - pypsa>=0.20
- atlite>=0.2.6 - atlite>=0.2.9
- dask - dask
# Dependencies of the workflow itself # Dependencies of the workflow itself
@ -24,7 +24,7 @@ dependencies:
- yaml - yaml
- pytables - pytables
- lxml - lxml
- powerplantmatching>=0.5.3 - powerplantmatching>=0.5.4
- numpy - numpy
- pandas - pandas
- geopandas>=0.11.0 - geopandas>=0.11.0
@ -32,7 +32,7 @@ dependencies:
- netcdf4 - netcdf4
- networkx - networkx
- scipy - scipy
- shapely - shapely<2.0 # need to address deprecations
- progressbar2 - progressbar2
- pyomo - pyomo
- matplotlib - matplotlib

View File

@ -2,12 +2,14 @@
# #
# SPDX-License-Identifier: CC0-1.0 # SPDX-License-Identifier: CC0-1.0
version: 0.5.0 version: 0.6.0
tutorial: true tutorial: true
logging: logging:
level: INFO level: INFO
format: '%(levelname)s:%(name)s:%(message)s' format: '%(levelname)s:%(name)s:%(message)s'
run:
name: ""
scenario: scenario:
simpl: [''] simpl: ['']