From ea8e643dc4916e0150e888b0df3152c664f81f98 Mon Sep 17 00:00:00 2001 From: eb5194 Date: Tue, 19 Jan 2021 15:20:58 +0100 Subject: [PATCH 001/102] add_electricity.py Resolve FutureWarning 771 Index.__or__ operating as set operation is deprecated --- scripts/add_electricity.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 8fc8ad5c..3f151977 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -332,7 +332,7 @@ def attach_hydro(n, costs, ppl): country = ppl['bus'].map(n.buses.country).rename("country") - inflow_idx = ror.index | hydro.index + inflow_idx = ror.index.union(hydro.index) if not inflow_idx.empty: dist_key = ppl.loc[inflow_idx, 'p_nom'].groupby(country).transform(normed) From b9fdacad4d428855071a5a221baa3dc175afd73d Mon Sep 17 00:00:00 2001 From: eb5194 Date: Wed, 20 Jan 2021 20:39:22 +0100 Subject: [PATCH 002/102] simplify: delete columns with incorrect/accurate entries --- scripts/simplify_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index b05d59aa..8663faa3 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -362,6 +362,7 @@ if __name__ == "__main__": n, cluster_map = cluster(n, int(snakemake.wildcards.simpl)) busmaps.append(cluster_map) + n.buses = n.buses.drop(['substation_lv', 'substation_off', 'under_construction'], axis=1) n.export_to_netcdf(snakemake.output.network) busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) From b8a76859317d26ca9b8189ef2ee5db21380e83a7 Mon Sep 17 00:00:00 2001 From: FabianHofmann Date: Tue, 27 Apr 2021 17:58:31 +0200 Subject: [PATCH 003/102] Atlite availability (#224) * adjust buil_cutout.py and Snakefile * try adjusting build_renewable_profiles, currently crashing due to weird pyproj error * build_renewable_profiles: -remove printing gid * build_renewable_profiles: use dask for paralellization, use dense functions * build_renewable_profiles: - revise imports - add logging for long calculation - revise explaining comment - revise distance calculation * build profiles: adjust to cutout.grid * * fix area to square km * rename potmatrix -> capacity_potential * rename available to availibility * config.default update cutout params build_renewable_potentials: major refactoring and simplification hydro_profiles: update code * build profiles: fix weight output dimensions * build profiles: fix typo, fix selection of buses * build profiles: reinsert paths variable * follow up * build profiles: move to dask calculation only * CI: set build cutout to true (add CDSAPI) * build profiles: use pyproj, test with gleas and geokit upstream * environment.yaml fix atlite version * build profiles: use dask 'processes' for more than 25 regions * build profiles: specify dask scheduler according to number of regions * backpedal a bit, only allow scheduler='processes' * follow up, code style and fixup * build profiles: add logger info for underwater fraction calc * config adjust cutout parameters Snakefile fixup * config.default.yaml: adjust resolution * config: use one cutout in total build_cutout: automatic detetection of geographical boundaries * env: add python>=3.8 requirement build_cutout: fixup for region bound * config: allow base cutout * folllow up, fix up * follow up II * clean up * clean up II * build profiles: move back to multiprocessing due to performance issues * small code style corrections * move in pool context * swqitch to ratsterio * switch to rasterio for availibility calculation * tiny fixup * * build continental raster for offshore distance calculation * adjust Snakefile to new script build_raster * rename continental raster to onshore raster add projected_mask function (not yet tested) add docstrings, modularize * Snakefile: remove build_onhore_raster rule, build mask directly from geometry instead build_natura_raster: adjust code, add function for exporting build_profiles: * add buffer to shore distance to init_globals function * update docstrings * improve handling of nodata grid codes * add geometry mask if natura raster not activated (the 255 value is an 'eligible' value for the corine data base, do this for excluding data outside the shape) * build_profiles: adjust docstrings * update environment * build profiles: fixup reproject woth padding * follow up, small fixups * fix resampling method checkpoint: reproduces solar profile in tut data * reintegrate plot map code style * config: rename cutout into "base" * build profiles: adjust to new atlite code * natura raster: small fixup * build natura raster: compress tiff file * config: adjust cutout names * build profiles: cover case if no or partial overlap between natura raster and cutout * config-tutorial: adjust cutout params * buid-profifiles: fixup in gebco filter * follow up * update config files * build profiles: select layoutmatrix != 0 * build profiles: speed up average_distance and underwaterfraction * build profiles: fix typo * update release notes build_cutout: only build needed features * update envs * config: add temperature to sarah features * temporary fix for atlite v0.2.1 and new xarray version release * env: remove xarray specification * * remove rule build_country_flh * build profiles: remove sneaked in line * doc: update configuration.rst (section atlite) and corresponding csv table * release notes: fix quotes * build profiles: use 3035 for area calculation * Update envs/environment.docs.yaml * Update scripts/build_cutout.py * Update doc/release_notes.rst Co-authored-by: euronion <42553970+euronion@users.noreply.github.com> * Update doc/configuration.rst Co-authored-by: euronion <42553970+euronion@users.noreply.github.com> * Update scripts/build_cutout.py Co-authored-by: euronion <42553970+euronion@users.noreply.github.com> * update release notes * release notes: add deprecation of 'keep_all_available_areas' build profiles: remove warning for 'keep_all_available_areas' * build cutout: rearrage code, set buffer correctly * Rename tutorial cutout to remove name clash with real cutout. * Update release_notes.rst: Rename tutorial cutout. * retrieve: update cutouts and downloads (alternative) (#237) * retrieve: update cutouts and downloads * retrieve: remove unnecessary import * use snakemake remote file functionality * Snakefile: update zenodo link * update natura remote link (closes #234) * env: update atlite version to 0.2.2 * env: fix dask version due to memory issues * test: retrieve cutout instead of build * test: use tutorial cutout for CI Co-authored-by: euronion <42553970+euronion@users.noreply.github.com> Co-authored-by: Fabian Neumann --- .travis.yml | 3 + Snakefile | 58 ++---- config.default.yaml | 28 ++- config.tutorial.yaml | 17 +- doc/configtables/atlite.csv | 11 +- doc/configuration.rst | 5 +- doc/plotting.rst | 44 ----- doc/preparation/retrieve.rst | 54 ++++- doc/release_notes.rst | 6 +- doc/wildcards.rst | 3 +- envs/environment.docs.yaml | 21 +- envs/environment.yaml | 25 +-- scripts/build_country_flh.py | 243 ----------------------- scripts/build_cutout.py | 35 ++-- scripts/build_hydro_profile.py | 12 +- scripts/build_natura_raster.py | 43 ++-- scripts/build_renewable_profiles.py | 295 +++++++++++----------------- scripts/retrieve_cutout.py | 75 ------- scripts/retrieve_natura_raster.py | 49 ----- test/config.test1.yaml | 21 +- 20 files changed, 306 insertions(+), 742 deletions(-) delete mode 100644 scripts/build_country_flh.py delete mode 100644 scripts/retrieve_cutout.py delete mode 100644 scripts/retrieve_natura_raster.py diff --git a/.travis.yml b/.travis.yml index 43b25200..79826a64 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,6 +29,9 @@ before_install: # list packages for easier debugging - conda list +before_script: + - 'echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc' + script: - cp ./test/config.test1.yaml ./config.yaml - snakemake -j all solve_all_networks diff --git a/Snakefile b/Snakefile index 817c905e..2702fd3d 100644 --- a/Snakefile +++ b/Snakefile @@ -5,6 +5,9 @@ from os.path import normpath, exists from shutil import copyfile +from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider +HTTP = HTTPRemoteProvider() + if not exists("config.yaml"): copyfile("config.default.yaml", "config.yaml") @@ -135,10 +138,12 @@ rule build_bus_regions: resources: mem=1000 script: "scripts/build_bus_regions.py" - if config['enable'].get('build_cutout', False): rule build_cutout: - output: directory("cutouts/{cutout}") + input: + regions_onshore="resources/regions_onshore.geojson", + regions_offshore="resources/regions_offshore.geojson" + output: "cutouts/{cutout}.nc" log: "logs/build_cutout/{cutout}.log" benchmark: "benchmarks/build_cutout_{cutout}" threads: ATLITE_NPROCESSES @@ -148,16 +153,16 @@ if config['enable'].get('build_cutout', False): if config['enable'].get('retrieve_cutout', True): rule retrieve_cutout: - output: directory(expand("cutouts/{cutouts}", **config['atlite'])), - log: "logs/retrieve_cutout.log" - script: 'scripts/retrieve_cutout.py' + input: HTTP.remote("zenodo.org/record/4709858/files/{cutout}.nc", keep_local=True) + output: "cutouts/{cutout}.nc" + shell: "mv {input} {output}" if config['enable'].get('build_natura_raster', False): rule build_natura_raster: input: natura="data/bundle/natura/Natura2000_end2015.shp", - cutouts=expand("cutouts/{cutouts}", **config['atlite']) + cutouts=expand("cutouts/{cutouts}.nc", **config['atlite']) output: "resources/natura.tiff" log: "logs/build_natura_raster.log" script: "scripts/build_natura_raster.py" @@ -165,9 +170,9 @@ if config['enable'].get('build_natura_raster', False): if config['enable'].get('retrieve_natura_raster', True): rule retrieve_natura_raster: + input: HTTP.remote("zenodo.org/record/4706686/files/natura.tiff", keep_local=True) output: "resources/natura.tiff" - log: "logs/retrieve_natura_raster.log" - script: 'scripts/retrieve_natura_raster.py' + shell: "mv {input} {output}" rule build_renewable_profiles: @@ -181,11 +186,10 @@ rule build_renewable_profiles: country_shapes='resources/country_shapes.geojson', offshore_shapes='resources/offshore_shapes.geojson', regions=lambda w: ("resources/regions_onshore.geojson" - if w.technology in ('onwind', 'solar') - else "resources/regions_offshore.geojson"), - cutout=lambda w: "cutouts/" + config["renewable"][w.technology]['cutout'] - output: - profile="resources/profile_{technology}.nc", + if w.technology in ('onwind', 'solar') + else "resources/regions_offshore.geojson"), + cutout=lambda w: "cutouts/" + config["renewable"][w.technology]['cutout'] + ".nc" + output: profile="resources/profile_{technology}.nc", log: "logs/build_renewable_profile_{technology}.log" benchmark: "benchmarks/build_renewable_profiles_{technology}" threads: ATLITE_NPROCESSES @@ -198,7 +202,7 @@ if 'hydro' in config['renewable'].keys(): input: country_shapes='resources/country_shapes.geojson', eia_hydro_generation='data/bundle/EIA_hydro_generation_2000_2014.csv', - cutout="cutouts/" + config["renewable"]['hydro']['cutout'] + cutout="cutouts/" + config["renewable"]['hydro']['cutout'] + ".nc" output: 'resources/profile_hydro.nc' log: "logs/build_hydro_profile.log" resources: mem=5000 @@ -388,29 +392,3 @@ rule plot_p_nom_max: log: "logs/plot_p_nom_max/elec_s{simpl}_{clusts}_{techs}_{country}_{ext}.log" script: "scripts/plot_p_nom_max.py" - -rule build_country_flh: - input: - base_network="networks/base.nc", - corine="data/bundle/corine/g250_clc06_V18_5.tif", - natura="resources/natura.tiff", - gebco=lambda w: ("data/bundle/GEBCO_2014_2D.nc" - if "max_depth" in config["renewable"][w.technology].keys() - else []), - country_shapes='resources/country_shapes.geojson', - offshore_shapes='resources/offshore_shapes.geojson', - pietzker="data/pietzker2014.xlsx", - regions=lambda w: ("resources/country_shapes.geojson" - if w.technology in ('onwind', 'solar') - else "resources/offshore_shapes.geojson"), - cutout=lambda w: "cutouts/" + config["renewable"][w.technology]['cutout'] - output: - area="resources/country_flh_area_{technology}.csv", - aggregated="resources/country_flh_aggregated_{technology}.csv", - uncorrected="resources/country_flh_uncorrected_{technology}.csv", - plot="resources/country_flh_{technology}.pdf", - exclusion=directory("resources/country_exclusion_{technology}") - log: "logs/build_country_flh_{technology}.log" - resources: mem=10000 - benchmark: "benchmarks/build_country_flh_{technology}" - script: "scripts/build_country_flh.py" diff --git a/config.default.yaml b/config.default.yaml index 9c3fa508..b1111d5a 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -62,18 +62,28 @@ electricity: atlite: nprocesses: 4 cutouts: + # use 'base' to determine geographical bounds and time span from config + # base: + # module: era5 europe-2013-era5: - module: era5 - xs: [-12., 35.] - ys: [72., 33.] - years: [2013, 2013] + module: era5 # in priority order + x: [-12., 35.] + y: [33., 72] + dx: 0.3 + dy: 0.3 + time: ['2013', '2013'] europe-2013-sarah: - module: sarah - resolution: 0.2 - xs: [-12., 42.] - ys: [65., 33.] - years: [2013, 2013] + module: [sarah, era5] # in priority order + x: [-12., 45.] + y: [33., 65] + dx: 0.2 + dy: 0.2 + time: ['2013', '2013'] + sarah_interpolate: false + sarah_dir: + features: [influx, temperature] + renewable: onwind: cutout: europe-2013-era5 diff --git a/config.tutorial.yaml b/config.tutorial.yaml index 5cc23e72..1dfde199 100755 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -54,16 +54,15 @@ electricity: atlite: nprocesses: 4 cutouts: - europe-2013-era5: + europe-2013-era5-tutorial: module: era5 - xs: [4., 15.] - ys: [56., 46.] - months: [3, 3] - years: [2013, 2013] + x: [4., 15.] + y: [46., 56.] + time: ["2013-03", "2013-03"] renewable: onwind: - cutout: europe-2013-era5 + cutout: europe-2013-era5-tutorial resource: method: wind turbine: Vestas_V112_3MW @@ -80,7 +79,7 @@ renewable: potential: simple # or conservative clip_p_max_pu: 1.e-2 offwind-ac: - cutout: europe-2013-era5 + cutout: europe-2013-era5-tutorial resource: method: wind turbine: NREL_ReferenceTurbine_5MW_offshore @@ -92,7 +91,7 @@ renewable: potential: simple # or conservative clip_p_max_pu: 1.e-2 offwind-dc: - cutout: europe-2013-era5 + cutout: europe-2013-era5-tutorial resource: method: wind turbine: NREL_ReferenceTurbine_5MW_offshore @@ -105,7 +104,7 @@ renewable: potential: simple # or conservative clip_p_max_pu: 1.e-2 solar: - cutout: europe-2013-era5 + cutout: europe-2013-era5-tutorial resource: method: pv panel: CSi diff --git a/doc/configtables/atlite.csv b/doc/configtables/atlite.csv index 5f21bb05..7bb56040 100644 --- a/doc/configtables/atlite.csv +++ b/doc/configtables/atlite.csv @@ -1,8 +1,9 @@ ,Unit,Values,Description nprocesses,--,int,"Number of parallel processes in cutout preparation" cutouts,,, --- {name},--,"Convention is to name cutouts like ``--`` (e.g. ``europe-2013-era5``).","Directory to write cutout data to. The user may specify multiple cutouts under configuration ``atlite: cutouts:``. Reference is used in configuration ``renewable: {technology}: cutout:``" --- -- module,--,"One of {'era5','sarah'}","Source of the reanalysis weather dataset (e.g. `ERA5 `_ or `SARAH-2 `_)" --- -- xs,°,"Float interval within [-180, 180]","Range of longitudes to download weather data for." --- -- ys,°,"Float interval within [-90, 90]","Range of latitudes to download weather data for." --- -- years,--,"Integer interval within [1979,2018]","Range of years to download weather data for." +-- {name},--,"Convention is to name cutouts like ``--`` (e.g. ``europe-2013-era5``).","Name of the cutout netcdf file. The user may specify multiple cutouts under configuration ``atlite: cutouts:``. Reference is used in configuration ``renewable: {technology}: cutout:``. The cutout ``base`` may be used to automatically calculate temporal and spatial bounds of the network." +-- -- module,--,"Subset of {'era5','sarah'}","Source of the reanalysis weather dataset (e.g. `ERA5 `_ or `SARAH-2 `_)" +-- -- x,°,"Float interval within [-180, 180]","Range of longitudes to download weather data for. If not defined, it defaults to the spatial bounds of all bus shapes." +-- -- y,°,"Float interval within [-90, 90]","Range of latitudes to download weather data for. If not defined, it defaults to the spatial bounds of all bus shapes." +-- -- time,,"Time interval within ['1979', '2018'] (with valid pandas date time strings)","Time span to download weather data for. If not defined, it defaults to the time interval spanned by the snapshots." +-- -- features,,"String or list of strings with valid cutout features ('inlfux', 'wind').","When freshly building a cutout, retrieve data only for those features. If not defined, it defaults to all available features." diff --git a/doc/configuration.rst b/doc/configuration.rst index 1a42c70a..a75669cd 100644 --- a/doc/configuration.rst +++ b/doc/configuration.rst @@ -95,9 +95,12 @@ Specifies the temporal range to build an energy system model for as arguments to ``atlite`` ============= +Define and specify the ``atlite.Cutout`` used for calculating renewable potentials and time-series. All options except for ``features`` are directly used as `cutout parameters `_. + .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 62-75 + :start-at: atlite: + :end-before: renewable: .. csv-table:: :header-rows: 1 diff --git a/doc/plotting.rst b/doc/plotting.rst index cd404226..6b76a28c 100644 --- a/doc/plotting.rst +++ b/doc/plotting.rst @@ -9,50 +9,6 @@ Plotting and Summary .. warning:: The corresponding code is currently under revision and has only minimal documentation. -.. _flh: - -Rule ``build_country_flh`` -============================= - -.. graphviz:: - :align: center - - digraph snakemake_dag { - graph [bgcolor=white, - margin=0, - size="8,5" - ]; - node [fontname=sans, - fontsize=10, - penwidth=2, - shape=box, - style=rounded - ]; - edge [color=grey, - penwidth=2 - ]; - 0 [color="0.31 0.6 0.85", - fillcolor=gray, - label=build_country_flh, - style=filled]; - 1 [color="0.06 0.6 0.85", - label=base_network]; - 1 -> 0; - 2 [color="0.42 0.6 0.85", - label=build_natura_raster]; - 2 -> 0; - 3 [color="0.58 0.6 0.85", - label=build_shapes]; - 3 -> 0; - 4 [color="0.14 0.6 0.85", - label=build_cutout]; - 4 -> 0; - } - -| - -.. automodule:: build_country_flh - .. _plot_potentials: Rule ``plot_p_nom_max`` diff --git a/doc/preparation/retrieve.rst b/doc/preparation/retrieve.rst index ea8ecc3e..26f152c5 100644 --- a/doc/preparation/retrieve.rst +++ b/doc/preparation/retrieve.rst @@ -21,9 +21,59 @@ Rule ``retrieve_databundle`` Rule ``retrieve_cutout`` ------------------------ -.. automodule:: retrieve_cutout +.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3517949.svg + :target: https://doi.org/10.5281/zenodo.3517949 + +Cutouts are spatio-temporal subsets of the European weather data from the `ECMWF ERA5 `_ reanalysis dataset and the `CMSAF SARAH-2 `_ solar surface radiation dataset for the year 2013. +They have been prepared by and are for use with the `atlite `_ tool. You can either generate them yourself using the ``build_cutouts`` rule or retrieve them directly from `zenodo `_ through the rule ``retrieve_cutout``. +The :ref:`tutorial` uses a smaller cutout than required for the full model (30 MB), which is also automatically downloaded. + +.. note:: + To download cutouts yourself from the `ECMWF ERA5 `_ you need to `set up the CDS API `_. + + +**Relevant Settings** + +.. code:: yaml + + tutorial: + enable: + build_cutout: + +.. seealso:: + Documentation of the configuration file ``config.yaml`` at + :ref:`toplevel_cf` + +**Outputs** + +- ``cutouts/{cutout}``: weather data from either the `ERA5 `_ reanalysis weather dataset or `SARAH-2 `_ satellite-based historic weather data. + +.. seealso:: + For details see :mod:`build_cutout` and read the `atlite documentation `_. + Rule ``retrieve_natura_raster`` ------------------------------- -.. automodule:: retrieve_natura_raster +.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.4706686.svg + :target: https://doi.org/10.5281/zenodo.4706686 + +This rule, as a substitute for :mod:`build_natura_raster`, downloads an already rasterized version (`natura.tiff `_) of `Natura 2000 `_ natural protection areas to reduce computation times. The file is placed into the ``resources`` sub-directory. + +**Relevant Settings** + +.. code:: yaml + + enable: + build_natura_raster: + +.. seealso:: + Documentation of the configuration file ``config.yaml`` at + :ref:`toplevel_cf` + +**Outputs** + +- ``resources/natura.tiff``: Rasterized version of `Natura 2000 `_ natural protection areas to reduce computation times. + +.. seealso:: + For details see :mod:`build_natura_raster`. diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 8233a1f3..a1b54396 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -11,8 +11,12 @@ Release Notes Upcoming Release ================ +* Switch to new major release, ``>=v0.2.1`` of ``atlite``. The version upgrade comes along with significant speed up for the rule ``build_renewable_profiles.py`` (~factor 2). A lot of the code which calculated the landuse availability is now outsourced and does not rely on ``glaes``, ``geokit`` anymore. This facilitates the environment building and version compatibility of ``gdal``, ``libgdal`` with other packages. +* The minimum python version was set to ``3.8``. +* The rule and script ``build_country_flh`` are removed as they're no longer used or maintained. +* The flag ``keep_all_available_areas`` in the configuration for renewable potentials (config.yaml -> renewable -> {technology}) was deprecated and now defaults to ``True``. +* The tutorial cutout was renamed from ``cutouts/europe-2013-era5.nc`` to ``cutouts/europe-2013-era5-tutorial.nc`` to accomodate tutorial and productive cutouts side-by-side. * Fix: Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq (from 3.1e9 t CO2-eq). The new value represents emissions related to the electricity sector for EU+UK. The old value was ~2x too high and used when the emissions wildcard in ``{opts}`` was used. - * Add option to include marginal costs of links representing fuel cells, electrolysis, and battery inverters [`#232 `_]. diff --git a/doc/wildcards.rst b/doc/wildcards.rst index 227997d1..b3267c23 100644 --- a/doc/wildcards.rst +++ b/doc/wildcards.rst @@ -130,8 +130,7 @@ It can take the values ``onwind``, ``offwind-ac``, ``offwind-dc``, and ``solar`` The wildcard can moreover be used to create technology specific figures and summaries. For instance ``{technology}`` can be used to plot regionally disaggregated potentials -with the rule :mod:`plot_p_nom_max` or to summarize a particular technology's -full load hours in various countries with the rule :mod:`build_country_flh`. +with the rule :mod:`plot_p_nom_max`. .. _attr: diff --git a/envs/environment.docs.yaml b/envs/environment.docs.yaml index 0c937e43..772583d4 100755 --- a/envs/environment.docs.yaml +++ b/envs/environment.docs.yaml @@ -9,7 +9,8 @@ dependencies: - python<=3.7 - pip - pypsa>=0.17.1 - - atlite=0.0.3 + - atlite>=0.2.2 + - dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved - pre-commit # Dependencies of the workflow itself @@ -19,27 +20,13 @@ dependencies: - memory_profiler - yaml - pytables - - powerplantmatching>=0.4.3 - - # Second order dependencies which should really be deps of atlite - - xarray - - progressbar2 - - pyyaml>=5.1.0 + - powerplantmatching>=0.4.8 # GIS dependencies have to come all from conda-forge - cartopy - - fiona - - proj - - pyshp - - geopandas - - rasterio - - shapely - - libgdal + - descartes - pip: - vresutils==0.3.1 - - git+https://github.com/PyPSA/glaes.git#egg=glaes - - git+https://github.com/PyPSA/geokit.git#egg=geokit - - cdsapi - sphinx - sphinx_rtd_theme diff --git a/envs/environment.yaml b/envs/environment.yaml index 7c5faef3..790aec26 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -8,12 +8,13 @@ channels: - bioconda - http://conda.anaconda.org/gurobi dependencies: - - python + - python>=3.8 - pip - mamba # esp for windows build - - pypsa>=0.17.1 - - atlite=0.0.3 + - pypsa>=0.17.1 + - atlite>=0.2.2 + - dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved # Dependencies of the workflow itself - xlrd @@ -29,32 +30,14 @@ dependencies: - powerplantmatching>=0.4.8 - numpy<=1.19.0 # otherwise macos fails - # Second order dependencies which should really be deps of atlite - - xarray - - netcdf4 - - bottleneck - - toolz - - dask - - progressbar2 - - pyyaml>=5.1.0 # Keep in conda environment when calling ipython - ipython # GIS dependencies: - cartopy - - fiona - - proj - - pyshp - - geopandas - - rasterio - - shapely - - libgdal<=3.0.4 - descartes - pip: - vresutils==0.3.1 - tsam>=1.1.0 - - git+https://github.com/PyPSA/glaes.git#egg=glaes - - git+https://github.com/PyPSA/geokit.git#egg=geokit - - cdsapi diff --git a/scripts/build_country_flh.py b/scripts/build_country_flh.py deleted file mode 100644 index 459b8f38..00000000 --- a/scripts/build_country_flh.py +++ /dev/null @@ -1,243 +0,0 @@ -#!/usr/bin/env python - -# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: GPL-3.0-or-later - -""" -Create ``.csv`` files and plots for comparing per country full load hours of renewable time series. - -Relevant Settings ------------------ - -.. code:: yaml - - snapshots: - - renewable: - {technology}: - cutout: - resource: - correction_factor: - -.. seealso:: - Documentation of the configuration file ``config.yaml`` at - :ref:`snapshots_cf`, :ref:`renewable_cf` - -Inputs ------- - -- ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC) `_ inventory on `44 classes `_ of land use (e.g. forests, arable land, industrial, urban areas). - - .. image:: img/corine.png - :scale: 33 % - -- ``data/bundle/GEBCO_2014_2D.nc``: A `bathymetric `_ data set with a global terrain model for ocean and land at 15 arc-second intervals by the `General Bathymetric Chart of the Oceans (GEBCO) `_. - - .. image:: img/gebco_2019_grid_image.jpg - :scale: 50 % - - **Source:** `GEBCO `_ - -- ``data/pietzker2014.xlsx``: `Supplementary material 2 `_ from `Pietzcker et al. `_; not part of the data bundle; download and place here yourself. -- ``resources/natura.tiff``: confer :ref:`natura` -- ``resources/country_shapes.geojson``: confer :ref:`shapes` -- ``resources/offshore_shapes.geojson``: confer :ref:`shapes` -- ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions` -- ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions` -- ``"cutouts/" + config["renewable"][{technology}]['cutout']``: :ref:`cutout` -- ``networks/base.nc``: :ref:`base` - -Outputs -------- - -- ``resources/country_flh_area_{technology}.csv``: -- ``resources/country_flh_aggregated_{technology}.csv``: -- ``resources/country_flh_uncorrected_{technology}.csv``: -- ``resources/country_flh_{technology}.pdf``: -- ``resources/country_exclusion_{technology}``: - -Description ------------ - -""" - -import logging -from _helpers import configure_logging - -import os -import atlite -import numpy as np -import xarray as xr -import pandas as pd - -import geokit as gk -from scipy.sparse import vstack -import pycountry as pyc -import matplotlib.pyplot as plt - -from vresutils import landuse as vlanduse -from vresutils.array import spdiag - -import progressbar as pgb - -from build_renewable_profiles import init_globals, calculate_potential - -logger = logging.getLogger(__name__) - - -def build_area(flh, countries, areamatrix, breaks, fn): - area_unbinned = xr.DataArray(areamatrix.todense(), [countries, capacity_factor.coords['spatial']]) - bins = xr.DataArray(pd.cut(flh.to_series(), bins=breaks), flh.coords, name="bins") - area = area_unbinned.groupby(bins).sum(dim="spatial").to_pandas() - area.loc[:,slice(*area.sum()[lambda s: s > 0].index[[0,-1]])].to_csv(fn) - area.columns = area.columns.map(lambda s: s.left) - return area - - -def plot_area_not_solar(area, countries): - # onshore wind/offshore wind - a = area.T - - fig, axes = plt.subplots(nrows=len(countries), sharex=True) - for c, ax in zip(countries, axes): - d = a[[c]] / 1e3 - d.plot.bar(ax=ax, legend=False, align='edge', width=1.) - ax.set_ylabel(f"Potential {c} / GW") - ax.set_title(c) - ax.legend() - ax.set_xlabel("Full-load hours") - fig.savefig(snakemake.output.plot, transparent=True, bbox_inches='tight') - -def plot_area_solar(area, p_area, countries): - # onshore wind/offshore wind - p = p_area.T - a = area.T - - fig, axes = plt.subplots(nrows=len(countries), sharex=True, squeeze=False) - for c, ax in zip(countries, axes.flat): - d = pd.concat([a[c], p[c]], keys=['PyPSA-Eur', 'Pietzker'], axis=1) / 1e3 - d.plot.bar(ax=ax, legend=False, align='edge', width=1.) - # ax.set_ylabel(f"Potential {c} / GW") - ax.set_title(c) - ax.legend() - ax.set_xlabel("Full-load hours") - - fig.savefig(snakemake.output.plot, transparent=True, bbox_inches='tight') - - -def build_aggregate(flh, countries, areamatrix, breaks, p_area, fn): - agg_a = pd.Series(np.ravel((areamatrix / areamatrix.sum(axis=1)).dot(flh.values)), - countries, name="PyPSA-Eur") - - if p_area is None: - agg_a['Overall'] = float((np.asarray((areamatrix.sum(axis=0) / areamatrix.sum()) - .dot(flh.values)).squeeze())) - - agg = pd.DataFrame({'PyPSA-Eur': agg_a}) - else: - # Determine indices of countries which are also in Pietzcker - inds = pd.Index(countries).get_indexer(p_area.index) - areamatrix = areamatrix[inds] - - agg_a['Overall'] = float((np.asarray((areamatrix.sum(axis=0) / areamatrix.sum()) - .dot(flh.values)).squeeze())) - - midpoints = (breaks[1:] + breaks[:-1])/2. - p = p_area.T - - # Per-country FLH comparison - agg_p = pd.Series((p / p.sum()).multiply(midpoints, axis=0).sum(), name="Pietzker") - agg_p['Overall'] = float((p.sum(axis=1) / p.sum().sum()).multiply(midpoints, axis=0).sum()) - - agg = pd.DataFrame({'PyPSA-Eur': agg_a, 'Pietzcker': agg_p, 'Ratio': agg_p / agg_a}) - - agg.to_csv(fn) - -if __name__ == '__main__': - if 'snakemake' not in globals(): - from _helpers import mock_snakemake - snakemake = mock_snakemake('build_country_flh', technology='solar') - configure_logging(snakemake) - - pgb.streams.wrap_stderr() - - - config = snakemake.config['renewable'][snakemake.wildcards.technology] - - time = pd.date_range(freq='m', **snakemake.config['snapshots']) - params = dict(years=slice(*time.year[[0, -1]]), months=slice(*time.month[[0, -1]])) - - cutout = atlite.Cutout(config['cutout'], - cutout_dir=os.path.dirname(snakemake.input.cutout), - **params) - - minx, maxx, miny, maxy = cutout.extent - dx = (maxx - minx) / (cutout.shape[1] - 1) - dy = (maxy - miny) / (cutout.shape[0] - 1) - bounds = gk.Extent.from_xXyY((minx - dx/2., maxx + dx/2., - miny - dy/2., maxy + dy/2.)) - - # Use GLAES to compute available potentials and the transition matrix - paths = dict(snakemake.input) - - init_globals(bounds.xXyY, dx, dy, config, paths) - regions = gk.vector.extractFeatures(paths["regions"], onlyAttr=True) - countries = pd.Index(regions["name"], name="country") - - widgets = [ - pgb.widgets.Percentage(), - ' ', pgb.widgets.SimpleProgress(format='(%s)' % pgb.widgets.SimpleProgress.DEFAULT_FORMAT), - ' ', pgb.widgets.Bar(), - ' ', pgb.widgets.Timer(), - ' ', pgb.widgets.ETA() - ] - progressbar = pgb.ProgressBar(prefix='Compute GIS potentials: ', widgets=widgets, max_value=len(countries)) - - if not os.path.isdir(snakemake.output.exclusion): - os.makedirs(snakemake.output.exclusion) - - matrix = vstack([calculate_potential(i, save_map=os.path.join(snakemake.output.exclusion, countries[i])) - for i in progressbar(regions.index)]) - - areamatrix = matrix * spdiag(vlanduse._cutout_cell_areas(cutout).ravel()) - areamatrix.data[areamatrix.data < 1.] = 0 # ignore weather cells where only less than 1 km^2 can be installed - areamatrix.eliminate_zeros() - - resource = config['resource'] - func = getattr(cutout, resource.pop('method')) - correction_factor = config.get('correction_factor', 1.) - - capacity_factor = func(capacity_factor=True, show_progress='Compute capacity factors: ', **resource).stack(spatial=('y', 'x')) - flh_uncorr = capacity_factor * 8760 - flh_corr = correction_factor * flh_uncorr - - if snakemake.wildcards.technology == 'solar': - pietzcker = pd.read_excel(snakemake.input.pietzker, sheet_name="PV on all area", skiprows=2, header=[0,1]).iloc[1:177] - p_area1_50 = pietzcker['Usable Area at given FLh in 1-50km distance to settlement '].dropna(axis=1) - p_area1_50.columns = p_area1_50.columns.str.split(' ').str[0] - - p_area50_100 = pietzcker['Usable Area at given FLh in 50-100km distance to settlement '] - - p_area = p_area1_50 + p_area50_100 - cols = p_area.columns - breaks = cols.str.split('-').str[0].append(pd.Index([cols[-1].split('-')[1]])).astype(int) - p_area.columns = breaks[:-1] - - p_area = p_area.reindex(countries.map(lambda c: pyc.countries.get(alpha_2=c).name)) - p_area.index = countries - p_area = p_area.dropna() # Pietzcker does not have data for CZ and MK - else: - breaks = np.r_[0:8000:50] - p_area = None - - - area = build_area(flh_corr, countries, areamatrix, breaks, snakemake.output.area) - - if snakemake.wildcards.technology == 'solar': - plot_area_solar(area, p_area, p_area.index) - else: - plot_area_not_solar(area, countries) - - build_aggregate(flh_uncorr, countries, areamatrix, breaks, p_area, snakemake.output.uncorrected) - build_aggregate(flh_corr, countries, areamatrix, breaks, p_area, snakemake.output.aggregated) diff --git a/scripts/build_cutout.py b/scripts/build_cutout.py index 1e55faf5..e3490b13 100644 --- a/scripts/build_cutout.py +++ b/scripts/build_cutout.py @@ -1,7 +1,3 @@ -# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: GPL-3.0-or-later - """ Create cutouts with `atlite `_. @@ -92,10 +88,11 @@ Description """ import logging +import atlite +import geopandas as gpd +import pandas as pd from _helpers import configure_logging -import os -import atlite logger = logging.getLogger(__name__) @@ -106,14 +103,24 @@ if __name__ == "__main__": configure_logging(snakemake) cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout] - for p in ('xs', 'ys', 'years', 'months'): - if p in cutout_params: - cutout_params[p] = slice(*cutout_params[p]) - cutout = atlite.Cutout(snakemake.wildcards.cutout, - cutout_dir=os.path.dirname(snakemake.output[0]), - **cutout_params) + snapshots = pd.date_range(freq='h', **snakemake.config['snapshots']) + time = [snapshots[0], snapshots[-1]] + cutout_params['time'] = slice(*cutout_params.get('time', time)) - nprocesses = snakemake.config['atlite'].get('nprocesses', 4) + if {'x', 'y', 'bounds'}.isdisjoint(cutout_params): + # Determine the bounds from bus regions with a buffer of two grid cells + onshore = gpd.read_file(snakemake.input.regions_onshore) + offshore = gpd.read_file(snakemake.input.regions_offshore) + regions = onshore.append(offshore) + d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy', 0.25))*2 + cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d] + elif {'x', 'y'}.issubset(cutout_params): + cutout_params['x'] = slice(*cutout_params['x']) + cutout_params['y'] = slice(*cutout_params['y']) - cutout.prepare(nprocesses=nprocesses) + + logging.info(f"Preparing cutout with parameters {cutout_params}.") + features = cutout_params.pop('features', None) + cutout = atlite.Cutout(snakemake.output[0], **cutout_params) + cutout.prepare(features=features) diff --git a/scripts/build_hydro_profile.py b/scripts/build_hydro_profile.py index 339fccaf..395753c0 100644 --- a/scripts/build_hydro_profile.py +++ b/scripts/build_hydro_profile.py @@ -62,7 +62,6 @@ Description import logging from _helpers import configure_logging -import os import atlite import geopandas as gpd from vresutils import hydro as vhydro @@ -76,20 +75,21 @@ if __name__ == "__main__": configure_logging(snakemake) config = snakemake.config['renewable']['hydro'] - cutout_dir = os.path.dirname(snakemake.input.cutout) - cutout = atlite.Cutout(config['cutout'], cutout_dir=cutout_dir) + cutout = atlite.Cutout(snakemake.input.cutout) countries = snakemake.config['countries'] - country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'].reindex(countries) + country_shapes = (gpd.read_file(snakemake.input.country_shapes) + .set_index('name')['geometry'].reindex(countries)) country_shapes.index.name = 'countries' - eia_stats = vhydro.get_eia_annual_hydro_generation(snakemake.input.eia_hydro_generation).reindex(columns=countries) + eia_stats = vhydro.get_eia_annual_hydro_generation( + snakemake.input.eia_hydro_generation).reindex(columns=countries) inflow = cutout.runoff(shapes=country_shapes, smooth=True, lower_threshold_quantile=True, normalize_using_yearly=eia_stats) if 'clip_min_inflow' in config: - inflow.values[inflow.values < config['clip_min_inflow']] = 0. + inflow = inflow.where(inflow > config['clip_min_inflow'], 0) inflow.to_netcdf(snakemake.output[0]) diff --git a/scripts/build_natura_raster.py b/scripts/build_natura_raster.py index 39667ca0..63b311e9 100644 --- a/scripts/build_natura_raster.py +++ b/scripts/build_natura_raster.py @@ -43,30 +43,49 @@ import logging from _helpers import configure_logging import atlite -import geokit as gk -from pathlib import Path +import geopandas as gpd +import rasterio as rio +from rasterio.features import geometry_mask +from rasterio.warp import transform_bounds logger = logging.getLogger(__name__) + def determine_cutout_xXyY(cutout_name): - cutout = atlite.Cutout(cutout_name, cutout_dir=cutout_dir) + cutout = atlite.Cutout(cutout_name) + assert cutout.crs.to_epsg() == 4326 x, X, y, Y = cutout.extent - dx = (X - x) / (cutout.shape[1] - 1) - dy = (Y - y) / (cutout.shape[0] - 1) + dx, dy = cutout.dx, cutout.dy return [x - dx/2., X + dx/2., y - dy/2., Y + dy/2.] +def get_transform_and_shape(bounds, res): + left, bottom = [(b // res)* res for b in bounds[:2]] + right, top = [(b // res + 1) * res for b in bounds[2:]] + shape = int((top - bottom) // res), int((right - left) / res) + transform = rio.Affine(res, 0, left, 0, -res, top) + return transform, shape + + if __name__ == "__main__": if 'snakemake' not in globals(): from _helpers import mock_snakemake snakemake = mock_snakemake('build_natura_raster') configure_logging(snakemake) - cutout_dir = Path(snakemake.input.cutouts[0]).parent.resolve() - cutout_names = {res['cutout'] for res in snakemake.config['renewable'].values()} - xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutout_names)) - xXyY = min(xs), max(Xs), min(ys), max(Ys) - natura = gk.vector.loadVector(snakemake.input.natura) - extent = gk.Extent.from_xXyY(xXyY).castTo(3035).fit(100) - extent.rasterize(natura, pixelWidth=100, pixelHeight=100, output=snakemake.output[0]) + cutouts = snakemake.input.cutouts + xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts)) + bounds = transform_bounds(4326, 3035, min(xs), min(ys), max(Xs), max(Ys)) + transform, out_shape = get_transform_and_shape(bounds, res=100) + + # adjusted boundaries + shapes = gpd.read_file(snakemake.input.natura).to_crs(3035) + raster = ~geometry_mask(shapes.geometry, out_shape[::-1], transform) + raster = raster.astype(rio.uint8) + + with rio.open(snakemake.output[0], 'w', driver='GTiff', dtype=rio.uint8, + count=1, transform=transform, crs=3035, compress='lzw', + width=raster.shape[1], height=raster.shape[0]) as dst: + dst.write(raster, indexes=1) + diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 71adb66e..f7e1bc7f 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -60,7 +60,6 @@ Inputs **Source:** `GEBCO `_ - ``resources/natura.tiff``: confer :ref:`natura` -- ``resources/country_shapes.geojson``: confer :ref:`shapes` - ``resources/offshore_shapes.geojson``: confer :ref:`shapes` - ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions` - ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions` @@ -180,220 +179,154 @@ node (`p_nom_max`): ``simple`` and ``conservative``: reached. """ +import progressbar as pgb +import geopandas as gpd +import xarray as xr +import numpy as np +import atlite import logging +from pypsa.geo import haversine +from shapely.geometry import LineString +import time + from _helpers import configure_logging -import os -import atlite - -import numpy as np -import xarray as xr -import pandas as pd -import multiprocessing as mp -import matplotlib.pyplot as plt -import progressbar as pgb - -from scipy.sparse import csr_matrix, vstack -from pypsa.geo import haversine -from vresutils import landuse as vlanduse -from vresutils.array import spdiag - logger = logging.getLogger(__name__) -bounds = dx = dy = config = paths = gebco = clc = natura = None - - -def init_globals(bounds_xXyY, n_dx, n_dy, n_config, n_paths): - # Late import so that the GDAL Context is only created in the new processes - global gl, gk, gdal - import glaes as gl - import geokit as gk - from osgeo import gdal as gdal - - # global in each process of the multiprocessing.Pool - global bounds, dx, dy, config, paths, gebco, clc, natura - - bounds = gk.Extent.from_xXyY(bounds_xXyY) - dx = n_dx - dy = n_dy - config = n_config - paths = n_paths - - if "max_depth" in config: - gebco = gk.raster.loadRaster(paths["gebco"]) - gebco.SetProjection(gk.srs.loadSRS(4326).ExportToWkt()) - - clc = gk.raster.loadRaster(paths["corine"]) - clc.SetProjection(gk.srs.loadSRS(3035).ExportToWkt()) - - natura = gk.raster.loadRaster(paths["natura"]) - - -def downsample_to_coarse_grid(bounds, dx, dy, mask, data): - # The GDAL warp function with the 'average' resample algorithm needs a band of zero values of at least - # the size of one coarse cell around the original raster or it produces erroneous results - orig = mask.createRaster(data=data) - padded_extent = mask.extent.castTo(bounds.srs).pad(max(dx, dy)).castTo(mask.srs) - padded = padded_extent.fit((mask.pixelWidth, mask.pixelHeight)).warp(orig, mask.pixelWidth, mask.pixelHeight) - orig = None # free original raster - average = bounds.createRaster(dx, dy, dtype=gdal.GDT_Float32) - assert gdal.Warp(average, padded, resampleAlg='average') == 1, "gdal warp failed: %s" % gdal.GetLastErrorMsg() - return average - - -def calculate_potential(gid, save_map=None): - feature = gk.vector.extractFeature(paths["regions"], where=gid) - ec = gl.ExclusionCalculator(feature.geom) - - corine = config.get("corine", {}) - if isinstance(corine, list): - corine = {'grid_codes': corine} - if "grid_codes" in corine: - ec.excludeRasterType(clc, value=corine["grid_codes"], invert=True) - if corine.get("distance", 0.) > 0.: - ec.excludeRasterType(clc, value=corine["distance_grid_codes"], buffer=corine["distance"]) - - if config.get("natura", False): - ec.excludeRasterType(natura, value=1) - if "max_depth" in config: - ec.excludeRasterType(gebco, (None, -config["max_depth"])) - - # TODO compute a distance field as a raster beforehand - if 'max_shore_distance' in config: - ec.excludeVectorType(paths["country_shapes"], buffer=config['max_shore_distance'], invert=True) - if 'min_shore_distance' in config: - ec.excludeVectorType(paths["country_shapes"], buffer=config['min_shore_distance']) - - if save_map is not None: - ec.draw() - plt.savefig(save_map, transparent=True) - plt.close() - - availability = downsample_to_coarse_grid(bounds, dx, dy, ec.region, np.where(ec.region.mask, ec._availability, 0)) - - return csr_matrix(gk.raster.extractMatrix(availability).flatten() / 100.) - if __name__ == '__main__': if 'snakemake' not in globals(): from _helpers import mock_snakemake snakemake = mock_snakemake('build_renewable_profiles', technology='solar') configure_logging(snakemake) - pgb.streams.wrap_stderr() - + paths = snakemake.input + nprocesses = snakemake.config['atlite'].get('nprocesses') + noprogress = not snakemake.config['atlite'].get('show_progress', True) config = snakemake.config['renewable'][snakemake.wildcards.technology] - - time = pd.date_range(freq='m', **snakemake.config['snapshots']) - params = dict(years=slice(*time.year[[0, -1]]), months=slice(*time.month[[0, -1]])) - - cutout = atlite.Cutout(config['cutout'], - cutout_dir=os.path.dirname(snakemake.input.cutout), - **params) - - minx, maxx, miny, maxy = cutout.extent - dx = (maxx - minx) / (cutout.shape[1] - 1) - dy = (maxy - miny) / (cutout.shape[0] - 1) - bounds_xXyY = (minx - dx/2., maxx + dx/2., miny - dy/2., maxy + dy/2.) - - # Use GLAES to compute available potentials and the transition matrix - paths = dict(snakemake.input) - - # Use the following for testing the default windows method on linux - # mp.set_start_method('spawn') - with mp.Pool(initializer=init_globals, initargs=(bounds_xXyY, dx, dy, config, paths), - maxtasksperchild=20, processes=snakemake.config['atlite'].get('nprocesses', 2)) as pool: - - # The GDAL library creates a GDAL context on module import, which may not be shared over multiple - # processes or the PROJ4 library has a hickup, so we import only after forking. - import geokit as gk - - regions = gk.vector.extractFeatures(paths["regions"], onlyAttr=True) - buses = pd.Index(regions['name'], name="bus") - widgets = [ - pgb.widgets.Percentage(), - ' ', pgb.widgets.SimpleProgress(format='(%s)' % pgb.widgets.SimpleProgress.DEFAULT_FORMAT), - ' ', pgb.widgets.Bar(), - ' ', pgb.widgets.Timer(), - ' ', pgb.widgets.ETA() - ] - progressbar = pgb.ProgressBar(prefix='Compute GIS potentials: ', widgets=widgets, max_value=len(regions)) - matrix = vstack(list(progressbar(pool.imap(calculate_potential, regions.index)))) - - potentials = config['capacity_per_sqkm'] * vlanduse._cutout_cell_areas(cutout) - potmatrix = matrix * spdiag(potentials.ravel()) - if not config.get('keep_all_available_areas', False): - potmatrix.data[potmatrix.data < 1.] = 0 # ignore weather cells where only less than 1 MW can be installed - potmatrix.eliminate_zeros() - - resource = config['resource'] - func = getattr(cutout, resource.pop('method')) + resource = config['resource'] # pv panel config / wind turbine config correction_factor = config.get('correction_factor', 1.) - if correction_factor != 1.: - logger.warning('correction_factor is set as {}'.format(correction_factor)) - capacity_factor = correction_factor * func(capacity_factor=True, show_progress='Compute capacity factors: ', **resource).stack(spatial=('y', 'x')).values - layoutmatrix = potmatrix * spdiag(capacity_factor) - - profile, capacities = func(matrix=layoutmatrix, index=buses, per_unit=True, - return_capacity=True, show_progress='Compute profiles: ', - **resource) - + capacity_per_sqkm = config['capacity_per_sqkm'] p_nom_max_meth = config.get('potential', 'conservative') - if p_nom_max_meth == 'simple': - p_nom_max = xr.DataArray(np.asarray(potmatrix.sum(axis=1)).squeeze(), [buses]) - elif p_nom_max_meth == 'conservative': - # p_nom_max has to be calculated for each bus and is the minimal ratio - # (min over all weather grid cells of the bus region) between the available - # potential (potmatrix) and the used normalised layout (layoutmatrix / - # capacities), so we would like to calculate i.e. potmatrix / (layoutmatrix / - # capacities). Since layoutmatrix = potmatrix * capacity_factor, this - # corresponds to capacities/max(capacity factor in the voronoi cell) - p_nom_max = xr.DataArray([1./np.max(capacity_factor[inds]) if len(inds) else 0. - for inds in np.split(potmatrix.indices, potmatrix.indptr[1:-1])], [buses]) * capacities + if isinstance(config.get("corine", {}), list): + config['corine'] = {'grid_codes': config['corine']} + + if correction_factor != 1.: + logger.info(f'correction_factor is set as {correction_factor}') + + + cutout = atlite.Cutout(paths['cutout']) + regions = gpd.read_file(paths.regions).set_index('name').rename_axis('bus') + buses = regions.index + + excluder = atlite.ExclusionContainer(crs=3035, res=100) + + if config['natura']: + excluder.add_raster(paths.natura, nodata=0, allow_no_overlap=True) + + corine = config.get("corine", {}) + if "grid_codes" in corine: + codes = corine["grid_codes"] + excluder.add_raster(paths.corine, codes=codes, invert=True, crs=3035) + if corine.get("distance", 0.) > 0.: + codes = corine["distance_grid_codes"] + buffer = corine["distance"] + excluder.add_raster(paths.corine, codes=codes, buffer=buffer, crs=3035) + + if "max_depth" in config: + func = lambda v: v <= -config['max_depth'] + excluder.add_raster(paths.gebco, codes=func, crs=4236, nodata=-1000) + + if 'min_shore_distance' in config: + buffer = config['min_shore_distance'] + excluder.add_geometry(paths.country_shapes, buffer=buffer) + + if 'max_shore_distance' in config: + buffer = config['max_shore_distance'] + excluder.add_geometry(paths.country_shapes, buffer=buffer, invert=True) + + kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress) + if noprogress: + logger.info('Calculate landuse availabilities...') + start = time.time() + availability = cutout.availabilitymatrix(regions, excluder, **kwargs) + duration = time.time() - start + logger.info(f'Completed availability calculation ({duration:2.2f}s)') else: - raise AssertionError('Config key `potential` should be one of "simple" (default) or "conservative",' - ' not "{}"'.format(p_nom_max_meth)) + availability = cutout.availabilitymatrix(regions, excluder, **kwargs) - layout = xr.DataArray(np.asarray(potmatrix.sum(axis=0)).reshape(cutout.shape), - [cutout.meta.indexes[ax] for ax in ['y', 'x']]) + area = cutout.grid.to_crs(3035).area / 1e6 + area = xr.DataArray(area.values.reshape(cutout.shape), + [cutout.coords['y'], cutout.coords['x']]) - # Determine weighted average distance from substation - cell_coords = cutout.grid_coordinates() + potential = capacity_per_sqkm * availability.sum('bus') * area + func = getattr(cutout, resource.pop('method')) + resource['dask_kwargs'] = {'num_workers': nprocesses} + capacity_factor = correction_factor * func(capacity_factor=True, **resource) + layout = capacity_factor * area * capacity_per_sqkm + profile, capacities = func(matrix=availability.stack(spatial=['y','x']), + layout=layout, index=buses, + per_unit=True, return_capacity=True, **resource) + + logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')") + if p_nom_max_meth == 'simple': + p_nom_max = capacity_per_sqkm * availability @ area + elif p_nom_max_meth == 'conservative': + max_cap_factor = capacity_factor.where(availability!=0).max(['x', 'y']) + p_nom_max = capacities / max_cap_factor + else: + raise AssertionError('Config key `potential` should be one of "simple" ' + f'(default) or "conservative", not "{p_nom_max_meth}"') + + + + logger.info('Calculate average distances.') + layoutmatrix = (layout * availability).stack(spatial=['y','x']) + + coords = cutout.grid[['x', 'y']] + bus_coords = regions[['x', 'y']] average_distance = [] - for i in regions.index: - row = layoutmatrix[i] - distances = haversine(regions.loc[i, ['x', 'y']], cell_coords[row.indices])[0] - average_distance.append((distances * (row.data / row.data.sum())).sum()) + centre_of_mass = [] + for bus in buses: + row = layoutmatrix.sel(bus=bus).data + nz_b = row != 0 + row = row[nz_b] + co = coords[nz_b] + distances = haversine(bus_coords.loc[bus], co) + average_distance.append((distances * (row / row.sum())).sum()) + centre_of_mass.append(co.values.T @ (row / row.sum())) average_distance = xr.DataArray(average_distance, [buses]) + centre_of_mass = xr.DataArray(centre_of_mass, [buses, ('spatial', ['x', 'y'])]) + ds = xr.merge([(correction_factor * profile).rename('profile'), - capacities.rename('weight'), - p_nom_max.rename('p_nom_max'), - layout.rename('potential'), - average_distance.rename('average_distance')]) + capacities.rename('weight'), + p_nom_max.rename('p_nom_max'), + potential.rename('potential'), + average_distance.rename('average_distance')]) + if snakemake.wildcards.technology.startswith("offwind"): - import geopandas as gpd - from shapely.geometry import LineString - - offshore_shape = gpd.read_file(snakemake.input.offshore_shapes).unary_union + logger.info('Calculate underwater fraction of connections.') + offshore_shape = gpd.read_file(paths['offshore_shapes']).unary_union underwater_fraction = [] - for i in regions.index: - row = layoutmatrix[i] - centre_of_mass = (cell_coords[row.indices] * (row.data / row.data.sum())[:,np.newaxis]).sum(axis=0) - line = LineString([centre_of_mass, regions.loc[i, ['x', 'y']]]) - underwater_fraction.append(line.intersection(offshore_shape).length / line.length) + for bus in buses: + p = centre_of_mass.sel(bus=bus).data + line = LineString([p, regions.loc[bus, ['x', 'y']]]) + frac = line.intersection(offshore_shape).length/line.length + underwater_fraction.append(frac) ds['underwater_fraction'] = xr.DataArray(underwater_fraction, [buses]) # select only buses with some capacity and minimal capacity factor ds = ds.sel(bus=((ds['profile'].mean('time') > config.get('min_p_max_pu', 0.)) & - (ds['p_nom_max'] > config.get('min_p_nom_max', 0.)))) + (ds['p_nom_max'] > config.get('min_p_nom_max', 0.)))) if 'clip_p_max_pu' in config: - ds['profile'].values[ds['profile'].values < config['clip_p_max_pu']] = 0. + min_p_max_pu = config['clip_p_max_pu'] + ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0) ds.to_netcdf(snakemake.output.profile) diff --git a/scripts/retrieve_cutout.py b/scripts/retrieve_cutout.py deleted file mode 100644 index 719a32fc..00000000 --- a/scripts/retrieve_cutout.py +++ /dev/null @@ -1,75 +0,0 @@ -# SPDX-FileCopyrightText: 2019-2020 Fabian Hofmann (FIAS) -# -# SPDX-License-Identifier: GPL-3.0-or-later - -""" -.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3517949.svg - :target: https://doi.org/10.5281/zenodo.3517949 - -Cutouts are spatiotemporal subsets of the European weather data from the `ECMWF ERA5 `_ reanalysis dataset and the `CMSAF SARAH-2 `_ solar surface radiation dataset for the year 2013 (3.9 GB). -They have been prepared by and are for use with the `atlite `_ tool. You can either generate them yourself using the ``build_cutouts`` rule or retrieve them directly from `zenodo `_ through the rule ``retrieve_cutout`` described here. - -.. note:: - To download cutouts yourself from the `ECMWF ERA5 `_ you need to `set up the CDS API `_. - -The :ref:`tutorial` uses smaller `cutouts `_ than required for the full model (19 MB) - -.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3518020.svg - :target: https://doi.org/10.5281/zenodo.3518020 - - -**Relevant Settings** - -.. code:: yaml - - tutorial: - enable: - build_cutout: - -.. seealso:: - Documentation of the configuration file ``config.yaml`` at - :ref:`toplevel_cf` - -**Outputs** - -- ``cutouts/{cutout}``: weather data from either the `ERA5 `_ reanalysis weather dataset or `SARAH-2 `_ satellite-based historic weather data. - -.. seealso:: - For details see :mod:`build_cutout` and read the `atlite documentation `_. - -""" - -import logging -logger = logging.getLogger(__name__) - -from pathlib import Path -import tarfile -from _helpers import progress_retrieve, configure_logging - -if __name__ == "__main__": - if 'snakemake' not in globals(): - from _helpers import mock_snakemake - snakemake = mock_snakemake('retrieve_cutout') - rootpath = '..' - else: - rootpath = '.' - - configure_logging(snakemake) # TODO Make logging compatible with progressbar (see PR #102) - - if snakemake.config['tutorial']: - url = "https://zenodo.org/record/3518020/files/pypsa-eur-tutorial-cutouts.tar.xz" - else: - url = "https://zenodo.org/record/3517949/files/pypsa-eur-cutouts.tar.xz" - - # Save location - tarball_fn = Path(f"{rootpath}/cutouts.tar.xz") - - logger.info(f"Downloading cutouts from '{url}'.") - progress_retrieve(url, tarball_fn) - - logger.info(f"Extracting cutouts.") - tarfile.open(tarball_fn).extractall(path=rootpath) - - tarball_fn.unlink() - - logger.info(f"Cutouts available in '{Path(tarball_fn.stem).stem}'.") diff --git a/scripts/retrieve_natura_raster.py b/scripts/retrieve_natura_raster.py deleted file mode 100644 index b179b46a..00000000 --- a/scripts/retrieve_natura_raster.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2019-2020 Fabian Hofmann (FIAS) -# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: GPL-3.0-or-later - -""" -.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3518215.svg - :target: https://doi.org/10.5281/zenodo.3518215 - -This rule, as a substitute for :mod:`build_natura_raster`, downloads an already rasterized version (`natura.tiff `_) of `Natura 2000 `_ natural protection areas to reduce computation times. The file is placed into the ``resources`` sub-directory. - -**Relevant Settings** - -.. code:: yaml - - enable: - build_natura_raster: - -.. seealso:: - Documentation of the configuration file ``config.yaml`` at - :ref:`toplevel_cf` - -**Outputs** - -- ``resources/natura.tiff``: Rasterized version of `Natura 2000 `_ natural protection areas to reduce computation times. - -.. seealso:: - For details see :mod:`build_natura_raster`. - -""" - -import logging - -from _helpers import progress_retrieve, configure_logging - -logger = logging.getLogger(__name__) - -if __name__ == "__main__": - if 'snakemake' not in globals(): - from _helpers import mock_snakemake - snakemake = mock_snakemake('retrieve_natura_raster') - configure_logging(snakemake) # TODO Make logging compatible with progressbar (see PR #102) - - url = "https://zenodo.org/record/3518215/files/natura.tiff" - - logger.info(f"Downloading natura raster from '{url}'.") - progress_retrieve(url, snakemake.output[0]) - - logger.info(f"Natura raster available as '{snakemake.output[0]}'.") diff --git a/test/config.test1.yaml b/test/config.test1.yaml index 2a91aaf0..d13a6844 100755 --- a/test/config.test1.yaml +++ b/test/config.test1.yaml @@ -53,16 +53,15 @@ electricity: atlite: nprocesses: 4 cutouts: - europe-2013-era5: + europe-2013-era5-tutorial: module: era5 - xs: [4., 15.] - ys: [56., 46.] - months: [3, 3] - years: [2013, 2013] + x: [4., 15.] + y: [46., 56.] + time: ["2013-03", "2013-03"] renewable: onwind: - cutout: europe-2013-era5 + cutout: europe-2013-era5-tutorial resource: method: wind turbine: Vestas_V112_3MW @@ -79,7 +78,7 @@ renewable: potential: simple # or conservative clip_p_max_pu: 1.e-2 offwind-ac: - cutout: europe-2013-era5 + cutout: europe-2013-era5-tutorial resource: method: wind turbine: NREL_ReferenceTurbine_5MW_offshore @@ -91,7 +90,7 @@ renewable: potential: simple # or conservative clip_p_max_pu: 1.e-2 offwind-dc: - cutout: europe-2013-era5 + cutout: europe-2013-era5-tutorial resource: method: wind turbine: NREL_ReferenceTurbine_5MW_offshore @@ -104,7 +103,7 @@ renewable: potential: simple # or conservative clip_p_max_pu: 1.e-2 solar: - cutout: europe-2013-era5 + cutout: europe-2013-era5-tutorial resource: method: pv panel: CSi @@ -147,9 +146,9 @@ transformers: load: url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv - power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data + power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data interpolate_limit: 3 # data gaps up until this size are interpolated linearly - time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from + time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from manual_adjustments: true # false scaling_factor: 1.0 From 035bcf99df2e7f16483262697b381a3ee0e412f4 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 28 Apr 2021 09:25:27 +0200 Subject: [PATCH 004/102] restore REUSE compliance [skip travis] --- scripts/build_cutout.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/scripts/build_cutout.py b/scripts/build_cutout.py index e3490b13..79be84fc 100644 --- a/scripts/build_cutout.py +++ b/scripts/build_cutout.py @@ -1,3 +1,7 @@ +# SPDX-FileCopyrightText: : 2017-2021 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: GPL-3.0-or-later + """ Create cutouts with `atlite `_. From cf55f00656bde956d408567404132aafe8b81115 Mon Sep 17 00:00:00 2001 From: Chiara Anselmetti <40397544+chiaroo@users.noreply.github.com> Date: Thu, 6 May 2021 15:56:49 +0200 Subject: [PATCH 005/102] Delete capital costs at battery discharge link (#240) Hey guys, please correct me if I'm wrong, but I think pricing both the charge and discharge of the battery store component with the inverter's capital costs results in duplicating costs since a bi-directional inverter is considered (I checked the source of the cost assumption, Budischak 2013). Before working with PyPSA-EUR, I have worked with the toy model WHOBS where only the charging-link of the battery store component is priced with its capital costs (cf. https://github.com/PyPSA/WHOBS/blob/master/run_single_simulation.ipynb). The proposed change in code is equivalent. --- scripts/add_extra_components.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index b957ca40..946c4433 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -152,7 +152,6 @@ def attach_stores(n, costs): bus1=buses_i, carrier='battery discharger', efficiency=costs.at['battery inverter','efficiency'], - capital_cost=costs.at['battery inverter', 'capital_cost'], p_nom_extendable=True, marginal_cost=costs.at["battery inverter", "marginal_cost"]) From f3f587e3f8ea7211c6605725fe47ae05fa380216 Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 21 May 2021 13:54:38 +0200 Subject: [PATCH 006/102] simplify to substations - initial draft --- config.default.yaml | 2 ++ config.tutorial.yaml | 2 ++ scripts/simplify_network.py | 47 +++++++++++++++++++++++++++++++++++-- 3 files changed, 49 insertions(+), 2 deletions(-) diff --git a/config.default.yaml b/config.default.yaml index b1111d5a..e210320f 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -59,6 +59,8 @@ electricity: # Wind: [onwind, offwind-ac, offwind-dc] # Solar: [solar] +simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) + atlite: nprocesses: 4 cutouts: diff --git a/config.tutorial.yaml b/config.tutorial.yaml index 1dfde199..97c5d9ca 100755 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -51,6 +51,8 @@ electricity: custom_powerplants: false # use pandas query strings here, e.g. Country in ['Germany'] conventional_carriers: [coal, CCGT] # [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass] +simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) + atlite: nprocesses: 4 cutouts: diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index b05d59aa..4572e664 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -98,7 +98,7 @@ from six.moves import reduce import pypsa from pypsa.io import import_components_from_dataframe, import_series_from_dataframe -from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport +from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport, get_clustering_from_busmap, _make_consense logger = logging.getLogger(__name__) @@ -308,7 +308,6 @@ def simplify_links(n): _aggregate_and_move_components(n, busmap, connection_costs_to_bus) return n, busmap - def remove_stubs(n): logger.info("Removing stubs") @@ -320,6 +319,46 @@ def remove_stubs(n): return n, busmap +def aggregate_to_substations(n): + logger.info("Aggregating buses that are no substations or have a no valid offshore connection")# + + busmap = n.buses.index.to_series() + + no_substations = list(set(n.buses.index)-set(n.generators.bus)-set(n.loads.bus)) + + + index = [np.append(["Line" for c in range(len(n.lines))], + ["Link" for c in range(len(n.links))]), + np.append(n.lines.index, n.links.index)] + #under_construction lines should be last choice, but weight should be < inf in case no other node is reachable, hence 1e-3 + weight = pd.Series(np.append((n.lines.length/n.lines.s_nom.apply(lambda b: b if b>0 else 1e-3)).values, + (n.links.length/n.links.p_nom.apply(lambda b: b if b>0 else 1e-3)).values), + index=index) + + adj = n.adjacency_matrix(branch_components=['Line', 'Link'], weights=weight) + + dist = dijkstra(adj, directed=False, indices=n.buses.index.get_indexer(no_substations)) + dist[:, n.buses.index.get_indexer(no_substations)] = np.inf #no_substations should not be assigned to other no_substations + + #restrict to same country: + for bus in no_substations: + country_buses = n.buses[~n.buses.country.isin([n.buses.loc[bus].country])].index + dist[n.buses.loc[no_substations].index.get_indexer([bus]),n.buses.index.get_indexer(country_buses)] = np.inf + + assign_to = dist.argmin(axis=1) + busmap.loc[no_substations] = n.buses.iloc[assign_to].index + + clustering = get_clustering_from_busmap(n, busmap, + bus_strategies=dict(country=_make_consense("Bus", "country")), + aggregate_generators_weighted=True, + aggregate_generators_carriers=None, + aggregate_one_ports=["Load", "StorageUnit"], + line_length_factor=1.0, + generator_strategies={'p_nom_max': 'sum'}, + scale_link_capital_costs=False) + + return clustering.network, busmap + def cluster(n, n_clusters): logger.info(f"Clustering to {n_clusters} buses") @@ -358,6 +397,10 @@ if __name__ == "__main__": busmaps = [trafo_map, simplify_links_map, stub_map] + if snakemake.config['simplify_to_substations']: + n, substation_map = aggregate_to_substations(n) + busmaps.append(substation_map) + if snakemake.wildcards.simpl: n, cluster_map = cluster(n, int(snakemake.wildcards.simpl)) busmaps.append(cluster_map) From 11c29ac6cc9078c8376d17dd470d27eeed473896 Mon Sep 17 00:00:00 2001 From: Chiara Anselmetti <40397544+chiaroo@users.noreply.github.com> Date: Fri, 21 May 2021 15:27:34 +0200 Subject: [PATCH 007/102] Adding focus_weights to pre-clustering (#241) * Add focus_weights to pre-clustering Hey guys, another quick fix since I noticed it wasn't implemented yet: When pre-clustering the network, the focus_weights have not yet been considered. This may distort clustering results when pre-clustering to a low resolution. * Update release_notes.rst --- doc/release_notes.rst | 1 + scripts/simplify_network.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index a1b54396..a378d5b3 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -19,6 +19,7 @@ Upcoming Release * Fix: Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq (from 3.1e9 t CO2-eq). The new value represents emissions related to the electricity sector for EU+UK. The old value was ~2x too high and used when the emissions wildcard in ``{opts}`` was used. * Add option to include marginal costs of links representing fuel cells, electrolysis, and battery inverters [`#232 `_]. +* The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 `_]. PyPSA-Eur 0.3.0 (7th December 2020) ================================== diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index b05d59aa..530204ef 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -324,6 +324,8 @@ def remove_stubs(n): def cluster(n, n_clusters): logger.info(f"Clustering to {n_clusters} buses") + focus_weights = snakemake.config.get('focus_weights', None) + renewable_carriers = pd.Index([tech for tech in n.generators.carrier.unique() if tech.split('-', 2)[0] in snakemake.config['renewable']]) @@ -337,7 +339,8 @@ def cluster(n, n_clusters): for tech in renewable_carriers])) if len(renewable_carriers) > 0 else 'conservative') clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode, - solver_name=snakemake.config['solving']['solver']['name']) + solver_name=snakemake.config['solving']['solver']['name'], + focus_weights=focus_weights) return clustering.network, clustering.busmap From f5a0d566d98368687969fda9f659dace98c84433 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martha=20Frysztacki=20=5Bfr=C9=A8=CA=82tat=CD=A1sk=CA=B2?= =?UTF-8?q?=5D?= Date: Fri, 21 May 2021 15:31:50 +0200 Subject: [PATCH 008/102] solve_operations_network: integrate all extendable links, not only DC (#244) * add_electricity.py Resolve FutureWarning 771 Index.__or__ operating as set operation is deprecated * solve_operations_network: bug fix * release notes * Update doc/release_notes.rst Co-authored-by: Fabian Neumann --- doc/release_notes.rst | 2 ++ scripts/solve_operations_network.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index a378d5b3..02b79cd7 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -19,6 +19,7 @@ Upcoming Release * Fix: Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq (from 3.1e9 t CO2-eq). The new value represents emissions related to the electricity sector for EU+UK. The old value was ~2x too high and used when the emissions wildcard in ``{opts}`` was used. * Add option to include marginal costs of links representing fuel cells, electrolysis, and battery inverters [`#232 `_]. +* Bugfix in :mod:`solve_operations_network`: optimised capacities are now fixed for all extendable links, not only HVDC links [`#244 `_]. * The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 `_]. PyPSA-Eur 0.3.0 (7th December 2020) @@ -45,6 +46,7 @@ Using the ``{opts}`` wildcard for scenarios: uses the `tsam `_ package [`#186 `_]. + More OPSD integration: * Add renewable power plants from `OPSD `_ to the network for specified technologies. diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index c65e6889..864afa77 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -71,7 +71,7 @@ def set_parameters_from_optimized(n, n_optim): n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.) n.lines['s_nom_extendable'] = False - links_dc_i = n.links.index[n.links.carrier == 'DC'] + links_dc_i = n.links.index[n.links.p_nom_extendable] n.links.loc[links_dc_i, 'p_nom'] = \ n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.) n.links.loc[links_dc_i, 'p_nom_extendable'] = False From 11af828c394faca3de4c3050b531e95605b678d4 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 25 May 2021 11:29:47 +0200 Subject: [PATCH 009/102] remove six dependency (#245) --- scripts/_helpers.py | 5 ++--- scripts/base_network.py | 7 +++---- scripts/build_shapes.py | 2 +- scripts/cluster_network.py | 2 +- scripts/make_summary.py | 5 ++--- scripts/plot_network.py | 1 - scripts/prepare_network.py | 3 +-- scripts/simplify_network.py | 9 ++++----- 8 files changed, 14 insertions(+), 20 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 807c439f..996baf73 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -156,7 +156,6 @@ def aggregate_p_curtailed(n): ]) def aggregate_costs(n, flatten=False, opts=None, existing_only=False): - from six import iterkeys, itervalues components = dict(Link=("p_nom", "p0"), Generator=("p_nom", "p"), @@ -167,8 +166,8 @@ def aggregate_costs(n, flatten=False, opts=None, existing_only=False): costs = {} for c, (p_nom, p_attr) in zip( - n.iterate_components(iterkeys(components), skip_empty=False), - itervalues(components) + n.iterate_components(components.keys(), skip_empty=False), + components.values() ): if c.df.empty: continue if not existing_only: p_nom += "_opt" diff --git a/scripts/base_network.py b/scripts/base_network.py index e43c4baf..c35b6858 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -74,7 +74,6 @@ import scipy as sp import networkx as nx from scipy.sparse import csgraph -from six import iteritems from itertools import product from shapely.geometry import Point, LineString @@ -268,13 +267,13 @@ def _apply_parameter_corrections(n): if corrections is None: return - for component, attrs in iteritems(corrections): + for component, attrs in corrections.items(): df = n.df(component) oid = _get_oid(df) if attrs is None: continue - for attr, repls in iteritems(attrs): - for i, r in iteritems(repls): + for attr, repls in attrs.items(): + for i, r in repls.items(): if i == 'oid': r = oid.map(repls["oid"]).dropna() elif i == 'index': diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 2651837b..96d4a60f 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -73,7 +73,7 @@ from _helpers import configure_logging import os import numpy as np from operator import attrgetter -from six.moves import reduce +from functools import reduce from itertools import takewhile import pandas as pd diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index a01f682f..8356f83b 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -135,7 +135,7 @@ import pyomo.environ as po import matplotlib.pyplot as plt import seaborn as sns -from six.moves import reduce +from functools import reduce from pypsa.networkclustering import (busmap_by_kmeans, busmap_by_spectral_clustering, _make_consense, get_clustering_from_busmap) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 4d3e9ee5..f815d5f0 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -60,7 +60,6 @@ import os import pypsa import pandas as pd -from six import iteritems from add_electricity import load_costs, update_transmission_costs idx = pd.IndexSlice @@ -386,7 +385,7 @@ def make_summaries(networks_dict, country='all'): for output in outputs: dfs[output] = pd.DataFrame(columns=columns,dtype=float) - for label, filename in iteritems(networks_dict): + for label, filename in networks_dict.items(): print(label, filename) if not os.path.exists(filename): print("does not exist!!") @@ -417,7 +416,7 @@ def make_summaries(networks_dict, country='all'): def to_csv(dfs): dir = snakemake.output[0] os.makedirs(dir, exist_ok=True) - for key, df in iteritems(dfs): + for key, df in dfs.items(): df.to_csv(os.path.join(dir, f"{key}.csv")) diff --git a/scripts/plot_network.py b/scripts/plot_network.py index e55b5de0..810f6284 100755 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -25,7 +25,6 @@ from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs, import pandas as pd import numpy as np -from six.moves import zip import cartopy.crs as ccrs import matplotlib.pyplot as plt diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index fc5c6e77..4caa5703 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -62,7 +62,6 @@ import re import pypsa import numpy as np import pandas as pd -from six import iteritems from add_electricity import load_costs, update_transmission_costs @@ -145,7 +144,7 @@ def average_every_nhours(n, offset): for c in n.iterate_components(): pnl = getattr(m, c.list_name+"_t") - for k, df in iteritems(c.pnl): + for k, df in c.pnl.items(): if not df.empty: pnl[k] = df.resample(offset).mean() diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 530204ef..5e89b6bf 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -93,8 +93,7 @@ import numpy as np import scipy as sp from scipy.sparse.csgraph import connected_components, dijkstra -from six import iteritems -from six.moves import reduce +from functools import reduce import pypsa from pypsa.io import import_components_from_dataframe, import_series_from_dataframe @@ -193,7 +192,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate n.mremove(c, n.df(c).index) import_components_from_dataframe(n, df, c) - for attr, df in iteritems(pnl): + for attr, df in pnl.items(): if not df.empty: import_series_from_dataframe(n, df, c, attr) @@ -237,7 +236,7 @@ def simplify_links(n): if len(G.adj[m]) > 2 or (set(G.adj[m]) - nodes)} for u in supernodes: - for m, ls in iteritems(G.adj[u]): + for m, ls in G.adj[u].items(): if m not in nodes or m in seen: continue buses = [u, m] @@ -245,7 +244,7 @@ def simplify_links(n): while m not in (supernodes | seen): seen.add(m) - for m2, ls in iteritems(G.adj[m]): + for m2, ls in G.adj[m].items(): if m2 in seen or m2 == u: continue buses.append(m2) links.append(list(ls)) # [name for name in ls]) From 1bd3a5a8061c61306fd99997fa8c98621f88a58f Mon Sep 17 00:00:00 2001 From: martacki Date: Tue, 25 May 2021 12:58:23 +0200 Subject: [PATCH 010/102] simplify: drop inaccurate columns only if no simpl wildcard is used --- scripts/simplify_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 8663faa3..4830b87b 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -361,8 +361,8 @@ if __name__ == "__main__": if snakemake.wildcards.simpl: n, cluster_map = cluster(n, int(snakemake.wildcards.simpl)) busmaps.append(cluster_map) - - n.buses = n.buses.drop(['substation_lv', 'substation_off', 'under_construction'], axis=1) + else: + n.buses = n.buses.drop(['symbol', 'tags', 'under_construction', 'substation_lv', 'substation_off'], axis=1) n.export_to_netcdf(snakemake.output.network) busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) From 14cf5815bb9d6f774b493ce5482b56ab3ca60a29 Mon Sep 17 00:00:00 2001 From: martacki Date: Tue, 25 May 2021 13:02:58 +0200 Subject: [PATCH 011/102] clear arrangement --- scripts/simplify_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 4830b87b..26bf96e3 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -363,6 +363,7 @@ if __name__ == "__main__": busmaps.append(cluster_map) else: n.buses = n.buses.drop(['symbol', 'tags', 'under_construction', 'substation_lv', 'substation_off'], axis=1) + n.export_to_netcdf(snakemake.output.network) busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) From e0215bc5a9bfa2341504873d57a2d7019329eadf Mon Sep 17 00:00:00 2001 From: Koen van Greevenbroek <74298901+koen-vg@users.noreply.github.com> Date: Tue, 25 May 2021 15:55:23 +0200 Subject: [PATCH 012/102] Propagate the solver log file name to the solver (#247) * Propagate the solver log file name to the solver Previously, the PyPSA network solving functions were not told about the solver logfile specified in the Snakemake file. * Pass solver_logfile on as kwargs The `solve_network` function passes any additional arguments on to the pypsa `network_lopf` and `ilopf` functions. Now we also pass `solver_logfile` on as part of kwargs. --- scripts/solve_network.py | 7 ++++--- scripts/solve_operations_network.py | 5 +++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index db64e576..24cd0464 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -241,7 +241,7 @@ def extra_functionality(n, snapshots): add_battery_constraints(n) -def solve_network(n, config, solver_log=None, opts='', **kwargs): +def solve_network(n, config, opts='', **kwargs): solver_options = config['solving']['solver'].copy() solver_name = solver_options.pop('name') cf_solving = config['solving']['options'] @@ -282,8 +282,9 @@ if __name__ == "__main__": with memory_logger(filename=fn, interval=30.) as mem: n = pypsa.Network(snakemake.input[0]) n = prepare_network(n, solve_opts) - n = solve_network(n, config=snakemake.config, solver_dir=tmpdir, - solver_log=snakemake.log.solver, opts=opts) + n = solve_network(n, config=snakemake.config, opts=opts, + solver_dir=tmpdir, + solver_logfile=snakemake.log.solver) n.export_to_netcdf(snakemake.output[0]) logger.info("Maximum memory usage: {}".format(mem.mem_usage)) diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index 864afa77..b698c2f1 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -111,8 +111,9 @@ if __name__ == "__main__": fn = getattr(snakemake.log, 'memory', None) with memory_logger(filename=fn, interval=30.) as mem: n = prepare_network(n, solve_opts=snakemake.config['solving']['options']) - n = solve_network(n, config, solver_dir=tmpdir, - solver_log=snakemake.log.solver, opts=opts) + n = solve_network(n, config=config, opts=opts, + solver_dir=tmpdir, + solver_logfile=snakemake.log.solver) n.export_to_netcdf(snakemake.output[0]) logger.info("Maximum memory usage: {}".format(mem.mem_usage)) From bfeb429c27b6ea1e9456d264568c9a8ed9a7ab18 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 25 May 2021 15:55:39 +0200 Subject: [PATCH 013/102] base: add escape if all TYNDP links already in network (#246) --- doc/release_notes.rst | 1 + scripts/base_network.py | 1 + 2 files changed, 2 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 02b79cd7..2251c853 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -19,6 +19,7 @@ Upcoming Release * Fix: Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq (from 3.1e9 t CO2-eq). The new value represents emissions related to the electricity sector for EU+UK. The old value was ~2x too high and used when the emissions wildcard in ``{opts}`` was used. * Add option to include marginal costs of links representing fuel cells, electrolysis, and battery inverters [`#232 `_]. +* Fix: Add escape in :mod:`base_network` if all TYNDP links are already contained in the network [`#246 `_]. * Bugfix in :mod:`solve_operations_network`: optimised capacities are now fixed for all extendable links, not only HVDC links [`#244 `_]. * The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 `_]. diff --git a/scripts/base_network.py b/scripts/base_network.py index c35b6858..778f8dc4 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -212,6 +212,7 @@ def _add_links_from_tyndp(buses, links): if links_tyndp["j"].notnull().any(): logger.info("TYNDP links already in the dataset (skipping): " + ", ".join(links_tyndp.loc[links_tyndp["j"].notnull(), "Name"])) links_tyndp = links_tyndp.loc[links_tyndp["j"].isnull()] + if links_tyndp.empty: return buses, links tree = sp.spatial.KDTree(buses[['x', 'y']]) _, ind0 = tree.query(links_tyndp[["x1", "y1"]]) From c53d49d8f82dd027138755e560282e8ffdbcc9ab Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 28 May 2021 14:53:00 +0200 Subject: [PATCH 014/102] adapt config for test --- test/config.test1.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/config.test1.yaml b/test/config.test1.yaml index d13a6844..75752c4d 100755 --- a/test/config.test1.yaml +++ b/test/config.test1.yaml @@ -50,6 +50,8 @@ electricity: custom_powerplants: false # use pandas query strings here, e.g. Country in ['Germany'] conventional_carriers: [coal, CCGT] # [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass] +simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) + atlite: nprocesses: 4 cutouts: From d6830d3c482a4eb6c3f23f671e81081d740e3a50 Mon Sep 17 00:00:00 2001 From: martacki Date: Tue, 1 Jun 2021 10:55:26 +0200 Subject: [PATCH 015/102] rename no_substations --- scripts/simplify_network.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 4572e664..72d0dbb7 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -319,14 +319,16 @@ def remove_stubs(n): return n, busmap -def aggregate_to_substations(n): - logger.info("Aggregating buses that are no substations or have a no valid offshore connection")# +def aggregate_to_substations(n, buses_i=None): + # can be used to aggregate a selection of buses to electrically closest neighbors + # if no buses are given, nodes that are no substations or without offshore connection are aggregated + + if buses_i is None: + logger.info("Aggregating buses that are no substations or have no valid offshore connection") + buses_i = list(set(n.buses.index)-set(n.generators.bus)-set(n.loads.bus)) busmap = n.buses.index.to_series() - no_substations = list(set(n.buses.index)-set(n.generators.bus)-set(n.loads.bus)) - - index = [np.append(["Line" for c in range(len(n.lines))], ["Link" for c in range(len(n.links))]), np.append(n.lines.index, n.links.index)] @@ -337,16 +339,16 @@ def aggregate_to_substations(n): adj = n.adjacency_matrix(branch_components=['Line', 'Link'], weights=weight) - dist = dijkstra(adj, directed=False, indices=n.buses.index.get_indexer(no_substations)) - dist[:, n.buses.index.get_indexer(no_substations)] = np.inf #no_substations should not be assigned to other no_substations + dist = dijkstra(adj, directed=False, indices=n.buses.index.get_indexer(buses_i)) + dist[:, n.buses.index.get_indexer(buses_i)] = np.inf #bus in buses_i should not be assigned to different bus in buses_i #restrict to same country: - for bus in no_substations: + for bus in buses_i: country_buses = n.buses[~n.buses.country.isin([n.buses.loc[bus].country])].index - dist[n.buses.loc[no_substations].index.get_indexer([bus]),n.buses.index.get_indexer(country_buses)] = np.inf + dist[n.buses.loc[buses_i].index.get_indexer([bus]),n.buses.index.get_indexer(country_buses)] = np.inf assign_to = dist.argmin(axis=1) - busmap.loc[no_substations] = n.buses.iloc[assign_to].index + busmap.loc[buses_i] = n.buses.iloc[assign_to].index clustering = get_clustering_from_busmap(n, busmap, bus_strategies=dict(country=_make_consense("Bus", "country")), From eef8e3fe41095ceb0ffa7d3815ef81389a35023b Mon Sep 17 00:00:00 2001 From: martacki Date: Tue, 1 Jun 2021 11:00:31 +0200 Subject: [PATCH 016/102] different line in configuration settings --- config.default.yaml | 4 ++-- config.tutorial.yaml | 4 ++-- test/config.test1.yaml | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/config.default.yaml b/config.default.yaml index e210320f..6df2c446 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -19,6 +19,8 @@ scenario: countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK'] +simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) + snapshots: start: "2013-01-01" end: "2014-01-01" @@ -59,8 +61,6 @@ electricity: # Wind: [onwind, offwind-ac, offwind-dc] # Solar: [solar] -simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) - atlite: nprocesses: 4 cutouts: diff --git a/config.tutorial.yaml b/config.tutorial.yaml index 97c5d9ca..4ddb6830 100755 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -19,6 +19,8 @@ scenario: countries: ['DE'] +simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) + snapshots: start: "2013-03-01" end: "2013-04-01" @@ -51,8 +53,6 @@ electricity: custom_powerplants: false # use pandas query strings here, e.g. Country in ['Germany'] conventional_carriers: [coal, CCGT] # [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass] -simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) - atlite: nprocesses: 4 cutouts: diff --git a/test/config.test1.yaml b/test/config.test1.yaml index 75752c4d..39b2ef8e 100755 --- a/test/config.test1.yaml +++ b/test/config.test1.yaml @@ -18,6 +18,8 @@ scenario: countries: ['DE'] +simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) + snapshots: start: "2013-03-01" end: "2014-04-01" @@ -50,8 +52,6 @@ electricity: custom_powerplants: false # use pandas query strings here, e.g. Country in ['Germany'] conventional_carriers: [coal, CCGT] # [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass] -simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) - atlite: nprocesses: 4 cutouts: From 9825fa32b24a1018472555d66cb8a0615278353c Mon Sep 17 00:00:00 2001 From: martacki Date: Tue, 1 Jun 2021 12:00:55 +0200 Subject: [PATCH 017/102] change layout of configuration settings --- config.default.yaml | 4 +++- config.tutorial.yaml | 4 +++- scripts/simplify_network.py | 2 +- test/config.test1.yaml | 4 +++- 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/config.default.yaml b/config.default.yaml index 6df2c446..51721729 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -19,7 +19,9 @@ scenario: countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK'] -simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) +clustering: + simplify: + to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) snapshots: start: "2013-01-01" diff --git a/config.tutorial.yaml b/config.tutorial.yaml index 4ddb6830..e551e460 100755 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -19,7 +19,9 @@ scenario: countries: ['DE'] -simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) +clustering: + simplify: + to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) snapshots: start: "2013-03-01" diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 72d0dbb7..309e94ff 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -399,7 +399,7 @@ if __name__ == "__main__": busmaps = [trafo_map, simplify_links_map, stub_map] - if snakemake.config['simplify_to_substations']: + if snakemake.config['clustering']['simplify']['to_substations']: n, substation_map = aggregate_to_substations(n) busmaps.append(substation_map) diff --git a/test/config.test1.yaml b/test/config.test1.yaml index 39b2ef8e..a5dadc65 100755 --- a/test/config.test1.yaml +++ b/test/config.test1.yaml @@ -18,7 +18,9 @@ scenario: countries: ['DE'] -simplify_to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) +clustering: + simplify: + to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections) snapshots: start: "2013-03-01" From cec9dcc41ce96034c6c11f78d154b06b04be759f Mon Sep 17 00:00:00 2001 From: martacki Date: Tue, 1 Jun 2021 12:02:43 +0200 Subject: [PATCH 018/102] adapt configuration options --- doc/configtables/clustering.csv | 3 +++ doc/configuration.rst | 6 +++--- doc/tutorial.rst | 12 ++++++------ 3 files changed, 12 insertions(+), 9 deletions(-) create mode 100644 doc/configtables/clustering.csv diff --git a/doc/configtables/clustering.csv b/doc/configtables/clustering.csv new file mode 100644 index 00000000..2f63f955 --- /dev/null +++ b/doc/configtables/clustering.csv @@ -0,0 +1,3 @@ +,Unit,Values,Description +simplify,,, +-- to_substations,bool,"{'true','false'}","Aggregates all nodes without power injection (positive or negative, i.e. demand or generation) to electrically closest ones" diff --git a/doc/configuration.rst b/doc/configuration.rst index a75669cd..e7c31793 100644 --- a/doc/configuration.rst +++ b/doc/configuration.rst @@ -256,7 +256,7 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 192-202 + :lines: 208-217 .. csv-table:: :header-rows: 1 @@ -268,7 +268,7 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 192,203-219 + :lines: 218-227 .. csv-table:: :header-rows: 1 @@ -282,7 +282,7 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 221-299 + :lines: 236-314 .. csv-table:: :header-rows: 1 diff --git a/doc/tutorial.rst b/doc/tutorial.rst index 507b1485..d2fb8433 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -53,41 +53,41 @@ Likewise, the example's temporal scope can be restricted (e.g. to a single month .. literalinclude:: ../config.tutorial.yaml :language: yaml - :lines: 22-25 + :lines: 24-27 It is also possible to allow less or more carbon-dioxide emissions. Here, we limit the emissions of Germany 100 Megatonnes per year. .. literalinclude:: ../config.tutorial.yaml :language: yaml - :lines: 36,38 + :lines: 38,40 PyPSA-Eur also includes a database of existing conventional powerplants. We can select which types of powerplants we like to be included with fixed capacities: .. literalinclude:: ../config.tutorial.yaml :language: yaml - :lines: 36,52 + :lines: 38,54 To accurately model the temporal and spatial availability of renewables such as wind and solar energy, we rely on historical weather data. It is advisable to adapt the required range of coordinates to the selection of countries. .. literalinclude:: ../config.tutorial.yaml :language: yaml - :lines: 54-62 + :lines: 56-63 We can also decide which weather data source should be used to calculate potentials and capacity factor time-series for each carrier. For example, we may want to use the ERA-5 dataset for solar and not the default SARAH-2 dataset. .. literalinclude:: ../config.tutorial.yaml :language: yaml - :lines: 64,107-108 + :lines: 65,108-109 Finally, it is possible to pick a solver. For instance, this tutorial uses the open-source solvers CBC and Ipopt and does not rely on the commercial solvers Gurobi or CPLEX (for which free academic licenses are available). .. literalinclude:: ../config.tutorial.yaml :language: yaml - :lines: 170,180-181 + :lines: 171,181-182 .. note:: From 0f5934f534c857fbbfb9b90f87ca6b8606f061fd Mon Sep 17 00:00:00 2001 From: martacki Date: Tue, 1 Jun 2021 12:06:56 +0200 Subject: [PATCH 019/102] release notes --- doc/release_notes.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index a1b54396..5330128c 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -19,6 +19,7 @@ Upcoming Release * Fix: Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq (from 3.1e9 t CO2-eq). The new value represents emissions related to the electricity sector for EU+UK. The old value was ~2x too high and used when the emissions wildcard in ``{opts}`` was used. * Add option to include marginal costs of links representing fuel cells, electrolysis, and battery inverters [`#232 `_]. +* Add option to pre-aggregate nodes without power injections (positive or negative, i.e. generation or demand) to electrically closest nodes or neighbors in ``simplify_network``. Defaults to ``False``. This affects nodes that are no substations or have no offshore connection. PyPSA-Eur 0.3.0 (7th December 2020) ================================== From 7b68e8be0caa131cbbf3fdea19ff158da9e74400 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 22 Jun 2021 11:01:33 +0200 Subject: [PATCH 020/102] GitHub actions CI (#252) * add github action ci * only one environment update call * line break in echo did not work * fix syntax * fix version syntax * switch to glpk * reduce time from month to week * list environment * use new ipopt version https://github.com/conda-forge/ipopt-feedstock/issues/55 * remove accidental additions * request ipopt lower than 3.13.3 https://github.com/conda-forge/ipopt-feedstock/issues/64 * add badges and release notes * add badge to readme and make ci.yaml cc-0 --- .github/workflows/ci.yaml | 47 +++++++++++++++++++++++++++++++++++++++ .travis.yml | 39 -------------------------------- README.md | 2 +- doc/index.rst | 4 ++-- doc/release_notes.rst | 1 + test/config.test1.yaml | 4 ++-- 6 files changed, 53 insertions(+), 44 deletions(-) create mode 100644 .github/workflows/ci.yaml delete mode 100644 .travis.yml diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 00000000..9ea810b4 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,47 @@ +# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + +name: CI + +on: [push] + +jobs: + build: + + runs-on: ${{ matrix.os }} + strategy: + max-parallel: 5 + matrix: + os: + - ubuntu-latest + - macos-latest + - windows-latest + + defaults: + run: + shell: bash -l {0} + + steps: + + - uses: actions/checkout@v2 + + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v2.1.1 + with: # checks out environment 'test' by default + mamba-version: "*" + channels: conda-forge,defaults + channel-priority: true + + - name: Install dependencies + run: | + echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc + echo -e " - glpk\n - ipopt<3.13.3" >> envs/environment.yaml + mamba env update -f envs/environment.yaml --name test + + - name: Test snakemake workflow + run: | + conda list + cp test/config.test1.yaml config.yaml + snakemake -j all solve_all_networks + rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 79826a64..00000000 --- a/.travis.yml +++ /dev/null @@ -1,39 +0,0 @@ -# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: GPL-3.0-or-later - -branches: - only: - - master - -os: - - windows - - linux - - osx - -language: bash - -before_install: - # install conda - - wget https://raw.githubusercontent.com/trichter/conda4travis/latest/conda4travis.sh -O conda4travis.sh - - source conda4travis.sh - - # install conda environment - - conda install -c conda-forge mamba - - mamba env create -f ./envs/environment.yaml - - conda activate pypsa-eur - - # install open-source solver - - mamba install -c conda-forge glpk ipopt'<3.13.3' - - # list packages for easier debugging - - conda list - -before_script: - - 'echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc' - -script: - - cp ./test/config.test1.yaml ./config.yaml - - snakemake -j all solve_all_networks - - rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results - # could repeat for more configurations in future diff --git a/README.md b/README.md index dc6b4791..15f979a7 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ SPDX-License-Identifier: CC-BY-4.0 --> ![GitHub release (latest by date including pre-releases)](https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases) -[![Build Status](https://travis-ci.org/PyPSA/pypsa-eur.svg?branch=master)](https://travis-ci.org/PyPSA/pypsa-eur) +[![Build Status](https://github.com/pypsa/pypsa-eur/actions/workflows/ci.yaml/badge.svg)](https://github.com/PyPSA/pypsa-eur/actions) [![Documentation](https://readthedocs.org/projects/pypsa-eur/badge/?version=latest)](https://pypsa-eur.readthedocs.io/en/latest/?badge=latest) ![Size](https://img.shields.io/github/repo-size/pypsa/pypsa-eur) [![Zenodo](https://zenodo.org/badge/DOI/10.5281/zenodo.3520874.svg)](https://doi.org/10.5281/zenodo.3520874) diff --git a/doc/index.rst b/doc/index.rst index 02b02ce2..e7dabdf4 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -9,8 +9,8 @@ PyPSA-Eur: An Open Optimisation Model of the European Transmission System .. image:: https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases :alt: GitHub release (latest by date including pre-releases) -.. image:: https://travis-ci.org/PyPSA/pypsa-eur.svg?branch=master - :target: https://travis-ci.org/PyPSA/pypsa-eur +.. image:: https://github.com/pypsa/pypsa-eur/actions/workflows/ci.yaml/badge.svg + :target: https://github.com/PyPSA/pypsa-eur/actions .. image:: https://readthedocs.org/projects/pypsa-eur/badge/?version=latest :target: https://pypsa-eur.readthedocs.io/en/latest/?badge=latest diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 2251c853..0294d2d0 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -22,6 +22,7 @@ Upcoming Release * Fix: Add escape in :mod:`base_network` if all TYNDP links are already contained in the network [`#246 `_]. * Bugfix in :mod:`solve_operations_network`: optimised capacities are now fixed for all extendable links, not only HVDC links [`#244 `_]. * The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 `_]. +* Continuous integration testing switches to Github Actions from Travis CI [`#252 `_]. PyPSA-Eur 0.3.0 (7th December 2020) ================================== diff --git a/test/config.test1.yaml b/test/config.test1.yaml index d13a6844..3ed02082 100755 --- a/test/config.test1.yaml +++ b/test/config.test1.yaml @@ -20,7 +20,7 @@ countries: ['DE'] snapshots: start: "2013-03-01" - end: "2014-04-01" + end: "2013-03-08" closed: 'left' # end is not inclusive enable: @@ -57,7 +57,7 @@ atlite: module: era5 x: [4., 15.] y: [46., 56.] - time: ["2013-03", "2013-03"] + time: ["2013-03-01", "2013-03-08"] renewable: onwind: From 4e5ac53c649fb7d7516b404cae026bc60188e162 Mon Sep 17 00:00:00 2001 From: euronion <42553970+euronion@users.noreply.github.com> Date: Thu, 24 Jun 2021 11:01:51 +0200 Subject: [PATCH 021/102] Fix snakemake CLA in GH action. (#256) * Update ci.yaml * Update ci.yaml --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9ea810b4..a3268243 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -43,5 +43,5 @@ jobs: run: | conda list cp test/config.test1.yaml config.yaml - snakemake -j all solve_all_networks + snakemake --cores all solve_all_networks rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results From 094afc8c5029cc3cc63ed0e8d67f180c380fba9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martha=20Frysztacki=20=5Bfr=C9=A8=CA=82tat=CD=A1sk=CA=B2?= =?UTF-8?q?=5D?= Date: Mon, 28 Jun 2021 15:52:37 +0200 Subject: [PATCH 022/102] config: co2base interpreted by snakemake as str without + (#258) --- config.default.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config.default.yaml b/config.default.yaml index b1111d5a..d8f3697a 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -36,7 +36,7 @@ enable: electricity: voltages: [220., 300., 380.] co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5 - co2base: 1.487e9 + co2base: 1.487e+9 agg_p_nom_limits: data/agg_p_nom_minmax.csv extendable_carriers: From 86540775197d9861a0a1fa8e143ce144ec2a6f16 Mon Sep 17 00:00:00 2001 From: euronion <42553970+euronion@users.noreply.github.com> Date: Mon, 28 Jun 2021 16:20:29 +0200 Subject: [PATCH 023/102] Fix creation of renewable profiles for offshore wind. (#255) * Update build_renewable_profiles.py * Update release_notes.rst * Update release_notes.rst * Update release_notes.rst * Update scripts/build_renewable_profiles.py Co-authored-by: FabianHofmann * Adjust doc string Co-authored-by: FabianHofmann Co-authored-by: Fabian --- doc/release_notes.rst | 3 ++- scripts/build_renewable_profiles.py | 6 +++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 0294d2d0..230fc67d 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -1,5 +1,5 @@ .. - SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors + SPDX-FileCopyrightText: 2019-2021 The PyPSA-Eur Authors SPDX-License-Identifier: CC-BY-4.0 @@ -23,6 +23,7 @@ Upcoming Release * Bugfix in :mod:`solve_operations_network`: optimised capacities are now fixed for all extendable links, not only HVDC links [`#244 `_]. * The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 `_]. * Continuous integration testing switches to Github Actions from Travis CI [`#252 `_]. +* Bugfix in :mod:`build_renewable_profile` where offshore wind profiles could no longer be created [`#249 `_]. PyPSA-Eur 0.3.0 (7th December 2020) ================================== diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index f7e1bc7f..111eb772 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -183,6 +183,7 @@ import progressbar as pgb import geopandas as gpd import xarray as xr import numpy as np +import functools import atlite import logging from pypsa.geo import haversine @@ -235,7 +236,10 @@ if __name__ == '__main__': excluder.add_raster(paths.corine, codes=codes, buffer=buffer, crs=3035) if "max_depth" in config: - func = lambda v: v <= -config['max_depth'] + # lambda not supported for atlite + multiprocessing + # use named function np.greater with partially frozen argument instead + # and exclude areas where: -max_depth > grid cell depth + func = functools.partial(np.greater,-config['max_depth']) excluder.add_raster(paths.gebco, codes=func, crs=4236, nodata=-1000) if 'min_shore_distance' in config: From d094119d47d7b8819966532df174cf4a6b1d50d1 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 29 Jun 2021 08:45:09 +0200 Subject: [PATCH 024/102] List first-order dependencies and add pypsa-eur-sec specialties (#257) * env: list first-order dependencies and add pypsa-eur-sec specialties * limit numpy version and require atlite 0.2.5 * fix accidental 0 major numpy version * add future dependency country_converter --- envs/environment.yaml | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index 790aec26..3d0a3400 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -13,13 +13,12 @@ dependencies: - mamba # esp for windows build - pypsa>=0.17.1 - - atlite>=0.2.2 + - atlite>=0.2.4 - dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved # Dependencies of the workflow itself - xlrd - openpyxl - - scikit-learn - pycountry - seaborn - snakemake-minimal @@ -28,8 +27,17 @@ dependencies: - pytables - lxml - powerplantmatching>=0.4.8 - - numpy<=1.19.0 # otherwise macos fails - + - numpy<=1.19 # until new PyPSA after 27-06-21 + - pandas + - geopandas + - xarray + - netcdf4 + - networkx + - scipy + - shapely + - progressbar2 + - pyomo + - matplotlib # Keep in conda environment when calling ipython - ipython @@ -37,6 +45,13 @@ dependencies: # GIS dependencies: - cartopy - descartes + - rasterio + + # PyPSA-Eur-Sec Dependencies + - geopy + - tqdm + - pytz + - country_converter - pip: - vresutils==0.3.1 From b10317ee3c15c7fc2c1be4810bea0fd6808defed Mon Sep 17 00:00:00 2001 From: martacki Date: Wed, 30 Jun 2021 21:07:38 +0200 Subject: [PATCH 025/102] fix p_nom_min values for extendable carriers with positive p_nom --- scripts/_helpers.py | 9 +++++++++ scripts/add_electricity.py | 5 ++++- scripts/cluster_network.py | 6 ++++-- scripts/simplify_network.py | 6 ++++-- 4 files changed, 21 insertions(+), 5 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 996baf73..622a3133 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -125,6 +125,15 @@ def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True): return n +def update_p_nom_max(n): + # if extendable carriers (solar/onwind/...) have capacity >= 0, + # e.g. existing assets from the OPSD project are included to the network, + # the installed capacity might exceed the expansion limit. + # Hence, we update the assumptions. + + n.generators.p_nom_max = (n.generators + .apply(lambda b: b[['p_nom_min','p_nom_max']].max(), axis=1)) + def aggregate_p_nom(n): return pd.concat([ n.generators.groupby("carrier").p_nom_opt.sum(), diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 45d537c4..bf80e8aa 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -84,7 +84,7 @@ It further adds extendable ``generators`` with **zero** capacity for """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, update_p_nom_max import pypsa import pandas as pd @@ -501,6 +501,7 @@ def attach_OPSD_renewables(n): caps = caps / gens_per_bus.reindex(caps.index, fill_value=1) n.generators.p_nom.update(gens.bus.map(caps).dropna()) + n.generators.p_nom_min.update(gens.bus.map(caps).dropna()) @@ -536,6 +537,7 @@ def estimate_renewable_capacities(n, tech_map=None): .groupby(n.generators.bus.map(n.buses.country)) .transform(lambda s: normed(s) * tech_capacities.at[s.name]) .where(lambda s: s>0.1, 0.)) # only capacities above 100kW + n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom'] def add_nice_carrier_names(n, config=None): @@ -575,6 +577,7 @@ if __name__ == "__main__": estimate_renewable_capacities(n) attach_OPSD_renewables(n) + update_p_nom_max(n) add_nice_carrier_names(n) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 8356f83b..3503bd9b 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes: """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, update_p_nom_max import pypsa import os @@ -282,7 +282,7 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr aggregate_generators_carriers=aggregate_carriers, aggregate_one_ports=["Load", "StorageUnit"], line_length_factor=line_length_factor, - generator_strategies={'p_nom_max': p_nom_max_strategy}, + generator_strategies={'p_nom_max': p_nom_max_strategy, 'p_nom_min': np.sum}, scale_link_capital_costs=False) if not n.links.empty: @@ -379,6 +379,8 @@ if __name__ == "__main__": extended_link_costs=hvac_overhead_cost, focus_weights=focus_weights) + update_p_nom_max(n) + clustering.network.export_to_netcdf(snakemake.output.network) for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative getattr(clustering, attr).to_csv(snakemake.output[attr]) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 5e89b6bf..2cdcd095 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -83,7 +83,7 @@ The rule :mod:`simplify_network` does up to four things: """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, update_p_nom_max from cluster_network import clustering_for_n_clusters, cluster_regions from add_electricity import load_costs @@ -198,7 +198,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus) - generators, generators_pnl = aggregategenerators(n, busmap) + generators, generators_pnl = aggregategenerators(n, busmap, custom_strategies={'p_nom_min': np.sum}) replace_components(n, "Generator", generators, generators_pnl) for one_port in aggregate_one_ports: @@ -364,6 +364,8 @@ if __name__ == "__main__": n, cluster_map = cluster(n, int(snakemake.wildcards.simpl)) busmaps.append(cluster_map) + update_p_nom_max(n) + n.export_to_netcdf(snakemake.output.network) busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) From cf2eff4127f27287d2b22e9cac2bec2f15116b8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martha=20Frysztacki=20=5Bfr=C9=A8=CA=82tat=CD=A1sk=CA=B2?= =?UTF-8?q?=5D?= Date: Wed, 30 Jun 2021 21:29:08 +0200 Subject: [PATCH 026/102] style Co-authored-by: FabianHofmann --- scripts/_helpers.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 622a3133..0c6a4658 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -131,8 +131,7 @@ def update_p_nom_max(n): # the installed capacity might exceed the expansion limit. # Hence, we update the assumptions. - n.generators.p_nom_max = (n.generators - .apply(lambda b: b[['p_nom_min','p_nom_max']].max(), axis=1)) + n.generators.p_nom_max = n.generators[['p_nom_min', 'p_nom_max']].max(1) def aggregate_p_nom(n): return pd.concat([ From 89631b240ae39bfd1575953891113deaba56b020 Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 2 Jul 2021 10:04:22 +0200 Subject: [PATCH 027/102] correct placement of if/else --- scripts/simplify_network.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 6a922019..4678f796 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -363,11 +363,10 @@ if __name__ == "__main__": if snakemake.wildcards.simpl: n, cluster_map = cluster(n, int(snakemake.wildcards.simpl)) busmaps.append(cluster_map) - - update_p_nom_max(n) - else: n.buses = n.buses.drop(['symbol', 'tags', 'under_construction', 'substation_lv', 'substation_off'], axis=1) + + update_p_nom_max(n) n.export_to_netcdf(snakemake.output.network) From 34578bf16bebd7884730a478cf95916650a1512b Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 2 Jul 2021 10:24:30 +0200 Subject: [PATCH 028/102] update release_notes --- doc/release_notes.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 230fc67d..bc09f957 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -24,6 +24,7 @@ Upcoming Release * The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 `_]. * Continuous integration testing switches to Github Actions from Travis CI [`#252 `_]. * Bugfix in :mod:`build_renewable_profile` where offshore wind profiles could no longer be created [`#249 `_]. +* Bugfix: Lower expansion limit of extendable carriers is now set to the existing capacity, i.e. p_nom_min = p_nom (0 before). Simultaneously, the upper limit (p_nom_max) is now the maximum of the installed capacity (p_nom) and the previous estimate based on land availability (p_nom_max). PyPSA-Eur 0.3.0 (7th December 2020) ================================== From 6983be337ac7f4a6a8499dadf39a74644cfe31ee Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 2 Jul 2021 10:26:46 +0200 Subject: [PATCH 029/102] release_notes: add PR reference --- doc/release_notes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index bc09f957..c001464d 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -24,7 +24,7 @@ Upcoming Release * The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 `_]. * Continuous integration testing switches to Github Actions from Travis CI [`#252 `_]. * Bugfix in :mod:`build_renewable_profile` where offshore wind profiles could no longer be created [`#249 `_]. -* Bugfix: Lower expansion limit of extendable carriers is now set to the existing capacity, i.e. p_nom_min = p_nom (0 before). Simultaneously, the upper limit (p_nom_max) is now the maximum of the installed capacity (p_nom) and the previous estimate based on land availability (p_nom_max). +* Bugfix: Lower expansion limit of extendable carriers is now set to the existing capacity, i.e. p_nom_min = p_nom (0 before). Simultaneously, the upper limit (p_nom_max) is now the maximum of the installed capacity (p_nom) and the previous estimate based on land availability (p_nom_max) [`#260 `_]. PyPSA-Eur 0.3.0 (7th December 2020) ================================== From 15ebe63b38738673ef095c3127bf8e3e21516aa0 Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 2 Jul 2021 10:43:32 +0200 Subject: [PATCH 030/102] release_notes: style --- doc/release_notes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index c001464d..b8aaeba2 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -24,7 +24,7 @@ Upcoming Release * The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 `_]. * Continuous integration testing switches to Github Actions from Travis CI [`#252 `_]. * Bugfix in :mod:`build_renewable_profile` where offshore wind profiles could no longer be created [`#249 `_]. -* Bugfix: Lower expansion limit of extendable carriers is now set to the existing capacity, i.e. p_nom_min = p_nom (0 before). Simultaneously, the upper limit (p_nom_max) is now the maximum of the installed capacity (p_nom) and the previous estimate based on land availability (p_nom_max) [`#260 `_]. +* Bugfix: Lower expansion limit of extendable carriers is now set to the existing capacity, i.e. ``p_nom_min = p_nom`` (0 before). Simultaneously, the upper limit (``p_nom_max``) is now the maximum of the installed capacity (``p_nom``) and the previous estimate based on land availability (``p_nom_max``) [`#260 `_]. PyPSA-Eur 0.3.0 (7th December 2020) ================================== From 989d8c6abfe6602ea2a3ae70683ba5097f54fe06 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 9 Jul 2021 10:22:40 +0200 Subject: [PATCH 032/102] specify when to run CI (#262) --- .github/workflows/ci.yaml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a3268243..b0699d74 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -4,7 +4,15 @@ name: CI -on: [push] +on: + push: + branches: + - master + pull_request: + branches: + - master + schedule: + - cron: "0 5 * * TUE" jobs: build: From e2c3118f08bccddbd2eb47002b9d89d1f9eb3087 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 12 Jul 2021 08:50:37 +0200 Subject: [PATCH 033/102] don't allow pandas 1.3.0 yet --- envs/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index 3d0a3400..979c83df 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -28,7 +28,7 @@ dependencies: - lxml - powerplantmatching>=0.4.8 - numpy<=1.19 # until new PyPSA after 27-06-21 - - pandas + - pandas<1.3 # until new PyPSA after 2-7-21, https://github.com/PyPSA/pypsa-eur/pull/261 - geopandas - xarray - netcdf4 From 2aa59a39051edb600dbc0c4087ea4f22a36423fe Mon Sep 17 00:00:00 2001 From: huckebrink <68848357+huckebrink@users.noreply.github.com> Date: Wed, 4 Aug 2021 18:19:23 +0200 Subject: [PATCH 034/102] exporting additional costs, and compatibility adjustments (#261) * added export for connection_cost adjustment * removed lambda function from multiprocessing changed true/false_values to lists * removed double import of functools * moved added costs to df column * Update scripts/simplify_network.py Co-authored-by: Leonie Plaga Co-authored-by: Fabian Neumann --- Snakefile | 3 ++- scripts/base_network.py | 10 +++++----- scripts/simplify_network.py | 4 ++++ 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/Snakefile b/Snakefile index 2702fd3d..7f5be34d 100644 --- a/Snakefile +++ b/Snakefile @@ -239,7 +239,8 @@ rule simplify_network: network='networks/elec_s{simpl}.nc', regions_onshore="resources/regions_onshore_elec_s{simpl}.geojson", regions_offshore="resources/regions_offshore_elec_s{simpl}.geojson", - busmap='resources/busmap_elec_s{simpl}.csv' + busmap='resources/busmap_elec_s{simpl}.csv', + connection_costs='resources/connection_costs_s{simpl}.csv' log: "logs/simplify_network/elec_s{simpl}.log" benchmark: "benchmarks/simplify_network/elec_s{simpl}" threads: 1 diff --git a/scripts/base_network.py b/scripts/base_network.py index 778f8dc4..4c2ed2c5 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -114,7 +114,7 @@ def _find_closest_links(links, new_links, distance_upper_bound=1.5): def _load_buses_from_eg(): buses = (pd.read_csv(snakemake.input.eg_buses, quotechar="'", - true_values='t', false_values='f', + true_values=['t'], false_values=['f'], dtype=dict(bus_id="str")) .set_index("bus_id") .drop(['station_id'], axis=1) @@ -136,7 +136,7 @@ def _load_buses_from_eg(): def _load_transformers_from_eg(buses): transformers = (pd.read_csv(snakemake.input.eg_transformers, quotechar="'", - true_values='t', false_values='f', + true_values=['t'], false_values=['f'], dtype=dict(transformer_id='str', bus0='str', bus1='str')) .set_index('transformer_id')) @@ -147,7 +147,7 @@ def _load_transformers_from_eg(buses): def _load_converters_from_eg(buses): converters = (pd.read_csv(snakemake.input.eg_converters, quotechar="'", - true_values='t', false_values='f', + true_values=['t'], false_values=['f'], dtype=dict(converter_id='str', bus0='str', bus1='str')) .set_index('converter_id')) @@ -159,7 +159,7 @@ def _load_converters_from_eg(buses): def _load_links_from_eg(buses): - links = (pd.read_csv(snakemake.input.eg_links, quotechar="'", true_values='t', false_values='f', + links = (pd.read_csv(snakemake.input.eg_links, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(link_id='str', bus0='str', bus1='str', under_construction="bool")) .set_index('link_id')) @@ -249,7 +249,7 @@ def _add_links_from_tyndp(buses, links): def _load_lines_from_eg(buses): - lines = (pd.read_csv(snakemake.input.eg_lines, quotechar="'", true_values='t', false_values='f', + lines = (pd.read_csv(snakemake.input.eg_lines, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(line_id='str', bus0='str', bus1='str', underground="bool", under_construction="bool")) .set_index('line_id') diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 4678f796..c1840760 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -178,6 +178,7 @@ def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None, def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus): + connection_costs = {} for tech in connection_costs_to_bus: tech_b = n.generators.carrier == tech costs = n.generators.loc[tech_b, "bus"].map(connection_costs_to_bus[tech]).loc[lambda s: s>0] @@ -185,6 +186,9 @@ def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus): n.generators.loc[costs.index, "capital_cost"] += costs logger.info("Displacing {} generator(s) and adding connection costs to capital_costs: {} " .format(tech, ", ".join("{:.0f} Eur/MW/a for `{}`".format(d, b) for b, d in costs.iteritems()))) + connection_costs[tech] = costs + pd.DataFrame(connection_costs).to_csv(snakemake.output.connection_costs) + def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate_one_ports={"Load", "StorageUnit"}): From 4cc5e49ca85dbef1fcc6e7b7916bf5bed60e2c6d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 6 Aug 2021 15:43:12 +0200 Subject: [PATCH 035/102] Adapt to new snapshot weightings (#259) * for now use n.snapshot_weightings.generators * require pypsa master; use .objective for Nyears * implement suggestions from code review * add release note --- doc/release_notes.rst | 1 + envs/environment.docs.yaml | 3 ++- envs/environment.yaml | 3 ++- scripts/_helpers.py | 2 +- scripts/add_electricity.py | 2 +- scripts/add_extra_components.py | 2 +- scripts/cluster_network.py | 3 ++- scripts/make_summary.py | 16 +++++++++------- scripts/plot_network.py | 2 +- scripts/prepare_network.py | 3 ++- scripts/simplify_network.py | 3 ++- scripts/solve_network.py | 10 +++++----- 12 files changed, 29 insertions(+), 21 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index b8aaeba2..e35d3cf6 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -24,6 +24,7 @@ Upcoming Release * The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 `_]. * Continuous integration testing switches to Github Actions from Travis CI [`#252 `_]. * Bugfix in :mod:`build_renewable_profile` where offshore wind profiles could no longer be created [`#249 `_]. +* Implements changes to ``n.snapshot_weightings`` in upcoming PyPSA version (cf. `PyPSA/PyPSA/#227 `_) [`#259 `_]. * Bugfix: Lower expansion limit of extendable carriers is now set to the existing capacity, i.e. ``p_nom_min = p_nom`` (0 before). Simultaneously, the upper limit (``p_nom_max``) is now the maximum of the installed capacity (``p_nom``) and the previous estimate based on land availability (``p_nom_max``) [`#260 `_]. PyPSA-Eur 0.3.0 (7th December 2020) diff --git a/envs/environment.docs.yaml b/envs/environment.docs.yaml index 772583d4..9edf0118 100755 --- a/envs/environment.docs.yaml +++ b/envs/environment.docs.yaml @@ -8,7 +8,7 @@ channels: dependencies: - python<=3.7 - pip - - pypsa>=0.17.1 + #- pypsa>=0.17.1 - atlite>=0.2.2 - dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved - pre-commit @@ -27,6 +27,7 @@ dependencies: - descartes - pip: + - git+https://github.com/pypsa/pypsa.git#egg=pypsa - vresutils==0.3.1 - sphinx - sphinx_rtd_theme diff --git a/envs/environment.yaml b/envs/environment.yaml index 979c83df..039bfd63 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -12,7 +12,7 @@ dependencies: - pip - mamba # esp for windows build - - pypsa>=0.17.1 + #- pypsa>=0.17.1 - atlite>=0.2.4 - dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved @@ -54,5 +54,6 @@ dependencies: - country_converter - pip: + - git+https://github.com/pypsa/pypsa.git#egg=pypsa - vresutils==0.3.1 - tsam>=1.1.0 diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 0c6a4658..ae28f808 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -119,7 +119,7 @@ def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True): # bus_carrier = n.storage_units.bus.map(n.buses.carrier) # n.storage_units.loc[bus_carrier == "heat","carrier"] = "water tanks" - Nyears = n.snapshot_weightings.sum() / 8760. + Nyears = n.snapshot_weightings.objective.sum() / 8760. costs = load_costs(Nyears, tech_costs, config['costs'], config['electricity']) update_transmission_costs(n, costs) diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index bf80e8aa..8f721652 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -561,7 +561,7 @@ if __name__ == "__main__": configure_logging(snakemake) n = pypsa.Network(snakemake.input.base_network) - Nyears = n.snapshot_weightings.sum() / 8760. + Nyears = n.snapshot_weightings.objective.sum() / 8760. costs = load_costs(Nyears) ppl = load_powerplants() diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index 946c4433..ae581382 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -197,7 +197,7 @@ if __name__ == "__main__": configure_logging(snakemake) n = pypsa.Network(snakemake.input.network) - Nyears = n.snapshot_weightings.sum() / 8760. + Nyears = n.snapshot_weightings.objective.sum() / 8760. costs = load_costs(Nyears, tech_costs=snakemake.input.tech_costs, config=snakemake.config['costs'], elec_config=snakemake.config['electricity']) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 3503bd9b..d74745d0 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -357,7 +357,8 @@ if __name__ == "__main__": clustering = pypsa.networkclustering.Clustering(n, busmap, linemap, linemap, pd.Series(dtype='O')) else: line_length_factor = snakemake.config['lines']['length_factor'] - hvac_overhead_cost = (load_costs(n.snapshot_weightings.sum()/8760, + Nyears = n.snapshot_weightings.objective.sum()/8760 + hvac_overhead_cost = (load_costs(Nyears, tech_costs=snakemake.input.tech_costs, config=snakemake.config['costs'], elec_config=snakemake.config['electricity']) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index f815d5f0..e26db34c 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -111,15 +111,15 @@ def calculate_costs(n, label, costs): costs.loc[idx[raw_index],label] = capital_costs_grouped.values if c.name == "Link": - p = c.pnl.p0.multiply(n.snapshot_weightings,axis=0).sum() + p = c.pnl.p0.multiply(n.snapshot_weightings.generators,axis=0).sum() elif c.name == "Line": continue elif c.name == "StorageUnit": - p_all = c.pnl.p.multiply(n.snapshot_weightings,axis=0) + p_all = c.pnl.p.multiply(n.snapshot_weightings.generators,axis=0) p_all[p_all < 0.] = 0. p = p_all.sum() else: - p = c.pnl.p.multiply(n.snapshot_weightings,axis=0).sum() + p = c.pnl.p.multiply(n.snapshot_weightings.generators,axis=0).sum() marginal_costs = p*c.df.marginal_cost @@ -144,10 +144,12 @@ def calculate_energy(n, label, energy): for c in n.iterate_components(n.one_port_components|n.branch_components): - if c.name in n.one_port_components: - c_energies = c.pnl.p.multiply(n.snapshot_weightings,axis=0).sum().multiply(c.df.sign).groupby(c.df.carrier).sum() + if c.name in {'Generator', 'Load', 'ShuntImpedance'}: + c_energies = c.pnl.p.multiply(n.snapshot_weightings.generators,axis=0).sum().multiply(c.df.sign).groupby(c.df.carrier).sum() + elif c.name in {'StorageUnit', 'Store'}: + c_energies = c.pnl.p.multiply(n.snapshot_weightings.stores,axis=0).sum().multiply(c.df.sign).groupby(c.df.carrier).sum() else: - c_energies = (-c.pnl.p1.multiply(n.snapshot_weightings,axis=0).sum() - c.pnl.p0.multiply(n.snapshot_weightings,axis=0).sum()).groupby(c.df.carrier).sum() + c_energies = (-c.pnl.p1.multiply(n.snapshot_weightings.generators,axis=0).sum() - c.pnl.p0.multiply(n.snapshot_weightings.generators,axis=0).sum()).groupby(c.df.carrier).sum() energy = include_in_summary(energy, [c.list_name], label, c_energies) @@ -400,7 +402,7 @@ def make_summaries(networks_dict, country='all'): if country != 'all': n = n[n.buses.country == country] - Nyears = n.snapshot_weightings.sum() / 8760. + Nyears = n.snapshot_weightings.objective.sum() / 8760. costs = load_costs(Nyears, snakemake.input[0], snakemake.config['costs'], snakemake.config['electricity']) update_transmission_costs(n, costs, simple_hvdc_costs=False) diff --git a/scripts/plot_network.py b/scripts/plot_network.py index 810f6284..61a2ac9b 100755 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -196,7 +196,7 @@ def plot_total_energy_pie(n, ax=None): def plot_total_cost_bar(n, ax=None): if ax is None: ax = plt.gca() - total_load = (n.snapshot_weightings * n.loads_t.p.sum(axis=1)).sum() + total_load = (n.snapshot_weightings.generators * n.loads_t.p.sum(axis=1)).sum() tech_colors = opts['tech_colors'] def split_costs(n): diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 4caa5703..86afef2f 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -150,6 +150,7 @@ def average_every_nhours(n, offset): return m + def apply_time_segmentation(n, segments): logger.info(f"Aggregating time series to {segments} segments.") try: @@ -223,7 +224,7 @@ if __name__ == "__main__": opts = snakemake.wildcards.opts.split('-') n = pypsa.Network(snakemake.input[0]) - Nyears = n.snapshot_weightings.sum() / 8760. + Nyears = n.snapshot_weightings.objective.sum() / 8760. set_line_s_max_pu(n) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index c1840760..f37899b9 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -141,7 +141,8 @@ def simplify_network_to_380(n): def _prepare_connection_costs_per_link(n): if n.links.empty: return {} - costs = load_costs(n.snapshot_weightings.sum() / 8760, snakemake.input.tech_costs, + Nyears = n.snapshot_weightings.objective.sum() / 8760 + costs = load_costs(Nyears, snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity']) connection_costs_per_link = {} diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 24cd0464..f8146b43 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -127,7 +127,7 @@ def prepare_network(n, solve_opts): if solve_opts.get('nhours'): nhours = solve_opts['nhours'] n.set_snapshots(n.snapshots[:nhours]) - n.snapshot_weightings[:] = 8760./nhours + n.snapshot_weightings[:] = 8760. / nhours return n @@ -174,16 +174,16 @@ def add_EQ_constraints(n, o, scaling=1e-1): ggrouper = n.generators.bus lgrouper = n.loads.bus sgrouper = n.storage_units.bus - load = n.snapshot_weightings @ \ + load = n.snapshot_weightings.generators @ \ n.loads_t.p_set.groupby(lgrouper, axis=1).sum() - inflow = n.snapshot_weightings @ \ + inflow = n.snapshot_weightings.stores @ \ n.storage_units_t.inflow.groupby(sgrouper, axis=1).sum() inflow = inflow.reindex(load.index).fillna(0.) rhs = scaling * ( level * load - inflow ) - lhs_gen = linexpr((n.snapshot_weightings * scaling, + lhs_gen = linexpr((n.snapshot_weightings.generators * scaling, get_var(n, "Generator", "p").T) ).T.groupby(ggrouper, axis=1).apply(join_exprs) - lhs_spill = linexpr((-n.snapshot_weightings * scaling, + lhs_spill = linexpr((-n.snapshot_weightings.stores * scaling, get_var(n, "StorageUnit", "spill").T) ).T.groupby(sgrouper, axis=1).apply(join_exprs) lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("") From 7a8ef1a2d44a77de240bb7144438dd35f51bff6f Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 9 Aug 2021 14:37:51 +0200 Subject: [PATCH 036/102] Add tabula-py to dependencies (for PyPSA-Eur-Sec) --- envs/environment.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/envs/environment.yaml b/envs/environment.yaml index 039bfd63..3dcbeea0 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -52,6 +52,7 @@ dependencies: - tqdm - pytz - country_converter + - tabula-py - pip: - git+https://github.com/pypsa/pypsa.git#egg=pypsa From b82c55543bc345581a5205e95e8da6499c8ce70e Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 13 Aug 2021 10:11:35 +0200 Subject: [PATCH 037/102] Readthedocs documentation with pip (#267) * Use pip with requirements.txt file for setting up doc environment * Try w/o cartopy * Try w/o cartopy take 2 * Try w/o cartopy take 3 * try pip * try pip ii * python 3.8 * fix links * remove conda docs and fix line references * remove conda on .readthedocs.yml * correct ambiguous line reference Co-authored-by: Jonas Hoersch --- .readthedocs.yml | 7 ++++-- doc/configuration.rst | 47 +++++++++++++++++++++++++------------- doc/requirements.txt | 17 ++++++++++++++ envs/environment.docs.yaml | 33 -------------------------- 4 files changed, 53 insertions(+), 51 deletions(-) create mode 100644 doc/requirements.txt delete mode 100755 envs/environment.docs.yaml diff --git a/.readthedocs.yml b/.readthedocs.yml index 173d21d7..d6b81a40 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -4,5 +4,8 @@ version: 2 -conda: - environment: envs/environment.docs.yaml +python: + version: 3.8 + install: + - requirements: doc/requirements.txt + system_packages: true diff --git a/doc/configuration.rst b/doc/configuration.rst index a75669cd..a6683046 100644 --- a/doc/configuration.rst +++ b/doc/configuration.rst @@ -50,7 +50,8 @@ An exemplary dependency graph (starting from the simplification rules) then look .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 14-18 + :start-at: scenario: + :end-before: countries: .. csv-table:: :header-rows: 1 @@ -66,7 +67,8 @@ Specifies the temporal range to build an energy system model for as arguments to .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 22-25 + :start-at: snapshots: + :end-before: enable: .. csv-table:: :header-rows: 1 @@ -80,7 +82,8 @@ Specifies the temporal range to build an energy system model for as arguments to .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 36-60 + :start-at: electricity: + :end-before: atlite: .. csv-table:: :header-rows: 1 @@ -117,7 +120,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 77-94 + :start-at: renewable: + :end-before: offwind-ac: .. csv-table:: :header-rows: 1 @@ -129,7 +133,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 77,95-107 + :start-at: offwind-ac: + :end-before: offwind-dc: .. csv-table:: :header-rows: 1 @@ -141,7 +146,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 77,108-121 + :start-at: offwind-dc: + :end-before: solar: .. csv-table:: :header-rows: 1 @@ -153,7 +159,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 77,122-141 + :start-at: solar: + :end-before: hydro: .. csv-table:: :header-rows: 1 @@ -165,7 +172,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 77,142-147 + :start-at: hydro: + :end-before: lines: .. csv-table:: :header-rows: 1 @@ -179,7 +187,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 149-157 + :start-at: lines: + :end-before: links: .. csv-table:: :header-rows: 1 @@ -193,7 +202,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 159-163 + :start-at: links: + :end-before: transformers: .. csv-table:: :header-rows: 1 @@ -207,7 +217,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 165-168 + :start-at: transformers: + :end-before: load: .. csv-table:: :header-rows: 1 @@ -221,7 +232,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 170-176 + :start-at: load: + :end-before: costs: .. csv-table:: :header-rows: 1 @@ -235,7 +247,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 178-190 + :start-after: scaling_factor: + :end-before: solving: .. csv-table:: :header-rows: 1 @@ -256,7 +269,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 192-202 + :start-at: solving: + :end-before: solver: .. csv-table:: :header-rows: 1 @@ -268,7 +282,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 192,203-219 + :start-at: solver: + :end-before: plotting: .. csv-table:: :header-rows: 1 @@ -282,7 +297,7 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 221-299 + :start-at: plotting: .. csv-table:: :header-rows: 1 diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 00000000..890eda95 --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,17 @@ +sphinx +sphinx_rtd_theme + +pypsa +vresutils>=0.3.1 +powerplantmatching>=0.4.8 +atlite>=0.2.2 +dask<=2021.3.1 + +# cartopy +scikit-learn +pycountry +pyyaml +seaborn +memory_profiler +tables +descartes \ No newline at end of file diff --git a/envs/environment.docs.yaml b/envs/environment.docs.yaml deleted file mode 100755 index 9edf0118..00000000 --- a/envs/environment.docs.yaml +++ /dev/null @@ -1,33 +0,0 @@ -# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: GPL-3.0-or-later - -name: pypsa-eur-docs -channels: - - conda-forge -dependencies: - - python<=3.7 - - pip - #- pypsa>=0.17.1 - - atlite>=0.2.2 - - dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved - - pre-commit - - # Dependencies of the workflow itself - - scikit-learn - - pycountry - - seaborn - - memory_profiler - - yaml - - pytables - - powerplantmatching>=0.4.8 - - # GIS dependencies have to come all from conda-forge - - cartopy - - descartes - - - pip: - - git+https://github.com/pypsa/pypsa.git#egg=pypsa - - vresutils==0.3.1 - - sphinx - - sphinx_rtd_theme From 0fa3888b0ebdf393208483ce8c4e62b69898b599 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 13 Aug 2021 10:15:06 +0200 Subject: [PATCH 038/102] restore REUSE compliance [skip ci] --- doc/requirements.txt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/requirements.txt b/doc/requirements.txt index 890eda95..2b461718 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1,3 +1,7 @@ +# SPDX-FileCopyrightText: : 2019-2021 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + sphinx sphinx_rtd_theme From 400317d0622ca67e4ff02d00af96d739ccd3b354 Mon Sep 17 00:00:00 2001 From: martacki Date: Mon, 16 Aug 2021 17:53:56 +0200 Subject: [PATCH 039/102] update plot_network and make_summary scripts to latest pypsa/-eur versions --- Snakefile | 1 - scripts/make_summary.py | 2 +- scripts/plot_network.py | 33 ++++++++++++++++++++------------- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/Snakefile b/Snakefile index 7f5be34d..2f8eea3b 100644 --- a/Snakefile +++ b/Snakefile @@ -361,7 +361,6 @@ def input_make_summary(w): ll = w.ll return ([COSTS] + expand("results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", - network=w.network, ll=ll, **{k: config["scenario"][k] if getattr(w, k) == "all" else getattr(w, k) for k in ["simpl", "clusters", "opts"]})) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index e26db34c..53482c48 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -444,7 +444,7 @@ if __name__ == "__main__": ll = [snakemake.wildcards.ll] networks_dict = {(simpl,clusters,l,opts) : - os.path.join(network_dir, f'{snakemake.wildcards.network}_s{simpl}_' + os.path.join(network_dir, f'elec_s{simpl}_' f'{clusters}_ec_l{l}_{opts}.nc') for simpl in expand_from_wildcard("simpl") for clusters in expand_from_wildcard("clusters") diff --git a/scripts/plot_network.py b/scripts/plot_network.py index 61a2ac9b..571f5bad 100755 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -88,36 +88,43 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}): # bus_sizes = n.generators_t.p.sum().loc[n.generators.carrier == "load"].groupby(n.generators.bus).sum() bus_sizes = pd.concat((n.generators.query('carrier != "load"').groupby(['bus', 'carrier']).p_nom_opt.sum(), n.storage_units.groupby(['bus', 'carrier']).p_nom_opt.sum())) - line_widths_exp = dict(Line=n.lines.s_nom_opt, Link=n.links.p_nom_opt) - line_widths_cur = dict(Line=n.lines.s_nom_min, Link=n.links.p_nom_min) + line_widths_exp = n.lines.s_nom_opt + line_widths_cur = n.lines.s_nom_min + link_widths_exp = n.links.p_nom_opt + link_widths_cur = n.links.p_nom_min else: raise 'plotting of {} has not been implemented yet'.format(attribute) line_colors_with_alpha = \ - dict(Line=(line_widths_cur['Line'] / n.lines.s_nom > 1e-3) - .map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)}), - Link=(line_widths_cur['Link'] / n.links.p_nom > 1e-3) + ((line_widths_cur / n.lines.s_nom > 1e-3) + .map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)})) + link_colors_with_alpha = \ + ((link_widths_cur / n.links.p_nom > 1e-3) .map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)})) + ## FORMAT linewidth_factor = opts['map'][attribute]['linewidth_factor'] bus_size_factor = opts['map'][attribute]['bus_size_factor'] ## PLOT - n.plot(line_widths=pd.concat(line_widths_exp)/linewidth_factor, - line_colors=dict(Line=line_colors['exp'], Link=line_colors['exp']), + n.plot(line_widths=line_widths_exp/linewidth_factor, + link_widths=link_widths_exp/linewidth_factor, + line_colors=line_colors['exp'], + link_colors=line_colors['exp'], bus_sizes=bus_sizes/bus_size_factor, bus_colors=tech_colors, boundaries=map_boundaries, - geomap=True, + color_geomap=True, geomap=True, ax=ax) - n.plot(line_widths=pd.concat(line_widths_cur)/linewidth_factor, - line_colors=pd.concat(line_colors_with_alpha), + n.plot(line_widths=line_widths_cur/linewidth_factor, + link_widths=link_widths_cur/linewidth_factor, + line_colors=line_colors_with_alpha, + link_colors=link_colors_with_alpha, bus_sizes=0, - bus_colors=tech_colors, boundaries=map_boundaries, - geomap=False, + color_geomap=True, geomap=False, ax=ax) ax.set_aspect('equal') ax.axis('off') @@ -138,7 +145,7 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}): loc="upper left", bbox_to_anchor=(0.24, 1.01), frameon=False, labelspacing=0.8, handletextpad=1.5, - title='Transmission Exist./Exp. ') + title='Transmission Exp./Exist. ') ax.add_artist(l1_1) handles = [] From 1c173567b5fdb6c4ebdc2ff1ccb802fc95439879 Mon Sep 17 00:00:00 2001 From: Martha Frysztacki Date: Wed, 18 Aug 2021 10:38:51 +0200 Subject: [PATCH 040/102] solve_operations_network: include optimized stores to operation (#269) * remove stores-buses from load generators * include optimized stores to operation network * add release notes * Update doc/release_notes.rst Co-authored-by: Fabian Neumann * Update doc/release_notes.rst Co-authored-by: Fabian Neumann * Update release_notes.rst * Update scripts/solve_network.py Co-authored-by: Fabian Neumann --- doc/release_notes.rst | 2 ++ scripts/solve_network.py | 5 +++-- scripts/solve_operations_network.py | 13 +++++++++---- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index e35d3cf6..af9a58f6 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -26,6 +26,8 @@ Upcoming Release * Bugfix in :mod:`build_renewable_profile` where offshore wind profiles could no longer be created [`#249 `_]. * Implements changes to ``n.snapshot_weightings`` in upcoming PyPSA version (cf. `PyPSA/PyPSA/#227 `_) [`#259 `_]. * Bugfix: Lower expansion limit of extendable carriers is now set to the existing capacity, i.e. ``p_nom_min = p_nom`` (0 before). Simultaneously, the upper limit (``p_nom_max``) is now the maximum of the installed capacity (``p_nom``) and the previous estimate based on land availability (``p_nom_max``) [`#260 `_]. +* Bugfix: Solving an operations network now includes optimized store capacities as well. Before only lines, links, generators and storage units were considered. +* Bugfix: With ``load_shedding: true`` in the solving options of ``config.yaml`` load shedding generators are only added at the AC buses, excluding buses for H2 and battery stores. PyPSA-Eur 0.3.0 (7th December 2020) ================================== diff --git a/scripts/solve_network.py b/scripts/solve_network.py index f8146b43..d874d335 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -101,8 +101,9 @@ def prepare_network(n, solve_opts): if solve_opts.get('load_shedding'): n.add("Carrier", "Load") - n.madd("Generator", n.buses.index, " load", - bus=n.buses.index, + buses_i = n.buses.query("carrier == 'AC'").index + n.madd("Generator", buses_i, " load", + bus=buses_i, carrier='load', sign=1e-3, # Adjust sign to measure p and p_nom in kW instead of MW marginal_cost=1e2, # Eur/kWh diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index b698c2f1..9f97754a 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -81,10 +81,15 @@ def set_parameters_from_optimized(n, n_optim): n_optim.generators['p_nom_opt'].reindex(gen_extend_i, fill_value=0.) n.generators.loc[gen_extend_i, 'p_nom_extendable'] = False - stor_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable] - n.storage_units.loc[stor_extend_i, 'p_nom'] = \ - n_optim.storage_units['p_nom_opt'].reindex(stor_extend_i, fill_value=0.) - n.storage_units.loc[stor_extend_i, 'p_nom_extendable'] = False + stor_units_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable] + n.storage_units.loc[stor_units_extend_i, 'p_nom'] = \ + n_optim.storage_units['p_nom_opt'].reindex(stor_units_extend_i, fill_value=0.) + n.storage_units.loc[stor_units_extend_i, 'p_nom_extendable'] = False + + stor_extend_i = n.stores.index[n.stores.e_nom_extendable] + n.stores.loc[stor_extend_i, 'e_nom'] = \ + n_optim.stores['e_nom_opt'].reindex(stor_extend_i, fill_value=0.) + n.stores.loc[stor_extend_i, 'e_nom_extendable'] = False return n From a6d2a0a99d42d6647565e05675a99fae081e289b Mon Sep 17 00:00:00 2001 From: Martha Frysztacki Date: Thu, 26 Aug 2021 16:00:08 +0200 Subject: [PATCH 041/102] style Co-authored-by: Fabian Hofmann --- scripts/simplify_network.py | 30 ++++++++++++------------------ 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 82e89eff..17c23a43 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -331,28 +331,22 @@ def aggregate_to_substations(n, buses_i=None): logger.info("Aggregating buses that are no substations or have no valid offshore connection") buses_i = list(set(n.buses.index)-set(n.generators.bus)-set(n.loads.bus)) - busmap = n.buses.index.to_series() - - index = [np.append(["Line" for c in range(len(n.lines))], - ["Link" for c in range(len(n.links))]), - np.append(n.lines.index, n.links.index)] - #under_construction lines should be last choice, but weight should be < inf in case no other node is reachable, hence 1e-3 - weight = pd.Series(np.append((n.lines.length/n.lines.s_nom.apply(lambda b: b if b>0 else 1e-3)).values, - (n.links.length/n.links.p_nom.apply(lambda b: b if b>0 else 1e-3)).values), - index=index) + weight = pd.concat({'Line': n.lines.length/n.lines.s_nom.clip(1e-3), + 'Link': n.links.length/n.links.p_nom.clip(1e-3)}) adj = n.adjacency_matrix(branch_components=['Line', 'Link'], weights=weight) - dist = dijkstra(adj, directed=False, indices=n.buses.index.get_indexer(buses_i)) - dist[:, n.buses.index.get_indexer(buses_i)] = np.inf #bus in buses_i should not be assigned to different bus in buses_i + bus_indexer = n.buses.index.get_indexer(buses_i) + dist = pd.DataFrame(dijkstra(adj, directed=False, indices=bus_indexer), buses_i, n.buses.index) - #restrict to same country: - for bus in buses_i: - country_buses = n.buses[~n.buses.country.isin([n.buses.loc[bus].country])].index - dist[n.buses.loc[buses_i].index.get_indexer([bus]),n.buses.index.get_indexer(country_buses)] = np.inf - - assign_to = dist.argmin(axis=1) - busmap.loc[buses_i] = n.buses.iloc[assign_to].index + dist[buses_i] = np.inf # bus in buses_i should not be assigned to different bus in buses_i + + for c in n.buses.country.unique(): + incountry_b = n.buses.country == c + dist.loc[incountry_b, ~incountry_b] = np.inf + + busmap = n.buses.index.to_series() + busmap.loc[buses_i] = dist.idxmin(1) clustering = get_clustering_from_busmap(n, busmap, bus_strategies=dict(country=_make_consense("Bus", "country")), From b9bbefa09f32f35f41c75eb85e29a592db4a1f55 Mon Sep 17 00:00:00 2001 From: Fabian Hofmann Date: Fri, 27 Aug 2021 10:34:27 +0200 Subject: [PATCH 042/102] simplify_network.py fix dict getting --- scripts/simplify_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 17c23a43..6e12e5e8 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -400,7 +400,7 @@ if __name__ == "__main__": busmaps = [trafo_map, simplify_links_map, stub_map] - if snakemake.config['clustering']['simplify']['to_substations']: + if snakemake.config.get('clustering', {}).get('simplify', {}).get('to_substations', False): n, substation_map = aggregate_to_substations(n) busmaps.append(substation_map) From ea50abab6d6debdfd594e8d0387a83fde2507161 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 27 Aug 2021 12:30:29 +0200 Subject: [PATCH 043/102] doc: fix build warnings --- doc/cloudcomputing.rst | 1 + doc/configtables/electricity.csv | 24 ++++++++++++------------ doc/configtables/offwind-dc.csv | 2 +- doc/configtables/onwind.csv | 2 +- doc/configtables/opts.csv | 10 +++++----- doc/configtables/solar.csv | 2 +- doc/configuration.rst | 5 +++-- doc/index.rst | 2 +- doc/plotting.rst | 4 ++-- doc/preparation.rst | 1 - doc/preparation/retrieve.rst | 2 +- doc/release_notes.rst | 2 +- doc/tutorial.rst | 2 +- scripts/build_shapes.py | 4 ++-- 14 files changed, 32 insertions(+), 31 deletions(-) diff --git a/doc/cloudcomputing.rst b/doc/cloudcomputing.rst index f20e1b23..f751d624 100644 --- a/doc/cloudcomputing.rst +++ b/doc/cloudcomputing.rst @@ -108,6 +108,7 @@ Make sure that your instance is operating for the next steps. - Option 1. Click on the Tools button and "Install Public Key into Server..". Somewhere in your folder structure must be a public key. I found it with the following folder syntax on my local windows computer -> :\Users\...\.ssh (there should be a PKK file). - Option 2. Click on the Tools button and "Generate new key pair...". Save the private key at a folder you remember and add it to the "private key file" field in WinSCP. Upload the public key to the metadeta of your instance. - Click ok and save. Then click Login. If successfull WinSCP will open on the left side your local computer folder structure and on the right side the folder strucutre of your VM. (If you followed Option 2 and its not initially working. Stop your instance, refresh the website, reopen the WinSCP field. Afterwards your your Login should be successfull) + If you had struggle with the above steps, you could also try `this video `_. .. note:: diff --git a/doc/configtables/electricity.csv b/doc/configtables/electricity.csv index aaeab239..aef35350 100644 --- a/doc/configtables/electricity.csv +++ b/doc/configtables/electricity.csv @@ -1,19 +1,19 @@ -,Unit,Values,Description, -voltages,kV,"Any subset of {220., 300., 380.}",Voltage levels to consider when, -co2limit,:math:`t_{CO_2-eq}/a`,float,Cap on total annual system carbon dioxide emissions, -co2base,:math:`t_{CO_2-eq}/a`,float,Reference value of total annual system carbon dioxide emissions if relative emission reduction target is specified in ``{opts}`` wildcard., +,Unit,Values,Description +voltages,kV,"Any subset of {220., 300., 380.}",Voltage levels to consider when +co2limit,:math:`t_{CO_2-eq}/a`,float,Cap on total annual system carbon dioxide emissions +co2base,:math:`t_{CO_2-eq}/a`,float,Reference value of total annual system carbon dioxide emissions if relative emission reduction target is specified in ``{opts}`` wildcard. agg_p_nom_limits,file,path,Reference to ``.csv`` file specifying per carrier generator nominal capacity constraints for individual countries if ``'CCL'`` is in ``{opts}`` wildcard. Defaults to ``data/agg_p_nom_minmax.csv``. -extendable_carriers,,,, +extendable_carriers,,, -- Generator,--,"Any subset of {'OCGT','CCGT'}",Places extendable conventional power plants (OCGT and/or CCGT) where gas power plants are located today without capacity limits. -- StorageUnit,--,"Any subset of {'battery','H2'}",Adds extendable storage units (battery and/or hydrogen) at every node/bus after clustering without capacity limits and with zero initial capacity. -- Store,--,"Any subset of {'battery','H2'}",Adds extendable storage units (battery and/or hydrogen) at every node/bus after clustering without capacity limits and with zero initial capacity. -- Link,--,Any subset of {'H2 pipeline'},Adds extendable links (H2 pipelines only) at every connection where there are lines or HVDC links without capacity limits and with zero initial capacity. Hydrogen pipelines require hydrogen storage to be modelled as ``Store``. -max_hours,,,, +max_hours,,, -- battery,h,float,Maximum state of charge capacity of the battery in terms of hours at full output capacity ``p_nom``. Cf. `PyPSA documentation `_. -- H2,h,float,Maximum state of charge capacity of the hydrogen storage in terms of hours at full output capacity ``p_nom``. Cf. `PyPSA documentation `_. -powerplants_filter,--,"use `pandas.query `_ strings here, e.g. Country not in ['Germany']",Filter query for the default powerplant database., -custom_powerplants,--,"use `pandas.query `_ strings here, e.g. Country in ['Germany']",Filter query for the custom powerplant database., -conventional_carriers,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}",List of conventional power plants to include in the model from ``resources/powerplants.csv``., -renewable_capacities_from_OPSD,,"[solar, onwind, offwind]",List of carriers (offwind-ac and offwind-dc are included in offwind) whose capacities 'p_nom' are aligned to the `OPSD renewable power plant list `_, -estimate_renewable_capacities_from_capacitiy_stats,,,, -"-- Fueltype [ppm], e.g. Wind",,"list of fueltypes strings in PyPSA-Eur, e.g. [onwind, offwind-ac, offwind-dc]",converts ppm Fueltype to PyPSA-EUR Fueltype, +powerplants_filter,--,"use `pandas.query `_ strings here, e.g. Country not in ['Germany']",Filter query for the default powerplant database. +custom_powerplants,--,"use `pandas.query `_ strings here, e.g. Country in ['Germany']",Filter query for the custom powerplant database. +conventional_carriers,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}",List of conventional power plants to include in the model from ``resources/powerplants.csv``. +renewable_capacities_from_OPSD,,"[solar, onwind, offwind]",List of carriers (offwind-ac and offwind-dc are included in offwind) whose capacities 'p_nom' are aligned to the `OPSD renewable power plant list `_ +estimate_renewable_capacities_from_capacitiy_stats,,, +"-- Fueltype [ppm], e.g. Wind",,"list of fueltypes strings in PyPSA-Eur, e.g. [onwind, offwind-ac, offwind-dc]",converts ppm Fueltype to PyPSA-EUR Fueltype diff --git a/doc/configtables/offwind-dc.csv b/doc/configtables/offwind-dc.csv index e5bbc847..06b82ba0 100644 --- a/doc/configtables/offwind-dc.csv +++ b/doc/configtables/offwind-dc.csv @@ -2,7 +2,7 @@ cutout,--,"Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module must be ERA5.","Specifies the directory where the relevant weather data ist stored." resource,,, -- method,--,"Must be 'wind'","A superordinate technology type." --- turbine,--,"One of turbine types included in `atlite `_","Specifies the turbine type and its characteristic power curve." +-- turbine,--,"One of turbine types included in `atlite `__","Specifies the turbine type and its characteristic power curve." capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine placement." corine,--,"Any *realistic* subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." diff --git a/doc/configtables/onwind.csv b/doc/configtables/onwind.csv index 149dc0c4..31884183 100644 --- a/doc/configtables/onwind.csv +++ b/doc/configtables/onwind.csv @@ -2,7 +2,7 @@ cutout,--,"Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module must be ERA5.","Specifies the directory where the relevant weather data ist stored." resource,,, -- method,--,"Must be 'wind'","A superordinate technology type." --- turbine,--,"One of turbine types included in `atlite `_","Specifies the turbine type and its characteristic power curve." +-- turbine,--,"One of turbine types included in `atlite `__","Specifies the turbine type and its characteristic power curve." capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine placement." corine,,, -- grid_codes,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for wind turbine placement." diff --git a/doc/configtables/opts.csv b/doc/configtables/opts.csv index da673ac8..918d0d17 100644 --- a/doc/configtables/opts.csv +++ b/doc/configtables/opts.csv @@ -1,11 +1,11 @@ Trigger, Description, Definition, Status -``nH``; i.e. ``2H``-``6H``, Resample the time-resolution by averaging over every ``n`` snapshots, ``prepare_network``: `average_every_nhours() `_ and its `caller `_), In active use +``nH``; i.e. ``2H``-``6H``, Resample the time-resolution by averaging over every ``n`` snapshots, ``prepare_network``: `average_every_nhours() `_ and its `caller `__), In active use ``nSEG``; e.g. ``4380SEG``, "Apply time series segmentation with `tsam `_ package to ``n`` adjacent snapshots of varying lengths based on capacity factors of varying renewables, hydro inflow and load.", ``prepare_network``: apply_time_segmentation(), In active use -``Co2L``, Add an overall absolute carbon-dioxide emissions limit configured in ``electricity: co2limit``. If a float is appended an overall emission limit relative to the emission level given in ``electricity: co2base`` is added (e.g. ``Co2L0.05`` limits emissisions to 5% of what is given in ``electricity: co2base``), ``prepare_network``: `add_co2limit() `_ and its `caller `_, In active use -``Ep``, Add cost for a carbon-dioxide price configured in ``costs: emission_prices: co2`` to ``marginal_cost`` of generators (other emission types listed in ``network.carriers`` possible as well), ``prepare_network``: `add_emission_prices() `_ and its `caller `_, In active use +``Co2L``, Add an overall absolute carbon-dioxide emissions limit configured in ``electricity: co2limit``. If a float is appended an overall emission limit relative to the emission level given in ``electricity: co2base`` is added (e.g. ``Co2L0.05`` limits emissisions to 5% of what is given in ``electricity: co2base``), ``prepare_network``: `add_co2limit() `_ and its `caller `__, In active use +``Ep``, Add cost for a carbon-dioxide price configured in ``costs: emission_prices: co2`` to ``marginal_cost`` of generators (other emission types listed in ``network.carriers`` possible as well), ``prepare_network``: `add_emission_prices() `_ and its `caller `__, In active use ``CCL``, Add minimum and maximum levels of generator nominal capacity per carrier for individual countries. These can be specified in the file linked at ``electricity: agg_p_nom_limits`` in the configuration. File defaults to ``data/agg_p_nom_minmax.csv``., ``solve_network``, In active use ``EQ``, "Require each country or node to on average produce a minimal share of its total consumption itself. Example: ``EQ0.5c`` demands each country to produce on average at least 50% of its consumption; ``EQ0.5`` demands each node to produce on average at least 50% of its consumption.", ``solve_network``, In active use ``ATK``, "Require each node to be autarkic. Example: ``ATK`` removes all lines and links. ``ATKc`` removes all cross-border lines and links.", ``prepare_network``, In active use -``BAU``, Add a per-``carrier`` minimal overall capacity; i.e. at least ``40GW`` of ``OCGT`` in Europe; configured in ``electricity: BAU_mincapacities``, ``solve_network``: `add_opts_constraints() `_, Untested -``SAFE``, Add a capacity reserve margin of a certain fraction above the peak demand to which renewable generators and storage do *not* contribute. Ignores network., ``solve_network`` `add_opts_constraints() `_, Untested +``BAU``, Add a per-``carrier`` minimal overall capacity; i.e. at least ``40GW`` of ``OCGT`` in Europe; configured in ``electricity: BAU_mincapacities``, ``solve_network``: `add_opts_constraints() `__, Untested +``SAFE``, Add a capacity reserve margin of a certain fraction above the peak demand to which renewable generators and storage do *not* contribute. Ignores network., ``solve_network`` `add_opts_constraints() `__, Untested ``carrier+{c|p}factor``, "Alter the capital cost (``c``) or installable potential (``p``) of a carrier by a factor. Example: ``solar+c0.5`` reduces the capital cost of solar to 50\% of original values.", ``prepare_network``, In active use diff --git a/doc/configtables/solar.csv b/doc/configtables/solar.csv index 8e57e066..7be39c04 100644 --- a/doc/configtables/solar.csv +++ b/doc/configtables/solar.csv @@ -2,7 +2,7 @@ cutout,--,"Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module can be ERA5 or SARAH-2.","Specifies the directory where the relevant weather data ist stored that is specified at ``atlite/cutouts`` configuration. Both ``sarah`` and ``era5`` work." resource,,, -- method,--,"Must be 'pv'","A superordinate technology type." --- panel,--,"One of {'Csi', 'CdTe', 'KANENA'} as defined in `atlite `_","Specifies the solar panel technology and its characteristic attributes." +-- panel,--,"One of {'Csi', 'CdTe', 'KANENA'} as defined in `atlite `__","Specifies the solar panel technology and its characteristic attributes." -- orientation,,, -- -- slope,°,"Realistically any angle in [0., 90.]","Specifies the tilt angle (or slope) of the solar panel. A slope of zero corresponds to the face of the panel aiming directly overhead. A positive tilt angle steers the panel towards the equator." -- -- azimuth,°,"Any angle in [0., 360.]","Specifies the `azimuth `_ orientation of the solar panel. South corresponds to 180.°." diff --git a/doc/configuration.rst b/doc/configuration.rst index a6683046..a448f817 100644 --- a/doc/configuration.rst +++ b/doc/configuration.rst @@ -18,7 +18,8 @@ Top-level configuration .. literalinclude:: ../config.default.yaml :language: yaml - :lines: 5-12,20,27-34 + :lines: 5-12,20,31-38 + .. csv-table:: :header-rows: 1 @@ -96,7 +97,7 @@ Specifies the temporal range to build an energy system model for as arguments to .. _atlite_cf: ``atlite`` -============= +========== Define and specify the ``atlite.Cutout`` used for calculating renewable potentials and time-series. All options except for ``features`` are directly used as `cutout parameters `_. diff --git a/doc/index.rst b/doc/index.rst index e7dabdf4..aace99cb 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -141,7 +141,7 @@ If you are (relatively) new to energy system modelling and optimisation and plan to use PyPSA-Eur, the following resources are *one way* to get started in addition to reading this documentation. -- Documentation of `PyPSA `_, the package for +- Documentation of `PyPSA `__, the package for simulating and optimising modern power systems which PyPSA-Eur uses under the hood. - Course on `Energy System Modelling `_, Karlsruhe Institute of Technology (KIT), `Dr. Tom Brown `_ diff --git a/doc/plotting.rst b/doc/plotting.rst index 6b76a28c..6b0ce392 100644 --- a/doc/plotting.rst +++ b/doc/plotting.rst @@ -84,8 +84,8 @@ Rule ``make_summary`` Rule ``plot_summary`` ======================== -.. graphviz:: - :align: center +.. .. graphviz:: +.. :align: center diff --git a/doc/preparation.rst b/doc/preparation.rst index 9e986580..dba5e981 100644 --- a/doc/preparation.rst +++ b/doc/preparation.rst @@ -45,7 +45,6 @@ together into a detailed PyPSA network stored in ``networks/elec.nc``. preparation/prepare_links_p_nom preparation/base_network preparation/build_bus_regions - preparation/build_natura_raster preparation/build_powerplants preparation/build_renewable_profiles preparation/build_hydro_profile diff --git a/doc/preparation/retrieve.rst b/doc/preparation/retrieve.rst index 26f152c5..42479284 100644 --- a/doc/preparation/retrieve.rst +++ b/doc/preparation/retrieve.rst @@ -25,7 +25,7 @@ Rule ``retrieve_cutout`` :target: https://doi.org/10.5281/zenodo.3517949 Cutouts are spatio-temporal subsets of the European weather data from the `ECMWF ERA5 `_ reanalysis dataset and the `CMSAF SARAH-2 `_ solar surface radiation dataset for the year 2013. -They have been prepared by and are for use with the `atlite `_ tool. You can either generate them yourself using the ``build_cutouts`` rule or retrieve them directly from `zenodo `_ through the rule ``retrieve_cutout``. +They have been prepared by and are for use with the `atlite `_ tool. You can either generate them yourself using the ``build_cutouts`` rule or retrieve them directly from `zenodo `__ through the rule ``retrieve_cutout``. The :ref:`tutorial` uses a smaller cutout than required for the full model (30 MB), which is also automatically downloaded. .. note:: diff --git a/doc/release_notes.rst b/doc/release_notes.rst index db46bea0..1e9e4bc3 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -31,7 +31,7 @@ Upcoming Release * Bugfix: With ``load_shedding: true`` in the solving options of ``config.yaml`` load shedding generators are only added at the AC buses, excluding buses for H2 and battery stores. PyPSA-Eur 0.3.0 (7th December 2020) -================================== +=================================== **New Features** diff --git a/doc/tutorial.rst b/doc/tutorial.rst index d2fb8433..17d4e3c1 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -14,7 +14,7 @@ Tutorial Before getting started with **PyPSA-Eur** it makes sense to be familiar -with its general modelling framework `PyPSA `_. +with its general modelling framework `PyPSA `__. Running the tutorial requires limited computational resources compared to the full model, which allows the user to explore most of its functionalities on a local machine. diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 96d4a60f..59603f96 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -34,8 +34,8 @@ Inputs .. image:: ../img/nuts3.png :scale: 33 % -- ``data/bundle/nama_10r_3popgdp.tsv.gz``: Average annual population by NUTS3 region (`eurostat `_) -- ``data/bundle/nama_10r_3gdp.tsv.gz``: Gross domestic product (GDP) by NUTS 3 regions (`eurostat `_) +- ``data/bundle/nama_10r_3popgdp.tsv.gz``: Average annual population by NUTS3 region (`eurostat `__) +- ``data/bundle/nama_10r_3gdp.tsv.gz``: Gross domestic product (GDP) by NUTS 3 regions (`eurostat `__) - ``data/bundle/ch_cantons.csv``: Mapping between Swiss Cantons and NUTS3 regions - ``data/bundle/je-e-21.03.02.xls``: Population and GDP data per Canton (`BFS - Swiss Federal Statistical Office `_ ) From 8bec0cc423ca5b570f10a8ab343fe114a9868ae8 Mon Sep 17 00:00:00 2001 From: Martha Frysztacki Date: Tue, 7 Sep 2021 16:28:01 +0200 Subject: [PATCH 044/102] simplify: drop incorrect cols if they exist (avoid error) (#272) --- scripts/simplify_network.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 6e12e5e8..48f0ebe6 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -407,8 +407,11 @@ if __name__ == "__main__": if snakemake.wildcards.simpl: n, cluster_map = cluster(n, int(snakemake.wildcards.simpl)) busmaps.append(cluster_map) - else: - n.buses = n.buses.drop(['symbol', 'tags', 'under_construction', 'substation_lv', 'substation_off'], axis=1) + + # some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed + # and are lost when clustering (for example with the simpl wildcard), we remove them for consistency: + buses_c = {'symbol', 'tags', 'under_construction', 'substation_lv', 'substation_off'}.intersection(n.buses.columns) + n.buses = n.buses.drop(buses_c, axis=1) update_p_nom_max(n) From 547080420ef0ea2279dab2e8ab683d035ad47a41 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 8 Sep 2021 11:12:41 +0200 Subject: [PATCH 045/102] add citation.cff --- CITATION.cff | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 CITATION.cff diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 00000000..54a78722 --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,30 @@ +cff-version: 1.1.0 +message: "If you use this package, please cite the corresponding manuscript in Energy Strategy Reviews." +title: "PyPSA-Eur: An open optimisation model of the European transmission system" +repository: https://github.com/pypsa/pypsa-eur +version: 0.3.0 +license: GPLv3 +journal: Energy Strategy Reviews +doi: 10.1016/j.esr.2018.08.012 +authors: + - family-names: Hörsch + given-names: Jonas + orcid: https://orcid.org/0000-0001-9438-767X + - family-names: Brown + given-names: Tom + orcid: https://orcid.org/0000-0001-5898-1911 + - family-names: Hofmann + given-names: Fabian + orcid: https://orcid.org/0000-0002-6604-5450 + - family-names: Neumann + given-names: Fabian + orcid: https://orcid.org/0000-0001-8551-1480 + - family-names: Frysztacki + given-names: Martha + orcid: https://orcid.org/0000-0002-0788-1328 + - family-names: Hampp + given-names: Johannes + orcid: https://orcid.org/0000-0002-1776-116X + - family-names: Schlachtberger + given-names: David + orcid: https://orcid.org/0000-0002-8167-8213 From 1d68a300e500cb648f792521c0c4e7b19470ec93 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 14 Sep 2021 14:14:31 +0200 Subject: [PATCH 046/102] update environment.yaml for pypsa 0.18.0 (#268) * update environment.yaml * test pandas<1.3 Co-authored-by: Fabian Hofmann --- envs/environment.yaml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index 3dcbeea0..d2d85e97 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -12,8 +12,8 @@ dependencies: - pip - mamba # esp for windows build - #- pypsa>=0.17.1 - - atlite>=0.2.4 + - pypsa>=0.18 + - atlite>=0.2.5 - dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved # Dependencies of the workflow itself @@ -27,8 +27,8 @@ dependencies: - pytables - lxml - powerplantmatching>=0.4.8 - - numpy<=1.19 # until new PyPSA after 27-06-21 - - pandas<1.3 # until new PyPSA after 2-7-21, https://github.com/PyPSA/pypsa-eur/pull/261 + - numpy + - pandas<1.3 - geopandas - xarray - netcdf4 @@ -55,6 +55,5 @@ dependencies: - tabula-py - pip: - - git+https://github.com/pypsa/pypsa.git#egg=pypsa - vresutils==0.3.1 - tsam>=1.1.0 From b88322587f4caf2b66a1ccd5a2a4d3eef2b948bc Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 14 Sep 2021 16:15:25 +0200 Subject: [PATCH 047/102] Release 0.4 (#274) * update release notes [skip ci] * update fixed versions environment.yaml * format release notes --- CITATION.cff | 2 +- config.default.yaml | 2 +- config.tutorial.yaml | 2 +- doc/conf.py | 2 +- doc/release_notes.rst | 131 +++++++++++-- envs/environment.fixed.yaml | 374 ++++++++++++++++++++---------------- test/config.test1.yaml | 2 +- 7 files changed, 327 insertions(+), 188 deletions(-) diff --git a/CITATION.cff b/CITATION.cff index 54a78722..b10377e7 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -2,7 +2,7 @@ cff-version: 1.1.0 message: "If you use this package, please cite the corresponding manuscript in Energy Strategy Reviews." title: "PyPSA-Eur: An open optimisation model of the European transmission system" repository: https://github.com/pypsa/pypsa-eur -version: 0.3.0 +version: 0.4.0 license: GPLv3 journal: Energy Strategy Reviews doi: 10.1016/j.esr.2018.08.012 diff --git a/config.default.yaml b/config.default.yaml index 27ca63d0..a20e4d04 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -2,7 +2,7 @@ # # SPDX-License-Identifier: CC0-1.0 -version: 0.3.0 +version: 0.4.0 tutorial: false logging: diff --git a/config.tutorial.yaml b/config.tutorial.yaml index e551e460..c199712a 100755 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -2,7 +2,7 @@ # # SPDX-License-Identifier: CC0-1.0 -version: 0.3.0 +version: 0.4.0 tutorial: true logging: diff --git a/doc/conf.py b/doc/conf.py index b2b220a0..00084a48 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -76,7 +76,7 @@ author = u'Jonas Hoersch (KIT, FIAS), Fabian Hofmann (FIAS), David Schlachtberge # The short X.Y version. version = u'0.3' # The full version, including alpha/beta/rc tags. -release = u'0.3.0' +release = u'0.4.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 1e9e4bc3..3c59f79f 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -11,24 +11,121 @@ Release Notes Upcoming Release ================ -* Switch to new major release, ``>=v0.2.1`` of ``atlite``. The version upgrade comes along with significant speed up for the rule ``build_renewable_profiles.py`` (~factor 2). A lot of the code which calculated the landuse availability is now outsourced and does not rely on ``glaes``, ``geokit`` anymore. This facilitates the environment building and version compatibility of ``gdal``, ``libgdal`` with other packages. -* The minimum python version was set to ``3.8``. -* The rule and script ``build_country_flh`` are removed as they're no longer used or maintained. -* The flag ``keep_all_available_areas`` in the configuration for renewable potentials (config.yaml -> renewable -> {technology}) was deprecated and now defaults to ``True``. -* The tutorial cutout was renamed from ``cutouts/europe-2013-era5.nc`` to ``cutouts/europe-2013-era5-tutorial.nc`` to accomodate tutorial and productive cutouts side-by-side. -* Fix: Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq (from 3.1e9 t CO2-eq). The new value represents emissions related to the electricity sector for EU+UK. The old value was ~2x too high and used when the emissions wildcard in ``{opts}`` was used. -* Add option to include marginal costs of links representing fuel cells, electrolysis, and battery inverters +* add new features and bugfixes here + + +PyPSA-Eur 0.4.0 (15th September 2021) +===================================== + +**New Features and Changes** + + +* Switch to the new major ``atlite`` release v0.2. The version upgrade comes + along with significant speed up for the rule ``build_renewable_profiles.py`` + (~factor 2). A lot of the code which calculated the land-use availability is now + outsourced and does not rely on ``glaes``, ``geokit`` anymore. This facilitates + the environment building and version compatibility of ``gdal``, ``libgdal`` with + other packages [`#224 `_]. + +* Implemented changes to ``n.snapshot_weightings`` in new PyPSA version v0.18 + (cf. `PyPSA/PyPSA/#227 `_) + [`#259 `_]. + +* Add option to pre-aggregate nodes without power injections (positive or + negative, i.e. generation or demand) to electrically closest nodes or neighbors + in ``simplify_network``. Defaults to ``False``. This affects nodes that are no + substations or have no offshore connection. + +* In :mod:`simplify_network`, bus columns with no longer correct entries are + removed (symbol, tags, under_construction, substation_lv, substation_off) + [`#219 `_] + +* Add option to include marginal costs of links representing fuel cells, + electrolysis, and battery inverters [`#232 `_]. -* Add option to pre-aggregate nodes without power injections (positive or negative, i.e. generation or demand) to electrically closest nodes or neighbors in ``simplify_network``. Defaults to ``False``. This affects nodes that are no substations or have no offshore connection. -* Fix: Add escape in :mod:`base_network` if all TYNDP links are already contained in the network [`#246 `_]. -* Bugfix in :mod:`solve_operations_network`: optimised capacities are now fixed for all extendable links, not only HVDC links [`#244 `_]. -* The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 `_]. -* Continuous integration testing switches to Github Actions from Travis CI [`#252 `_]. -* Bugfix in :mod:`build_renewable_profile` where offshore wind profiles could no longer be created [`#249 `_]. -* Implements changes to ``n.snapshot_weightings`` in upcoming PyPSA version (cf. `PyPSA/PyPSA/#227 `_) [`#259 `_]. -* Bugfix: Lower expansion limit of extendable carriers is now set to the existing capacity, i.e. ``p_nom_min = p_nom`` (0 before). Simultaneously, the upper limit (``p_nom_max``) is now the maximum of the installed capacity (``p_nom``) and the previous estimate based on land availability (``p_nom_max``) [`#260 `_]. -* Bugfix: Solving an operations network now includes optimized store capacities as well. Before only lines, links, generators and storage units were considered. -* Bugfix: With ``load_shedding: true`` in the solving options of ``config.yaml`` load shedding generators are only added at the AC buses, excluding buses for H2 and battery stores. + +* The rule and script ``build_country_flh`` are removed as they are no longer + used or maintained. + +* The connection cost of generators in :mod:`simplify_network` are now reported + in ``resources/connection_costs_s{simpl}.csv`` + [`#261 `_]. + +* The tutorial cutout was renamed from ``cutouts/europe-2013-era5.nc`` to + ``cutouts/europe-2013-era5-tutorial.nc`` to accomodate tutorial and productive + cutouts side-by-side. + +* The flag ``keep_all_available_areas`` in the configuration for renewable + potentials was deprecated and now defaults to ``True``. + +* Update dependencies in ``envs/environment.yaml`` + [`#257 `_] + +* Continuous integration testing switches to Github Actions from Travis CI + [`#252 `_]. + +* Documentation on readthedocs.io is now built with ``pip`` only and no longer + requires ``conda`` [`#267 `_]. + +* Use ``Citation.cff`` [`#273 `_]. + +**Bugs and Compatibility** + + +* Support for PyPSA v0.18 [`#268 `_]. + +* Minimum Python version set to ``3.8``. + +* Removed ``six`` dependency [`#245 `_]. + +* Update :mod:`plot_network` and :mod:`make_summary` rules to latest PyPSA + versions [`#270 `_]. + +* Bugfix: Keep converter links to store components when using the ``ATK`` + wildcard and only remove DC links [`#214 `_]. + +* Bugfix: Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq + (from 3.1e9 t CO2-eq). The new value represents emissions related to the + electricity sector for EU+UK+Balkan. The old value was too high and used when + the emissions wildcard in ``{opts}`` was used + [`#233 `_]. + +* Bugfix: Add escape in :mod:`base_network` if all TYNDP links are already + contained in the network + [`#246 `_]. + +* Bugfix: In :mod:`solve_operations_network` the optimised capacities are now + fixed for all extendable links, not only HVDC links + [`#244 `_]. + +* Bugfix: The ``focus_weights`` are now also considered when pre-clustering in + the :mod:`simplify_network` rule + [`#241 `_]. + +* Bugfix: in :mod:`build_renewable_profile` where offshore wind profiles could + no longer be created [`#249 `_]. + +* Bugfix: Lower expansion limit of extendable carriers is now set to the + existing capacity, i.e. ``p_nom_min = p_nom`` (0 before). Simultaneously, the + upper limit (``p_nom_max``) is now the maximum of the installed capacity + (``p_nom``) and the previous estimate based on land availability (``p_nom_max``) + [`#260 `_]. + +* Bugfix: Solving an operations network now includes optimized store capacities + as well. Before only lines, links, generators and storage units were considered + [`#269 `_]. + +* Bugfix: With ``load_shedding: true`` in the solving options of ``config.yaml`` + load shedding generators are only added at the AC buses, excluding buses for H2 + and battery stores [`#269 `_]. + +* Bugfix: Delete duplicated capital costs at battery discharge link + [`#240 `_]. + +* Bugfix: Propagate the solver log file name to the solver. Previously, the + PyPSA network solving functions were not told about the solver logfile specified + in the Snakemake file [`#247 `_] + PyPSA-Eur 0.3.0 (7th December 2020) =================================== diff --git a/envs/environment.fixed.yaml b/envs/environment.fixed.yaml index 558db60e..dc5ee621 100644 --- a/envs/environment.fixed.yaml +++ b/envs/environment.fixed.yaml @@ -1,7 +1,3 @@ -# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: GPL-3.0-or-later - name: pypsa-eur channels: - bioconda @@ -11,255 +7,301 @@ dependencies: - _libgcc_mutex=0.1 - _openmp_mutex=4.5 - affine=2.3.0 + - alsa-lib=1.2.3 - amply=0.1.4 - appdirs=1.4.4 - - atlite=0.0.3 - - attrs=20.3.0 + - atlite=0.2.5 + - attrs=21.2.0 - backcall=0.2.0 - backports=1.0 - - backports.functools_lru_cache=1.6.1 - - beautifulsoup4=4.9.3 - - blosc=1.20.1 - - bokeh=2.2.3 - - boost-cpp=1.72.0 + - backports.functools_lru_cache=1.6.4 + - beautifulsoup4=4.10.0 + - blosc=1.21.0 + - bokeh=2.3.3 + - boost-cpp=1.74.0 - bottleneck=1.3.2 - brotlipy=0.7.0 - bzip2=1.0.8 - - c-ares=1.17.1 - - ca-certificates=2020.11.8 + - c-ares=1.17.2 + - ca-certificates=2021.5.30 - cairo=1.16.0 - - cartopy=0.17.0 - - certifi=2020.11.8 - - cffi=1.14.4 + - cartopy=0.19.0.post1 + - cdsapi=0.5.1 + - certifi=2021.5.30 + - cffi=1.14.6 - cfitsio=3.470 - - cftime=1.3.0 - - chardet=3.0.4 + - cftime=1.5.0 + - chardet=4.0.0 + - charset-normalizer=2.0.0 - click=7.1.2 - click-plugins=1.1.1 - - cligj=0.7.1 - - cloudpickle=1.6.0 + - cligj=0.7.2 + - cloudpickle=2.0.0 - coincbc=2.10.5 - - conda=4.9.2 - - conda-package-handling=1.7.2 - - configargparse=1.2.3 - - cryptography=3.2.1 - - curl=7.71.1 + - colorama=0.4.4 + - conda=4.10.3 + - conda-package-handling=1.7.3 + - configargparse=1.5.2 + - connection_pool=0.0.3 + - country_converter=0.7.3 + - cryptography=3.4.7 + - curl=7.78.0 - cycler=0.10.0 - cytoolz=0.11.0 - - dask=2.30.0 - - dask-core=2.30.0 + - dask=2021.3.1 + - dask-core=2021.3.1 - datrie=0.8.2 + - dbus=1.13.6 - decorator=4.4.2 + - deprecation=2.1.0 - descartes=1.1.0 - - distributed=2.30.1 - - docutils=0.16 - - entsoe-py=0.2.10 - - expat=2.2.9 - - fiona=1.8.13 + - distributed=2021.4.1 + - distro=1.5.0 + - docutils=0.17.1 + - entsoe-py=0.3.7 + - et_xmlfile=1.0.1 + - expat=2.4.1 + - filelock=3.0.12 + - fiona=1.8.18 - fontconfig=2.13.1 - freetype=2.10.4 - - freexl=1.0.5 - - fsspec=0.8.4 - - gdal=3.0.4 - - geographiclib=1.50 - - geopandas=0.8.1 - - geopy=2.0.0 - - geos=3.8.1 + - freexl=1.0.6 + - fsspec=2021.8.1 + - gdal=3.2.1 + - geographiclib=1.52 + - geopandas=0.9.0 + - geopandas-base=0.9.0 + - geopy=2.2.0 + - geos=3.9.1 - geotiff=1.6.0 - gettext=0.19.8.1 - giflib=5.2.1 - - gitdb=4.0.5 - - gitpython=3.1.11 - - glib=2.66.3 - - glpk=4.65 - - gmp=6.2.1 - - hdf4=4.2.13 + - gitdb=4.0.7 + - gitpython=3.1.23 + - glib=2.68.4 + - glib-tools=2.68.4 + - graphite2=1.3.13 + - gst-plugins-base=1.18.5 + - gstreamer=1.18.5 + - harfbuzz=2.9.1 + - hdf4=4.2.15 - hdf5=1.10.6 - heapdict=1.0.1 - - icu=64.2 - - idna=2.10 - - importlib-metadata=3.1.1 - - importlib_metadata=3.1.1 - - ipopt=3.13.2 - - ipython=7.19.0 + - icu=68.1 + - idna=3.1 + - importlib-metadata=4.8.1 + - iniconfig=1.1.1 + - ipython=7.27.0 - ipython_genutils=0.2.0 - - jedi=0.17.2 - - jinja2=2.11.2 - - joblib=0.17.0 + - jdcal=1.4.1 + - jedi=0.18.0 + - jinja2=3.0.1 + - joblib=1.0.1 - jpeg=9d - - json-c=0.13.1 + - json-c=0.15 - jsonschema=3.2.0 - - jupyter_core=4.7.0 + - jupyter_core=4.7.1 - kealib=1.4.14 - - kiwisolver=1.3.1 - - krb5=1.17.2 - - lcms2=2.11 - - ld_impl_linux-64=2.35.1 - - libarchive=3.3.3 + - kiwisolver=1.3.2 + - krb5=1.19.2 + - lcms2=2.12 + - ld_impl_linux-64=2.36.1 + - libarchive=3.5.1 - libblas=3.9.0 - libcblas=3.9.0 - - libcurl=7.71.1 + - libclang=11.1.0 + - libcurl=7.78.0 - libdap4=3.20.6 - libedit=3.1.20191231 - libev=4.33 + - libevent=2.1.10 - libffi=3.3 - - libgcc-ng=9.3.0 - - libgdal=3.0.4 - - libgfortran-ng=7.5.0 - - libgfortran4=7.5.0 - - libgfortran5=9.3.0 - - libglib=2.66.3 - - libgomp=9.3.0 + - libgcc-ng=11.1.0 + - libgdal=3.2.1 + - libgfortran-ng=11.1.0 + - libgfortran5=11.1.0 + - libglib=2.68.4 + - libgomp=11.1.0 - libiconv=1.16 - libkml=1.3.0 - liblapack=3.9.0 + - libllvm11=11.1.0 - libnetcdf=4.7.4 - - libnghttp2=1.41.0 - - libopenblas=0.3.12 + - libnghttp2=1.43.0 + - libogg=1.3.4 + - libopenblas=0.3.17 + - libopus=1.3.1 - libpng=1.6.37 - - libpq=12.3 - - libsolv=0.7.16 + - libpq=13.3 + - librttopo=1.1.0 + - libsolv=0.7.19 - libspatialindex=1.9.3 - - libspatialite=4.3.0a - - libssh2=1.9.0 - - libstdcxx-ng=9.3.0 - - libtiff=4.1.0 + - libspatialite=5.0.1 + - libssh2=1.10.0 + - libstdcxx-ng=11.1.0 + - libtiff=4.2.0 - libuuid=2.32.1 - - libwebp-base=1.1.0 + - libvorbis=1.3.7 + - libwebp-base=1.2.1 - libxcb=1.13 - - libxml2=2.9.10 + - libxkbcommon=1.0.3 + - libxml2=2.9.12 - libxslt=1.1.33 - locket=0.2.0 - - lxml=4.6.2 - - lz4-c=1.9.2 + - lxml=4.6.3 + - lz4-c=1.9.3 - lzo=2.10 - - mamba=0.7.3 - - markupsafe=1.1.1 - - matplotlib-base=3.3.3 + - mamba=0.15.3 + - mapclassify=2.4.3 + - markupsafe=2.0.1 + - matplotlib=3.4.3 + - matplotlib-base=3.4.3 + - matplotlib-inline=0.1.3 - memory_profiler=0.58.0 - - metis=5.1.0 - - mock=4.0.2 - - msgpack-python=1.0.0 + - mock=4.0.3 + - more-itertools=8.9.0 + - msgpack-python=1.0.2 - munch=2.5.0 - - nbformat=5.0.8 + - mysql-common=8.0.25 + - mysql-libs=8.0.25 + - nbformat=5.1.3 - ncurses=6.2 - - netcdf4=1.5.4 - - networkx=2.5 - - nose=1.3.7 - - numexpr=2.7.1 - - numpy=1.19.0 + - netcdf4=1.5.6 + - networkx=2.6.3 + - nspr=4.30 + - nss=3.69 + - numexpr=2.7.3 + - numpy=1.21.2 - olefile=0.46 - - openjpeg=2.3.1 - - openssl=1.1.1h - - owslib=0.20.0 - - packaging=20.7 - - pandas=1.1.4 - - parso=0.7.1 - - partd=1.1.0 + - openjdk=11.0.9.1 + - openjpeg=2.4.0 + - openpyxl=3.0.8 + - openssl=1.1.1l + - packaging=21.0 + - pandas=1.2.5 + - parso=0.8.2 + - partd=1.2.0 - patsy=0.5.1 - - pcre=8.44 + - pcre=8.45 - pexpect=4.8.0 - pickleshare=0.7.5 - - pillow=8.0.1 - - pip=20.3.1 - - pixman=0.38.0 + - pillow=8.2.0 + - pip=21.2.4 + - pixman=0.40.0 + - pluggy=1.0.0 - ply=3.11 - - poppler=0.87.0 - - poppler-data=0.4.10 - - postgresql=12.3 + - poppler=0.89.0 + - poppler-data=0.4.11 + - postgresql=13.3 - powerplantmatching=0.4.8 - progressbar2=3.53.1 - - proj=7.0.0 - - prompt-toolkit=3.0.8 - - psutil=5.7.3 + - proj=7.2.0 + - prompt-toolkit=3.0.20 + - psutil=5.8.0 - pthread-stubs=0.4 - - ptyprocess=0.6.0 - - pulp=2.3.1 + - ptyprocess=0.7.0 + - pulp=2.5.0 + - py=1.10.0 - pycosat=0.6.3 - pycountry=20.7.3 - pycparser=2.20 - - pyepsg=0.4.0 - - pygments=2.7.2 - - pykdtree=1.3.4 - - pyomo=5.7.1 - - pyopenssl=20.0.0 + - pygments=2.10.0 + - pyomo=6.1.2 + - pyopenssl=20.0.1 - pyparsing=2.4.7 - - pyproj=2.6.1.post1 - - pypsa=0.17.1 + - pyproj=3.1.0 + - pypsa=0.18.0 + - pyqt=5.12.3 + - pyqt-impl=5.12.3 + - pyqt5-sip=4.19.18 + - pyqtchart=5.12 + - pyqtwebengine=5.12.1 - pyrsistent=0.17.3 - - pyshp=2.1.2 + - pyshp=2.1.3 - pysocks=1.7.1 - pytables=3.6.1 - - python=3.8.6 - - python-dateutil=2.8.1 - - python-utils=2.4.0 - - python_abi=3.8 - - pytz=2020.4 - - pyutilib=6.0.0 - - pyyaml=5.3.1 - - rasterio=1.1.5 + - pytest=6.2.5 + - python=3.9.7 + - python-dateutil=2.8.2 + - python-utils=2.5.6 + - python_abi=3.9 + - pytz=2021.1 + - pyyaml=5.4.1 + - qt=5.12.9 + - rasterio=1.2.6 - ratelimiter=1.2.0 - - readline=8.0 - - reproc=14.2.1 - - reproc-cpp=14.2.1 - - requests=2.25.0 - - rtree=0.9.4 + - readline=8.1 + - reproc=14.2.3 + - reproc-cpp=14.2.3 + - requests=2.26.0 + - rtree=0.9.7 - ruamel_yaml=0.15.80 - - scikit-learn=0.23.2 - - scipy=1.5.3 - - seaborn=0.11.0 - - seaborn-base=0.11.0 - - setuptools=49.6.0 + - scikit-learn=0.24.2 + - scipy=1.7.1 + - seaborn=0.11.2 + - seaborn-base=0.11.2 + - setuptools=58.0.4 + - setuptools-scm=6.3.2 + - setuptools_scm=6.3.2 - shapely=1.7.1 - - six=1.15.0 - - smmap=3.0.4 - - snakemake-minimal=5.30.1 + - six=1.16.0 + - smart_open=5.2.1 + - smmap=3.0.5 + - snakemake-minimal=6.8.0 - snuggs=1.4.7 - - sortedcontainers=2.3.0 + - sortedcontainers=2.4.0 - soupsieve=2.0.1 - - sqlite=3.34.0 - - statsmodels=0.12.1 - - tbb=2020.2 - - tblib=1.6.0 - - threadpoolctl=2.1.0 - - tiledb=1.7.7 - - tk=8.6.10 + - sqlite=3.36.0 + - statsmodels=0.12.2 + - stopit=1.1.2 + - tabula-py=2.2.0 + - tabulate=0.8.9 + - tblib=1.7.0 + - threadpoolctl=2.2.0 + - tiledb=2.2.9 + - tk=8.6.11 + - toml=0.10.2 + - tomli=1.2.1 - toolz=0.11.1 - - toposort=1.5 + - toposort=1.6 - tornado=6.1 - - tqdm=4.54.1 - - traitlets=5.0.5 - - typing_extensions=3.7.4.3 - - tzcode=2020a - - urllib3=1.25.11 + - tqdm=4.62.2 + - traitlets=5.1.0 + - typing_extensions=3.10.0.0 + - tzcode=2021a + - tzdata=2021a + - urllib3=1.26.6 - wcwidth=0.2.5 - - wheel=0.36.1 + - wheel=0.37.0 - wrapt=1.12.1 - - xarray=0.16.2 - - xerces-c=3.2.2 - - xlrd=1.2.0 + - xarray=0.19.0 + - xerces-c=3.2.3 + - xlrd=2.0.1 + - xorg-fixesproto=5.0 + - xorg-inputproto=2.3.2 - xorg-kbproto=1.0.7 - xorg-libice=1.0.10 - xorg-libsm=1.2.3 - - xorg-libx11=1.6.12 + - xorg-libx11=1.7.2 - xorg-libxau=1.0.9 - xorg-libxdmcp=1.1.3 - xorg-libxext=1.3.4 + - xorg-libxfixes=5.0.3 + - xorg-libxi=1.7.10 - xorg-libxrender=0.9.10 + - xorg-libxtst=1.2.3 + - xorg-recordproto=1.14.2 - xorg-renderproto=0.11.1 - xorg-xextproto=7.3.0 - xorg-xproto=7.0.31 - xz=5.2.5 - yaml=0.2.5 - zict=2.0.0 - - zipp=3.4.0 + - zipp=3.5.0 - zlib=1.2.11 - - zstd=1.4.5 + - zstd=1.4.9 - pip: - - cdsapi==0.4.0 - countrycode==0.2 - - geokit==1.1.2 - - glaes==1.1.2 - sklearn==0.0 - - tsam==1.1.0 + - tsam==1.1.1 - vresutils==0.3.1 diff --git a/test/config.test1.yaml b/test/config.test1.yaml index 83ce38ad..aea3dbc2 100755 --- a/test/config.test1.yaml +++ b/test/config.test1.yaml @@ -2,7 +2,7 @@ # # SPDX-License-Identifier: CC0-1.0 -version: 0.3.0 +version: 0.4.0 tutorial: true logging: level: INFO From dfb929f2cf87786349a30d5273b68f5f92d3958c Mon Sep 17 00:00:00 2001 From: martacki Date: Tue, 14 Sep 2021 16:34:02 +0200 Subject: [PATCH 048/102] remove snakemake dependencies in functions, use as kwarg instead --- config.yaml~ | 317 ++++++++++++++++++++++++++++++++ scripts/add_electricity.py | 101 +++++----- scripts/add_extra_components.py | 21 +-- scripts/build_powerplants.py | 9 +- scripts/build_shapes.py | 31 ++-- scripts/cluster_network.py | 28 ++- scripts/make_summary.py | 9 +- scripts/prepare_network.py | 48 ++--- scripts/simplify_network.py | 29 +-- 9 files changed, 445 insertions(+), 148 deletions(-) create mode 100644 config.yaml~ diff --git a/config.yaml~ b/config.yaml~ new file mode 100644 index 00000000..91f645f8 --- /dev/null +++ b/config.yaml~ @@ -0,0 +1,317 @@ +# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + +version: 0.3.0 +tutorial: false + +logging: + level: INFO + format: '%(levelname)s:%(name)s:%(message)s' + +summary_dir: results + +scenario: + simpl: [''] + ll: ['copt'] + clusters: [37, 128, 256, 512, 1024] + opts: [Co2L-3H] + +countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK'] + +snapshots: + start: "2013-01-01" + end: "2014-01-01" + closed: 'left' # end is not inclusive + +enable: + prepare_links_p_nom: false + retrieve_databundle: true + build_cutout: false + retrieve_cutout: true + build_natura_raster: false + retrieve_natura_raster: true + custom_busmap: false + +clustering: + algorithm: + name: kmeans #kmeans + feature: coordinates #feature not supported yet + +electricity: + voltages: [220., 300., 380.] + co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5 + co2base: 1.487e+9 + agg_p_nom_limits: data/agg_p_nom_minmax.csv + + extendable_carriers: + Generator: [] + StorageUnit: [] # battery, H2 + Store: [battery, H2] + Link: [] + + max_hours: + battery: 6 + H2: 168 + + powerplants_filter: false # use pandas query strings here, e.g. Country not in ['Germany'] + custom_powerplants: false # use pandas query strings here, e.g. Country in ['Germany'] + conventional_carriers: [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass] + renewable_capacities_from_OPSD: [] # onwind, offwind, solar + + # estimate_renewable_capacities_from_capacity_stats: + # # Wind is the Fueltype in ppm.data.Capacity_stats, onwind, offwind-{ac,dc} the carrier in PyPSA-Eur + # Wind: [onwind, offwind-ac, offwind-dc] + # Solar: [solar] + +atlite: + nprocesses: 4 + cutouts: + # use 'base' to determine geographical bounds and time span from config + # base: + # module: era5 + europe-2013-era5: + module: era5 # in priority order + x: [-12., 35.] + y: [33., 72] + dx: 0.3 + dy: 0.3 + time: ['2013', '2013'] + europe-2013-sarah: + module: [sarah, era5] # in priority order + x: [-12., 45.] + y: [33., 65] + dx: 0.2 + dy: 0.2 + time: ['2013', '2013'] + sarah_interpolate: false + sarah_dir: + features: [influx, temperature] + + +renewable: + onwind: + cutout: europe-2013-era5 + resource: + method: wind + turbine: Vestas_V112_3MW + capacity_per_sqkm: 3 # ScholzPhd Tab 4.3.1: 10MW/km^2 + # correction_factor: 0.93 + corine: + # Scholz, Y. (2012). Renewable energy based electricity supply at low costs: + # development of the REMix model and application for Europe. ( p.42 / p.28) + grid_codes: [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 31, 32] + distance: 1000 + distance_grid_codes: [1, 2, 3, 4, 5, 6] + natura: true + potential: simple # or conservative + clip_p_max_pu: 1.e-2 + offwind-ac: + cutout: europe-2013-era5 + resource: + method: wind + turbine: NREL_ReferenceTurbine_5MW_offshore + capacity_per_sqkm: 3 + # correction_factor: 0.93 + corine: [44, 255] + natura: true + max_depth: 50 + max_shore_distance: 30000 + potential: simple # or conservative + clip_p_max_pu: 1.e-2 + offwind-dc: + cutout: europe-2013-era5 + resource: + method: wind + turbine: NREL_ReferenceTurbine_5MW_offshore + # ScholzPhd Tab 4.3.1: 10MW/km^2 + capacity_per_sqkm: 3 + # correction_factor: 0.93 + corine: [44, 255] + natura: true + max_depth: 50 + min_shore_distance: 30000 + potential: simple # or conservative + clip_p_max_pu: 1.e-2 + solar: + cutout: europe-2013-sarah + resource: + method: pv + panel: CSi + orientation: + slope: 35. + azimuth: 180. + capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2 + # Determined by comparing uncorrected area-weighted full-load hours to those + # published in Supplementary Data to + # Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power + # sector: The economic potential of photovoltaics and concentrating solar + # power." Applied Energy 135 (2014): 704-720. + correction_factor: 0.854337 + corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, + 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] + natura: true + potential: simple # or conservative + clip_p_max_pu: 1.e-2 + hydro: + cutout: europe-2013-era5 + carriers: [ror, PHS, hydro] + PHS_max_hours: 6 + hydro_max_hours: "energy_capacity_totals_by_country" # one of energy_capacity_totals_by_country, estimate_by_large_installations or a float + clip_min_inflow: 1.0 + +lines: + types: + 220.: "Al/St 240/40 2-bundle 220.0" + 300.: "Al/St 240/40 3-bundle 300.0" + 380.: "Al/St 240/40 4-bundle 380.0" + s_max_pu: 0.7 + s_nom_max: .inf + length_factor: 1.25 + under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity + +links: + p_max_pu: 1.0 + p_nom_max: .inf + include_tyndp: true + under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity + +transformers: + x: 0.1 + s_nom: 2000. + type: '' + +load: + url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv + power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data + interpolate_limit: 3 # data gaps up until this size are interpolated linearly + time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from + manual_adjustments: true # false + scaling_factor: 1.0 + +costs: + year: 2030 + discountrate: 0.07 # From a Lion Hirth paper, also reflects average of Noothout et al 2016 + USD2013_to_EUR2013: 0.7532 # [EUR/USD] ECB: https://www.ecb.europa.eu/stats/exchange/eurofxref/html/eurofxref-graph-usd.en.html + marginal_cost: # EUR/MWh + solar: 0.01 + onwind: 0.015 + offwind: 0.015 + hydro: 0. + H2: 0. + electrolysis: 0. + fuel cell: 0. + battery: 0. + battery inverter: 0. + emission_prices: # in currency per tonne emission, only used with the option Ep + co2: 0. + +solving: + options: + formulation: kirchhoff + load_shedding: true + noisy_costs: true + min_iterations: 4 + max_iterations: 6 + clip_p_max_pu: 0.01 + skip_iterations: false + track_iterations: false + #nhours: 10 + solver: + name: gurobi + threads: 4 + method: 2 # barrier + crossover: 0 + BarConvTol: 1.e-5 + FeasibilityTol: 1.e-6 + AggFill: 0 + PreDual: 0 + GURO_PAR_BARDENSETHRESH: 200 + # solver: + # name: cplex + # threads: 4 + # lpmethod: 4 # barrier + # solutiontype: 2 # non basic solution, ie no crossover + # barrier_convergetol: 1.e-5 + # feasopt_tolerance: 1.e-6 + +plotting: + map: + figsize: [7, 7] + boundaries: [-10.2, 29, 35, 72] + p_nom: + bus_size_factor: 5.e+4 + linewidth_factor: 3.e+3 + + costs_max: 80 + costs_threshold: 1 + + energy_max: 15000. + energy_min: -10000. + energy_threshold: 50. + + vre_techs: ["onwind", "offwind-ac", "offwind-dc", "solar", "ror"] + conv_techs: ["OCGT", "CCGT", "Nuclear", "Coal"] + storage_techs: ["hydro+PHS", "battery", "H2"] + load_carriers: ["AC load"] + AC_carriers: ["AC line", "AC transformer"] + link_carriers: ["DC line", "Converter AC-DC"] + tech_colors: + "onwind" : "#235ebc" + "onshore wind" : "#235ebc" + 'offwind' : "#6895dd" + 'offwind-ac' : "#6895dd" + 'offshore wind' : "#6895dd" + 'offshore wind ac' : "#6895dd" + 'offwind-dc' : "#74c6f2" + 'offshore wind dc' : "#74c6f2" + "hydro" : "#08ad97" + "hydro+PHS" : "#08ad97" + "PHS" : "#08ad97" + "hydro reservoir" : "#08ad97" + 'hydroelectricity' : '#08ad97' + "ror" : "#4adbc8" + "run of river" : "#4adbc8" + 'solar' : "#f9d002" + 'solar PV' : "#f9d002" + 'solar thermal' : '#ffef60' + 'biomass' : '#0c6013' + 'solid biomass' : '#06540d' + 'biogas' : '#23932d' + 'waste' : '#68896b' + 'geothermal' : '#ba91b1' + "OCGT" : "#d35050" + "gas" : "#d35050" + "natural gas" : "#d35050" + "CCGT" : "#b20101" + "nuclear" : "#ff9000" + "coal" : "#707070" + "lignite" : "#9e5a01" + "oil" : "#262626" + "H2" : "#ea048a" + "hydrogen storage" : "#ea048a" + "battery" : "#b8ea04" + "Electric load" : "#f9d002" + "electricity" : "#f9d002" + "lines" : "#70af1d" + "transmission lines" : "#70af1d" + "AC-AC" : "#70af1d" + "AC line" : "#70af1d" + "links" : "#8a1caf" + "HVDC links" : "#8a1caf" + "DC-DC" : "#8a1caf" + "DC link" : "#8a1caf" + nice_names: + OCGT: "Open-Cycle Gas" + CCGT: "Combined-Cycle Gas" + offwind-ac: "Offshore Wind (AC)" + offwind-dc: "Offshore Wind (DC)" + onwind: "Onshore Wind" + solar: "Solar" + PHS: "Pumped Hydro Storage" + hydro: "Reservoir & Dam" + battery: "Battery Storage" + H2: "Hydrogen Storage" + lines: "Transmission Lines" + ror: "Run of River" diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 8f721652..813df498 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -118,12 +118,7 @@ def _add_missing_carriers_from_costs(n, costs, carriers): n.import_components_from_dataframe(emissions, 'Carrier') -def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None): - if tech_costs is None: - tech_costs = snakemake.input.tech_costs - - if config is None: - config = snakemake.config['costs'] +def load_costs(tech_costs, config, elec_config, Nyears=1.): # set all asset costs and other parameters costs = pd.read_csv(tech_costs, index_col=list(range(3))).sort_index() @@ -169,8 +164,6 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None): marginal_cost=0., co2_emissions=0.)) - if elec_config is None: - elec_config = snakemake.config['electricity'] max_hours = elec_config['max_hours'] costs.loc["battery"] = \ costs_for_storage(costs.loc["battery storage"], costs.loc["battery inverter"], @@ -188,9 +181,7 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None): return costs -def load_powerplants(ppl_fn=None): - if ppl_fn is None: - ppl_fn = snakemake.input.powerplants +def load_powerplants(ppl_fn): carrier_dict = {'ocgt': 'OCGT', 'ccgt': 'CCGT', 'bioenergy': 'biomass', 'ccgt, thermal': 'CCGT', 'hard coal': 'coal'} return (pd.read_csv(ppl_fn, index_col=0, dtype={'bus': 'str'}) @@ -199,18 +190,17 @@ def load_powerplants(ppl_fn=None): .replace({'carrier': carrier_dict})) -def attach_load(n): +def attach_load(n, regions, load, nuts3_shapes, cntries = [], scaling = 1.): substation_lv_i = n.buses.index[n.buses['substation_lv']] - regions = (gpd.read_file(snakemake.input.regions).set_index('name') + regions = (gpd.read_file(regions).set_index('name') .reindex(substation_lv_i)) - opsd_load = (pd.read_csv(snakemake.input.load, index_col=0, parse_dates=True) - .filter(items=snakemake.config['countries'])) + opsd_load = (pd.read_csv(load, index_col=0, parse_dates=True) + .filter(items=cntries)) - scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0) logger.info(f"Load data scaled with scalling factor {scaling}.") opsd_load *= scaling - nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index('index') + nuts3 = gpd.read_file(nuts3_shapes).set_index('index') def upsample(cntry, group): l = opsd_load[cntry] @@ -263,18 +253,20 @@ def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=Fal n.links.loc[dc_b, 'capital_cost'] = costs -def attach_wind_and_solar(n, costs): - for tech in snakemake.config['renewable']: +def attach_wind_and_solar(n, costs, input_profiles, + technologies = ['onwind', 'offwind-ac', 'offwind-dc', 'solar'], + line_length_factor = 1.): + for tech in technologies: if tech == 'hydro': continue n.add("Carrier", name=tech) - with xr.open_dataset(getattr(snakemake.input, 'profile_' + tech)) as ds: + with xr.open_dataset(getattr(input_profiles, 'profile_' + tech)) as ds: if ds.indexes['bus'].empty: continue suptech = tech.split('-', 2)[0] if suptech == 'offwind': underwater_fraction = ds['underwater_fraction'].to_pandas() - connection_cost = (snakemake.config['lines']['length_factor'] * + connection_cost = (line_length_factor * ds['average_distance'].to_pandas() * (underwater_fraction * costs.at[tech + '-connection-submarine', 'capital_cost'] + @@ -300,8 +292,8 @@ def attach_wind_and_solar(n, costs): p_max_pu=ds['profile'].transpose('time', 'bus').to_pandas()) -def attach_conventional_generators(n, costs, ppl): - carriers = snakemake.config['electricity']['conventional_carriers'] +def attach_conventional_generators(n, costs, ppl, carriers=['nuclear', 'oil', 'OCGT', 'CCGT', + 'coal', 'lignite', 'geothermal', 'biomass']): _add_missing_carriers_from_costs(n, costs, carriers) @@ -322,10 +314,9 @@ def attach_conventional_generators(n, costs, ppl): logger.warning(f'Capital costs for conventional generators put to 0 EUR/MW.') -def attach_hydro(n, costs, ppl): - if 'hydro' not in snakemake.config['renewable']: return - c = snakemake.config['renewable']['hydro'] - carriers = c.get('carriers', ['ror', 'PHS', 'hydro']) +def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, + config_hydro = {'carriers': {'ror', 'PHS', 'hydro'}}): + carriers = config_hydro.get('carriers', ['ror', 'PHS', 'hydro']) _add_missing_carriers_from_costs(n, costs, carriers) @@ -341,11 +332,11 @@ def attach_hydro(n, costs, ppl): if not inflow_idx.empty: dist_key = ppl.loc[inflow_idx, 'p_nom'].groupby(country).transform(normed) - with xr.open_dataarray(snakemake.input.profile_hydro) as inflow: + with xr.open_dataarray(profile_hydro) as inflow: inflow_countries = pd.Index(country[inflow_idx]) missing_c = (inflow_countries.unique() .difference(inflow.indexes['countries'])) - assert missing_c.empty, (f"'{snakemake.input.profile_hydro}' is missing " + assert missing_c.empty, (f"'{profile_hydro}' is missing " f"inflow time-series for at least one country: {', '.join(missing_c)}") inflow_t = (inflow.sel(countries=inflow_countries) @@ -370,7 +361,7 @@ def attach_hydro(n, costs, ppl): if 'PHS' in carriers and not phs.empty: # fill missing max hours to config value and # assume no natural inflow due to lack of data - phs = phs.replace({'max_hours': {0: c['PHS_max_hours']}}) + phs = phs.replace({'max_hours': {0: config_hydro['PHS_max_hours']}}) n.madd('StorageUnit', phs.index, carrier='PHS', bus=phs['bus'], @@ -382,8 +373,8 @@ def attach_hydro(n, costs, ppl): cyclic_state_of_charge=True) if 'hydro' in carriers and not hydro.empty: - hydro_max_hours = c.get('hydro_max_hours') - hydro_stats = pd.read_csv(snakemake.input.hydro_capacities, + hydro_max_hours = config_hydro.get('hydro_max_hours') + hydro_stats = pd.read_csv(hydro_capacities, comment="#", na_values='-', index_col=0) e_target = hydro_stats["E_store[TWh]"].clip(lower=0.2) * 1e6 e_installed = hydro.eval('p_nom * max_hours').groupby(hydro.country).sum() @@ -412,7 +403,7 @@ def attach_hydro(n, costs, ppl): p_nom=hydro['p_nom'], max_hours=hydro_max_hours, capital_cost=(costs.at['hydro', 'capital_cost'] - if c.get('hydro_capital_cost') else 0.), + if config_hydro.get('hydro_capital_cost') else 0.), marginal_cost=costs.at['hydro', 'marginal_cost'], p_max_pu=1., # dispatch p_min_pu=0., # store @@ -422,8 +413,7 @@ def attach_hydro(n, costs, ppl): inflow=inflow_t.loc[:, hydro.index]) -def attach_extendable_generators(n, costs, ppl): - elec_opts = snakemake.config['electricity'] +def attach_extendable_generators(n, costs, ppl, elec_opts = {'extendable_carriers': {'Generator': []}}): carriers = pd.Index(elec_opts['extendable_carriers']['Generator']) _add_missing_carriers_from_costs(n, costs, carriers) @@ -472,12 +462,11 @@ def attach_extendable_generators(n, costs, ppl): -def attach_OPSD_renewables(n): +def attach_OPSD_renewables(n, techs=[]): available = ['DE', 'FR', 'PL', 'CH', 'DK', 'CZ', 'SE', 'GB'] tech_map = {'Onshore': 'onwind', 'Offshore': 'offwind', 'Solar': 'solar'} countries = set(available) & set(n.buses.country) - techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', []) tech_map = {k: v for k, v in tech_map.items() if v in techs} if not tech_map: @@ -505,10 +494,7 @@ def attach_OPSD_renewables(n): -def estimate_renewable_capacities(n, tech_map=None): - if tech_map is None: - tech_map = (snakemake.config['electricity'] - .get('estimate_renewable_capacities_from_capacity_stats', {})) +def estimate_renewable_capacities(n, tech_map={}): if len(tech_map) == 0: return @@ -540,8 +526,7 @@ def estimate_renewable_capacities(n, tech_map=None): n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom'] -def add_nice_carrier_names(n, config=None): - if config is None: config = snakemake.config +def add_nice_carrier_names(n, config): carrier_i = n.carriers.index nice_names = (pd.Series(config['plotting']['nice_names']) .reindex(carrier_i).fillna(carrier_i.to_series().str.title())) @@ -563,22 +548,32 @@ if __name__ == "__main__": n = pypsa.Network(snakemake.input.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(Nyears) - ppl = load_powerplants() + costs = load_costs(tech_costs = snakemake.input.tech_costs, config = snakemake.config['costs'], + elec_config = snakemake.config['electricity'], Nyears = Nyears) + ppl = load_powerplants(snakemake.input.powerplants) - attach_load(n) + attach_load(n, regions = snakemake.input.regions, load = snakemake.input.load, + nuts3_shapes = snakemake.input.nuts3_shapes, + cntries = snakemake.config['countries'], + scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0)) update_transmission_costs(n, costs) - attach_conventional_generators(n, costs, ppl) - attach_wind_and_solar(n, costs) - attach_hydro(n, costs, ppl) - attach_extendable_generators(n, costs, ppl) + attach_conventional_generators(n, costs, ppl, carriers = snakemake.config['electricity']['conventional_carriers']) + attach_wind_and_solar(n, costs, snakemake.input, technologies = snakemake.config['renewable'], + line_length_factor = snakemake.config['lines']['length_factor']) - estimate_renewable_capacities(n) - attach_OPSD_renewables(n) + if 'hydro' in snakemake.config['renewable']: + attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities, + config_hydro = snakemake.config['renewable']['hydro']) + + attach_extendable_generators(n, costs, ppl, elec_opts = snakemake.config['electricity']) + + estimate_renewable_capacities(n, tech_map = (snakemake.config['electricity'] + .get('estimate_renewable_capacities_from_capacity_stats', {}))) + attach_OPSD_renewables(n, techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', [])) update_p_nom_max(n) - add_nice_carrier_names(n) + add_nice_carrier_names(n, config = snakemake.config) n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index ae581382..846fb120 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -64,8 +64,7 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) -def attach_storageunits(n, costs): - elec_opts = snakemake.config['electricity'] +def attach_storageunits(n, costs, elec_opts = {'extendable_carriers': {'StorageUnit': []}, 'max_hours': {'battery': 6, 'H2': 168}}): carriers = elec_opts['extendable_carriers']['StorageUnit'] max_hours = elec_opts['max_hours'] @@ -89,8 +88,7 @@ def attach_storageunits(n, costs): cyclic_state_of_charge=True) -def attach_stores(n, costs): - elec_opts = snakemake.config['electricity'] +def attach_stores(n, costs, elec_opts = {'extendable_carriers': {'Store': ['battery', 'H2']}}): carriers = elec_opts['extendable_carriers']['Store'] _add_missing_carriers_from_costs(n, costs, carriers) @@ -156,8 +154,7 @@ def attach_stores(n, costs): marginal_cost=costs.at["battery inverter", "marginal_cost"]) -def attach_hydrogen_pipelines(n, costs): - elec_opts = snakemake.config['electricity'] +def attach_hydrogen_pipelines(n, costs, elec_opts = {'extendable_carriers': {'Store': ['H2', 'battery']}}): ext_carriers = elec_opts['extendable_carriers'] as_stores = ext_carriers.get('Store', []) @@ -198,13 +195,13 @@ if __name__ == "__main__": n = pypsa.Network(snakemake.input.network) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(Nyears, tech_costs=snakemake.input.tech_costs, - config=snakemake.config['costs'], - elec_config=snakemake.config['electricity']) + costs = load_costs(tech_costs = snakemake.input.tech_costs, + config = snakemake.config['costs'], + elec_config = snakemake.config['electricity'], Nyears = Nyears) - attach_storageunits(n, costs) - attach_stores(n, costs) - attach_hydrogen_pipelines(n, costs) + attach_storageunits(n, costs, elec_opts = snakemake.config['electricity']) + attach_stores(n, costs, elec_opts = snakemake.config['electricity']) + attach_hydrogen_pipelines(n, costs, elec_opts = snakemake.config['electricity']) add_nice_carrier_names(n, config=snakemake.config) diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index 8b329469..e87637f9 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -84,11 +84,10 @@ from scipy.spatial import cKDTree as KDTree logger = logging.getLogger(__name__) -def add_custom_powerplants(ppl): - custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] +def add_custom_powerplants(ppl, custom_powerplants, custom_ppl_query=False): if not custom_ppl_query: return ppl - add_ppls = pd.read_csv(snakemake.input.custom_powerplants, index_col=0, + add_ppls = pd.read_csv(custom_powerplants, index_col=0, dtype={'bus': 'str'}) if isinstance(custom_ppl_query, str): add_ppls.query(custom_ppl_query, inplace=True) @@ -119,7 +118,9 @@ if __name__ == "__main__": if isinstance(ppl_query, str): ppl.query(ppl_query, inplace=True) - ppl = add_custom_powerplants(ppl) # add carriers from own powerplant files + # add carriers from own powerplant files: + ppl = add_custom_powerplants(ppl, custom_powerplants = snakemake.input.custom_powerplants, + custom_ppl_query = snakemake.config['electricity']['custom_powerplants']) cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()] diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 59603f96..7edf439a 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -107,11 +107,10 @@ def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True): return polys.simplify(tolerance=tolerance) -def countries(): - cntries = snakemake.config['countries'] +def countries(naturalearth, cntries=[]): if 'RS' in cntries: cntries.append('KV') - df = gpd.read_file(snakemake.input.naturalearth) + df = gpd.read_file(naturalearth) # Names are a hassle in naturalearth, try several fields fieldnames = (df[x].where(lambda s: s!='-99') for x in ('ISO_A2', 'WB_A2', 'ADM0_A3')) @@ -124,9 +123,9 @@ def countries(): return s -def eez(country_shapes): - df = gpd.read_file(snakemake.input.eez) - df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in snakemake.config['countries']])] +def eez(country_shapes, eez, cntries=[]): + df = gpd.read_file(eez) + df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in cntries])] df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c)) s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False)) s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3}) @@ -145,29 +144,29 @@ def country_cover(country_shapes, eez_shapes=None): return Polygon(shell=europe_shape.exterior) -def nuts3(country_shapes): - df = gpd.read_file(snakemake.input.nuts3) +def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp): + df = gpd.read_file(nuts3) df = df.loc[df['STAT_LEVL_'] == 3] df['geometry'] = df['geometry'].map(_simplify_polys) df = df.rename(columns={'NUTS_ID': 'id'})[['id', 'geometry']].set_index('id') - pop = pd.read_table(snakemake.input.nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python') + pop = pd.read_table(nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python') pop = (pop .set_index(pd.MultiIndex.from_tuples(pop.pop('unit,geo\\time').str.split(','))).loc['THS'] .applymap(lambda x: pd.to_numeric(x, errors='coerce')) .fillna(method='bfill', axis=1))['2014'] - gdp = pd.read_table(snakemake.input.nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python') + gdp = pd.read_table(nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python') gdp = (gdp .set_index(pd.MultiIndex.from_tuples(gdp.pop('unit,geo\\time').str.split(','))).loc['EUR_HAB'] .applymap(lambda x: pd.to_numeric(x, errors='coerce')) .fillna(method='bfill', axis=1))['2014'] - cantons = pd.read_csv(snakemake.input.ch_cantons) + cantons = pd.read_csv(ch_cantons) cantons = cantons.set_index(cantons['HASC'].str[3:])['NUTS'] cantons = cantons.str.pad(5, side='right', fillchar='0') - swiss = pd.read_excel(snakemake.input.ch_popgdp, skiprows=3, index_col=0) + swiss = pd.read_excel(ch_popgdp, skiprows=3, index_col=0) swiss.columns = swiss.columns.to_series().map(cantons) pop = pop.append(pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':])) @@ -220,14 +219,16 @@ if __name__ == "__main__": out = snakemake.output - country_shapes = countries() + country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries']) save_to_geojson(country_shapes, out.country_shapes) - offshore_shapes = eez(country_shapes) + offshore_shapes = eez(country_shapes, snakemake.input.eez, cntries=snakemake.config['countries']) save_to_geojson(offshore_shapes, out.offshore_shapes) europe_shape = country_cover(country_shapes, offshore_shapes) save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape) - nuts3_shapes = nuts3(country_shapes) + nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop, + snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp) + save_to_geojson(nuts3_shapes, out.nuts3_shapes) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index d74745d0..4784bb32 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -170,12 +170,9 @@ def weighting_for_country(n, x): return (w * (100. / w.max())).clip(lower=1.).astype(int) -def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None): +def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"): """Determine the number of clusters per country""" - if solver_name is None: - solver_name = snakemake.config['solving']['solver']['name'] - L = (n.loads_t.p_set.mean() .groupby(n.loads.bus).sum() .groupby([n.buses.country, n.buses.sub_network]).sum() @@ -268,12 +265,10 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr else: raise AttributeError(f"potential_mode should be one of 'simple' or 'conservative' but is '{potential_mode}'") - if custom_busmap: - busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True) - busmap.index = busmap.index.astype(str) - logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") - else: + if custom_busmap is False: busmap = busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights, algorithm) + else: + busmap = custom_busmap clustering = get_clustering_from_busmap( n, busmap, @@ -306,8 +301,6 @@ def save_to_geojson(s, fn): def cluster_regions(busmaps, input=None, output=None): - if input is None: input = snakemake.input - if output is None: output = snakemake.output busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) @@ -358,10 +351,9 @@ if __name__ == "__main__": else: line_length_factor = snakemake.config['lines']['length_factor'] Nyears = n.snapshot_weightings.objective.sum()/8760 - hvac_overhead_cost = (load_costs(Nyears, - tech_costs=snakemake.input.tech_costs, - config=snakemake.config['costs'], - elec_config=snakemake.config['electricity']) + hvac_overhead_cost = (load_costs(tech_costs = snakemake.input.tech_costs, + config = snakemake.config['costs'], + elec_config=snakemake.config['electricity'], Nyears = Nyears) .at['HVAC overhead', 'capital_cost']) def consense(x): @@ -373,6 +365,10 @@ if __name__ == "__main__": potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential'] for tech in renewable_carriers])) custom_busmap = snakemake.config["enable"].get("custom_busmap", False) + if custom_busmap: + custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True) + custom_busmap.index = custom_busmap.index.astype(str) + logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers, line_length_factor=line_length_factor, potential_mode=potential_mode, @@ -386,4 +382,4 @@ if __name__ == "__main__": for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative getattr(clustering, attr).to_csv(snakemake.output[attr]) - cluster_regions((clustering.busmap,)) + cluster_regions((clustering.busmap,), snakemake.input, snakemake.output) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 53482c48..a0778e73 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -403,8 +403,8 @@ def make_summaries(networks_dict, country='all'): n = n[n.buses.country == country] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(Nyears, snakemake.input[0], - snakemake.config['costs'], snakemake.config['electricity']) + costs = load_costs(tech_costs = snakemake.input[0], config = snakemake.config['costs'], + elec_config = snakemake.config['electricity'], Nyears = Nyears) update_transmission_costs(n, costs, simple_hvdc_costs=False) assign_carriers(n) @@ -415,8 +415,7 @@ def make_summaries(networks_dict, country='all'): return dfs -def to_csv(dfs): - dir = snakemake.output[0] +def to_csv(dfs, dir): os.makedirs(dir, exist_ok=True) for key, df in dfs.items(): df.to_csv(os.path.join(dir, f"{key}.csv")) @@ -453,4 +452,4 @@ if __name__ == "__main__": dfs = make_summaries(networks_dict, country=snakemake.wildcards.country) - to_csv(dfs) + to_csv(dfs, snakemake.output[0]) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 86afef2f..90b3a0df 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -70,21 +70,14 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) -def add_co2limit(n, Nyears=1., factor=None): - - if factor is not None: - annual_emissions = factor*snakemake.config['electricity']['co2base'] - else: - annual_emissions = snakemake.config['electricity']['co2limit'] +def add_co2limit(n, co2limit=1.487e+9, Nyears=1.): n.add("GlobalConstraint", "CO2Limit", carrier_attribute="co2_emissions", sense="<=", - constant=annual_emissions * Nyears) + constant=co2limit * Nyears) -def add_emission_prices(n, emission_prices=None, exclude_co2=False): - if emission_prices is None: - emission_prices = snakemake.config['costs']['emission_prices'] +def add_emission_prices(n, emission_prices={'co2': 0.}, exclude_co2=False): if exclude_co2: emission_prices.pop('co2') ep = (pd.Series(emission_prices).rename(lambda x: x+'_emissions') * n.carriers.filter(like='_emissions')).sum(axis=1) @@ -94,13 +87,12 @@ def add_emission_prices(n, emission_prices=None, exclude_co2=False): n.storage_units['marginal_cost'] += su_ep -def set_line_s_max_pu(n): - s_max_pu = snakemake.config['lines']['s_max_pu'] +def set_line_s_max_pu(n, s_max_pu = 0.7): n.lines['s_max_pu'] = s_max_pu logger.info(f"N-1 security margin of lines set to {s_max_pu}") -def set_transmission_limit(n, ll_type, factor, Nyears=1): +def set_transmission_limit(n, ll_type, factor, costs, Nyears=1): links_dc_b = n.links.carrier == 'DC' if not n.links.empty else pd.Series() _lines_s_nom = (np.sqrt(3) * n.lines.type.map(n.line_types.i_nom) * @@ -112,9 +104,6 @@ def set_transmission_limit(n, ll_type, factor, Nyears=1): ref = (lines_s_nom @ n.lines[col] + n.links.loc[links_dc_b, "p_nom"] @ n.links.loc[links_dc_b, col]) - costs = load_costs(Nyears, snakemake.input.tech_costs, - snakemake.config['costs'], - snakemake.config['electricity']) update_transmission_costs(n, costs, simple_hvdc_costs=False) if factor == 'opt' or float(factor) > 1.0: @@ -151,7 +140,7 @@ def average_every_nhours(n, offset): return m -def apply_time_segmentation(n, segments): +def apply_time_segmentation(n, segments, solver_name="cplex"): logger.info(f"Aggregating time series to {segments} segments.") try: import tsam.timeseriesaggregation as tsam @@ -170,8 +159,6 @@ def apply_time_segmentation(n, segments): raw = pd.concat([p_max_pu, load, inflow], axis=1, sort=False) - solver_name = snakemake.config["solving"]["solver"]["name"] - agg = tsam.TimeSeriesAggregation(raw, hoursPerPeriod=len(raw), noTypicalPeriods=1, noSegments=int(segments), segmentation=True, solver=solver_name) @@ -208,9 +195,7 @@ def enforce_autarky(n, only_crossborder=False): n.mremove("Line", lines_rm) n.mremove("Link", links_rm) -def set_line_nom_max(n): - s_nom_max_set = snakemake.config["lines"].get("s_nom_max,", np.inf) - p_nom_max_set = snakemake.config["links"].get("p_nom_max", np.inf) +def set_line_nom_max(n, s_nom_max_set=np.inf, p_nom_max_set=np.inf): n.lines.s_nom_max.clip(upper=s_nom_max_set, inplace=True) n.links.p_nom_max.clip(upper=p_nom_max_set, inplace=True) @@ -225,8 +210,11 @@ if __name__ == "__main__": n = pypsa.Network(snakemake.input[0]) Nyears = n.snapshot_weightings.objective.sum() / 8760. + costs = load_costs(tech_costs = snakemake.input.tech_costs, + config = snakemake.config['costs'], + elec_config = snakemake.config['electricity'], Nyears = Nyears) - set_line_s_max_pu(n) + set_line_s_max_pu(n, s_max_pu=snakemake.config['lines']['s_max_pu']) for o in opts: m = re.match(r'^\d+h$', o, re.IGNORECASE) @@ -237,16 +225,17 @@ if __name__ == "__main__": for o in opts: m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: - n = apply_time_segmentation(n, m.group(0)[:-3]) + n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=snakemake.config["solving"]["solver"]["name"]) break for o in opts: if "Co2L" in o: m = re.findall("[0-9]*\.?[0-9]+$", o) if len(m) > 0: - add_co2limit(n, Nyears, float(m[0])) + co2limit=float(m[0])*snakemake.config['electricity']['co2base'] + add_co2limit(n, Nyears, co2limit) else: - add_co2limit(n, Nyears) + add_co2limit(n, Nyears, snakemake.config['electricity']['co2limit']) break for o in opts: @@ -267,12 +256,13 @@ if __name__ == "__main__": c.df.loc[sel,attr] *= factor if 'Ep' in opts: - add_emission_prices(n) + add_emission_prices(n, emission_prices=snakemake.config['costs']['emission_prices']) ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] - set_transmission_limit(n, ll_type, factor, Nyears) + set_transmission_limit(n, ll_type, factor, costs, Nyears) - set_line_nom_max(n) + set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf), + p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf)) if "ATK" in opts: enforce_autarky(n) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 48f0ebe6..384025b8 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -138,13 +138,9 @@ def simplify_network_to_380(n): return n, trafo_map -def _prepare_connection_costs_per_link(n): +def _prepare_connection_costs_per_link(n, costs): if n.links.empty: return {} - Nyears = n.snapshot_weightings.objective.sum() / 8760 - costs = load_costs(Nyears, snakemake.input.tech_costs, - snakemake.config['costs'], snakemake.config['electricity']) - connection_costs_per_link = {} for tech in snakemake.config['renewable']: @@ -158,9 +154,9 @@ def _prepare_connection_costs_per_link(n): return connection_costs_per_link -def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None, buses=None): +def _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link=None, buses=None): if connection_costs_per_link is None: - connection_costs_per_link = _prepare_connection_costs_per_link(n) + connection_costs_per_link = _prepare_connection_costs_per_link(n, costs) if buses is None: buses = busmap.index[busmap.index != busmap.values] @@ -217,7 +213,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)]) -def simplify_links(n): +def simplify_links(n, costs): ## Complex multi-node links are folded into end-points logger.info("Simplifying connected link components") @@ -264,7 +260,7 @@ def simplify_links(n): busmap = n.buses.index.to_series() - connection_costs_per_link = _prepare_connection_costs_per_link(n) + connection_costs_per_link = _prepare_connection_costs_per_link(n, costs) connection_costs_to_bus = pd.DataFrame(0., index=n.buses.index, columns=list(connection_costs_per_link)) for lbl in labels.value_counts().loc[lambda s: s > 2].index: @@ -278,7 +274,7 @@ def simplify_links(n): m = sp.spatial.distance_matrix(n.buses.loc[b, ['x', 'y']], n.buses.loc[buses[1:-1], ['x', 'y']]) busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]] - connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link, buses) + connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link, buses) all_links = [i for _, i in sum(links, [])] @@ -312,12 +308,12 @@ def simplify_links(n): _aggregate_and_move_components(n, busmap, connection_costs_to_bus) return n, busmap -def remove_stubs(n): +def remove_stubs(n, costs): logger.info("Removing stubs") busmap = busmap_by_stubs(n) # ['country']) - connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap) + connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs) _aggregate_and_move_components(n, busmap, connection_costs_to_bus) @@ -394,9 +390,14 @@ if __name__ == "__main__": n, trafo_map = simplify_network_to_380(n) - n, simplify_links_map = simplify_links(n) + Nyears = n.snapshot_weightings.objective.sum() / 8760 + technology_costs = load_costs(tech_costs = snakemake.input.tech_costs, + config = snakemake.config['costs'], + elec_config = snakemake.config['electricity'], Nyears = Nyears) - n, stub_map = remove_stubs(n) + n, simplify_links_map = simplify_links(n, technology_costs) + + n, stub_map = remove_stubs(n, technology_costs) busmaps = [trafo_map, simplify_links_map, stub_map] From d551a3b5f1d2c748ffda74744495fb2cb1a0eae8 Mon Sep 17 00:00:00 2001 From: martacki Date: Tue, 14 Sep 2021 16:36:13 +0200 Subject: [PATCH 049/102] delete config.yaml --- config.yaml~ | 317 --------------------------------------------------- 1 file changed, 317 deletions(-) delete mode 100644 config.yaml~ diff --git a/config.yaml~ b/config.yaml~ deleted file mode 100644 index 91f645f8..00000000 --- a/config.yaml~ +++ /dev/null @@ -1,317 +0,0 @@ -# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: CC0-1.0 - -version: 0.3.0 -tutorial: false - -logging: - level: INFO - format: '%(levelname)s:%(name)s:%(message)s' - -summary_dir: results - -scenario: - simpl: [''] - ll: ['copt'] - clusters: [37, 128, 256, 512, 1024] - opts: [Co2L-3H] - -countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK'] - -snapshots: - start: "2013-01-01" - end: "2014-01-01" - closed: 'left' # end is not inclusive - -enable: - prepare_links_p_nom: false - retrieve_databundle: true - build_cutout: false - retrieve_cutout: true - build_natura_raster: false - retrieve_natura_raster: true - custom_busmap: false - -clustering: - algorithm: - name: kmeans #kmeans - feature: coordinates #feature not supported yet - -electricity: - voltages: [220., 300., 380.] - co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5 - co2base: 1.487e+9 - agg_p_nom_limits: data/agg_p_nom_minmax.csv - - extendable_carriers: - Generator: [] - StorageUnit: [] # battery, H2 - Store: [battery, H2] - Link: [] - - max_hours: - battery: 6 - H2: 168 - - powerplants_filter: false # use pandas query strings here, e.g. Country not in ['Germany'] - custom_powerplants: false # use pandas query strings here, e.g. Country in ['Germany'] - conventional_carriers: [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass] - renewable_capacities_from_OPSD: [] # onwind, offwind, solar - - # estimate_renewable_capacities_from_capacity_stats: - # # Wind is the Fueltype in ppm.data.Capacity_stats, onwind, offwind-{ac,dc} the carrier in PyPSA-Eur - # Wind: [onwind, offwind-ac, offwind-dc] - # Solar: [solar] - -atlite: - nprocesses: 4 - cutouts: - # use 'base' to determine geographical bounds and time span from config - # base: - # module: era5 - europe-2013-era5: - module: era5 # in priority order - x: [-12., 35.] - y: [33., 72] - dx: 0.3 - dy: 0.3 - time: ['2013', '2013'] - europe-2013-sarah: - module: [sarah, era5] # in priority order - x: [-12., 45.] - y: [33., 65] - dx: 0.2 - dy: 0.2 - time: ['2013', '2013'] - sarah_interpolate: false - sarah_dir: - features: [influx, temperature] - - -renewable: - onwind: - cutout: europe-2013-era5 - resource: - method: wind - turbine: Vestas_V112_3MW - capacity_per_sqkm: 3 # ScholzPhd Tab 4.3.1: 10MW/km^2 - # correction_factor: 0.93 - corine: - # Scholz, Y. (2012). Renewable energy based electricity supply at low costs: - # development of the REMix model and application for Europe. ( p.42 / p.28) - grid_codes: [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, - 24, 25, 26, 27, 28, 29, 31, 32] - distance: 1000 - distance_grid_codes: [1, 2, 3, 4, 5, 6] - natura: true - potential: simple # or conservative - clip_p_max_pu: 1.e-2 - offwind-ac: - cutout: europe-2013-era5 - resource: - method: wind - turbine: NREL_ReferenceTurbine_5MW_offshore - capacity_per_sqkm: 3 - # correction_factor: 0.93 - corine: [44, 255] - natura: true - max_depth: 50 - max_shore_distance: 30000 - potential: simple # or conservative - clip_p_max_pu: 1.e-2 - offwind-dc: - cutout: europe-2013-era5 - resource: - method: wind - turbine: NREL_ReferenceTurbine_5MW_offshore - # ScholzPhd Tab 4.3.1: 10MW/km^2 - capacity_per_sqkm: 3 - # correction_factor: 0.93 - corine: [44, 255] - natura: true - max_depth: 50 - min_shore_distance: 30000 - potential: simple # or conservative - clip_p_max_pu: 1.e-2 - solar: - cutout: europe-2013-sarah - resource: - method: pv - panel: CSi - orientation: - slope: 35. - azimuth: 180. - capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2 - # Determined by comparing uncorrected area-weighted full-load hours to those - # published in Supplementary Data to - # Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power - # sector: The economic potential of photovoltaics and concentrating solar - # power." Applied Energy 135 (2014): 704-720. - correction_factor: 0.854337 - corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, - 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] - natura: true - potential: simple # or conservative - clip_p_max_pu: 1.e-2 - hydro: - cutout: europe-2013-era5 - carriers: [ror, PHS, hydro] - PHS_max_hours: 6 - hydro_max_hours: "energy_capacity_totals_by_country" # one of energy_capacity_totals_by_country, estimate_by_large_installations or a float - clip_min_inflow: 1.0 - -lines: - types: - 220.: "Al/St 240/40 2-bundle 220.0" - 300.: "Al/St 240/40 3-bundle 300.0" - 380.: "Al/St 240/40 4-bundle 380.0" - s_max_pu: 0.7 - s_nom_max: .inf - length_factor: 1.25 - under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity - -links: - p_max_pu: 1.0 - p_nom_max: .inf - include_tyndp: true - under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity - -transformers: - x: 0.1 - s_nom: 2000. - type: '' - -load: - url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv - power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data - interpolate_limit: 3 # data gaps up until this size are interpolated linearly - time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from - manual_adjustments: true # false - scaling_factor: 1.0 - -costs: - year: 2030 - discountrate: 0.07 # From a Lion Hirth paper, also reflects average of Noothout et al 2016 - USD2013_to_EUR2013: 0.7532 # [EUR/USD] ECB: https://www.ecb.europa.eu/stats/exchange/eurofxref/html/eurofxref-graph-usd.en.html - marginal_cost: # EUR/MWh - solar: 0.01 - onwind: 0.015 - offwind: 0.015 - hydro: 0. - H2: 0. - electrolysis: 0. - fuel cell: 0. - battery: 0. - battery inverter: 0. - emission_prices: # in currency per tonne emission, only used with the option Ep - co2: 0. - -solving: - options: - formulation: kirchhoff - load_shedding: true - noisy_costs: true - min_iterations: 4 - max_iterations: 6 - clip_p_max_pu: 0.01 - skip_iterations: false - track_iterations: false - #nhours: 10 - solver: - name: gurobi - threads: 4 - method: 2 # barrier - crossover: 0 - BarConvTol: 1.e-5 - FeasibilityTol: 1.e-6 - AggFill: 0 - PreDual: 0 - GURO_PAR_BARDENSETHRESH: 200 - # solver: - # name: cplex - # threads: 4 - # lpmethod: 4 # barrier - # solutiontype: 2 # non basic solution, ie no crossover - # barrier_convergetol: 1.e-5 - # feasopt_tolerance: 1.e-6 - -plotting: - map: - figsize: [7, 7] - boundaries: [-10.2, 29, 35, 72] - p_nom: - bus_size_factor: 5.e+4 - linewidth_factor: 3.e+3 - - costs_max: 80 - costs_threshold: 1 - - energy_max: 15000. - energy_min: -10000. - energy_threshold: 50. - - vre_techs: ["onwind", "offwind-ac", "offwind-dc", "solar", "ror"] - conv_techs: ["OCGT", "CCGT", "Nuclear", "Coal"] - storage_techs: ["hydro+PHS", "battery", "H2"] - load_carriers: ["AC load"] - AC_carriers: ["AC line", "AC transformer"] - link_carriers: ["DC line", "Converter AC-DC"] - tech_colors: - "onwind" : "#235ebc" - "onshore wind" : "#235ebc" - 'offwind' : "#6895dd" - 'offwind-ac' : "#6895dd" - 'offshore wind' : "#6895dd" - 'offshore wind ac' : "#6895dd" - 'offwind-dc' : "#74c6f2" - 'offshore wind dc' : "#74c6f2" - "hydro" : "#08ad97" - "hydro+PHS" : "#08ad97" - "PHS" : "#08ad97" - "hydro reservoir" : "#08ad97" - 'hydroelectricity' : '#08ad97' - "ror" : "#4adbc8" - "run of river" : "#4adbc8" - 'solar' : "#f9d002" - 'solar PV' : "#f9d002" - 'solar thermal' : '#ffef60' - 'biomass' : '#0c6013' - 'solid biomass' : '#06540d' - 'biogas' : '#23932d' - 'waste' : '#68896b' - 'geothermal' : '#ba91b1' - "OCGT" : "#d35050" - "gas" : "#d35050" - "natural gas" : "#d35050" - "CCGT" : "#b20101" - "nuclear" : "#ff9000" - "coal" : "#707070" - "lignite" : "#9e5a01" - "oil" : "#262626" - "H2" : "#ea048a" - "hydrogen storage" : "#ea048a" - "battery" : "#b8ea04" - "Electric load" : "#f9d002" - "electricity" : "#f9d002" - "lines" : "#70af1d" - "transmission lines" : "#70af1d" - "AC-AC" : "#70af1d" - "AC line" : "#70af1d" - "links" : "#8a1caf" - "HVDC links" : "#8a1caf" - "DC-DC" : "#8a1caf" - "DC link" : "#8a1caf" - nice_names: - OCGT: "Open-Cycle Gas" - CCGT: "Combined-Cycle Gas" - offwind-ac: "Offshore Wind (AC)" - offwind-dc: "Offshore Wind (DC)" - onwind: "Onshore Wind" - solar: "Solar" - PHS: "Pumped Hydro Storage" - hydro: "Reservoir & Dam" - battery: "Battery Storage" - H2: "Hydrogen Storage" - lines: "Transmission Lines" - ror: "Run of River" From 2e02af8b4bee569affd9e6180003dff6f40ac8dc Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 14 Sep 2021 16:37:41 +0200 Subject: [PATCH 050/102] change license --- CITATION.cff | 6 +- LICENSES/GPL-3.0-or-later.txt | 645 +--------------------------- Snakefile | 2 +- doc/Makefile | 2 +- doc/_static/theme_overrides.css | 2 +- doc/conf.py | 2 +- doc/index.rst | 2 +- doc/make.bat | 2 +- doc/release_notes.rst | 2 +- envs/environment.fixed.yaml | 6 + envs/environment.yaml | 2 +- scripts/_helpers.py | 2 +- scripts/add_electricity.py | 2 +- scripts/add_extra_components.py | 2 +- scripts/base_network.py | 2 +- scripts/build_bus_regions.py | 2 +- scripts/build_cutout.py | 2 +- scripts/build_hydro_profile.py | 2 +- scripts/build_load_data.py | 2 +- scripts/build_natura_raster.py | 2 +- scripts/build_powerplants.py | 2 +- scripts/build_renewable_profiles.py | 2 +- scripts/build_shapes.py | 2 +- scripts/cluster_network.py | 2 +- scripts/make_summary.py | 2 +- scripts/plot_network.py | 2 +- scripts/plot_p_nom_max.py | 2 +- scripts/plot_summary.py | 2 +- scripts/prepare_links_p_nom.py | 2 +- scripts/prepare_network.py | 2 +- scripts/retrieve_databundle.py | 2 +- scripts/simplify_network.py | 2 +- scripts/solve_network.py | 2 +- scripts/solve_operations_network.py | 2 +- 34 files changed, 62 insertions(+), 657 deletions(-) diff --git a/CITATION.cff b/CITATION.cff index b10377e7..a28562fd 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -1,9 +1,13 @@ +# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + cff-version: 1.1.0 message: "If you use this package, please cite the corresponding manuscript in Energy Strategy Reviews." title: "PyPSA-Eur: An open optimisation model of the European transmission system" repository: https://github.com/pypsa/pypsa-eur version: 0.4.0 -license: GPLv3 +license: MIT journal: Energy Strategy Reviews doi: 10.1016/j.esr.2018.08.012 authors: diff --git a/LICENSES/GPL-3.0-or-later.txt b/LICENSES/GPL-3.0-or-later.txt index e142a525..dc10fd32 100644 --- a/LICENSES/GPL-3.0-or-later.txt +++ b/LICENSES/GPL-3.0-or-later.txt @@ -1,625 +1,20 @@ -GNU GENERAL PUBLIC LICENSE - -Version 3, 29 June 2007 - -Copyright © 2007 Free Software Foundation, Inc. - -Everyone is permitted to copy and distribute verbatim copies of this license -document, but changing it is not allowed. - -Preamble - -The GNU General Public License is a free, copyleft license for software and -other kinds of works. - -The licenses for most software and other practical works are designed to take -away your freedom to share and change the works. By contrast, the GNU General -Public License is intended to guarantee your freedom to share and change all -versions of a program--to make sure it remains free software for all its users. -We, the Free Software Foundation, use the GNU General Public License for most -of our software; it applies also to any other work released this way by its -authors. You can apply it to your programs, too. - -When we speak of free software, we are referring to freedom, not price. Our -General Public Licenses are designed to make sure that you have the freedom -to distribute copies of free software (and charge for them if you wish), that -you receive source code or can get it if you want it, that you can change -the software or use pieces of it in new free programs, and that you know you -can do these things. - -To protect your rights, we need to prevent others from denying you these rights -or asking you to surrender the rights. Therefore, you have certain responsibilities -if you distribute copies of the software, or if you modify it: responsibilities -to respect the freedom of others. - -For example, if you distribute copies of such a program, whether gratis or -for a fee, you must pass on to the recipients the same freedoms that you received. -You must make sure that they, too, receive or can get the source code. And -you must show them these terms so they know their rights. - -Developers that use the GNU GPL protect your rights with two steps: (1) assert -copyright on the software, and (2) offer you this License giving you legal -permission to copy, distribute and/or modify it. - -For the developers' and authors' protection, the GPL clearly explains that -there is no warranty for this free software. For both users' and authors' -sake, the GPL requires that modified versions be marked as changed, so that -their problems will not be attributed erroneously to authors of previous versions. - -Some devices are designed to deny users access to install or run modified -versions of the software inside them, although the manufacturer can do so. -This is fundamentally incompatible with the aim of protecting users' freedom -to change the software. The systematic pattern of such abuse occurs in the -area of products for individuals to use, which is precisely where it is most -unacceptable. Therefore, we have designed this version of the GPL to prohibit -the practice for those products. If such problems arise substantially in other -domains, we stand ready to extend this provision to those domains in future -versions of the GPL, as needed to protect the freedom of users. - -Finally, every program is threatened constantly by software patents. States -should not allow patents to restrict development and use of software on general-purpose -computers, but in those that do, we wish to avoid the special danger that -patents applied to a free program could make it effectively proprietary. To -prevent this, the GPL assures that patents cannot be used to render the program -non-free. - -The precise terms and conditions for copying, distribution and modification -follow. - -TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - -"Copyright" also means copyright-like laws that apply to other kinds of works, -such as semiconductor masks. - -"The Program" refers to any copyrightable work licensed under this License. -Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals -or organizations. - -To "modify" a work means to copy from or adapt all or part of the work in -a fashion requiring copyright permission, other than the making of an exact -copy. The resulting work is called a "modified version" of the earlier work -or a work "based on" the earlier work. - -A "covered work" means either the unmodified Program or a work based on the -Program. - -To "propagate" a work means to do anything with it that, without permission, -would make you directly or secondarily liable for infringement under applicable -copyright law, except executing it on a computer or modifying a private copy. -Propagation includes copying, distribution (with or without modification), -making available to the public, and in some countries other activities as -well. - -To "convey" a work means any kind of propagation that enables other parties -to make or receive copies. Mere interaction with a user through a computer -network, with no transfer of a copy, is not conveying. - -An interactive user interface displays "Appropriate Legal Notices" to the -extent that it includes a convenient and prominently visible feature that -(1) displays an appropriate copyright notice, and (2) tells the user that -there is no warranty for the work (except to the extent that warranties are -provided), that licensees may convey the work under this License, and how -to view a copy of this License. If the interface presents a list of user commands -or options, such as a menu, a prominent item in the list meets this criterion. - - 1. Source Code. - -The "source code" for a work means the preferred form of the work for making -modifications to it. "Object code" means any non-source form of a work. - -A "Standard Interface" means an interface that either is an official standard -defined by a recognized standards body, or, in the case of interfaces specified -for a particular programming language, one that is widely used among developers -working in that language. - -The "System Libraries" of an executable work include anything, other than -the work as a whole, that (a) is included in the normal form of packaging -a Major Component, but which is not part of that Major Component, and (b) -serves only to enable use of the work with that Major Component, or to implement -a Standard Interface for which an implementation is available to the public -in source code form. A "Major Component", in this context, means a major essential -component (kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to produce -the work, or an object code interpreter used to run it. - -The "Corresponding Source" for a work in object code form means all the source -code needed to generate, install, and (for an executable work) run the object -code and to modify the work, including scripts to control those activities. -However, it does not include the work's System Libraries, or general-purpose -tools or generally available free programs which are used unmodified in performing -those activities but which are not part of the work. For example, Corresponding -Source includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically linked -subprograms that the work is specifically designed to require, such as by -intimate data communication or control flow between those subprograms and -other parts of the work. - -The Corresponding Source need not include anything that users can regenerate -automatically from other parts of the Corresponding Source. - - The Corresponding Source for a work in source code form is that same work. - - 2. Basic Permissions. - -All rights granted under this License are granted for the term of copyright -on the Program, and are irrevocable provided the stated conditions are met. -This License explicitly affirms your unlimited permission to run the unmodified -Program. The output from running a covered work is covered by this License -only if the output, given its content, constitutes a covered work. This License -acknowledges your rights of fair use or other equivalent, as provided by copyright -law. - -You may make, run and propagate covered works that you do not convey, without -conditions so long as your license otherwise remains in force. You may convey -covered works to others for the sole purpose of having them make modifications -exclusively for you, or provide you with facilities for running those works, -provided that you comply with the terms of this License in conveying all material -for which you do not control copyright. Those thus making or running the covered -works for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of your copyrighted -material outside their relationship with you. - -Conveying under any other circumstances is permitted solely under the conditions -stated below. Sublicensing is not allowed; section 10 makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - -No covered work shall be deemed part of an effective technological measure -under any applicable law fulfilling obligations under article 11 of the WIPO -copyright treaty adopted on 20 December 1996, or similar laws prohibiting -or restricting circumvention of such measures. - -When you convey a covered work, you waive any legal power to forbid circumvention -of technological measures to the extent such circumvention is effected by -exercising rights under this License with respect to the covered work, and -you disclaim any intention to limit operation or modification of the work -as a means of enforcing, against the work's users, your or third parties' -legal rights to forbid circumvention of technological measures. - - 4. Conveying Verbatim Copies. - -You may convey verbatim copies of the Program's source code as you receive -it, in any medium, provided that you conspicuously and appropriately publish -on each copy an appropriate copyright notice; keep intact all notices stating -that this License and any non-permissive terms added in accord with section -7 apply to the code; keep intact all notices of the absence of any warranty; -and give all recipients a copy of this License along with the Program. - -You may charge any price or no price for each copy that you convey, and you -may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - -You may convey a work based on the Program, or the modifications to produce -it from the Program, in the form of source code under the terms of section -4, provided that you also meet all of these conditions: - -a) The work must carry prominent notices stating that you modified it, and -giving a relevant date. - -b) The work must carry prominent notices stating that it is released under -this License and any conditions added under section 7. This requirement modifies -the requirement in section 4 to "keep intact all notices". - -c) You must license the entire work, as a whole, under this License to anyone -who comes into possession of a copy. This License will therefore apply, along -with any applicable section 7 additional terms, to the whole of the work, -and all its parts, regardless of how they are packaged. This License gives -no permission to license the work in any other way, but it does not invalidate -such permission if you have separately received it. - -d) If the work has interactive user interfaces, each must display Appropriate -Legal Notices; however, if the Program has interactive interfaces that do -not display Appropriate Legal Notices, your work need not make them do so. - -A compilation of a covered work with other separate and independent works, -which are not by their nature extensions of the covered work, and which are -not combined with it such as to form a larger program, in or on a volume of -a storage or distribution medium, is called an "aggregate" if the compilation -and its resulting copyright are not used to limit the access or legal rights -of the compilation's users beyond what the individual works permit. Inclusion -of a covered work in an aggregate does not cause this License to apply to -the other parts of the aggregate. - - 6. Conveying Non-Source Forms. - -You may convey a covered work in object code form under the terms of sections -4 and 5, provided that you also convey the machine-readable Corresponding -Source under the terms of this License, in one of these ways: - -a) Convey the object code in, or embodied in, a physical product (including -a physical distribution medium), accompanied by the Corresponding Source fixed -on a durable physical medium customarily used for software interchange. - -b) Convey the object code in, or embodied in, a physical product (including -a physical distribution medium), accompanied by a written offer, valid for -at least three years and valid for as long as you offer spare parts or customer -support for that product model, to give anyone who possesses the object code -either (1) a copy of the Corresponding Source for all the software in the -product that is covered by this License, on a durable physical medium customarily -used for software interchange, for a price no more than your reasonable cost -of physically performing this conveying of source, or (2) access to copy the -Corresponding Source from a network server at no charge. - -c) Convey individual copies of the object code with a copy of the written -offer to provide the Corresponding Source. This alternative is allowed only -occasionally and noncommercially, and only if you received the object code -with such an offer, in accord with subsection 6b. - -d) Convey the object code by offering access from a designated place (gratis -or for a charge), and offer equivalent access to the Corresponding Source -in the same way through the same place at no further charge. You need not -require recipients to copy the Corresponding Source along with the object -code. If the place to copy the object code is a network server, the Corresponding -Source may be on a different server (operated by you or a third party) that -supports equivalent copying facilities, provided you maintain clear directions -next to the object code saying where to find the Corresponding Source. Regardless -of what server hosts the Corresponding Source, you remain obligated to ensure -that it is available for as long as needed to satisfy these requirements. - -e) Convey the object code using peer-to-peer transmission, provided you inform -other peers where the object code and Corresponding Source of the work are -being offered to the general public at no charge under subsection 6d. - -A separable portion of the object code, whose source code is excluded from -the Corresponding Source as a System Library, need not be included in conveying -the object code work. - -A "User Product" is either (1) a "consumer product", which means any tangible -personal property which is normally used for personal, family, or household -purposes, or (2) anything designed or sold for incorporation into a dwelling. -In determining whether a product is a consumer product, doubtful cases shall -be resolved in favor of coverage. For a particular product received by a particular -user, "normally used" refers to a typical or common use of that class of product, -regardless of the status of the particular user or of the way in which the -particular user actually uses, or expects or is expected to use, the product. -A product is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent the -only significant mode of use of the product. - -"Installation Information" for a User Product means any methods, procedures, -authorization keys, or other information required to install and execute modified -versions of a covered work in that User Product from a modified version of -its Corresponding Source. The information must suffice to ensure that the -continued functioning of the modified object code is in no case prevented -or interfered with solely because modification has been made. - -If you convey an object code work under this section in, or with, or specifically -for use in, a User Product, and the conveying occurs as part of a transaction -in which the right of possession and use of the User Product is transferred -to the recipient in perpetuity or for a fixed term (regardless of how the -transaction is characterized), the Corresponding Source conveyed under this -section must be accompanied by the Installation Information. But this requirement -does not apply if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has been installed -in ROM). - -The requirement to provide Installation Information does not include a requirement -to continue to provide support service, warranty, or updates for a work that -has been modified or installed by the recipient, or for the User Product in -which it has been modified or installed. Access to a network may be denied -when the modification itself materially and adversely affects the operation -of the network or violates the rules and protocols for communication across -the network. - -Corresponding Source conveyed, and Installation Information provided, in accord -with this section must be in a format that is publicly documented (and with -an implementation available to the public in source code form), and must require -no special password or key for unpacking, reading or copying. - - 7. Additional Terms. - -"Additional permissions" are terms that supplement the terms of this License -by making exceptions from one or more of its conditions. Additional permissions -that are applicable to the entire Program shall be treated as though they -were included in this License, to the extent that they are valid under applicable -law. If additional permissions apply only to part of the Program, that part -may be used separately under those permissions, but the entire Program remains -governed by this License without regard to the additional permissions. - -When you convey a copy of a covered work, you may at your option remove any -additional permissions from that copy, or from any part of it. (Additional -permissions may be written to require their own removal in certain cases when -you modify the work.) You may place additional permissions on material, added -by you to a covered work, for which you have or can give appropriate copyright -permission. - -Notwithstanding any other provision of this License, for material you add -to a covered work, you may (if authorized by the copyright holders of that -material) supplement the terms of this License with terms: - -a) Disclaiming warranty or limiting liability differently from the terms of -sections 15 and 16 of this License; or - -b) Requiring preservation of specified reasonable legal notices or author -attributions in that material or in the Appropriate Legal Notices displayed -by works containing it; or - -c) Prohibiting misrepresentation of the origin of that material, or requiring -that modified versions of such material be marked in reasonable ways as different -from the original version; or - -d) Limiting the use for publicity purposes of names of licensors or authors -of the material; or - -e) Declining to grant rights under trademark law for use of some trade names, -trademarks, or service marks; or - -f) Requiring indemnification of licensors and authors of that material by -anyone who conveys the material (or modified versions of it) with contractual -assumptions of liability to the recipient, for any liability that these contractual -assumptions directly impose on those licensors and authors. - -All other non-permissive additional terms are considered "further restrictions" -within the meaning of section 10. If the Program as you received it, or any -part of it, contains a notice stating that it is governed by this License -along with a term that is a further restriction, you may remove that term. -If a license document contains a further restriction but permits relicensing -or conveying under this License, you may add to a covered work material governed -by the terms of that license document, provided that the further restriction -does not survive such relicensing or conveying. - -If you add terms to a covered work in accord with this section, you must place, -in the relevant source files, a statement of the additional terms that apply -to those files, or a notice indicating where to find the applicable terms. - -Additional terms, permissive or non-permissive, may be stated in the form -of a separately written license, or stated as exceptions; the above requirements -apply either way. - - 8. Termination. - -You may not propagate or modify a covered work except as expressly provided -under this License. Any attempt otherwise to propagate or modify it is void, -and will automatically terminate your rights under this License (including -any patent licenses granted under the third paragraph of section 11). - -However, if you cease all violation of this License, then your license from -a particular copyright holder is reinstated (a) provisionally, unless and -until the copyright holder explicitly and finally terminates your license, -and (b) permanently, if the copyright holder fails to notify you of the violation -by some reasonable means prior to 60 days after the cessation. - -Moreover, your license from a particular copyright holder is reinstated permanently -if the copyright holder notifies you of the violation by some reasonable means, -this is the first time you have received notice of violation of this License -(for any work) from that copyright holder, and you cure the violation prior -to 30 days after your receipt of the notice. - -Termination of your rights under this section does not terminate the licenses -of parties who have received copies or rights from you under this License. -If your rights have been terminated and not permanently reinstated, you do -not qualify to receive new licenses for the same material under section 10. - - 9. Acceptance Not Required for Having Copies. - -You are not required to accept this License in order to receive or run a copy -of the Program. Ancillary propagation of a covered work occurring solely as -a consequence of using peer-to-peer transmission to receive a copy likewise -does not require acceptance. However, nothing other than this License grants -you permission to propagate or modify any covered work. These actions infringe -copyright if you do not accept this License. Therefore, by modifying or propagating -a covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - -Each time you convey a covered work, the recipient automatically receives -a license from the original licensors, to run, modify and propagate that work, -subject to this License. You are not responsible for enforcing compliance -by third parties with this License. - -An "entity transaction" is a transaction transferring control of an organization, -or substantially all assets of one, or subdividing an organization, or merging -organizations. If propagation of a covered work results from an entity transaction, -each party to that transaction who receives a copy of the work also receives -whatever licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the Corresponding -Source of the work from the predecessor in interest, if the predecessor has -it or can get it with reasonable efforts. - -You may not impose any further restrictions on the exercise of the rights -granted or affirmed under this License. For example, you may not impose a -license fee, royalty, or other charge for exercise of rights granted under -this License, and you may not initiate litigation (including a cross-claim -or counterclaim in a lawsuit) alleging that any patent claim is infringed -by making, using, selling, offering for sale, or importing the Program or -any portion of it. - - 11. Patents. - -A "contributor" is a copyright holder who authorizes use under this License -of the Program or a work on which the Program is based. The work thus licensed -is called the contributor's "contributor version". - -A contributor's "essential patent claims" are all patent claims owned or controlled -by the contributor, whether already acquired or hereafter acquired, that would -be infringed by some manner, permitted by this License, of making, using, -or selling its contributor version, but do not include claims that would be -infringed only as a consequence of further modification of the contributor -version. For purposes of this definition, "control" includes the right to -grant patent sublicenses in a manner consistent with the requirements of this -License. - -Each contributor grants you a non-exclusive, worldwide, royalty-free patent -license under the contributor's essential patent claims, to make, use, sell, -offer for sale, import and otherwise run, modify and propagate the contents -of its contributor version. - -In the following three paragraphs, a "patent license" is any express agreement -or commitment, however denominated, not to enforce a patent (such as an express -permission to practice a patent or covenant not to sue for patent infringement). -To "grant" such a patent license to a party means to make such an agreement -or commitment not to enforce a patent against the party. - -If you convey a covered work, knowingly relying on a patent license, and the -Corresponding Source of the work is not available for anyone to copy, free -of charge and under the terms of this License, through a publicly available -network server or other readily accessible means, then you must either (1) -cause the Corresponding Source to be so available, or (2) arrange to deprive -yourself of the benefit of the patent license for this particular work, or -(3) arrange, in a manner consistent with the requirements of this License, -to extend the patent license to downstream recipients. "Knowingly relying" -means you have actual knowledge that, but for the patent license, your conveying -the covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that country -that you have reason to believe are valid. - -If, pursuant to or in connection with a single transaction or arrangement, -you convey, or propagate by procuring conveyance of, a covered work, and grant -a patent license to some of the parties receiving the covered work authorizing -them to use, propagate, modify or convey a specific copy of the covered work, -then the patent license you grant is automatically extended to all recipients -of the covered work and works based on it. - -A patent license is "discriminatory" if it does not include within the scope -of its coverage, prohibits the exercise of, or is conditioned on the non-exercise -of one or more of the rights that are specifically granted under this License. -You may not convey a covered work if you are a party to an arrangement with -a third party that is in the business of distributing software, under which -you make payment to the third party based on the extent of your activity of -conveying the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory patent -license (a) in connection with copies of the covered work conveyed by you -(or copies made from those copies), or (b) primarily for and in connection -with specific products or compilations that contain the covered work, unless -you entered into that arrangement, or that patent license was granted, prior -to 28 March 2007. - -Nothing in this License shall be construed as excluding or limiting any implied -license or other defenses to infringement that may otherwise be available -to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - -If conditions are imposed on you (whether by court order, agreement or otherwise) -that contradict the conditions of this License, they do not excuse you from -the conditions of this License. If you cannot convey a covered work so as -to satisfy simultaneously your obligations under this License and any other -pertinent obligations, then as a consequence you may not convey it at all. -For example, if you agree to terms that obligate you to collect a royalty -for further conveying from those to whom you convey the Program, the only -way you could satisfy both those terms and this License would be to refrain -entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - -Notwithstanding any other provision of this License, you have permission to -link or combine any covered work with a work licensed under version 3 of the -GNU Affero General Public License into a single combined work, and to convey -the resulting work. The terms of this License will continue to apply to the -part which is the covered work, but the special requirements of the GNU Affero -General Public License, section 13, concerning interaction through a network -will apply to the combination as such. - - 14. Revised Versions of this License. - -The Free Software Foundation may publish revised and/or new versions of the -GNU General Public License from time to time. Such new versions will be similar -in spirit to the present version, but may differ in detail to address new -problems or concerns. - -Each version is given a distinguishing version number. If the Program specifies -that a certain numbered version of the GNU General Public License "or any -later version" applies to it, you have the option of following the terms and -conditions either of that numbered version or of any later version published -by the Free Software Foundation. If the Program does not specify a version -number of the GNU General Public License, you may choose any version ever -published by the Free Software Foundation. - -If the Program specifies that a proxy can decide which future versions of -the GNU General Public License can be used, that proxy's public statement -of acceptance of a version permanently authorizes you to choose that version -for the Program. - -Later license versions may give you additional or different permissions. However, -no additional obligations are imposed on any author or copyright holder as -a result of your choosing to follow a later version. - - 15. Disclaimer of Warranty. - -THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE -LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR -OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER -EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES -OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS -TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM -PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR -CORRECTION. - - 16. Limitation of Liability. - -IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL -ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM -AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, -INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO -USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED -INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE -PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER -PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - -If the disclaimer of warranty and limitation of liability provided above cannot -be given local legal effect according to their terms, reviewing courts shall -apply local law that most closely approximates an absolute waiver of all civil -liability in connection with the Program, unless a warranty or assumption -of liability accompanies a copy of the Program in return for a fee. END OF -TERMS AND CONDITIONS - -How to Apply These Terms to Your New Programs - -If you develop a new program, and you want it to be of the greatest possible -use to the public, the best way to achieve this is to make it free software -which everyone can redistribute and change under these terms. - -To do so, attach the following notices to the program. It is safest to attach -them to the start of each source file to most effectively state the exclusion -of warranty; and each file should have at least the "copyright" line and a -pointer to where the full notice is found. - - - -Copyright (C) - -This program is free software: you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free Software -Foundation, either version 3 of the License, or (at your option) any later -version. - -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - -You should have received a copy of the GNU General Public License along with -this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - -If the program does terminal interaction, make it output a short notice like -this when it starts in an interactive mode: - - Copyright (C) - -This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - -This is free software, and you are welcome to redistribute it under certain -conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands might -be different; for a GUI interface, you would use an "about box". - -You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. For -more information on this, and how to apply and follow the GNU GPL, see . - -The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you may -consider it more useful to permit linking proprietary applications with the -library. If this is what you want to do, use the GNU Lesser General Public -License instead of this License. But first, please read . +MIT License + +Copyright 2017-2021 The PyPSA-Eur Authors + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/Snakefile b/Snakefile index 2f8eea3b..423f20eb 100644 --- a/Snakefile +++ b/Snakefile @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT from os.path import normpath, exists from shutil import copyfile diff --git a/doc/Makefile b/doc/Makefile index ce5c6e6a..75df2f48 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT # Makefile for Sphinx documentation # diff --git a/doc/_static/theme_overrides.css b/doc/_static/theme_overrides.css index d14367ee..a4c9818d 100644 --- a/doc/_static/theme_overrides.css +++ b/doc/_static/theme_overrides.css @@ -1,5 +1,5 @@ /* SPDX-FileCopyrightText: 2017-2020 The PyPSA-Eur Authors - SPDX-License-Identifier: GPL-3.0-or-later + SPDX-License-Identifier: MIT */ .wy-side-nav-search { diff --git a/doc/conf.py b/doc/conf.py index 00084a48..afb23271 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: 20017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT # -*- coding: utf-8 -*- # diff --git a/doc/index.rst b/doc/index.rst index aace99cb..9801ef9d 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -196,7 +196,7 @@ Licence PyPSA-Eur work is released under multiple licenses: -* All original source code is licensed as free software under `GPL-3.0-or-later `_. +* All original source code is licensed as free software under `MIT `_. * The documentation is licensed under `CC-BY-4.0 `_. * Configuration files are mostly licensed under `CC0-1.0 `_. * Data files are licensed under `CC-BY-4.0 `_. diff --git a/doc/make.bat b/doc/make.bat index e2403128..35dcecc2 100644 --- a/doc/make.bat +++ b/doc/make.bat @@ -1,5 +1,5 @@ REM SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors -REM SPDX-License-Identifier: GPL-3.0-or-later +REM SPDX-License-Identifier: MIT @ECHO OFF diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 3c59f79f..392877ac 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -309,7 +309,7 @@ Release Process * Tag a release on Github via ``git tag v0.x.x``, ``git push``, ``git push --tags``. Include release notes in the tag message. -* Upload code to `zenodo code repository `_ with `GNU GPL 3.0 `_ license. +* Upload code to `zenodo code repository `_ with `MIT license `_. * Create pre-built networks for ``config.default.yaml`` by running ``snakemake -j 1 extra_components_all_networks``. diff --git a/envs/environment.fixed.yaml b/envs/environment.fixed.yaml index dc5ee621..dee915b6 100644 --- a/envs/environment.fixed.yaml +++ b/envs/environment.fixed.yaml @@ -1,3 +1,9 @@ +# SPDX-FileCopyrightText: : 2017-2021 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + + + name: pypsa-eur channels: - bioconda diff --git a/envs/environment.yaml b/envs/environment.yaml index d2d85e97..b29e9351 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT name: pypsa-eur channels: diff --git a/scripts/_helpers.py b/scripts/_helpers.py index ae28f808..30775ae5 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT import pandas as pd from pathlib import Path diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 8f721652..08a32a26 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT # coding: utf-8 """ diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index ae581382..88f7d35c 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT # coding: utf-8 """ diff --git a/scripts/base_network.py b/scripts/base_network.py index 4c2ed2c5..4b85bb57 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT # coding: utf-8 """ diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index 87890d92..d91d0575 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Creates Voronoi shapes for each bus representing both onshore and offshore regions. diff --git a/scripts/build_cutout.py b/scripts/build_cutout.py index 79be84fc..78eafac6 100644 --- a/scripts/build_cutout.py +++ b/scripts/build_cutout.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2021 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Create cutouts with `atlite `_. diff --git a/scripts/build_hydro_profile.py b/scripts/build_hydro_profile.py index 395753c0..6ac59262 100644 --- a/scripts/build_hydro_profile.py +++ b/scripts/build_hydro_profile.py @@ -2,7 +2,7 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Build hydroelectric inflow time-series for each country. diff --git a/scripts/build_load_data.py b/scripts/build_load_data.py index e31fa3f2..b286df45 100755 --- a/scripts/build_load_data.py +++ b/scripts/build_load_data.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2020 @JanFrederickUnnewehr, The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ diff --git a/scripts/build_natura_raster.py b/scripts/build_natura_raster.py index 63b311e9..f7a923d6 100644 --- a/scripts/build_natura_raster.py +++ b/scripts/build_natura_raster.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Rasters the vector data of the `Natura 2000 `_ natural protection areas onto all cutout regions. diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index 8b329469..ab000631 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT # coding: utf-8 """ diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 111eb772..9ce83de3 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -2,7 +2,7 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """Calculates for each network node the (i) installable capacity (based on land-use), (ii) the available generation time diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 59603f96..5814085b 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Creates GIS shape files of the countries, exclusive economic zones and `NUTS3 `_ areas. diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index d74745d0..980b73b0 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT # coding: utf-8 """ diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 53482c48..cff5318c 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Creates summaries of aggregated energy and costs as ``.csv`` files. diff --git a/scripts/plot_network.py b/scripts/plot_network.py index 571f5bad..456bf50f 100755 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Plots map with pie charts and cost box bar charts. diff --git a/scripts/plot_p_nom_max.py b/scripts/plot_p_nom_max.py index bc346785..e79ad274 100644 --- a/scripts/plot_p_nom_max.py +++ b/scripts/plot_p_nom_max.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Plots renewable installation potentials per capacity factor. diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index c8cc169c..a34611de 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Plots energy and cost summaries for solved networks. diff --git a/scripts/prepare_links_p_nom.py b/scripts/prepare_links_p_nom.py index 7623d1bf..b83089d6 100644 --- a/scripts/prepare_links_p_nom.py +++ b/scripts/prepare_links_p_nom.py @@ -2,7 +2,7 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Extracts capacities of HVDC links from `Wikipedia `_. diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 86afef2f..ed33abb7 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT # coding: utf-8 """ diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index 7ee6c2b1..86869879 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -1,7 +1,7 @@ # Copyright 2019-2020 Fabian Hofmann (FIAS) # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3517935.svg diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 48f0ebe6..85bc4d15 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT # coding: utf-8 """ diff --git a/scripts/solve_network.py b/scripts/solve_network.py index d874d335..6619f2d7 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Solves linear optimal power flow for a network iteratively while updating reactances. diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index 9f97754a..74506e5a 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -1,6 +1,6 @@ # SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # -# SPDX-License-Identifier: GPL-3.0-or-later +# SPDX-License-Identifier: MIT """ Solves linear optimal dispatch in hourly resolution From 780b60cc46f9f964e649c3b177ac47ba53a6e975 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 14 Sep 2021 16:38:48 +0200 Subject: [PATCH 051/102] add release note --- doc/release_notes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 392877ac..49ff620f 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -19,6 +19,8 @@ PyPSA-Eur 0.4.0 (15th September 2021) **New Features and Changes** +* With this release, we change the license from copyleft GPLv3 to the more + liberal MIT license with the consent of all contributors. * Switch to the new major ``atlite`` release v0.2. The version upgrade comes along with significant speed up for the rule ``build_renewable_profiles.py`` From 3eea2a8e84af10a2cd484958bd23cabf04263a0e Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 14 Sep 2021 16:40:10 +0200 Subject: [PATCH 052/102] add missing, remove unused license --- LICENSES/{GPL-3.0-or-later.txt => MIT.txt} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename LICENSES/{GPL-3.0-or-later.txt => MIT.txt} (100%) diff --git a/LICENSES/GPL-3.0-or-later.txt b/LICENSES/MIT.txt similarity index 100% rename from LICENSES/GPL-3.0-or-later.txt rename to LICENSES/MIT.txt From 6485b989734485a1a924e6e9930633b891e0b690 Mon Sep 17 00:00:00 2001 From: Koen van Greevenbroek Date: Wed, 15 Sep 2021 11:44:49 +0200 Subject: [PATCH 053/102] Mark datasets from Zenodo as static When retrieving a remote file over HTTP, Snakemake uses the "last-modified" property in HTTP header as a proxy for `mtime` of the remote file. If this time is more recent than the `mtime` of the output of the retrieve rule, the rule is triggered and the remote file is retrieved again (since it was apparently updated). However, Zenodo periodically updates the "last-modified" property of records retrieved over HTTP even if those records have not been updated. This causes Snakemake to false assume that the records have to downloaded again. By setting `static=True` for datasets we know don't actually change, we avoid this problem. --- Snakefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Snakefile b/Snakefile index 2f8eea3b..3c3f02f8 100644 --- a/Snakefile +++ b/Snakefile @@ -153,7 +153,7 @@ if config['enable'].get('build_cutout', False): if config['enable'].get('retrieve_cutout', True): rule retrieve_cutout: - input: HTTP.remote("zenodo.org/record/4709858/files/{cutout}.nc", keep_local=True) + input: HTTP.remote("zenodo.org/record/4709858/files/{cutout}.nc", keep_local=True, static=True) output: "cutouts/{cutout}.nc" shell: "mv {input} {output}" @@ -170,7 +170,7 @@ if config['enable'].get('build_natura_raster', False): if config['enable'].get('retrieve_natura_raster', True): rule retrieve_natura_raster: - input: HTTP.remote("zenodo.org/record/4706686/files/natura.tiff", keep_local=True) + input: HTTP.remote("zenodo.org/record/4706686/files/natura.tiff", keep_local=True, static=True) output: "resources/natura.tiff" shell: "mv {input} {output}" From ede9146735a5263dcc22a51f5c22dcfc201e92c9 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 20 Sep 2021 11:02:57 +0200 Subject: [PATCH 054/102] limit pyproj --- envs/environment.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/envs/environment.yaml b/envs/environment.yaml index b29e9351..21d5458d 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -38,6 +38,7 @@ dependencies: - progressbar2 - pyomo - matplotlib + - pyproj<=3.2.0 # Keep in conda environment when calling ipython - ipython From da883b1283f85f1a4c92f1fd5918d3ad193208ce Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 20 Sep 2021 11:12:26 +0200 Subject: [PATCH 055/102] limit pyproj to 3.1.0 --- envs/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index 21d5458d..7273c370 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -38,7 +38,7 @@ dependencies: - progressbar2 - pyomo - matplotlib - - pyproj<=3.2.0 + - pyproj<=3.1.0 # Keep in conda environment when calling ipython - ipython From 785caecdb52bbc19d04cddc16406c23787b0bb1f Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 20 Sep 2021 11:18:47 +0200 Subject: [PATCH 056/102] no proj 8 --- envs/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index 7273c370..29d743ac 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -38,7 +38,7 @@ dependencies: - progressbar2 - pyomo - matplotlib - - pyproj<=3.1.0 + - proj<8 # Keep in conda environment when calling ipython - ipython From 2f2ac89c87bcc6cda33af43370f080728e211974 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 20 Sep 2021 11:43:04 +0200 Subject: [PATCH 057/102] update main affiliation --- doc/index.rst | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/doc/index.rst b/doc/index.rst index 9801ef9d..4691ab58 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -49,7 +49,18 @@ The restriction to freely available and open data encourages the open exchange o PyPSA-Eur is designed to be imported into the open toolbox `PyPSA `_ for which `documentation `_ is available as well. -This project is maintained by the `Energy System Modelling group `_ at the `Institute for Automation and Applied Informatics `_ at the `Karlsruhe Institute of Technology `_. The group is funded by the `Helmholtz Association `_ until 2024. Previous versions were developed by the `Renewable Energy Group `_ at `FIAS `_ to carry out simulations for the `CoNDyNet project `_, financed by the `German Federal Ministry for Education and Research (BMBF) `_ as part of the `Stromnetze Research Initiative `_. +This project is currently maintained by the `Department of Digital +Transformation in Energy Systems` `_ at the +`Technische Universität Berlin `_. Previous versions were +developed within the `IAI `_ at the `Karlsruhe Institute of +Technology (KIT) `_ and by the `Renewable +Energy Group +`_ +at `FIAS `_ to carry out simulations for the +`CoNDyNet project `_, financed by the `German Federal +Ministry for Education and Research (BMBF) `_ +as part of the `Stromnetze Research Initiative +`_. A version of the model that adds building heating, transport and industry sectors to the model, as well as gas networks, is currently being developed in the `PyPSA-Eur-Sec repository `_. From 089fc5b747af7f6c2f88c366e297dbf3bec8f25c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 22 Sep 2021 16:46:23 +0200 Subject: [PATCH 058/102] final tweaks of release notes --- doc/release_notes.rst | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 49ff620f..ec57778b 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -14,13 +14,14 @@ Upcoming Release * add new features and bugfixes here -PyPSA-Eur 0.4.0 (15th September 2021) +PyPSA-Eur 0.4.0 (22th September 2021) ===================================== **New Features and Changes** * With this release, we change the license from copyleft GPLv3 to the more - liberal MIT license with the consent of all contributors. + liberal MIT license with the consent of all contributors + [`#276 `_]. * Switch to the new major ``atlite`` release v0.2. The version upgrade comes along with significant speed up for the rule ``build_renewable_profiles.py`` @@ -83,48 +84,48 @@ PyPSA-Eur 0.4.0 (15th September 2021) * Update :mod:`plot_network` and :mod:`make_summary` rules to latest PyPSA versions [`#270 `_]. -* Bugfix: Keep converter links to store components when using the ``ATK`` +* Keep converter links to store components when using the ``ATK`` wildcard and only remove DC links [`#214 `_]. -* Bugfix: Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq +* Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq (from 3.1e9 t CO2-eq). The new value represents emissions related to the electricity sector for EU+UK+Balkan. The old value was too high and used when the emissions wildcard in ``{opts}`` was used [`#233 `_]. -* Bugfix: Add escape in :mod:`base_network` if all TYNDP links are already +* Add escape in :mod:`base_network` if all TYNDP links are already contained in the network [`#246 `_]. -* Bugfix: In :mod:`solve_operations_network` the optimised capacities are now +* In :mod:`solve_operations_network` the optimised capacities are now fixed for all extendable links, not only HVDC links [`#244 `_]. -* Bugfix: The ``focus_weights`` are now also considered when pre-clustering in +* The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 `_]. -* Bugfix: in :mod:`build_renewable_profile` where offshore wind profiles could +* in :mod:`build_renewable_profile` where offshore wind profiles could no longer be created [`#249 `_]. -* Bugfix: Lower expansion limit of extendable carriers is now set to the +* Lower expansion limit of extendable carriers is now set to the existing capacity, i.e. ``p_nom_min = p_nom`` (0 before). Simultaneously, the upper limit (``p_nom_max``) is now the maximum of the installed capacity (``p_nom``) and the previous estimate based on land availability (``p_nom_max``) [`#260 `_]. -* Bugfix: Solving an operations network now includes optimized store capacities +* Solving an operations network now includes optimized store capacities as well. Before only lines, links, generators and storage units were considered [`#269 `_]. -* Bugfix: With ``load_shedding: true`` in the solving options of ``config.yaml`` +* With ``load_shedding: true`` in the solving options of ``config.yaml`` load shedding generators are only added at the AC buses, excluding buses for H2 and battery stores [`#269 `_]. -* Bugfix: Delete duplicated capital costs at battery discharge link +* Delete duplicated capital costs at battery discharge link [`#240 `_]. -* Bugfix: Propagate the solver log file name to the solver. Previously, the +* Propagate the solver log file name to the solver. Previously, the PyPSA network solving functions were not told about the solver logfile specified in the Snakemake file [`#247 `_] From 54e35a3c86ea12191162138e69cbf42ff9783553 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 22 Sep 2021 16:49:30 +0200 Subject: [PATCH 059/102] update environment.fixed.yaml --- envs/environment.fixed.yaml | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/envs/environment.fixed.yaml b/envs/environment.fixed.yaml index dee915b6..3fe3d51a 100644 --- a/envs/environment.fixed.yaml +++ b/envs/environment.fixed.yaml @@ -1,9 +1,7 @@ -# SPDX-FileCopyrightText: : 2017-2021 The PyPSA-Eur Authors +# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors # # SPDX-License-Identifier: CC0-1.0 - - name: pypsa-eur channels: - bioconda @@ -51,7 +49,7 @@ dependencies: - connection_pool=0.0.3 - country_converter=0.7.3 - cryptography=3.4.7 - - curl=7.78.0 + - curl=7.79.0 - cycler=0.10.0 - cytoolz=0.11.0 - dask=2021.3.1 @@ -106,7 +104,7 @@ dependencies: - jpeg=9d - json-c=0.15 - jsonschema=3.2.0 - - jupyter_core=4.7.1 + - jupyter_core=4.8.1 - kealib=1.4.14 - kiwisolver=1.3.2 - krb5=1.19.2 @@ -116,18 +114,18 @@ dependencies: - libblas=3.9.0 - libcblas=3.9.0 - libclang=11.1.0 - - libcurl=7.78.0 + - libcurl=7.79.0 - libdap4=3.20.6 - libedit=3.1.20191231 - libev=4.33 - libevent=2.1.10 - - libffi=3.3 - - libgcc-ng=11.1.0 + - libffi=3.4.2 + - libgcc-ng=11.2.0 - libgdal=3.2.1 - - libgfortran-ng=11.1.0 - - libgfortran5=11.1.0 + - libgfortran-ng=11.2.0 + - libgfortran5=11.2.0 - libglib=2.68.4 - - libgomp=11.1.0 + - libgomp=11.2.0 - libiconv=1.16 - libkml=1.3.0 - liblapack=3.9.0 @@ -144,7 +142,7 @@ dependencies: - libspatialindex=1.9.3 - libspatialite=5.0.1 - libssh2=1.10.0 - - libstdcxx-ng=11.1.0 + - libstdcxx-ng=11.2.0 - libtiff=4.2.0 - libuuid=2.32.1 - libvorbis=1.3.7 @@ -165,7 +163,7 @@ dependencies: - matplotlib-inline=0.1.3 - memory_profiler=0.58.0 - mock=4.0.3 - - more-itertools=8.9.0 + - more-itertools=8.10.0 - msgpack-python=1.0.2 - munch=2.5.0 - mysql-common=8.0.25 @@ -271,9 +269,9 @@ dependencies: - toolz=0.11.1 - toposort=1.6 - tornado=6.1 - - tqdm=4.62.2 + - tqdm=4.62.3 - traitlets=5.1.0 - - typing_extensions=3.10.0.0 + - typing_extensions=3.10.0.2 - tzcode=2021a - tzdata=2021a - urllib3=1.26.6 From 04c8d1fd026ba857534d01f162e5364ac410df2c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 23 Sep 2021 08:57:18 +0200 Subject: [PATCH 060/102] add correction factor to offshore wind for wake losses --- config.default.yaml | 10 ++++++++-- doc/release_notes.rst | 4 +++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/config.default.yaml b/config.default.yaml index a20e4d04..d97f4712 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -112,7 +112,10 @@ renewable: method: wind turbine: NREL_ReferenceTurbine_5MW_offshore capacity_per_sqkm: 3 - # correction_factor: 0.93 + correction_factor: 0.8855 + # proxy for wake losses + # from 10.1016/j.energy.2018.08.153 + # until done more rigorously in #153 corine: [44, 255] natura: true max_depth: 50 @@ -126,7 +129,10 @@ renewable: turbine: NREL_ReferenceTurbine_5MW_offshore # ScholzPhd Tab 4.3.1: 10MW/km^2 capacity_per_sqkm: 3 - # correction_factor: 0.93 + correction_factor: 0.8855 + # proxy for wake losses + # from 10.1016/j.energy.2018.08.153 + # until done more rigorously in #153 corine: [44, 255] natura: true max_depth: 50 diff --git a/doc/release_notes.rst b/doc/release_notes.rst index ec57778b..22406954 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -11,7 +11,9 @@ Release Notes Upcoming Release ================ -* add new features and bugfixes here +* Add an efficiency factor of 88.55% to offshore wind capacity factors + as a proxy for wake losses. More rigorous modelling is `planned `_ + [`#277 `_]. PyPSA-Eur 0.4.0 (22th September 2021) From 5ee280b96fc51ea6745fa78363d4abbfdb93b8a8 Mon Sep 17 00:00:00 2001 From: Koen van Greevenbroek Date: Tue, 5 Oct 2021 11:00:30 +0200 Subject: [PATCH 061/102] Update default CPLEX options following PyPSA update For more recent versions of CPLEX and PyPSA, using dotted options works while the current default crashes a CPLEX solve with "AttributeError: 'RootParameterGroup' object has no attribute 'barrier_convergetol'". See https://github.com/PyPSA/PyPSA/pull/168. --- config.default.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config.default.yaml b/config.default.yaml index d97f4712..74f03d69 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -238,8 +238,8 @@ solving: # threads: 4 # lpmethod: 4 # barrier # solutiontype: 2 # non basic solution, ie no crossover - # barrier_convergetol: 1.e-5 - # feasopt_tolerance: 1.e-6 + # barrier.convergetol: 1.e-5 + # feasopt.tolerance: 1.e-6 plotting: map: From 08349d78973c1235d59941fc7f9e5bedaae7182a Mon Sep 17 00:00:00 2001 From: Koen van Greevenbroek Date: Tue, 5 Oct 2021 11:08:32 +0200 Subject: [PATCH 062/102] Also update the default CPLEX configuration in test config --- test/config.test1.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/config.test1.yaml b/test/config.test1.yaml index aea3dbc2..6b3e667a 100755 --- a/test/config.test1.yaml +++ b/test/config.test1.yaml @@ -195,8 +195,8 @@ solving: # threads: 4 # lpmethod: 4 # barrier # solutiontype: 2 # non basic solution, ie no crossover - # barrier_convergetol: 1.e-5 - # feasopt_tolerance: 1.e-6 + # barrier.convergetol: 1.e-5 + # feasopt.tolerance: 1.e-6 plotting: map: From 66524bf7978365357114ff0394a27001ff7cf6db Mon Sep 17 00:00:00 2001 From: Koen van Greevenbroek Date: Tue, 5 Oct 2021 11:20:45 +0200 Subject: [PATCH 063/102] Update obsolete link to CPLEX documentation --- doc/configtables/solving-solver.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/configtables/solving-solver.csv b/doc/configtables/solving-solver.csv index 3eae6310..e9146dfc 100644 --- a/doc/configtables/solving-solver.csv +++ b/doc/configtables/solving-solver.csv @@ -1,3 +1,3 @@ ,Unit,Values,Description name,--,"One of {'gurobi', 'cplex', 'cbc', 'glpk', 'ipopt'}; potentially more possible","Solver to use for optimisation problems in the workflow; e.g. clustering and linear optimal power flow." -opts,--,"Parameter list for `Gurobi `_ and `CPLEX `_","Solver specific parameter settings." +opts,--,"Parameter list for `Gurobi `_ and `CPLEX `_","Solver specific parameter settings." From af625d73791dba3f2bf7ca2c488fd041cc8fc086 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 5 Oct 2021 12:18:10 +0200 Subject: [PATCH 064/102] config: reduce default deployment density for offshore wind --- config.default.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config.default.yaml b/config.default.yaml index d97f4712..6bd06700 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -111,7 +111,7 @@ renewable: resource: method: wind turbine: NREL_ReferenceTurbine_5MW_offshore - capacity_per_sqkm: 3 + capacity_per_sqkm: 2 correction_factor: 0.8855 # proxy for wake losses # from 10.1016/j.energy.2018.08.153 @@ -128,7 +128,7 @@ renewable: method: wind turbine: NREL_ReferenceTurbine_5MW_offshore # ScholzPhd Tab 4.3.1: 10MW/km^2 - capacity_per_sqkm: 3 + capacity_per_sqkm: 2 correction_factor: 0.8855 # proxy for wake losses # from 10.1016/j.energy.2018.08.153 From 1b2c6e17c504e691e7ed331b4ce78d073a09333e Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 5 Oct 2021 12:21:45 +0200 Subject: [PATCH 065/102] add release note for offshore density reduction --- doc/release_notes.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 22406954..0423a581 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -15,6 +15,9 @@ Upcoming Release as a proxy for wake losses. More rigorous modelling is `planned `_ [`#277 `_]. +* The default deployment density of AC- and DC-connected offshore wind capacity is reduced from 3 MW/sqkm + to a more conservative estimate of 2 MW/sqkm [`#280 `_]. + PyPSA-Eur 0.4.0 (22th September 2021) ===================================== From 23d3216d92d569d23dab7eb58864ac12b5d24154 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 5 Oct 2021 17:30:22 +0200 Subject: [PATCH 066/102] base_network.py: fix scipy spatial import --- scripts/base_network.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 4b85bb57..514e4dc3 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -70,9 +70,9 @@ import yaml import pandas as pd import geopandas as gpd import numpy as np -import scipy as sp import networkx as nx +from scipy import spatial from scipy.sparse import csgraph from itertools import product @@ -101,7 +101,7 @@ def _find_closest_links(links, new_links, distance_upper_bound=1.5): for s in links.geometry]) querycoords = np.vstack([new_links[['x1', 'y1', 'x2', 'y2']], new_links[['x2', 'y2', 'x1', 'y1']]]) - tree = sp.spatial.KDTree(treecoords) + tree = spatial.KDTree(treecoords) dist, ind = tree.query(querycoords, distance_upper_bound=distance_upper_bound) found_b = ind < len(links) found_i = np.arange(len(new_links)*2)[found_b] % len(new_links) @@ -214,7 +214,7 @@ def _add_links_from_tyndp(buses, links): links_tyndp = links_tyndp.loc[links_tyndp["j"].isnull()] if links_tyndp.empty: return buses, links - tree = sp.spatial.KDTree(buses[['x', 'y']]) + tree = spatial.KDTree(buses[['x', 'y']]) _, ind0 = tree.query(links_tyndp[["x1", "y1"]]) ind0_b = ind0 < len(buses) links_tyndp.loc[ind0_b, "bus0"] = buses.index[ind0[ind0_b]] From fccf5323d2b203d380d1e6ba0bb659ce5f9cd523 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 6 Oct 2021 12:16:12 +0200 Subject: [PATCH 067/102] doc: fix css theme --- doc/conf.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index afb23271..01dd6bc8 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -157,16 +157,12 @@ html_theme_options = { # pixels large. #html_favicon = None -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +# These folders are copied to the documentation's HTML output +html_static_path = ["_static"] -html_context = { - 'css_files': [ - '_static/theme_overrides.css', # override wide tables in RTD theme - ], -} +# These paths are either relative to html_static_path +# or fully qualified paths (eg. https://...) +html_css_files = ["theme_overrides.css"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied From 50548b23f9aeeb57d9a9e5b11264b15483cfd81c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 7 Oct 2021 12:26:48 +0200 Subject: [PATCH 068/102] add basic setup of rsync with files to ignore --- .syncignore-receive | 15 +++++++++++++++ .syncignore-send | 19 +++++++++++++++++++ 2 files changed, 34 insertions(+) create mode 100644 .syncignore-receive create mode 100644 .syncignore-send diff --git a/.syncignore-receive b/.syncignore-receive new file mode 100644 index 00000000..c24928f5 --- /dev/null +++ b/.syncignore-receive @@ -0,0 +1,15 @@ +.snakemake +.git +.pytest_cache +.ipynb_checkpoints +.vscode +.DS_Store +__pycache__ +*.pyc +*.pyo +*.ipynb +notebooks +doc +cutouts +data/bundle +*.nc \ No newline at end of file diff --git a/.syncignore-send b/.syncignore-send new file mode 100644 index 00000000..3839d915 --- /dev/null +++ b/.syncignore-send @@ -0,0 +1,19 @@ +.snakemake +.git +.pytest_cache +.ipynb_checkpoints +.vscode +.DS_Store +__pycache__ +*.pyc +*.pyo +*.ipynb +notebooks +benchmarks +logs +resources +results +networks +cutouts +data/bundle +doc \ No newline at end of file From 9379c135fd5ea2d0705accfccbebe41bf8cf3e94 Mon Sep 17 00:00:00 2001 From: Francesco Witte Date: Tue, 9 Nov 2021 15:56:59 +0100 Subject: [PATCH 069/102] Fix link appearence and missing parentheses --- doc/index.rst | 2 +- doc/wildcards.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/index.rst b/doc/index.rst index 4691ab58..5ee1db5b 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -50,7 +50,7 @@ The restriction to freely available and open data encourages the open exchange o PyPSA-Eur is designed to be imported into the open toolbox `PyPSA `_ for which `documentation `_ is available as well. This project is currently maintained by the `Department of Digital -Transformation in Energy Systems` `_ at the +Transformation in Energy Systems `_ at the `Technische Universität Berlin `_. Previous versions were developed within the `IAI `_ at the `Karlsruhe Institute of Technology (KIT) `_ and by the `Renewable diff --git a/doc/wildcards.rst b/doc/wildcards.rst index b3267c23..2290de67 100644 --- a/doc/wildcards.rst +++ b/doc/wildcards.rst @@ -126,7 +126,7 @@ The ``{technology}`` wildcard The ``{technology}`` wildcard specifies for which renewable energy technology to produce availablity time series and potentials using the rule :mod:`build_renewable_profiles`. It can take the values ``onwind``, ``offwind-ac``, ``offwind-dc``, and ``solar`` but **not** ``hydro`` -(since hydroelectric plant profiles are created by a different rule. +(since hydroelectric plant profiles are created by a different rule). The wildcard can moreover be used to create technology specific figures and summaries. For instance ``{technology}`` can be used to plot regionally disaggregated potentials From c0d1656091d50eaf347967bdf26803a0b8fc6904 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 10 Nov 2021 10:37:09 +0100 Subject: [PATCH 070/102] build_load_data: separate retrieving and processing load data --- Snakefile | 7 +++++++ config.default.yaml | 1 - config.tutorial.yaml | 1 - scripts/build_load_data.py | 4 +--- test/config.test1.yaml | 1 - 5 files changed, 8 insertions(+), 6 deletions(-) diff --git a/Snakefile b/Snakefile index 1196d47b..cb50e3bf 100644 --- a/Snakefile +++ b/Snakefile @@ -67,7 +67,14 @@ if config['enable'].get('retrieve_databundle', True): script: 'scripts/retrieve_databundle.py' +rule retrieve_load_data: + input: HTTP.remote("data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv", keep_local=True, static=True) + output: "data/load_raw.csv" + shell: "mv {input} {output}" + + rule build_load_data: + input: "data/load_raw.csv" output: "resources/load.csv" log: "logs/build_load_data.log" script: 'scripts/build_load_data.py' diff --git a/config.default.yaml b/config.default.yaml index a0ffbe47..f70e7c2c 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -188,7 +188,6 @@ transformers: type: '' load: - url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data interpolate_limit: 3 # data gaps up until this size are interpolated linearly time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from diff --git a/config.tutorial.yaml b/config.tutorial.yaml index c199712a..26ead242 100755 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -150,7 +150,6 @@ transformers: type: '' load: - url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data interpolate_limit: 3 # data gaps up until this size are interpolated linearly time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from diff --git a/scripts/build_load_data.py b/scripts/build_load_data.py index b286df45..f71be6ea 100755 --- a/scripts/build_load_data.py +++ b/scripts/build_load_data.py @@ -14,7 +14,6 @@ Relevant Settings snapshots: load: - url: interpolate_limit: time_shift_for_large_gaps: manual_adjustments: @@ -199,14 +198,13 @@ if __name__ == "__main__": config = snakemake.config powerstatistics = config['load']['power_statistics'] - url = config['load']['url'] interpolate_limit = config['load']['interpolate_limit'] countries = config['countries'] snapshots = pd.date_range(freq='h', **config['snapshots']) years = slice(snapshots[0], snapshots[-1]) time_shift = config['load']['time_shift_for_large_gaps'] - load = load_timeseries(url, years, countries, powerstatistics) + load = load_timeseries(snakemake.input[0], years, countries, powerstatistics) if config['load']['manual_adjustments']: load = manual_adjustment(load, powerstatistics) diff --git a/test/config.test1.yaml b/test/config.test1.yaml index 6b3e667a..2986037b 100755 --- a/test/config.test1.yaml +++ b/test/config.test1.yaml @@ -149,7 +149,6 @@ transformers: type: '' load: - url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data interpolate_limit: 3 # data gaps up until this size are interpolated linearly time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from From f1243c3e0cc5fc1f4d6b5de03f6489f6109c9769 Mon Sep 17 00:00:00 2001 From: Max Parzen Date: Wed, 24 Nov 2021 14:16:24 +0100 Subject: [PATCH 071/102] Fix distribute clustering with cbc/glpk/ipopt Assume you have 10 nodes that need to be distributed between 2 countries. What can happen with some of the open source solvers is that one country gets assigned to 9.01 (float) nodes, and the other one to 0.99. Now using .astype(int) would lead to a node distribution of 0 and 9, as the `astype(int)` function round down by default (0.99 -> 0). This assigned zero value breaks the code in case open source solvers are used. Gurobi somehow does deal with it. --- scripts/cluster_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 980b73b0..1a976cd1 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -218,7 +218,7 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None): results = opt.solve(m) assert results['Solver'][0]['Status'] == 'ok', f"Solver returned non-optimally: {results}" - return pd.Series(m.n.get_values(), index=L.index).astype(int) + return pd.Series(m.n.get_values(), index=L.index).round().astype(int) def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algorithm="kmeans", **algorithm_kwds): From f06c7958bc074ed295fdc773b39278a79fe8b450 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 4 Dec 2021 18:21:08 +0100 Subject: [PATCH 072/102] restore REUSE compliance [skip ci] --- .syncignore-receive | 4 ++++ .syncignore-send | 10 +++++++--- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/.syncignore-receive b/.syncignore-receive index c24928f5..717245c3 100644 --- a/.syncignore-receive +++ b/.syncignore-receive @@ -1,3 +1,7 @@ +# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + .snakemake .git .pytest_cache diff --git a/.syncignore-send b/.syncignore-send index 3839d915..9390d0e5 100644 --- a/.syncignore-send +++ b/.syncignore-send @@ -1,3 +1,7 @@ +# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + .snakemake .git .pytest_cache @@ -11,9 +15,9 @@ __pycache__ notebooks benchmarks logs -resources +resources* results -networks +networks* cutouts data/bundle -doc \ No newline at end of file +doc From ed83988ed1fbc2b064cf5d2868965be9921bd12f Mon Sep 17 00:00:00 2001 From: Philipp Glaum Date: Wed, 15 Dec 2021 11:02:03 +0100 Subject: [PATCH 073/102] hot fix for snakemake bug --- scripts/_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 30775ae5..f1e5e887 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -240,7 +240,7 @@ def mock_snakemake(rulename, **wildcards): if os.path.exists(p): snakefile = p break - workflow = sm.Workflow(snakefile) + workflow = sm.Workflow(snakefile, overwrite_configfiles=[]) workflow.include(snakefile) workflow.global_resources = {} rule = workflow.get_rule(rulename) From 66f2d36f0df1545f97f67359493b9debcd9e9924 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 09:38:34 +0100 Subject: [PATCH 074/102] add_electricity: revise code and make it leaner --- scripts/add_electricity.py | 95 +++++++++++++++++++++----------------- 1 file changed, 52 insertions(+), 43 deletions(-) diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 2a2c26d9..e8498789 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -190,12 +190,13 @@ def load_powerplants(ppl_fn): .replace({'carrier': carrier_dict})) -def attach_load(n, regions, load, nuts3_shapes, cntries = [], scaling = 1.): +def attach_load(n, regions, load, nuts3_shapes, countries, scaling=1.): + substation_lv_i = n.buses.index[n.buses['substation_lv']] regions = (gpd.read_file(regions).set_index('name') .reindex(substation_lv_i)) opsd_load = (pd.read_csv(load, index_col=0, parse_dates=True) - .filter(items=cntries)) + .filter(items=countries)) logger.info(f"Load data scaled with scalling factor {scaling}.") opsd_load *= scaling @@ -229,6 +230,9 @@ def attach_load(n, regions, load, nuts3_shapes, cntries = [], scaling = 1.): def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=False): + # TODO: line length factor of lines is applied to lines and links. + # Separate the function to distinguish. + n.lines['capital_cost'] = (n.lines['length'] * length_factor * costs.at['HVAC overhead', 'capital_cost']) @@ -253,9 +257,9 @@ def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=Fal n.links.loc[dc_b, 'capital_cost'] = costs -def attach_wind_and_solar(n, costs, input_profiles, - technologies = ['onwind', 'offwind-ac', 'offwind-dc', 'solar'], - line_length_factor = 1.): +def attach_wind_and_solar(n, costs, input_profiles, technologies, line_length_factor=1): + # TODO: rename tech -> carrier, technologies -> carriers + for tech in technologies: if tech == 'hydro': continue @@ -292,8 +296,7 @@ def attach_wind_and_solar(n, costs, input_profiles, p_max_pu=ds['profile'].transpose('time', 'bus').to_pandas()) -def attach_conventional_generators(n, costs, ppl, carriers=['nuclear', 'oil', 'OCGT', 'CCGT', - 'coal', 'lignite', 'geothermal', 'biomass']): +def attach_conventional_generators(n, costs, ppl, carriers): _add_missing_carriers_from_costs(n, costs, carriers) @@ -314,9 +317,7 @@ def attach_conventional_generators(n, costs, ppl, carriers=['nuclear', 'oil', 'O logger.warning(f'Capital costs for conventional generators put to 0 EUR/MW.') -def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, - config_hydro = {'carriers': {'ror', 'PHS', 'hydro'}}): - carriers = config_hydro.get('carriers', ['ror', 'PHS', 'hydro']) +def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **config): _add_missing_carriers_from_costs(n, costs, carriers) @@ -361,7 +362,8 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, if 'PHS' in carriers and not phs.empty: # fill missing max hours to config value and # assume no natural inflow due to lack of data - phs = phs.replace({'max_hours': {0: config_hydro['PHS_max_hours']}}) + max_hours = config.get('PHS_max_hours', 6) + phs = phs.replace({'max_hours': {0: max_hours}}) n.madd('StorageUnit', phs.index, carrier='PHS', bus=phs['bus'], @@ -373,7 +375,10 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, cyclic_state_of_charge=True) if 'hydro' in carriers and not hydro.empty: - hydro_max_hours = config_hydro.get('hydro_max_hours') + hydro_max_hours = config.get('hydro_max_hours') + + assert hydro_max_hours is not None, "No path for hydro capacities given." + hydro_stats = pd.read_csv(hydro_capacities, comment="#", na_values='-', index_col=0) e_target = hydro_stats["E_store[TWh]"].clip(lower=0.2) * 1e6 @@ -402,8 +407,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, bus=hydro['bus'], p_nom=hydro['p_nom'], max_hours=hydro_max_hours, - capital_cost=(costs.at['hydro', 'capital_cost'] - if config_hydro.get('hydro_capital_cost') else 0.), + capital_cost=costs.at['hydro', 'capital_cost'], marginal_cost=costs.at['hydro', 'marginal_cost'], p_max_pu=1., # dispatch p_min_pu=0., # store @@ -413,8 +417,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, inflow=inflow_t.loc[:, hydro.index]) -def attach_extendable_generators(n, costs, ppl, elec_opts = {'extendable_carriers': {'Generator': []}}): - carriers = pd.Index(elec_opts['extendable_carriers']['Generator']) +def attach_extendable_generators(n, costs, ppl, carriers): _add_missing_carriers_from_costs(n, costs, carriers) @@ -462,7 +465,7 @@ def attach_extendable_generators(n, costs, ppl, elec_opts = {'extendable_carrier -def attach_OPSD_renewables(n, techs=[]): +def attach_OPSD_renewables(n, techs): available = ['DE', 'FR', 'PL', 'CH', 'DK', 'CZ', 'SE', 'GB'] tech_map = {'Onshore': 'onwind', 'Offshore': 'offwind', 'Solar': 'solar'} @@ -494,7 +497,7 @@ def attach_OPSD_renewables(n, techs=[]): -def estimate_renewable_capacities(n, tech_map={}): +def estimate_renewable_capacities(n, tech_map): if len(tech_map) == 0: return @@ -526,16 +529,15 @@ def estimate_renewable_capacities(n, tech_map={}): n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom'] -def add_nice_carrier_names(n, config): +def add_nice_carrier_names(n, nice_names, tech_colors): carrier_i = n.carriers.index - nice_names = (pd.Series(config['plotting']['nice_names']) + nice_names = (pd.Series(nice_names) .reindex(carrier_i).fillna(carrier_i.to_series().str.title())) n.carriers['nice_name'] = nice_names - colors = pd.Series(config['plotting']['tech_colors']).reindex(carrier_i) + colors = pd.Series(tech_colors).reindex(carrier_i) if colors.isna().any(): missing_i = list(colors.index[colors.isna()]) - logger.warning(f'tech_colors for carriers {missing_i} not defined ' - 'in config.') + logger.warning(f'tech_colors for carriers {missing_i} not defined.') n.carriers['color'] = colors @@ -545,35 +547,42 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_electricity') configure_logging(snakemake) - n = pypsa.Network(snakemake.input.base_network) + config = snakemake.config + paths = snakemake.input + + n = pypsa.Network(paths.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = snakemake.input.tech_costs, config = snakemake.config['costs'], - elec_config = snakemake.config['electricity'], Nyears = Nyears) - ppl = load_powerplants(snakemake.input.powerplants) + costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears=Nyears) + ppl = load_powerplants(paths.powerplants) - attach_load(n, regions = snakemake.input.regions, load = snakemake.input.load, - nuts3_shapes = snakemake.input.nuts3_shapes, - cntries = snakemake.config['countries'], - scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0)) + attach_load(n, paths.regions, paths.load, paths.nuts3_shapes, config['countries'], + scaling=config['load']['scaling_factor']) - update_transmission_costs(n, costs) + update_transmission_costs(n, costs, config['lines']['length_factor']) - attach_conventional_generators(n, costs, ppl, carriers = snakemake.config['electricity']['conventional_carriers']) - attach_wind_and_solar(n, costs, snakemake.input, technologies = snakemake.config['renewable'], - line_length_factor = snakemake.config['lines']['length_factor']) + carriers = config['electricity']['conventional_carriers'] + attach_conventional_generators(n, costs, ppl, carriers) - if 'hydro' in snakemake.config['renewable']: - attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities, - config_hydro = snakemake.config['renewable']['hydro']) + carriers = config['renewable'] + attach_wind_and_solar(n, costs, paths, carriers, config['lines']['length_factor']) - attach_extendable_generators(n, costs, ppl, elec_opts = snakemake.config['electricity']) + if 'hydro' in config['renewable']: + carriers = config['renewable']['hydro'].pop('carriers', []) + attach_hydro(n, costs, ppl, paths.profile_hydro, paths.hydro_capacities, + carriers, **config['renewable']['hydro']) + + carriers = config['electricity']['extendable_carriers']['Generator'] + attach_extendable_generators(n, costs, ppl, carriers) + + tech_map = config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {}) + estimate_renewable_capacities(n, tech_map) + techs = config['electricity'].get('renewable_capacities_from_OPSD', []) + attach_OPSD_renewables(n, techs) - estimate_renewable_capacities(n, tech_map = (snakemake.config['electricity'] - .get('estimate_renewable_capacities_from_capacity_stats', {}))) - attach_OPSD_renewables(n, techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', [])) update_p_nom_max(n) - add_nice_carrier_names(n, config = snakemake.config) + plot_config = config['plotting'] + add_nice_carrier_names(n, plot_config['nice_names'], plot_config['tech_colors']) n.export_to_netcdf(snakemake.output[0]) From 2c318a247ecbeaaf86993db7873819507c86add2 Mon Sep 17 00:00:00 2001 From: Fabian Hofmann Date: Tue, 11 Jan 2022 09:55:22 +0100 Subject: [PATCH 075/102] Update scripts/prepare_network.py Co-authored-by: Fabian Neumann --- scripts/prepare_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 9d53625c..03187284 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -225,7 +225,8 @@ if __name__ == "__main__": for o in opts: m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: - n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=snakemake.config["solving"]["solver"]["name"]) + solver_name = snakemake.config["solving"]["solver"]["name"] + n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=solver_name) break for o in opts: From acd7122aad6b3a944f436931a98dbb0b2e2d3010 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 09:58:59 +0100 Subject: [PATCH 076/102] add_electricity: revert changes in add_nice_carrier_names --- scripts/add_electricity.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index e8498789..42d4f5cc 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -529,18 +529,17 @@ def estimate_renewable_capacities(n, tech_map): n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom'] -def add_nice_carrier_names(n, nice_names, tech_colors): +def add_nice_carrier_names(n, config): carrier_i = n.carriers.index - nice_names = (pd.Series(nice_names) + nice_names = (pd.Series(config['plotting']['nice_names']) .reindex(carrier_i).fillna(carrier_i.to_series().str.title())) n.carriers['nice_name'] = nice_names - colors = pd.Series(tech_colors).reindex(carrier_i) + colors = pd.Series(config['plotting']['tech_colors']).reindex(carrier_i) if colors.isna().any(): missing_i = list(colors.index[colors.isna()]) - logger.warning(f'tech_colors for carriers {missing_i} not defined.') + logger.warning(f'tech_colors for carriers {missing_i} not defined in config.') n.carriers['color'] = colors - if __name__ == "__main__": if 'snakemake' not in globals(): from _helpers import mock_snakemake @@ -582,7 +581,6 @@ if __name__ == "__main__": update_p_nom_max(n) - plot_config = config['plotting'] - add_nice_carrier_names(n, plot_config['nice_names'], plot_config['tech_colors']) + add_nice_carrier_names(n, config) n.export_to_netcdf(snakemake.output[0]) From 01e93545f694d450839cc6ac0a423f6acb239de0 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 09:59:34 +0100 Subject: [PATCH 077/102] add_extra_components: revise changes --- scripts/add_extra_components.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index 9a2d6033..db764d4f 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -64,7 +64,7 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) -def attach_storageunits(n, costs, elec_opts = {'extendable_carriers': {'StorageUnit': []}, 'max_hours': {'battery': 6, 'H2': 168}}): +def attach_storageunits(n, costs, elec_opts): carriers = elec_opts['extendable_carriers']['StorageUnit'] max_hours = elec_opts['max_hours'] @@ -88,7 +88,7 @@ def attach_storageunits(n, costs, elec_opts = {'extendable_carriers': {'StorageU cyclic_state_of_charge=True) -def attach_stores(n, costs, elec_opts = {'extendable_carriers': {'Store': ['battery', 'H2']}}): +def attach_stores(n, costs, elec_opts): carriers = elec_opts['extendable_carriers']['Store'] _add_missing_carriers_from_costs(n, costs, carriers) @@ -154,7 +154,7 @@ def attach_stores(n, costs, elec_opts = {'extendable_carriers': {'Store': ['batt marginal_cost=costs.at["battery inverter", "marginal_cost"]) -def attach_hydrogen_pipelines(n, costs, elec_opts = {'extendable_carriers': {'Store': ['H2', 'battery']}}): +def attach_hydrogen_pipelines(n, costs, elec_opts): ext_carriers = elec_opts['extendable_carriers'] as_stores = ext_carriers.get('Store', []) @@ -192,17 +192,18 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_extra_components', network='elec', simpl='', clusters=5) configure_logging(snakemake) + paths = snakemake.input - n = pypsa.Network(snakemake.input.network) + n = pypsa.Network(paths.network) + elec_config = snakemake.config['electricity'] + Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = snakemake.input.tech_costs, - config = snakemake.config['costs'], - elec_config = snakemake.config['electricity'], Nyears = Nyears) + costs = load_costs(paths.tech_costs, snakemake.config['costs'], elec_config, Nyears=Nyears) - attach_storageunits(n, costs, elec_opts = snakemake.config['electricity']) - attach_stores(n, costs, elec_opts = snakemake.config['electricity']) - attach_hydrogen_pipelines(n, costs, elec_opts = snakemake.config['electricity']) + attach_storageunits(n, costs, elec_config) + attach_stores(n, costs, elec_config) + attach_hydrogen_pipelines(n, costs, elec_config) - add_nice_carrier_names(n, config=snakemake.config) + add_nice_carrier_names(n, snakemake.config) n.export_to_netcdf(snakemake.output[0]) From 39a6753ece2ff6e9e30f70edb8a6f8053a7721fb Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 09:59:58 +0100 Subject: [PATCH 078/102] build_powerplants: revise changes --- scripts/build_powerplants.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index 39af2385..be57baa8 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -99,8 +99,9 @@ if __name__ == "__main__": from _helpers import mock_snakemake snakemake = mock_snakemake('build_powerplants') configure_logging(snakemake) + paths = snakemake.input - n = pypsa.Network(snakemake.input.base_network) + n = pypsa.Network(paths.base_network) countries = n.buses.country.unique() ppl = (pm.powerplants(from_url=True) @@ -119,8 +120,8 @@ if __name__ == "__main__": ppl.query(ppl_query, inplace=True) # add carriers from own powerplant files: - ppl = add_custom_powerplants(ppl, custom_powerplants = snakemake.input.custom_powerplants, - custom_ppl_query = snakemake.config['electricity']['custom_powerplants']) + custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] + ppl = add_custom_powerplants(ppl, paths.custom_powerplants, custom_ppl_query) cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()] From c7a443c9695c4c2ddb96f9cacf05fb62a3c46874 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 10:23:22 +0100 Subject: [PATCH 079/102] build_shapes: revise changes --- scripts/build_shapes.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index b7264470..12b1d015 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -107,8 +107,8 @@ def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True): return polys.simplify(tolerance=tolerance) -def countries(naturalearth, cntries=[]): - if 'RS' in cntries: cntries.append('KV') +def countries(naturalearth, country_list): + if 'RS' in country_list: country_list.append('KV') df = gpd.read_file(naturalearth) @@ -116,16 +116,16 @@ def countries(naturalearth, cntries=[]): fieldnames = (df[x].where(lambda s: s!='-99') for x in ('ISO_A2', 'WB_A2', 'ADM0_A3')) df['name'] = reduce(lambda x,y: x.fillna(y), fieldnames, next(fieldnames)).str[0:2] - df = df.loc[df.name.isin(cntries) & ((df['scalerank'] == 0) | (df['scalerank'] == 5))] + df = df.loc[df.name.isin(country_list) & ((df['scalerank'] == 0) | (df['scalerank'] == 5))] s = df.set_index('name')['geometry'].map(_simplify_polys) - if 'RS' in cntries: s['RS'] = s['RS'].union(s.pop('KV')) + if 'RS' in country_list: s['RS'] = s['RS'].union(s.pop('KV')) return s -def eez(country_shapes, eez, cntries=[]): +def eez(country_shapes, eez, countries): df = gpd.read_file(eez) - df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in cntries])] + df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in countries])] df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c)) s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False)) s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3}) @@ -217,18 +217,19 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_shapes') configure_logging(snakemake) + paths = snakemake.input out = snakemake.output - country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries']) + country_shapes = countries(paths.naturalearth, snakemake.config['countries']) save_to_geojson(country_shapes, out.country_shapes) - offshore_shapes = eez(country_shapes, snakemake.input.eez, cntries=snakemake.config['countries']) + offshore_shapes = eez(country_shapes, paths.eez, snakemake.config['countries']) save_to_geojson(offshore_shapes, out.offshore_shapes) europe_shape = country_cover(country_shapes, offshore_shapes) save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape) - nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop, - snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp) + nuts3_shapes = nuts3(country_shapes, paths.nuts3, paths.nuts3pop, + paths.nuts3gdp, paths.ch_cantons, paths.ch_popgdp) save_to_geojson(nuts3_shapes, out.nuts3_shapes) From ed7fd27adc82db6b5466b830a1e989cd15dc6413 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 10:24:45 +0100 Subject: [PATCH 080/102] build_shapes: use country_list as argument --- scripts/build_shapes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 12b1d015..cca941e6 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -123,9 +123,9 @@ def countries(naturalearth, country_list): return s -def eez(country_shapes, eez, countries): +def eez(country_shapes, eez, country_list): df = gpd.read_file(eez) - df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in countries])] + df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in country_list])] df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c)) s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False)) s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3}) From 94364cbeebbf84b6407056cd00330f2e84e0989d Mon Sep 17 00:00:00 2001 From: Fabian Hofmann Date: Tue, 11 Jan 2022 10:31:49 +0100 Subject: [PATCH 081/102] Update scripts/cluster_network.py Co-authored-by: Fabian Neumann --- scripts/cluster_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 30a8770d..51556b27 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -265,7 +265,7 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr else: raise AttributeError(f"potential_mode should be one of 'simple' or 'conservative' but is '{potential_mode}'") - if custom_busmap is False: + if not custom_busmap: busmap = busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights, algorithm) else: busmap = custom_busmap From e1aae5a98ebbbe073c78ad801dd23e702dab56f6 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 13 Jan 2022 15:36:13 +0100 Subject: [PATCH 082/102] minor adjustment to memory requirements of add_electricity, cluster_network --- Snakefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Snakefile b/Snakefile index cb50e3bf..ce79a421 100644 --- a/Snakefile +++ b/Snakefile @@ -232,7 +232,7 @@ rule add_electricity: log: "logs/add_electricity.log" benchmark: "benchmarks/add_electricity" threads: 1 - resources: mem=3000 + resources: mem=5000 script: "scripts/add_electricity.py" @@ -273,7 +273,7 @@ rule cluster_network: log: "logs/cluster_network/elec_s{simpl}_{clusters}.log" benchmark: "benchmarks/cluster_network/elec_s{simpl}_{clusters}" threads: 1 - resources: mem=3000 + resources: mem=6000 script: "scripts/cluster_network.py" From a747c88d08a57bddd72a523bf91fb2bd6ec72b24 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 13 Jan 2022 15:42:48 +0100 Subject: [PATCH 083/102] disable solar PV CF correction factor for default satellite data (closes #285) --- config.default.yaml | 5 +++-- config.tutorial.yaml | 5 +++-- doc/release_notes.rst | 4 ++++ 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/config.default.yaml b/config.default.yaml index f70e7c2c..7a443a03 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -148,12 +148,13 @@ renewable: slope: 35. azimuth: 180. capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2 - # Determined by comparing uncorrected area-weighted full-load hours to those + # Correction factor determined by comparing uncorrected area-weighted full-load hours to those # published in Supplementary Data to # Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power # sector: The economic potential of photovoltaics and concentrating solar # power." Applied Energy 135 (2014): 704-720. - correction_factor: 0.854337 + # This correction factor of 0.854337 may be in order if using reanalysis data. + # correction_factor: 0.854337 corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] natura: true diff --git a/config.tutorial.yaml b/config.tutorial.yaml index 26ead242..ea624727 100755 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -116,12 +116,13 @@ renewable: slope: 35. azimuth: 180. capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2 - # Determined by comparing uncorrected area-weighted full-load hours to those + # Correction factor determined by comparing uncorrected area-weighted full-load hours to those # published in Supplementary Data to # Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power # sector: The economic potential of photovoltaics and concentrating solar # power." Applied Energy 135 (2014): 704-720. - correction_factor: 0.854337 + # This correction factor of 0.854337 may be in order if using reanalysis data. + # correction_factor: 0.854337 corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] natura: true diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 0423a581..c379cf5c 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -18,6 +18,10 @@ Upcoming Release * The default deployment density of AC- and DC-connected offshore wind capacity is reduced from 3 MW/sqkm to a more conservative estimate of 2 MW/sqkm [`#280 `_]. +* Following discussion in `#285 `_ we have disabled the + correction factor for solar PV capacity factors by default while satellite data is used. + A correction factor of 0.854337 is recommended if reanalysis data like ERA5 is used. + PyPSA-Eur 0.4.0 (22th September 2021) ===================================== From 505f093141063dbf30cb8ce850ad38214006ff57 Mon Sep 17 00:00:00 2001 From: martacki Date: Thu, 13 Jan 2022 18:00:22 +0100 Subject: [PATCH 084/102] fix ordering of kwargs in add_co2limit --- scripts/prepare_network.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 03187284..3eb244cf 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -233,10 +233,10 @@ if __name__ == "__main__": if "Co2L" in o: m = re.findall("[0-9]*\.?[0-9]+$", o) if len(m) > 0: - co2limit=float(m[0])*snakemake.config['electricity']['co2base'] - add_co2limit(n, Nyears, co2limit) + co2limit = float(m[0]) * snakemake.config['electricity']['co2base'] + add_co2limit(n, co2limit, Nyears) else: - add_co2limit(n, Nyears, snakemake.config['electricity']['co2limit']) + add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears) break for o in opts: From b660277e37aa69e91187c12e7167cb306627bf48 Mon Sep 17 00:00:00 2001 From: euronion <42553970+euronion@users.noreply.github.com> Date: Thu, 13 Jan 2022 18:25:12 +0100 Subject: [PATCH 085/102] Merge pull request #303 from PyPSA/misc/improve-ci-speed [DNMY] Improve CI performance. --- .github/workflows/ci.yaml | 93 +++++++++++++++++++++++++++------------ envs/environment.yaml | 2 - 2 files changed, 64 insertions(+), 31 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b0699d74..c753deab 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -4,6 +4,10 @@ name: CI +# Caching method based on and described by: +# epassaro (2021): https://dev.to/epassaro/caching-anaconda-environments-in-github-actions-5hde +# and code in GitHub repo: https://github.com/epassaro/cache-conda-envs + on: push: branches: @@ -14,42 +18,73 @@ on: schedule: - cron: "0 5 * * TUE" +env: + CACHE_NUMBER: 1 # Change this value to manually reset the environment cache + jobs: build: - runs-on: ${{ matrix.os }} strategy: - max-parallel: 5 matrix: - os: - - ubuntu-latest - - macos-latest - - windows-latest + include: + # Matrix required to handle caching with Mambaforge + - os: ubuntu-latest + label: ubuntu-latest + prefix: /usr/share/miniconda3/envs/pypsa-eur + - os: macos-latest + label: macos-latest + prefix: /Users/runner/miniconda3/envs/pypsa-eur + + - os: windows-latest + label: windows-latest + prefix: C:\Miniconda3\envs\pypsa-eur + + name: ${{ matrix.label }} + + runs-on: ${{ matrix.os }} + defaults: run: shell: bash -l {0} - - steps: - - - uses: actions/checkout@v2 - - - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2.1.1 - with: # checks out environment 'test' by default - mamba-version: "*" - channels: conda-forge,defaults - channel-priority: true - - - name: Install dependencies - run: | - echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc - echo -e " - glpk\n - ipopt<3.13.3" >> envs/environment.yaml - mamba env update -f envs/environment.yaml --name test - - name: Test snakemake workflow - run: | - conda list - cp test/config.test1.yaml config.yaml - snakemake --cores all solve_all_networks - rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results + steps: + - uses: actions/checkout@v2 + + - name: Setup secrets + run: | + echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc + + - name: Add solver to environment + run: | + echo -e " - glpk\n - ipopt<3.13.3" >> envs/environment.yaml + + - name: Setup Mambaforge + uses: conda-incubator/setup-miniconda@v2 + with: + miniforge-variant: Mambaforge + miniforge-version: latest + activate-environment: pypsa-eur + use-mamba: true + + - name: Set cache date + run: echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV + + - name: Create environment cache + uses: actions/cache@v2 + id: cache + with: + path: ${{ matrix.prefix }} + key: ${{ matrix.label }}-conda-${{ hashFiles('envs/environment.yaml') }}-${{ env.DATE }}-${{ env.CACHE_NUMBER }} + + - name: Update environment due to outdated or unavailable cache + run: mamba env update -n pypsa-eur -f envs/environment.yaml + if: steps.cache.outputs.cache-hit != 'true' + + - name: Test snakemake workflow + run: | + conda activate pypsa-eur + conda list + cp test/config.test1.yaml config.yaml + snakemake --cores all solve_all_networks + rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results diff --git a/envs/environment.yaml b/envs/environment.yaml index 29d743ac..b6958d85 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -6,11 +6,9 @@ name: pypsa-eur channels: - conda-forge - bioconda - - http://conda.anaconda.org/gurobi dependencies: - python>=3.8 - pip - - mamba # esp for windows build - pypsa>=0.18 - atlite>=0.2.5 From 9b7bb27da1b13fc7c74707ba491675244d2807c6 Mon Sep 17 00:00:00 2001 From: martacki Date: Thu, 13 Jan 2022 18:54:27 +0100 Subject: [PATCH 086/102] snakemake dependencies in base_network --- scripts/base_network.py | 120 +++++++++++++++++++++------------------- 1 file changed, 64 insertions(+), 56 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 514e4dc3..baa12092 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -112,8 +112,8 @@ def _find_closest_links(links, new_links, distance_upper_bound=1.5): .sort_index()['i'] -def _load_buses_from_eg(): - buses = (pd.read_csv(snakemake.input.eg_buses, quotechar="'", +def _load_buses_from_eg(eg_buses, europe_shape, config_elec): + buses = (pd.read_csv(eg_buses, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(bus_id="str")) .set_index("bus_id") @@ -124,18 +124,18 @@ def _load_buses_from_eg(): buses['under_construction'] = buses['under_construction'].fillna(False).astype(bool) # remove all buses outside of all countries including exclusive economic zones (offshore) - europe_shape = gpd.read_file(snakemake.input.europe_shape).loc[0, 'geometry'] + europe_shape = gpd.read_file(europe_shape).loc[0, 'geometry'] europe_shape_prepped = shapely.prepared.prep(europe_shape) buses_in_europe_b = buses[['x', 'y']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1) - buses_with_v_nom_to_keep_b = buses.v_nom.isin(snakemake.config['electricity']['voltages']) | buses.v_nom.isnull() - logger.info("Removing buses with voltages {}".format(pd.Index(buses.v_nom.unique()).dropna().difference(snakemake.config['electricity']['voltages']))) + buses_with_v_nom_to_keep_b = buses.v_nom.isin(config_elec['voltages']) | buses.v_nom.isnull() + logger.info("Removing buses with voltages {}".format(pd.Index(buses.v_nom.unique()).dropna().difference(config_elec['voltages']))) return pd.DataFrame(buses.loc[buses_in_europe_b & buses_with_v_nom_to_keep_b]) -def _load_transformers_from_eg(buses): - transformers = (pd.read_csv(snakemake.input.eg_transformers, quotechar="'", +def _load_transformers_from_eg(buses, eg_transformers): + transformers = (pd.read_csv(eg_transformers, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(transformer_id='str', bus0='str', bus1='str')) .set_index('transformer_id')) @@ -145,8 +145,8 @@ def _load_transformers_from_eg(buses): return transformers -def _load_converters_from_eg(buses): - converters = (pd.read_csv(snakemake.input.eg_converters, quotechar="'", +def _load_converters_from_eg(buses, eg_converters): + converters = (pd.read_csv(eg_converters, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(converter_id='str', bus0='str', bus1='str')) .set_index('converter_id')) @@ -158,8 +158,8 @@ def _load_converters_from_eg(buses): return converters -def _load_links_from_eg(buses): - links = (pd.read_csv(snakemake.input.eg_links, quotechar="'", true_values=['t'], false_values=['f'], +def _load_links_from_eg(buses, eg_links): + links = (pd.read_csv(eg_links, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(link_id='str', bus0='str', bus1='str', under_construction="bool")) .set_index('link_id')) @@ -176,11 +176,11 @@ def _load_links_from_eg(buses): return links -def _add_links_from_tyndp(buses, links): - links_tyndp = pd.read_csv(snakemake.input.links_tyndp) +def _add_links_from_tyndp(buses, links, links_tyndp, europe_shape): + links_tyndp = pd.read_csv(links_tyndp) # remove all links from list which lie outside all of the desired countries - europe_shape = gpd.read_file(snakemake.input.europe_shape).loc[0, 'geometry'] + europe_shape = gpd.read_file(europe_shape).loc[0, 'geometry'] europe_shape_prepped = shapely.prepared.prep(europe_shape) x1y1_in_europe_b = links_tyndp[['x1', 'y1']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1) x2y2_in_europe_b = links_tyndp[['x2', 'y2']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1) @@ -248,8 +248,8 @@ def _add_links_from_tyndp(buses, links): return buses, links.append(links_tyndp, sort=True) -def _load_lines_from_eg(buses): - lines = (pd.read_csv(snakemake.input.eg_lines, quotechar="'", true_values=['t'], false_values=['f'], +def _load_lines_from_eg(buses, eg_lines): + lines = (pd.read_csv(eg_lines, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(line_id='str', bus0='str', bus1='str', underground="bool", under_construction="bool")) .set_index('line_id') @@ -262,8 +262,8 @@ def _load_lines_from_eg(buses): return lines -def _apply_parameter_corrections(n): - with open(snakemake.input.parameter_corrections) as f: +def _apply_parameter_corrections(n, parameter_corrections): + with open(parameter_corrections) as f: corrections = yaml.safe_load(f) if corrections is None: return @@ -285,14 +285,14 @@ def _apply_parameter_corrections(n): df.loc[inds, attr] = r[inds].astype(df[attr].dtype) -def _set_electrical_parameters_lines(lines): - v_noms = snakemake.config['electricity']['voltages'] - linetypes = snakemake.config['lines']['types'] +def _set_electrical_parameters_lines(lines, config): + v_noms = config['electricity']['voltages'] + linetypes = config['lines']['types'] for v_nom in v_noms: lines.loc[lines["v_nom"] == v_nom, 'type'] = linetypes[v_nom] - lines['s_max_pu'] = snakemake.config['lines']['s_max_pu'] + lines['s_max_pu'] = config['lines']['s_max_pu'] return lines @@ -304,14 +304,14 @@ def _set_lines_s_nom_from_linetypes(n): ) -def _set_electrical_parameters_links(links): +def _set_electrical_parameters_links(links, config, links_p_nom): if links.empty: return links - p_max_pu = snakemake.config['links'].get('p_max_pu', 1.) + p_max_pu = config['links'].get('p_max_pu', 1.) links['p_max_pu'] = p_max_pu links['p_min_pu'] = -p_max_pu - links_p_nom = pd.read_csv(snakemake.input.links_p_nom) + links_p_nom = pd.read_csv(links_p_nom) # filter links that are not in operation anymore removed_b = links_p_nom.Remarks.str.contains('Shut down|Replaced', na=False) @@ -331,8 +331,8 @@ def _set_electrical_parameters_links(links): return links -def _set_electrical_parameters_converters(converters): - p_max_pu = snakemake.config['links'].get('p_max_pu', 1.) +def _set_electrical_parameters_converters(converters, config): + p_max_pu = config['links'].get('p_max_pu', 1.) converters['p_max_pu'] = p_max_pu converters['p_min_pu'] = -p_max_pu @@ -345,8 +345,8 @@ def _set_electrical_parameters_converters(converters): return converters -def _set_electrical_parameters_transformers(transformers): - config = snakemake.config['transformers'] +def _set_electrical_parameters_transformers(transformers, config): + config = config['transformers'] ## Add transformer parameters transformers["x"] = config.get('x', 0.1) @@ -373,7 +373,7 @@ def _remove_unconnected_components(network): return network[component == component_sizes.index[0]] -def _set_countries_and_substations(n): +def _set_countries_and_substations(n, config, country_shapes, offshore_shapes): buses = n.buses @@ -386,9 +386,9 @@ def _set_countries_and_substations(n): index=buses.index ) - countries = snakemake.config['countries'] - country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'] - offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry'] + countries = config['countries'] + country_shapes = gpd.read_file(country_shapes).set_index('name')['geometry'] + offshore_shapes = gpd.read_file(offshore_shapes).set_index('name')['geometry'] substation_b = buses['symbol'].str.contains('substation|converter station', case=False) def prefer_voltage(x, which): @@ -498,19 +498,19 @@ def _replace_b2b_converter_at_country_border_by_link(n): .format(i, b0, line, linkcntry.at[i], buscntry.at[b1])) -def _set_links_underwater_fraction(n): +def _set_links_underwater_fraction(n, offshore_shapes): if n.links.empty: return if not hasattr(n.links, 'geometry'): n.links['underwater_fraction'] = 0. else: - offshore_shape = gpd.read_file(snakemake.input.offshore_shapes).unary_union + offshore_shape = gpd.read_file(offshore_shapes).unary_union links = gpd.GeoSeries(n.links.geometry.dropna().map(shapely.wkt.loads)) n.links['underwater_fraction'] = links.intersection(offshore_shape).length / links.length -def _adjust_capacities_of_under_construction_branches(n): - lines_mode = snakemake.config['lines'].get('under_construction', 'undef') +def _adjust_capacities_of_under_construction_branches(n, config): + lines_mode = config['lines'].get('under_construction', 'undef') if lines_mode == 'zero': n.lines.loc[n.lines.under_construction, 'num_parallel'] = 0. n.lines.loc[n.lines.under_construction, 's_nom'] = 0. @@ -519,7 +519,7 @@ def _adjust_capacities_of_under_construction_branches(n): elif lines_mode != 'keep': logger.warning("Unrecognized configuration for `lines: under_construction` = `{}`. Keeping under construction lines.") - links_mode = snakemake.config['links'].get('under_construction', 'undef') + links_mode = config['links'].get('under_construction', 'undef') if links_mode == 'zero': n.links.loc[n.links.under_construction, "p_nom"] = 0. elif links_mode == 'remove': @@ -534,27 +534,30 @@ def _adjust_capacities_of_under_construction_branches(n): return n -def base_network(): - buses = _load_buses_from_eg() +def base_network(eg_buses, eg_converters, eg_transformers, eg_lines, eg_links, + links_p_nom, links_tyndp, europe_shape, country_shapes, offshore_shapes, + parameter_corrections, config): - links = _load_links_from_eg(buses) - if snakemake.config['links'].get('include_tyndp'): - buses, links = _add_links_from_tyndp(buses, links) + buses = _load_buses_from_eg(eg_buses, europe_shape, config['electricity']) - converters = _load_converters_from_eg(buses) + links = _load_links_from_eg(buses, eg_links) + if config['links'].get('include_tyndp'): + buses, links = _add_links_from_tyndp(buses, links, links_tyndp, europe_shape) - lines = _load_lines_from_eg(buses) - transformers = _load_transformers_from_eg(buses) + converters = _load_converters_from_eg(buses, eg_converters) - lines = _set_electrical_parameters_lines(lines) - transformers = _set_electrical_parameters_transformers(transformers) - links = _set_electrical_parameters_links(links) - converters = _set_electrical_parameters_converters(converters) + lines = _load_lines_from_eg(buses, eg_lines) + transformers = _load_transformers_from_eg(buses, eg_transformers) + + lines = _set_electrical_parameters_lines(lines, config) + transformers = _set_electrical_parameters_transformers(transformers, config) + links = _set_electrical_parameters_links(links, config, links_p_nom) + converters = _set_electrical_parameters_converters(converters, config) n = pypsa.Network() n.name = 'PyPSA-Eur' - n.set_snapshots(pd.date_range(freq='h', **snakemake.config['snapshots'])) + n.set_snapshots(pd.date_range(freq='h', **config['snapshots'])) n.snapshot_weightings[:] *= 8760. / n.snapshot_weightings.sum() n.import_components_from_dataframe(buses, "Bus") @@ -565,17 +568,17 @@ def base_network(): _set_lines_s_nom_from_linetypes(n) - _apply_parameter_corrections(n) + _apply_parameter_corrections(n, parameter_corrections) n = _remove_unconnected_components(n) - _set_countries_and_substations(n) + _set_countries_and_substations(n, config, country_shapes, offshore_shapes) - _set_links_underwater_fraction(n) + _set_links_underwater_fraction(n, offshore_shapes) _replace_b2b_converter_at_country_border_by_link(n) - n = _adjust_capacities_of_under_construction_branches(n) + n = _adjust_capacities_of_under_construction_branches(n, config) return n @@ -585,6 +588,11 @@ if __name__ == "__main__": snakemake = mock_snakemake('base_network') configure_logging(snakemake) - n = base_network() + paths = snakemake.input + config = snakemake.config + + n = base_network(paths.eg_buses, paths.eg_converters, paths.eg_transformers, paths.eg_lines, paths.eg_links, + paths.links_p_nom, paths.links_tyndp, paths.europe_shape, paths.country_shapes, paths.offshore_shapes, + paths.parameter_corrections, config) n.export_to_netcdf(snakemake.output[0]) From 72e277a007c9421a5b48078942867e73b48ed481 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 14 Jan 2022 08:43:21 +0100 Subject: [PATCH 087/102] update environment and address deprecations (#291) * update environment and address deprecations * check pandas<1.3 * limit snakemake due to ascii encoding error, address review comments * remove version restriction on snakemake --- README.md | 2 +- envs/environment.yaml | 8 ++++---- scripts/add_electricity.py | 2 -- scripts/base_network.py | 2 +- scripts/build_shapes.py | 6 +++--- scripts/cluster_network.py | 5 ++++- 6 files changed, 13 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 15f979a7..8f569f2e 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ The dataset consists of: - Electrical demand time series from the [OPSD project](https://open-power-system-data.org/). - Renewable time series based on ERA5 and SARAH, assembled using the [atlite tool](https://github.com/FRESNA/atlite). -- Geographical potentials for wind and solar generators based on land use (CORINE) and excluding nature reserves (Natura2000) are computed with the [vresutils library](https://github.com/FRESNA/vresutils) and the [glaes library](https://github.com/FZJ-IEK3-VSA/glaes). +- Geographical potentials for wind and solar generators based on land use (CORINE) and excluding nature reserves (Natura2000) are computed with the [atlite library](https://github.com/PyPSA/atlite). Already-built versions of the model can be found in the accompanying [Zenodo repository](https://doi.org/10.5281/zenodo.3601881). diff --git a/envs/environment.yaml b/envs/environment.yaml index b6958d85..4b7b0ec5 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -10,9 +10,9 @@ dependencies: - python>=3.8 - pip - - pypsa>=0.18 + - pypsa>=0.18.1 - atlite>=0.2.5 - - dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved + - dask # Dependencies of the workflow itself - xlrd @@ -36,7 +36,7 @@ dependencies: - progressbar2 - pyomo - matplotlib - - proj<8 + - proj # Keep in conda environment when calling ipython - ipython @@ -54,5 +54,5 @@ dependencies: - tabula-py - pip: - - vresutils==0.3.1 + - vresutils>=0.3.1 - tsam>=1.1.0 diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 08a32a26..9e64ad29 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -95,7 +95,6 @@ import powerplantmatching as pm from powerplantmatching.export import map_country_bus from vresutils.costdata import annuity -from vresutils.load import timeseries_opsd from vresutils import transfer as vtransfer idx = pd.IndexSlice @@ -227,7 +226,6 @@ def attach_load(n): # relative factors 0.6 and 0.4 have been determined from a linear # regression on the country to continent load data - # (refer to vresutils.load._upsampling_weights) factors = normed(0.6 * normed(gdp_n) + 0.4 * normed(pop_n)) return pd.DataFrame(factors.values * l.values[:,np.newaxis], index=l.index, columns=factors.index) diff --git a/scripts/base_network.py b/scripts/base_network.py index 514e4dc3..1f2b9241 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -97,7 +97,7 @@ def _get_country(df): def _find_closest_links(links, new_links, distance_upper_bound=1.5): - treecoords = np.asarray([np.asarray(shapely.wkt.loads(s))[[0, -1]].flatten() + treecoords = np.asarray([np.asarray(shapely.wkt.loads(s).coords)[[0, -1]].flatten() for s in links.geometry]) querycoords = np.vstack([new_links[['x1', 'y1', 'x2', 'y2']], new_links[['x2', 'y2', 'x1', 'y1']]]) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 5814085b..366cb820 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -79,7 +79,7 @@ from itertools import takewhile import pandas as pd import geopandas as gpd from shapely.geometry import MultiPolygon, Polygon -from shapely.ops import cascaded_union +from shapely.ops import unary_union import pycountry as pyc logger = logging.getLogger(__name__) @@ -95,7 +95,7 @@ def _get_country(target, **keys): def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True): if isinstance(polys, MultiPolygon): - polys = sorted(polys, key=attrgetter('area'), reverse=True) + polys = sorted(polys.geoms, key=attrgetter('area'), reverse=True) mainpoly = polys[0] mainlength = np.sqrt(mainpoly.area/(2.*np.pi)) if mainpoly.area > minarea: @@ -139,7 +139,7 @@ def country_cover(country_shapes, eez_shapes=None): if eez_shapes is not None: shapes += list(eez_shapes) - europe_shape = cascaded_union(shapes) + europe_shape = unary_union(shapes) if isinstance(europe_shape, MultiPolygon): europe_shape = max(europe_shape, key=attrgetter('area')) return Polygon(shell=europe_shape.exterior) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 1a976cd1..4b9db466 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -140,6 +140,9 @@ from functools import reduce from pypsa.networkclustering import (busmap_by_kmeans, busmap_by_spectral_clustering, _make_consense, get_clustering_from_busmap) +import warnings +warnings.filterwarnings(action='ignore', category=UserWarning) + from add_electricity import load_costs idx = pd.IndexSlice @@ -313,7 +316,7 @@ def cluster_regions(busmaps, input=None, output=None): for which in ('regions_onshore', 'regions_offshore'): regions = gpd.read_file(getattr(input, which)).set_index('name') - geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.cascaded_union) + geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.unary_union) regions_c = gpd.GeoDataFrame(dict(geometry=geom_c)) regions_c.index.name = 'name' save_to_geojson(regions_c, getattr(output, which)) From deac9f32e7280ab4f30ea80322796e9fd8861c1b Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 14 Jan 2022 11:05:15 +0100 Subject: [PATCH 088/102] move snakemake keys (input, output, config, ...) to own variables --- scripts/_helpers.py | 4 +++ scripts/add_electricity.py | 7 +++-- scripts/add_extra_components.py | 13 ++++----- scripts/base_network.py | 7 +++-- scripts/build_bus_regions.py | 16 ++++++----- scripts/build_cutout.py | 14 +++++----- scripts/build_hydro_profile.py | 20 +++++++------- scripts/build_load_data.py | 9 ++++--- scripts/build_natura_raster.py | 9 ++++--- scripts/build_powerplants.py | 11 ++++---- scripts/build_renewable_profiles.py | 16 ++++++----- scripts/build_shapes.py | 9 +++---- scripts/cluster_network.py | 42 +++++++++++++++-------------- scripts/plot_network.py | 25 ++++++++--------- scripts/plot_p_nom_max.py | 12 +++++---- scripts/prepare_links_p_nom.py | 6 +++-- scripts/prepare_network.py | 32 +++++++++++----------- scripts/retrieve_databundle.py | 6 +++-- scripts/simplify_network.py | 24 +++++++++-------- scripts/solve_network.py | 20 +++++++------- scripts/solve_operations_network.py | 21 ++++++++------- 21 files changed, 176 insertions(+), 147 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index f1e5e887..a44a8133 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -95,6 +95,10 @@ def pdbcast(v, h): return pd.DataFrame(v.values.reshape((-1, 1)) * h.values, index=v.index, columns=h.index) +def retrieve_snakemake_keys(snakemake): + return (snakemake.input, snakemake.config, snakemake.wildcards, + snakemake.log, snakemake.output) + def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True): import pypsa diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 42d4f5cc..c4a883f5 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -84,7 +84,7 @@ It further adds extendable ``generators`` with **zero** capacity for """ import logging -from _helpers import configure_logging, update_p_nom_max +from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max import pypsa import pandas as pd @@ -546,8 +546,7 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_electricity') configure_logging(snakemake) - config = snakemake.config - paths = snakemake.input + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = pypsa.Network(paths.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. @@ -583,4 +582,4 @@ if __name__ == "__main__": add_nice_carrier_names(n, config) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index db764d4f..35947aee 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -50,7 +50,7 @@ The rule :mod:`add_extra_components` attaches additional extendable components t - ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If this option is chosen, the script adds extra buses with corresponding carrier where energy ``Stores`` are attached and which are connected to the corresponding power buses via two links, one each for charging and discharging. This leads to three investment variables for the energy capacity, charging and discharging capacity of the storage unit. """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import pandas as pd @@ -192,18 +192,19 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_extra_components', network='elec', simpl='', clusters=5) configure_logging(snakemake) - paths = snakemake.input + + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = pypsa.Network(paths.network) - elec_config = snakemake.config['electricity'] + elec_config = config['electricity'] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, snakemake.config['costs'], elec_config, Nyears=Nyears) + costs = load_costs(paths.tech_costs, config['costs'], elec_config, Nyears=Nyears) attach_storageunits(n, costs, elec_config) attach_stores(n, costs, elec_config) attach_hydrogen_pipelines(n, costs, elec_config) - add_nice_carrier_names(n, snakemake.config) + add_nice_carrier_names(n, config) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/base_network.py b/scripts/base_network.py index baa12092..41699f04 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -63,7 +63,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import yaml @@ -588,11 +588,10 @@ if __name__ == "__main__": snakemake = mock_snakemake('base_network') configure_logging(snakemake) - paths = snakemake.input - config = snakemake.config + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = base_network(paths.eg_buses, paths.eg_converters, paths.eg_transformers, paths.eg_lines, paths.eg_links, paths.links_p_nom, paths.links_tyndp, paths.europe_shape, paths.country_shapes, paths.offshore_shapes, paths.parameter_corrections, config) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index d91d0575..78e2070d 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -42,7 +42,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import os @@ -67,12 +67,14 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_bus_regions') configure_logging(snakemake) - countries = snakemake.config['countries'] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - n = pypsa.Network(snakemake.input.base_network) + countries = config['countries'] - country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'] - offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry'] + n = pypsa.Network(paths.base_network) + + country_shapes = gpd.read_file(paths.country_shapes).set_index('name')['geometry'] + offshore_shapes = gpd.read_file(paths.offshore_shapes).set_index('name')['geometry'] onshore_regions = [] offshore_regions = [] @@ -103,6 +105,6 @@ if __name__ == "__main__": offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2] offshore_regions.append(offshore_regions_c) - save_to_geojson(pd.concat(onshore_regions, ignore_index=True), snakemake.output.regions_onshore) + save_to_geojson(pd.concat(onshore_regions, ignore_index=True), out.regions_onshore) - save_to_geojson(pd.concat(offshore_regions, ignore_index=True), snakemake.output.regions_offshore) + save_to_geojson(pd.concat(offshore_regions, ignore_index=True), out.regions_offshore) diff --git a/scripts/build_cutout.py b/scripts/build_cutout.py index 78eafac6..4b3e2bdc 100644 --- a/scripts/build_cutout.py +++ b/scripts/build_cutout.py @@ -95,7 +95,7 @@ import logging import atlite import geopandas as gpd import pandas as pd -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys logger = logging.getLogger(__name__) @@ -106,16 +106,18 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_cutout', cutout='europe-2013-era5') configure_logging(snakemake) - cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - snapshots = pd.date_range(freq='h', **snakemake.config['snapshots']) + cutout_params = config['atlite']['cutouts'][wildcards.cutout] + + snapshots = pd.date_range(freq='h', **config['snapshots']) time = [snapshots[0], snapshots[-1]] cutout_params['time'] = slice(*cutout_params.get('time', time)) if {'x', 'y', 'bounds'}.isdisjoint(cutout_params): # Determine the bounds from bus regions with a buffer of two grid cells - onshore = gpd.read_file(snakemake.input.regions_onshore) - offshore = gpd.read_file(snakemake.input.regions_offshore) + onshore = gpd.read_file(paths.regions_onshore) + offshore = gpd.read_file(paths.regions_offshore) regions = onshore.append(offshore) d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy', 0.25))*2 cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d] @@ -126,5 +128,5 @@ if __name__ == "__main__": logging.info(f"Preparing cutout with parameters {cutout_params}.") features = cutout_params.pop('features', None) - cutout = atlite.Cutout(snakemake.output[0], **cutout_params) + cutout = atlite.Cutout(out[0], **cutout_params) cutout.prepare(features=features) diff --git a/scripts/build_hydro_profile.py b/scripts/build_hydro_profile.py index 6ac59262..563c8ecb 100644 --- a/scripts/build_hydro_profile.py +++ b/scripts/build_hydro_profile.py @@ -60,7 +60,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import atlite import geopandas as gpd @@ -74,22 +74,24 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_hydro_profile') configure_logging(snakemake) - config = snakemake.config['renewable']['hydro'] - cutout = atlite.Cutout(snakemake.input.cutout) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - countries = snakemake.config['countries'] - country_shapes = (gpd.read_file(snakemake.input.country_shapes) + config_hydro = config['renewable']['hydro'] + cutout = atlite.Cutout(paths.cutout) + + countries = config['countries'] + country_shapes = (gpd.read_file(paths.country_shapes) .set_index('name')['geometry'].reindex(countries)) country_shapes.index.name = 'countries' eia_stats = vhydro.get_eia_annual_hydro_generation( - snakemake.input.eia_hydro_generation).reindex(columns=countries) + paths.eia_hydro_generation).reindex(columns=countries) inflow = cutout.runoff(shapes=country_shapes, smooth=True, lower_threshold_quantile=True, normalize_using_yearly=eia_stats) - if 'clip_min_inflow' in config: - inflow = inflow.where(inflow > config['clip_min_inflow'], 0) + if 'clip_min_inflow' in config_hydro: + inflow = inflow.where(inflow > config_hydro['clip_min_inflow'], 0) - inflow.to_netcdf(snakemake.output[0]) + inflow.to_netcdf(out[0]) diff --git a/scripts/build_load_data.py b/scripts/build_load_data.py index f71be6ea..0f9124ea 100755 --- a/scripts/build_load_data.py +++ b/scripts/build_load_data.py @@ -37,7 +37,7 @@ Outputs import logging logger = logging.getLogger(__name__) -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pandas as pd import numpy as np @@ -196,7 +196,8 @@ if __name__ == "__main__": configure_logging(snakemake) - config = snakemake.config + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + powerstatistics = config['load']['power_statistics'] interpolate_limit = config['load']['interpolate_limit'] countries = config['countries'] @@ -204,7 +205,7 @@ if __name__ == "__main__": years = slice(snapshots[0], snapshots[-1]) time_shift = config['load']['time_shift_for_large_gaps'] - load = load_timeseries(snakemake.input[0], years, countries, powerstatistics) + load = load_timeseries(paths[0], years, countries, powerstatistics) if config['load']['manual_adjustments']: load = manual_adjustment(load, powerstatistics) @@ -221,5 +222,5 @@ if __name__ == "__main__": '`time_shift_for_large_gaps` or modify the `manual_adjustment` function ' 'for implementing the needed load data modifications.') - load.to_csv(snakemake.output[0]) + load.to_csv(out[0]) diff --git a/scripts/build_natura_raster.py b/scripts/build_natura_raster.py index f7a923d6..71d2c45e 100644 --- a/scripts/build_natura_raster.py +++ b/scripts/build_natura_raster.py @@ -40,7 +40,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import atlite import geopandas as gpd @@ -73,18 +73,19 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_natura_raster') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - cutouts = snakemake.input.cutouts + cutouts = paths.cutouts xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts)) bounds = transform_bounds(4326, 3035, min(xs), min(ys), max(Xs), max(Ys)) transform, out_shape = get_transform_and_shape(bounds, res=100) # adjusted boundaries - shapes = gpd.read_file(snakemake.input.natura).to_crs(3035) + shapes = gpd.read_file(paths.natura).to_crs(3035) raster = ~geometry_mask(shapes.geometry, out_shape[::-1], transform) raster = raster.astype(rio.uint8) - with rio.open(snakemake.output[0], 'w', driver='GTiff', dtype=rio.uint8, + with rio.open(out[0], 'w', driver='GTiff', dtype=rio.uint8, count=1, transform=transform, crs=3035, compress='lzw', width=raster.shape[1], height=raster.shape[0]) as dst: dst.write(raster, indexes=1) diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index be57baa8..4b9d13a1 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -72,7 +72,7 @@ The configuration options ``electricity: powerplants_filter`` and ``electricity: """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import powerplantmatching as pm @@ -99,7 +99,8 @@ if __name__ == "__main__": from _helpers import mock_snakemake snakemake = mock_snakemake('build_powerplants') configure_logging(snakemake) - paths = snakemake.input + + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = pypsa.Network(paths.base_network) countries = n.buses.country.unique() @@ -115,12 +116,12 @@ if __name__ == "__main__": df.Technology.replace('Steam Turbine', 'OCGT').fillna('OCGT'))))) - ppl_query = snakemake.config['electricity']['powerplants_filter'] + ppl_query = config['electricity']['powerplants_filter'] if isinstance(ppl_query, str): ppl.query(ppl_query, inplace=True) # add carriers from own powerplant files: - custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] + custom_ppl_query = config['electricity']['custom_powerplants'] ppl = add_custom_powerplants(ppl, paths.custom_powerplants, custom_ppl_query) cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()] @@ -140,4 +141,4 @@ if __name__ == "__main__": if bus_null_b.any(): logging.warning(f"Couldn't find close bus for {bus_null_b.sum()} powerplants") - ppl.to_csv(snakemake.output[0]) + ppl.to_csv(out[0]) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 9ce83de3..944d6f39 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -190,7 +190,7 @@ from pypsa.geo import haversine from shapely.geometry import LineString import time -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys logger = logging.getLogger(__name__) @@ -201,10 +201,12 @@ if __name__ == '__main__': snakemake = mock_snakemake('build_renewable_profiles', technology='solar') configure_logging(snakemake) pgb.streams.wrap_stderr() - paths = snakemake.input - nprocesses = snakemake.config['atlite'].get('nprocesses') - noprogress = not snakemake.config['atlite'].get('show_progress', True) - config = snakemake.config['renewable'][snakemake.wildcards.technology] + + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + + nprocesses = config['atlite'].get('nprocesses') + noprogress = not config['atlite'].get('show_progress', True) + config = config['renewable'][wildcards.technology] resource = config['resource'] # pv panel config / wind turbine config correction_factor = config.get('correction_factor', 1.) capacity_per_sqkm = config['capacity_per_sqkm'] @@ -313,7 +315,7 @@ if __name__ == '__main__': average_distance.rename('average_distance')]) - if snakemake.wildcards.technology.startswith("offwind"): + if wildcards.technology.startswith("offwind"): logger.info('Calculate underwater fraction of connections.') offshore_shape = gpd.read_file(paths['offshore_shapes']).unary_union underwater_fraction = [] @@ -333,4 +335,4 @@ if __name__ == '__main__': min_p_max_pu = config['clip_p_max_pu'] ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0) - ds.to_netcdf(snakemake.output.profile) + ds.to_netcdf(out.profile) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index cca941e6..b4686ac3 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -68,7 +68,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import os import numpy as np @@ -217,13 +217,12 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_shapes') configure_logging(snakemake) - paths = snakemake.input - out = snakemake.output + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - country_shapes = countries(paths.naturalearth, snakemake.config['countries']) + country_shapes = countries(paths.naturalearth, config['countries']) save_to_geojson(country_shapes, out.country_shapes) - offshore_shapes = eez(country_shapes, paths.eez, snakemake.config['countries']) + offshore_shapes = eez(country_shapes, paths.eez, config['countries']) save_to_geojson(offshore_shapes, out.offshore_shapes) europe_shape = country_cover(country_shapes, offshore_shapes) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 51556b27..554109e3 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes: """ import logging -from _helpers import configure_logging, update_p_nom_max +from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max import pypsa import os @@ -306,7 +306,7 @@ def cluster_regions(busmaps, input=None, output=None): for which in ('regions_onshore', 'regions_offshore'): regions = gpd.read_file(getattr(input, which)).set_index('name') - geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.cascaded_union) + geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.unary_union) regions_c = gpd.GeoDataFrame(dict(geometry=geom_c)) regions_c.index.name = 'name' save_to_geojson(regions_c, getattr(output, which)) @@ -328,19 +328,21 @@ if __name__ == "__main__": snakemake = mock_snakemake('cluster_network', network='elec', simpl='', clusters='5') configure_logging(snakemake) - n = pypsa.Network(snakemake.input.network) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - focus_weights = snakemake.config.get('focus_weights', None) + n = pypsa.Network(paths.network) + + focus_weights = config.get('focus_weights', None) renewable_carriers = pd.Index([tech for tech in n.generators.carrier.unique() - if tech in snakemake.config['renewable']]) + if tech in config['renewable']]) - if snakemake.wildcards.clusters.endswith('m'): - n_clusters = int(snakemake.wildcards.clusters[:-1]) + if wildcards.clusters.endswith('m'): + n_clusters = int(wildcards.clusters[:-1]) aggregate_carriers = pd.Index(n.generators.carrier.unique()).difference(renewable_carriers) else: - n_clusters = int(snakemake.wildcards.clusters) + n_clusters = int(wildcards.clusters) aggregate_carriers = None # All if n_clusters == len(n.buses): @@ -349,11 +351,11 @@ if __name__ == "__main__": linemap = n.lines.index.to_series() clustering = pypsa.networkclustering.Clustering(n, busmap, linemap, linemap, pd.Series(dtype='O')) else: - line_length_factor = snakemake.config['lines']['length_factor'] + line_length_factor = config['lines']['length_factor'] Nyears = n.snapshot_weightings.objective.sum()/8760 - hvac_overhead_cost = (load_costs(tech_costs = snakemake.input.tech_costs, - config = snakemake.config['costs'], - elec_config=snakemake.config['electricity'], Nyears = Nyears) + hvac_overhead_cost = (load_costs(tech_costs = paths.tech_costs, + config = config['costs'], + elec_config=config['electricity'], Nyears = Nyears) .at['HVAC overhead', 'capital_cost']) def consense(x): @@ -362,24 +364,24 @@ if __name__ == "__main__": "The `potential` configuration option must agree for all renewable carriers, for now!" ) return v - potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential'] + potential_mode = consense(pd.Series([config['renewable'][tech]['potential'] for tech in renewable_carriers])) - custom_busmap = snakemake.config["enable"].get("custom_busmap", False) + custom_busmap = config["enable"].get("custom_busmap", False) if custom_busmap: - custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True) + custom_busmap = pd.read_csv(paths.custom_busmap, index_col=0, squeeze=True) custom_busmap.index = custom_busmap.index.astype(str) - logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") + logger.info(f"Imported custom busmap from {paths.custom_busmap}") clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers, line_length_factor=line_length_factor, potential_mode=potential_mode, - solver_name=snakemake.config['solving']['solver']['name'], + solver_name=config['solving']['solver']['name'], extended_link_costs=hvac_overhead_cost, focus_weights=focus_weights) update_p_nom_max(n) - clustering.network.export_to_netcdf(snakemake.output.network) + clustering.network.export_to_netcdf(out.network) for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative - getattr(clustering, attr).to_csv(snakemake.output[attr]) + getattr(clustering, attr).to_csv(out[attr]) - cluster_regions((clustering.busmap,), snakemake.input, snakemake.output) + cluster_regions((clustering.busmap,), paths, out) diff --git a/scripts/plot_network.py b/scripts/plot_network.py index 456bf50f..645c8c39 100755 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -20,8 +20,8 @@ Description """ import logging -from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs, - configure_logging) +from _helpers import (retrieve_snakemake_keys, load_network_for_plots, + aggregate_p, aggregate_costs, configure_logging) import pandas as pd import numpy as np @@ -259,18 +259,19 @@ if __name__ == "__main__": set_plot_style() - opts = snakemake.config['plotting'] - map_figsize = opts['map']['figsize'] - map_boundaries = opts['map']['boundaries'] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - n = load_network_for_plots(snakemake.input.network, snakemake.input.tech_costs, snakemake.config) + map_figsize = config['map']['figsize'] + map_boundaries = config['map']['boundaries'] - scenario_opts = snakemake.wildcards.opts.split('-') + n = load_network_for_plots(paths.network, paths.tech_costs, config) + + scenario_opts = wildcards.opts.split('-') fig, ax = plt.subplots(figsize=map_figsize, subplot_kw={"projection": ccrs.PlateCarree()}) - plot_map(n, ax, snakemake.wildcards.attr, opts) + plot_map(n, ax, wildcards.attr, config) - fig.savefig(snakemake.output.only_map, dpi=150, bbox_inches='tight') + fig.savefig(out.only_map, dpi=150, bbox_inches='tight') ax1 = fig.add_axes([-0.115, 0.625, 0.2, 0.2]) plot_total_energy_pie(n, ax1) @@ -278,12 +279,12 @@ if __name__ == "__main__": ax2 = fig.add_axes([-0.075, 0.1, 0.1, 0.45]) plot_total_cost_bar(n, ax2) - ll = snakemake.wildcards.ll + ll = wildcards.ll ll_type = ll[0] ll_factor = ll[1:] lbl = dict(c='line cost', v='line volume')[ll_type] amnt = '{ll} x today\'s'.format(ll=ll_factor) if ll_factor != 'opt' else 'optimal' fig.suptitle('Expansion to {amount} {label} at {clusters} clusters' - .format(amount=amnt, label=lbl, clusters=snakemake.wildcards.clusters)) + .format(amount=amnt, label=lbl, clusters=wildcards.clusters)) - fig.savefig(snakemake.output.ext, transparent=True, bbox_inches='tight') + fig.savefig(out.ext, transparent=True, bbox_inches='tight') diff --git a/scripts/plot_p_nom_max.py b/scripts/plot_p_nom_max.py index e79ad274..540608f9 100644 --- a/scripts/plot_p_nom_max.py +++ b/scripts/plot_p_nom_max.py @@ -53,11 +53,13 @@ if __name__ == "__main__": clusts= '5,full', country= 'all') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + plot_kwds = dict(drawstyle="steps-post") - clusters = snakemake.wildcards.clusts.split(',') - techs = snakemake.wildcards.techs.split(',') - country = snakemake.wildcards.country + clusters = wildcards.clusts.split(',') + techs = wildcards.techs.split(',') + country = wildcards.country if country == 'all': country = None else: @@ -66,7 +68,7 @@ if __name__ == "__main__": fig, axes = plt.subplots(1, len(techs)) for j, cluster in enumerate(clusters): - net = pypsa.Network(snakemake.input[j]) + net = pypsa.Network(paths[j]) for i, tech in enumerate(techs): cum_p_nom_max(net, tech, country).plot(x="p_max_pu", y="cum_p_nom_max", @@ -79,4 +81,4 @@ if __name__ == "__main__": plt.legend(title="Cluster level") - fig.savefig(snakemake.output[0], transparent=True, bbox_inches='tight') + fig.savefig(out[0], transparent=True, bbox_inches='tight') diff --git a/scripts/prepare_links_p_nom.py b/scripts/prepare_links_p_nom.py index b83089d6..6bd4bca4 100644 --- a/scripts/prepare_links_p_nom.py +++ b/scripts/prepare_links_p_nom.py @@ -37,7 +37,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pandas as pd @@ -63,6 +63,8 @@ if __name__ == "__main__": snakemake = mock_snakemake('prepare_links_p_nom', simpl='', network='elec') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0] mw = "Power (MW)" @@ -74,4 +76,4 @@ if __name__ == "__main__": links_p_nom['x1'], links_p_nom['y1'] = extract_coordinates(links_p_nom['Converterstation 1']) links_p_nom['x2'], links_p_nom['y2'] = extract_coordinates(links_p_nom['Converterstation 2']) - links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(snakemake.output[0], index=False) + links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(out[0], index=False) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 3eb244cf..19a395ea 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -56,7 +56,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import re import pypsa @@ -206,15 +206,17 @@ if __name__ == "__main__": clusters='40', ll='v0.3', opts='Co2L-24H') configure_logging(snakemake) - opts = snakemake.wildcards.opts.split('-') + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - n = pypsa.Network(snakemake.input[0]) + opts = wildcards.opts.split('-') + + n = pypsa.Network(paths[0]) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = snakemake.input.tech_costs, - config = snakemake.config['costs'], - elec_config = snakemake.config['electricity'], Nyears = Nyears) + costs = load_costs(tech_costs = paths.tech_costs, + config = config['costs'], + elec_config = config['electricity'], Nyears = Nyears) - set_line_s_max_pu(n, s_max_pu=snakemake.config['lines']['s_max_pu']) + set_line_s_max_pu(n, s_max_pu=config['lines']['s_max_pu']) for o in opts: m = re.match(r'^\d+h$', o, re.IGNORECASE) @@ -225,7 +227,7 @@ if __name__ == "__main__": for o in opts: m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: - solver_name = snakemake.config["solving"]["solver"]["name"] + solver_name = config["solving"]["solver"]["name"] n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=solver_name) break @@ -233,10 +235,10 @@ if __name__ == "__main__": if "Co2L" in o: m = re.findall("[0-9]*\.?[0-9]+$", o) if len(m) > 0: - co2limit = float(m[0]) * snakemake.config['electricity']['co2base'] + co2limit = float(m[0]) * config['electricity']['co2base'] add_co2limit(n, co2limit, Nyears) else: - add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears) + add_co2limit(n, config['electricity']['co2limit'], Nyears) break for o in opts: @@ -257,17 +259,17 @@ if __name__ == "__main__": c.df.loc[sel,attr] *= factor if 'Ep' in opts: - add_emission_prices(n, emission_prices=snakemake.config['costs']['emission_prices']) + add_emission_prices(n, config['costs']['emission_prices']) - ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] + ll_type, factor = wildcards.ll[0], wildcards.ll[1:] set_transmission_limit(n, ll_type, factor, costs, Nyears) - set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf), - p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf)) + set_line_nom_max(n, s_nom_max_set=config["lines"].get("s_nom_max,", np.inf), + p_nom_max_set=config["links"].get("p_nom_max,", np.inf)) if "ATK" in opts: enforce_autarky(n) elif "ATKc" in opts: enforce_autarky(n, only_crossborder=True) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index 86869879..c5a31f81 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -33,7 +33,7 @@ The :ref:`tutorial` uses a smaller `data bundle Date: Fri, 14 Jan 2022 11:29:01 +0100 Subject: [PATCH 089/102] make_summary: remove snakemake dependencies --- scripts/make_summary.py | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 24c5e87c..a283fd20 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -54,7 +54,7 @@ Replacing '/summaries/' with '/plots/' creates nice colored maps of the results. """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import os import pypsa @@ -378,7 +378,7 @@ outputs = ["costs", ] -def make_summaries(networks_dict, country='all'): +def make_summaries(networks_dict, paths, config, country='all'): columns = pd.MultiIndex.from_tuples(networks_dict.keys(),names=["simpl","clusters","ll","opts"]) @@ -403,8 +403,8 @@ def make_summaries(networks_dict, country='all'): n = n[n.buses.country == country] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = snakemake.input[0], config = snakemake.config['costs'], - elec_config = snakemake.config['electricity'], Nyears = Nyears) + costs = load_costs(tech_costs = paths[0], config = config['costs'], + elec_config = config['electricity'], Nyears) update_transmission_costs(n, costs, simple_hvdc_costs=False) assign_carriers(n) @@ -431,25 +431,27 @@ if __name__ == "__main__": network_dir = os.path.join('results', 'networks') configure_logging(snakemake) - def expand_from_wildcard(key): - w = getattr(snakemake.wildcards, key) - return snakemake.config["scenario"][key] if w == "all" else [w] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - if snakemake.wildcards.ll.endswith("all"): - ll = snakemake.config["scenario"]["ll"] - if len(snakemake.wildcards.ll) == 4: - ll = [l for l in ll if l[0] == snakemake.wildcards.ll[0]] + def expand_from_wildcard(key, config): + w = getattr(wildcards, key) + return config["scenario"][key] if w == "all" else [w] + + if wildcards.ll.endswith("all"): + ll = config["scenario"]["ll"] + if len(wildcards.ll) == 4: + ll = [l for l in ll if l[0] == wildcards.ll[0]] else: - ll = [snakemake.wildcards.ll] + ll = [wildcards.ll] networks_dict = {(simpl,clusters,l,opts) : os.path.join(network_dir, f'elec_s{simpl}_' f'{clusters}_ec_l{l}_{opts}.nc') - for simpl in expand_from_wildcard("simpl") + for simpl in expand_from_wildcard("simpl", config) for clusters in expand_from_wildcard("clusters") for l in ll for opts in expand_from_wildcard("opts")} - dfs = make_summaries(networks_dict, country=snakemake.wildcards.country) + dfs = make_summaries(networks_dict, paths, config, country=wildcards.country) - to_csv(dfs, snakemake.output[0]) + to_csv(dfs, out[0]) From f28a088ea3b1f49214e82dde5ccddcfb93c2d0db Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 14 Jan 2022 11:30:15 +0100 Subject: [PATCH 090/102] arguments in function calls instead of kwarg-style --- scripts/add_electricity.py | 4 ++-- scripts/add_extra_components.py | 2 +- scripts/cluster_network.py | 8 +++----- scripts/prepare_network.py | 10 ++++------ scripts/solve_network.py | 3 +-- scripts/solve_operations_network.py | 5 ++--- 6 files changed, 13 insertions(+), 19 deletions(-) diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index cbefba2f..fcddea8c 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -549,11 +549,11 @@ if __name__ == "__main__": n = pypsa.Network(paths.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears=Nyears) + costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears) ppl = load_powerplants(paths.powerplants) attach_load(n, paths.regions, paths.load, paths.nuts3_shapes, config['countries'], - scaling=config['load']['scaling_factor']) + config['load']['scaling_factor']) update_transmission_costs(n, costs, config['lines']['length_factor']) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index 35947aee..0531c9fa 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -199,7 +199,7 @@ if __name__ == "__main__": elec_config = config['electricity'] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, config['costs'], elec_config, Nyears=Nyears) + costs = load_costs(paths.tech_costs, config['costs'], elec_config, Nyears) attach_storageunits(n, costs, elec_config) attach_stores(n, costs, elec_config) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 71dd1746..041fb259 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -374,12 +374,10 @@ if __name__ == "__main__": custom_busmap = pd.read_csv(paths.custom_busmap, index_col=0, squeeze=True) custom_busmap.index = custom_busmap.index.astype(str) logger.info(f"Imported custom busmap from {paths.custom_busmap}") + clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers, - line_length_factor=line_length_factor, - potential_mode=potential_mode, - solver_name=config['solving']['solver']['name'], - extended_link_costs=hvac_overhead_cost, - focus_weights=focus_weights) + line_length_factor, potential_mode, config['solving']['solver']['name'], + "kmeans", hvac_overhead_cost, focus_weights) update_p_nom_max(n) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 19a395ea..e0b488f5 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -140,7 +140,7 @@ def average_every_nhours(n, offset): return m -def apply_time_segmentation(n, segments, solver_name="cplex"): +def apply_time_segmentation(n, segments, solver_name="cbc"): logger.info(f"Aggregating time series to {segments} segments.") try: import tsam.timeseriesaggregation as tsam @@ -212,11 +212,9 @@ if __name__ == "__main__": n = pypsa.Network(paths[0]) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = paths.tech_costs, - config = config['costs'], - elec_config = config['electricity'], Nyears = Nyears) + costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears) - set_line_s_max_pu(n, s_max_pu=config['lines']['s_max_pu']) + set_line_s_max_pu(n, config['lines']['s_max_pu']) for o in opts: m = re.match(r'^\d+h$', o, re.IGNORECASE) @@ -228,7 +226,7 @@ if __name__ == "__main__": m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: solver_name = config["solving"]["solver"]["name"] - n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=solver_name) + n = apply_time_segmentation(n, m.group(0)[:-3], solver_name) break for o in opts: diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 1aaf4970..6f2124da 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -285,8 +285,7 @@ if __name__ == "__main__": with memory_logger(filename=fn, interval=30.) as mem: n = pypsa.Network(paths[0]) n = prepare_network(n, solve_opts) - n = solve_network(n, config=config, opts=opts, - solver_dir=tmpdir, + n = solve_network(n, config, opts, solver_dir=tmpdir, solver_logfile=logs.solver) n.export_to_netcdf(out[0]) diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index baea5a5c..6490ce51 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -116,9 +116,8 @@ if __name__ == "__main__": fn = getattr(logs, 'memory', None) with memory_logger(filename=fn, interval=30.) as mem: - n = prepare_network(n, solve_opts=config['solving']['options']) - n = solve_network(n, config=config, opts=opts, - solver_dir=tmpdir, + n = prepare_network(n, config['solving']['options']) + n = solve_network(n, config, opts, solver_dir=tmpdir, solver_logfile=logs.solver) n.export_to_netcdf(out[0]) From 0da77a7600ef53ef2b519ddafa41904505e5f56f Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 14 Jan 2022 13:44:33 +0100 Subject: [PATCH 091/102] remove snakemake dependencies in plot_summary --- scripts/plot_summary.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index a34611de..48f064b0 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -21,7 +21,7 @@ Description import os import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pandas as pd import matplotlib.pyplot as plt @@ -55,7 +55,7 @@ def rename_techs(label): preferred_order = pd.Index(["transmission lines","hydroelectricity","hydro reservoir","run of river","pumped hydro storage","onshore wind","offshore wind ac", "offshore wind dc","solar PV","solar thermal","OCGT","hydrogen storage","battery storage"]) -def plot_costs(infn, fn=None): +def plot_costs(infn, config, fn=None): ## For now ignore the simpl header cost_df = pd.read_csv(infn,index_col=list(range(3)),header=[1,2,3]) @@ -67,7 +67,7 @@ def plot_costs(infn, fn=None): df = df.groupby(df.index.map(rename_techs)).sum() - to_drop = df.index[df.max(axis=1) < snakemake.config['plotting']['costs_threshold']] + to_drop = df.index[df.max(axis=1) < config['plotting']['costs_threshold']] print("dropping") @@ -84,7 +84,7 @@ def plot_costs(infn, fn=None): fig, ax = plt.subplots() fig.set_size_inches((12,8)) - df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[snakemake.config['plotting']['tech_colors'][i] for i in new_index]) + df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[config['plotting']['tech_colors'][i] for i in new_index]) handles,labels = ax.get_legend_handles_labels() @@ -92,7 +92,7 @@ def plot_costs(infn, fn=None): handles.reverse() labels.reverse() - ax.set_ylim([0,snakemake.config['plotting']['costs_max']]) + ax.set_ylim([0,config['plotting']['costs_max']]) ax.set_ylabel("System Cost [EUR billion per year]") @@ -109,7 +109,7 @@ def plot_costs(infn, fn=None): fig.savefig(fn, transparent=True) -def plot_energy(infn, fn=None): +def plot_energy(infn, config, fn=None): energy_df = pd.read_csv(infn, index_col=list(range(2)),header=[1,2,3]) @@ -120,7 +120,7 @@ def plot_energy(infn, fn=None): df = df.groupby(df.index.map(rename_techs)).sum() - to_drop = df.index[df.abs().max(axis=1) < snakemake.config['plotting']['energy_threshold']] + to_drop = df.index[df.abs().max(axis=1) < config['plotting']['energy_threshold']] print("dropping") @@ -137,7 +137,7 @@ def plot_energy(infn, fn=None): fig, ax = plt.subplots() fig.set_size_inches((12,8)) - df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[snakemake.config['plotting']['tech_colors'][i] for i in new_index]) + df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[config['plotting']['tech_colors'][i] for i in new_index]) handles,labels = ax.get_legend_handles_labels() @@ -145,7 +145,7 @@ def plot_energy(infn, fn=None): handles.reverse() labels.reverse() - ax.set_ylim([snakemake.config['plotting']['energy_min'],snakemake.config['plotting']['energy_max']]) + ax.set_ylim([config['plotting']['energy_min'], config['plotting']['energy_max']]) ax.set_ylabel("Energy [TWh/a]") @@ -170,10 +170,12 @@ if __name__ == "__main__": attr='', ext='png', country='all') configure_logging(snakemake) - summary = snakemake.wildcards.summary + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + + summary = wildcards.summary try: func = globals()[f"plot_{summary}"] except KeyError: raise RuntimeError(f"plotting function for {summary} has not been defined") - func(os.path.join(snakemake.input[0], f"{summary}.csv"), snakemake.output[0]) + func(os.path.join(paths[0], f"{summary}.csv"), config, out[0]) From dc83fd8e0912f6a27e67d64ea8d1cce5e339344f Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 14 Jan 2022 15:13:44 +0100 Subject: [PATCH 092/102] fix small bugs --- scripts/make_summary.py | 7 +++---- scripts/plot_p_nom_max.py | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index a283fd20..3f8ee728 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -403,8 +403,7 @@ def make_summaries(networks_dict, paths, config, country='all'): n = n[n.buses.country == country] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = paths[0], config = config['costs'], - elec_config = config['electricity'], Nyears) + costs = load_costs(paths[0], config['costs'], config['electricity'], Nyears) update_transmission_costs(n, costs, simple_hvdc_costs=False) assign_carriers(n) @@ -448,9 +447,9 @@ if __name__ == "__main__": os.path.join(network_dir, f'elec_s{simpl}_' f'{clusters}_ec_l{l}_{opts}.nc') for simpl in expand_from_wildcard("simpl", config) - for clusters in expand_from_wildcard("clusters") + for clusters in expand_from_wildcard("clusters", config) for l in ll - for opts in expand_from_wildcard("opts")} + for opts in expand_from_wildcard("opts", config)} dfs = make_summaries(networks_dict, paths, config, country=wildcards.country) diff --git a/scripts/plot_p_nom_max.py b/scripts/plot_p_nom_max.py index 540608f9..ea66d612 100644 --- a/scripts/plot_p_nom_max.py +++ b/scripts/plot_p_nom_max.py @@ -19,7 +19,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import pandas as pd From 9f0515105b41414df7feb8052077f3cc18a3031d Mon Sep 17 00:00:00 2001 From: Qui-Rin <94053589+Qui-Rin@users.noreply.github.com> Date: Fri, 14 Jan 2022 19:02:44 +0100 Subject: [PATCH 093/102] build_load_data: Removed underscore in pattern When using the transparency option the pattern used as a filter is created with a double underscore -> removed underscore in '_transparency' --- scripts/build_load_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_load_data.py b/scripts/build_load_data.py index f71be6ea..840cb6c7 100755 --- a/scripts/build_load_data.py +++ b/scripts/build_load_data.py @@ -70,7 +70,7 @@ def load_timeseries(fn, years, countries, powerstatistics=True): """ logger.info(f"Retrieving load data from '{fn}'.") - pattern = 'power_statistics' if powerstatistics else '_transparency' + pattern = 'power_statistics' if powerstatistics else 'transparency' pattern = f'_load_actual_entsoe_{pattern}' rename = lambda s: s[:-len(pattern)] date_parser = lambda x: dateutil.parser.parse(x, ignoretz=True) From dbf0f65ab53f2e3b100336ea2c273915ee9361dd Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 24 Jan 2022 11:16:23 +0100 Subject: [PATCH 094/102] Update config.default.yaml --- config.default.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config.default.yaml b/config.default.yaml index 7a443a03..d2bf6159 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -154,6 +154,7 @@ renewable: # sector: The economic potential of photovoltaics and concentrating solar # power." Applied Energy 135 (2014): 704-720. # This correction factor of 0.854337 may be in order if using reanalysis data. + # for discussion refer to https://github.com/PyPSA/pypsa-eur/pull/304 # correction_factor: 0.854337 corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] From c4be81eb5ecbd3421504f4281f629fddc7f83cc4 Mon Sep 17 00:00:00 2001 From: martacki Date: Mon, 24 Jan 2022 19:13:48 +0100 Subject: [PATCH 095/102] simplify_network: remove snakemake dependencies --- scripts/simplify_network.py | 71 ++++++++++++++++++------------------- 1 file changed, 34 insertions(+), 37 deletions(-) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 1de180d0..70f27bf2 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -83,7 +83,7 @@ The rule :mod:`simplify_network` does up to four things: """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max +from _helpers import configure_logging, update_p_nom_max from cluster_network import clustering_for_n_clusters, cluster_regions from add_electricity import load_costs @@ -138,15 +138,15 @@ def simplify_network_to_380(n): return n, trafo_map -def _prepare_connection_costs_per_link(n, costs): +def _prepare_connection_costs_per_link(n, costs, config): if n.links.empty: return {} connection_costs_per_link = {} - for tech in snakemake.config['renewable']: + for tech in config['renewable']: if tech.startswith('offwind'): connection_costs_per_link[tech] = ( - n.links.length * snakemake.config['lines']['length_factor'] * + n.links.length * config['lines']['length_factor'] * (n.links.underwater_fraction * costs.at[tech + '-connection-submarine', 'capital_cost'] + (1. - n.links.underwater_fraction) * costs.at[tech + '-connection-underground', 'capital_cost']) ) @@ -154,9 +154,9 @@ def _prepare_connection_costs_per_link(n, costs): return connection_costs_per_link -def _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link=None, buses=None): +def _compute_connection_costs_to_bus(n, busmap, costs, config, connection_costs_per_link=None, buses=None): if connection_costs_per_link is None: - connection_costs_per_link = _prepare_connection_costs_per_link(n, costs) + connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config) if buses is None: buses = busmap.index[busmap.index != busmap.values] @@ -174,7 +174,7 @@ def _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link return connection_costs_to_bus -def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus): +def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output): connection_costs = {} for tech in connection_costs_to_bus: tech_b = n.generators.carrier == tech @@ -184,11 +184,11 @@ def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus): logger.info("Displacing {} generator(s) and adding connection costs to capital_costs: {} " .format(tech, ", ".join("{:.0f} Eur/MW/a for `{}`".format(d, b) for b, d in costs.iteritems()))) connection_costs[tech] = costs - pd.DataFrame(connection_costs).to_csv(snakemake.output.connection_costs) + pd.DataFrame(connection_costs).to_csv(output.connection_costs) -def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate_one_ports={"Load", "StorageUnit"}): +def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output, aggregate_one_ports={"Load", "StorageUnit"}): def replace_components(n, c, df, pnl): n.mremove(c, n.df(c).index) @@ -197,7 +197,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate if not df.empty: import_series_from_dataframe(n, df, c, attr) - _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus) + _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output) generators, generators_pnl = aggregategenerators(n, busmap, custom_strategies={'p_nom_min': np.sum}) replace_components(n, "Generator", generators, generators_pnl) @@ -213,7 +213,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)]) -def simplify_links(n, costs): +def simplify_links(n, costs, config, output): ## Complex multi-node links are folded into end-points logger.info("Simplifying connected link components") @@ -260,7 +260,7 @@ def simplify_links(n, costs): busmap = n.buses.index.to_series() - connection_costs_per_link = _prepare_connection_costs_per_link(n, costs) + connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config) connection_costs_to_bus = pd.DataFrame(0., index=n.buses.index, columns=list(connection_costs_per_link)) for lbl in labels.value_counts().loc[lambda s: s > 2].index: @@ -274,11 +274,11 @@ def simplify_links(n, costs): m = sp.spatial.distance_matrix(n.buses.loc[b, ['x', 'y']], n.buses.loc[buses[1:-1], ['x', 'y']]) busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]] - connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link, buses) + connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, costs, config, connection_costs_per_link, buses) all_links = [i for _, i in sum(links, [])] - p_max_pu = snakemake.config['links'].get('p_max_pu', 1.) + p_max_pu = config['links'].get('p_max_pu', 1.) lengths = n.links.loc[all_links, 'length'] name = lengths.idxmax() + '+{}'.format(len(links) - 1) params = dict( @@ -305,17 +305,17 @@ def simplify_links(n, costs): logger.debug("Collecting all components using the busmap") - _aggregate_and_move_components(n, busmap, connection_costs_to_bus) + _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output) return n, busmap -def remove_stubs(n, costs): +def remove_stubs(n, costs, config, output): logger.info("Removing stubs") busmap = busmap_by_stubs(n) # ['country']) - connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs) + connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config) - _aggregate_and_move_components(n, busmap, connection_costs_to_bus) + _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output) return n, busmap @@ -356,25 +356,25 @@ def aggregate_to_substations(n, buses_i=None): return clustering.network, busmap -def cluster(n, n_clusters): +def cluster(n, n_clusters, config): logger.info(f"Clustering to {n_clusters} buses") - focus_weights = snakemake.config.get('focus_weights', None) + focus_weights = config.get('focus_weights', None) renewable_carriers = pd.Index([tech for tech in n.generators.carrier.unique() - if tech.split('-', 2)[0] in snakemake.config['renewable']]) + if tech.split('-', 2)[0] in config['renewable']]) def consense(x): v = x.iat[0] assert ((x == v).all() or x.isnull().all()), ( "The `potential` configuration option must agree for all renewable carriers, for now!" ) return v - potential_mode = (consense(pd.Series([snakemake.config['renewable'][tech]['potential'] + potential_mode = (consense(pd.Series([config['renewable'][tech]['potential'] for tech in renewable_carriers])) if len(renewable_carriers) > 0 else 'conservative') clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode, - solver_name=snakemake.config['solving']['solver']['name'], + solver_name=config['solving']['solver']['name'], focus_weights=focus_weights) return clustering.network, clustering.busmap @@ -386,29 +386,26 @@ if __name__ == "__main__": snakemake = mock_snakemake('simplify_network', simpl='', network='elec') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - n = pypsa.Network(paths.network) + n = pypsa.Network(snakemake.input.network) n, trafo_map = simplify_network_to_380(n) Nyears = n.snapshot_weightings.objective.sum() / 8760 - technology_costs = load_costs(tech_costs = paths.tech_costs, - config = config['costs'], - elec_config = config['electricity'], Nyears = Nyears) - n, simplify_links_map = simplify_links(n, technology_costs) + technology_costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) - n, stub_map = remove_stubs(n, technology_costs) + n, simplify_links_map = simplify_links(n, technology_costs, snakemake.config, snakemake.output) + + n, stub_map = remove_stubs(n, technology_costs, snakemake.config, snakemake.output) busmaps = [trafo_map, simplify_links_map, stub_map] - if config.get('clustering', {}).get('simplify', {}).get('to_substations', False): + if snakemake.config.get('clustering', {}).get('simplify', {}).get('to_substations', False): n, substation_map = aggregate_to_substations(n) busmaps.append(substation_map) - if wildcards.simpl: - n, cluster_map = cluster(n, int(wildcards.simpl)) + if snakemake.wildcards.simpl: + n, cluster_map = cluster(n, int(snakemake.wildcards.simpl), snakemake.config) busmaps.append(cluster_map) # some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed @@ -418,9 +415,9 @@ if __name__ == "__main__": update_p_nom_max(n) - n.export_to_netcdf(out.network) + n.export_to_netcdf(snakemake.output.network) busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) - busmap_s.to_csv(out.busmap) + busmap_s.to_csv(snakemake.output.busmap) - cluster_regions(busmaps, paths, out) + cluster_regions(busmaps, snakemake.input, snakemake.output) From 6cdf3a287994201371fd9f7115ca28af587add2b Mon Sep 17 00:00:00 2001 From: martacki Date: Mon, 24 Jan 2022 19:48:26 +0100 Subject: [PATCH 096/102] use snakemake keywords directly without extracting them beforehand --- scripts/_helpers.py | 4 --- scripts/add_electricity.py | 40 ++++++++++++------------ scripts/add_extra_components.py | 14 ++++----- scripts/base_network.py | 12 +++----- scripts/build_bus_regions.py | 16 +++++----- scripts/build_cutout.py | 14 ++++----- scripts/build_hydro_profile.py | 16 +++++----- scripts/build_load_data.py | 20 ++++++------ scripts/build_powerplants.py | 14 ++++----- scripts/build_renewable_profiles.py | 48 ++++++++++++++--------------- scripts/build_shapes.py | 20 ++++++------ scripts/cluster_network.py | 40 ++++++++++++------------ scripts/prepare_network.py | 30 +++++++++--------- scripts/retrieve_databundle.py | 6 ++-- scripts/solve_network.py | 20 ++++++------ scripts/solve_operations_network.py | 24 +++++++-------- 16 files changed, 152 insertions(+), 186 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index a44a8133..f1e5e887 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -95,10 +95,6 @@ def pdbcast(v, h): return pd.DataFrame(v.values.reshape((-1, 1)) * h.values, index=v.index, columns=h.index) -def retrieve_snakemake_keys(snakemake): - return (snakemake.input, snakemake.config, snakemake.wildcards, - snakemake.log, snakemake.output) - def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True): import pypsa diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index fcddea8c..7dffe60f 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -84,7 +84,7 @@ It further adds extendable ``generators`` with **zero** capacity for """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max +from _helpers import configure_logging, update_p_nom_max import pypsa import pandas as pd @@ -544,40 +544,38 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_electricity') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - n = pypsa.Network(paths.base_network) + n = pypsa.Network(snakemake.input.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears) - ppl = load_powerplants(paths.powerplants) + costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) + ppl = load_powerplants(snakemake.input.powerplants) - attach_load(n, paths.regions, paths.load, paths.nuts3_shapes, config['countries'], - config['load']['scaling_factor']) + attach_load(n, snakemake.input.regions, snakemake.input.load, snakemake.input.nuts3_shapes, + snakemake.config['countries'], snakemake.config['load']['scaling_factor']) - update_transmission_costs(n, costs, config['lines']['length_factor']) + update_transmission_costs(n, costs, snakemake.config['lines']['length_factor']) - carriers = config['electricity']['conventional_carriers'] + carriers = snakemake.config['electricity']['conventional_carriers'] attach_conventional_generators(n, costs, ppl, carriers) - carriers = config['renewable'] - attach_wind_and_solar(n, costs, paths, carriers, config['lines']['length_factor']) + carriers = snakemake.config['renewable'] + attach_wind_and_solar(n, costs, snakemake.input, carriers, snakemake.config['lines']['length_factor']) - if 'hydro' in config['renewable']: - carriers = config['renewable']['hydro'].pop('carriers', []) - attach_hydro(n, costs, ppl, paths.profile_hydro, paths.hydro_capacities, - carriers, **config['renewable']['hydro']) + if 'hydro' in snakemake.config['renewable']: + carriers = snakemake.config['renewable']['hydro'].pop('carriers', []) + attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities, + carriers, **snakemake.config['renewable']['hydro']) - carriers = config['electricity']['extendable_carriers']['Generator'] + carriers = snakemake.config['electricity']['extendable_carriers']['Generator'] attach_extendable_generators(n, costs, ppl, carriers) - tech_map = config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {}) + tech_map = snakemake.config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {}) estimate_renewable_capacities(n, tech_map) - techs = config['electricity'].get('renewable_capacities_from_OPSD', []) + techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', []) attach_OPSD_renewables(n, techs) update_p_nom_max(n) - add_nice_carrier_names(n, config) + add_nice_carrier_names(n, snakemake.config) - n.export_to_netcdf(out[0]) + n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index 0531c9fa..287dd66e 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -50,7 +50,7 @@ The rule :mod:`add_extra_components` attaches additional extendable components t - ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If this option is chosen, the script adds extra buses with corresponding carrier where energy ``Stores`` are attached and which are connected to the corresponding power buses via two links, one each for charging and discharging. This leads to three investment variables for the energy capacity, charging and discharging capacity of the storage unit. """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import pypsa import pandas as pd @@ -193,18 +193,16 @@ if __name__ == "__main__": simpl='', clusters=5) configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - n = pypsa.Network(paths.network) - elec_config = config['electricity'] + n = pypsa.Network(snakemake.input.network) + elec_config = snakemake.config['electricity'] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, config['costs'], elec_config, Nyears) + costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], elec_config, Nyears) attach_storageunits(n, costs, elec_config) attach_stores(n, costs, elec_config) attach_hydrogen_pipelines(n, costs, elec_config) - add_nice_carrier_names(n, config) + add_nice_carrier_names(n, snakemake.config) - n.export_to_netcdf(out[0]) + n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/base_network.py b/scripts/base_network.py index b9c9f37f..28d804cd 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -63,7 +63,7 @@ Description """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import pypsa import yaml @@ -588,10 +588,8 @@ if __name__ == "__main__": snakemake = mock_snakemake('base_network') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + n = base_network(snakemake.input.eg_buses, snakemake.input.eg_converters, snakemake.input.eg_transformers, snakemake.input.eg_lines, snakemake.input.eg_links, + snakemake.input.links_p_nom, snakemake.input.links_tyndp, snakemake.input.europe_shape, snakemake.input.country_shapes, snakemake.input.offshore_shapes, + snakemake.input.parameter_corrections, snakemake.config) - n = base_network(paths.eg_buses, paths.eg_converters, paths.eg_transformers, paths.eg_lines, paths.eg_links, - paths.links_p_nom, paths.links_tyndp, paths.europe_shape, paths.country_shapes, paths.offshore_shapes, - paths.parameter_corrections, config) - - n.export_to_netcdf(out[0]) + n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index 78e2070d..d91d0575 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -42,7 +42,7 @@ Description """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import pypsa import os @@ -67,14 +67,12 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_bus_regions') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + countries = snakemake.config['countries'] - countries = config['countries'] + n = pypsa.Network(snakemake.input.base_network) - n = pypsa.Network(paths.base_network) - - country_shapes = gpd.read_file(paths.country_shapes).set_index('name')['geometry'] - offshore_shapes = gpd.read_file(paths.offshore_shapes).set_index('name')['geometry'] + country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'] + offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry'] onshore_regions = [] offshore_regions = [] @@ -105,6 +103,6 @@ if __name__ == "__main__": offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2] offshore_regions.append(offshore_regions_c) - save_to_geojson(pd.concat(onshore_regions, ignore_index=True), out.regions_onshore) + save_to_geojson(pd.concat(onshore_regions, ignore_index=True), snakemake.output.regions_onshore) - save_to_geojson(pd.concat(offshore_regions, ignore_index=True), out.regions_offshore) + save_to_geojson(pd.concat(offshore_regions, ignore_index=True), snakemake.output.regions_offshore) diff --git a/scripts/build_cutout.py b/scripts/build_cutout.py index 4b3e2bdc..78eafac6 100644 --- a/scripts/build_cutout.py +++ b/scripts/build_cutout.py @@ -95,7 +95,7 @@ import logging import atlite import geopandas as gpd import pandas as pd -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging logger = logging.getLogger(__name__) @@ -106,18 +106,16 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_cutout', cutout='europe-2013-era5') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout] - cutout_params = config['atlite']['cutouts'][wildcards.cutout] - - snapshots = pd.date_range(freq='h', **config['snapshots']) + snapshots = pd.date_range(freq='h', **snakemake.config['snapshots']) time = [snapshots[0], snapshots[-1]] cutout_params['time'] = slice(*cutout_params.get('time', time)) if {'x', 'y', 'bounds'}.isdisjoint(cutout_params): # Determine the bounds from bus regions with a buffer of two grid cells - onshore = gpd.read_file(paths.regions_onshore) - offshore = gpd.read_file(paths.regions_offshore) + onshore = gpd.read_file(snakemake.input.regions_onshore) + offshore = gpd.read_file(snakemake.input.regions_offshore) regions = onshore.append(offshore) d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy', 0.25))*2 cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d] @@ -128,5 +126,5 @@ if __name__ == "__main__": logging.info(f"Preparing cutout with parameters {cutout_params}.") features = cutout_params.pop('features', None) - cutout = atlite.Cutout(out[0], **cutout_params) + cutout = atlite.Cutout(snakemake.output[0], **cutout_params) cutout.prepare(features=features) diff --git a/scripts/build_hydro_profile.py b/scripts/build_hydro_profile.py index 563c8ecb..74efc2ef 100644 --- a/scripts/build_hydro_profile.py +++ b/scripts/build_hydro_profile.py @@ -60,7 +60,7 @@ Description """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import atlite import geopandas as gpd @@ -74,18 +74,16 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_hydro_profile') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + config_hydro = snakemake.config['renewable']['hydro'] + cutout = atlite.Cutout(snakemake.input.cutout) - config_hydro = config['renewable']['hydro'] - cutout = atlite.Cutout(paths.cutout) - - countries = config['countries'] - country_shapes = (gpd.read_file(paths.country_shapes) + countries = snakemake.config['countries'] + country_shapes = (gpd.read_file(snakemake.input.country_shapes) .set_index('name')['geometry'].reindex(countries)) country_shapes.index.name = 'countries' eia_stats = vhydro.get_eia_annual_hydro_generation( - paths.eia_hydro_generation).reindex(columns=countries) + snakemake.input.eia_hydro_generation).reindex(columns=countries) inflow = cutout.runoff(shapes=country_shapes, smooth=True, lower_threshold_quantile=True, @@ -94,4 +92,4 @@ if __name__ == "__main__": if 'clip_min_inflow' in config_hydro: inflow = inflow.where(inflow > config_hydro['clip_min_inflow'], 0) - inflow.to_netcdf(out[0]) + inflow.to_netcdf(snakemake.output[0]) diff --git a/scripts/build_load_data.py b/scripts/build_load_data.py index 144037a9..10921782 100755 --- a/scripts/build_load_data.py +++ b/scripts/build_load_data.py @@ -37,7 +37,7 @@ Outputs import logging logger = logging.getLogger(__name__) -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import pandas as pd import numpy as np @@ -196,18 +196,16 @@ if __name__ == "__main__": configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - powerstatistics = config['load']['power_statistics'] - interpolate_limit = config['load']['interpolate_limit'] - countries = config['countries'] - snapshots = pd.date_range(freq='h', **config['snapshots']) + powerstatistics = snakemake.config['load']['power_statistics'] + interpolate_limit = snakemake.config['load']['interpolate_limit'] + countries = snakemake.config['countries'] + snapshots = pd.date_range(freq='h', **snakemake.config['snapshots']) years = slice(snapshots[0], snapshots[-1]) - time_shift = config['load']['time_shift_for_large_gaps'] + time_shift = snakemake.config['load']['time_shift_for_large_gaps'] - load = load_timeseries(paths[0], years, countries, powerstatistics) + load = load_timeseries(snakemake.input[0], years, countries, powerstatistics) - if config['load']['manual_adjustments']: + if snakemake.config['load']['manual_adjustments']: load = manual_adjustment(load, powerstatistics) logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.") @@ -222,5 +220,5 @@ if __name__ == "__main__": '`time_shift_for_large_gaps` or modify the `manual_adjustment` function ' 'for implementing the needed load data modifications.') - load.to_csv(out[0]) + load.to_csv(snakemake.output[0]) diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index 4b9d13a1..d4ad4989 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -72,7 +72,7 @@ The configuration options ``electricity: powerplants_filter`` and ``electricity: """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import pypsa import powerplantmatching as pm @@ -100,9 +100,7 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_powerplants') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - n = pypsa.Network(paths.base_network) + n = pypsa.Network(snakemake.input.base_network) countries = n.buses.country.unique() ppl = (pm.powerplants(from_url=True) @@ -116,13 +114,13 @@ if __name__ == "__main__": df.Technology.replace('Steam Turbine', 'OCGT').fillna('OCGT'))))) - ppl_query = config['electricity']['powerplants_filter'] + ppl_query = snakemake.config['electricity']['powerplants_filter'] if isinstance(ppl_query, str): ppl.query(ppl_query, inplace=True) # add carriers from own powerplant files: - custom_ppl_query = config['electricity']['custom_powerplants'] - ppl = add_custom_powerplants(ppl, paths.custom_powerplants, custom_ppl_query) + custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] + ppl = add_custom_powerplants(ppl, snakemake.input.custom_powerplants, custom_ppl_query) cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()] @@ -141,4 +139,4 @@ if __name__ == "__main__": if bus_null_b.any(): logging.warning(f"Couldn't find close bus for {bus_null_b.sum()} powerplants") - ppl.to_csv(out[0]) + ppl.to_csv(snakemake.output[0]) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 944d6f39..b37e6825 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -190,7 +190,7 @@ from pypsa.geo import haversine from shapely.geometry import LineString import time -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging logger = logging.getLogger(__name__) @@ -202,55 +202,53 @@ if __name__ == '__main__': configure_logging(snakemake) pgb.streams.wrap_stderr() - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - nprocesses = config['atlite'].get('nprocesses') - noprogress = not config['atlite'].get('show_progress', True) - config = config['renewable'][wildcards.technology] + nprocesses = snakemake.config['atlite'].get('nprocesses') + noprogress = not snakemake.config['atlite'].get('show_progress', True) + config = snakemake.config['renewable'][snakemake.wildcards.technology] resource = config['resource'] # pv panel config / wind turbine config - correction_factor = config.get('correction_factor', 1.) + correction_factor = snakemake.config.get('correction_factor', 1.) capacity_per_sqkm = config['capacity_per_sqkm'] - p_nom_max_meth = config.get('potential', 'conservative') + p_nom_max_meth = snakemake.config.get('potential', 'conservative') if isinstance(config.get("corine", {}), list): - config['corine'] = {'grid_codes': config['corine']} + snakemake.config['corine'] = {'grid_codes': config['corine']} if correction_factor != 1.: logger.info(f'correction_factor is set as {correction_factor}') - cutout = atlite.Cutout(paths['cutout']) - regions = gpd.read_file(paths.regions).set_index('name').rename_axis('bus') + cutout = atlite.Cutout(snakemake.input['cutout']) + regions = gpd.read_file(snakemake.input.regions).set_index('name').rename_axis('bus') buses = regions.index excluder = atlite.ExclusionContainer(crs=3035, res=100) if config['natura']: - excluder.add_raster(paths.natura, nodata=0, allow_no_overlap=True) + excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True) - corine = config.get("corine", {}) + corine = snakemake.config.get("corine", {}) if "grid_codes" in corine: codes = corine["grid_codes"] - excluder.add_raster(paths.corine, codes=codes, invert=True, crs=3035) + excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035) if corine.get("distance", 0.) > 0.: codes = corine["distance_grid_codes"] buffer = corine["distance"] - excluder.add_raster(paths.corine, codes=codes, buffer=buffer, crs=3035) + excluder.add_raster(snakemake.input.corine, codes=codes, buffer=buffer, crs=3035) if "max_depth" in config: # lambda not supported for atlite + multiprocessing # use named function np.greater with partially frozen argument instead # and exclude areas where: -max_depth > grid cell depth func = functools.partial(np.greater,-config['max_depth']) - excluder.add_raster(paths.gebco, codes=func, crs=4236, nodata=-1000) + excluder.add_raster(snakemake.input.gebco, codes=func, crs=4236, nodata=-1000) if 'min_shore_distance' in config: buffer = config['min_shore_distance'] - excluder.add_geometry(paths.country_shapes, buffer=buffer) + excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer) if 'max_shore_distance' in config: buffer = config['max_shore_distance'] - excluder.add_geometry(paths.country_shapes, buffer=buffer, invert=True) + excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer, invert=True) kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress) if noprogress: @@ -315,9 +313,9 @@ if __name__ == '__main__': average_distance.rename('average_distance')]) - if wildcards.technology.startswith("offwind"): + if snakemake.wildcards.technology.startswith("offwind"): logger.info('Calculate underwater fraction of connections.') - offshore_shape = gpd.read_file(paths['offshore_shapes']).unary_union + offshore_shape = gpd.read_file(snakemake.input['offshore_shapes']).unary_union underwater_fraction = [] for bus in buses: p = centre_of_mass.sel(bus=bus).data @@ -328,11 +326,11 @@ if __name__ == '__main__': ds['underwater_fraction'] = xr.DataArray(underwater_fraction, [buses]) # select only buses with some capacity and minimal capacity factor - ds = ds.sel(bus=((ds['profile'].mean('time') > config.get('min_p_max_pu', 0.)) & - (ds['p_nom_max'] > config.get('min_p_nom_max', 0.)))) + ds = ds.sel(bus=((ds['profile'].mean('time') > snakemake.config.get('min_p_max_pu', 0.)) & + (ds['p_nom_max'] > snakemake.config.get('min_p_nom_max', 0.)))) - if 'clip_p_max_pu' in config: - min_p_max_pu = config['clip_p_max_pu'] + if 'clip_p_max_pu' in snakemake.config: + min_p_max_pu = snakemake.config['clip_p_max_pu'] ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0) - ds.to_netcdf(out.profile) + ds.to_netcdf(snakemake.output.profile) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 515cbc13..95867d89 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -68,7 +68,7 @@ Description """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import os import numpy as np @@ -217,18 +217,16 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_shapes') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries']) + save_to_geojson(country_shapes, snakemake.output.country_shapes) - country_shapes = countries(paths.naturalearth, config['countries']) - save_to_geojson(country_shapes, out.country_shapes) - - offshore_shapes = eez(country_shapes, paths.eez, config['countries']) - save_to_geojson(offshore_shapes, out.offshore_shapes) + offshore_shapes = eez(country_shapes, snakemake.input.eez, snakemake.config['countries']) + save_to_geojson(offshore_shapes, snakemake.output.offshore_shapes) europe_shape = country_cover(country_shapes, offshore_shapes) - save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape) + save_to_geojson(gpd.GeoSeries(europe_shape), snakemake.output.europe_shape) - nuts3_shapes = nuts3(country_shapes, paths.nuts3, paths.nuts3pop, - paths.nuts3gdp, paths.ch_cantons, paths.ch_popgdp) + nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop, + snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp) - save_to_geojson(nuts3_shapes, out.nuts3_shapes) + save_to_geojson(nuts3_shapes, snakemake.output.nuts3_shapes) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 041fb259..525196fc 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes: """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max +from _helpers import configure_logging, update_p_nom_max import pypsa import os @@ -331,21 +331,19 @@ if __name__ == "__main__": snakemake = mock_snakemake('cluster_network', network='elec', simpl='', clusters='5') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + n = pypsa.Network(snakemake.input.network) - n = pypsa.Network(paths.network) - - focus_weights = config.get('focus_weights', None) + focus_weights = snakemake.config.get('focus_weights', None) renewable_carriers = pd.Index([tech for tech in n.generators.carrier.unique() - if tech in config['renewable']]) + if tech in snakemake.config['renewable']]) - if wildcards.clusters.endswith('m'): - n_clusters = int(wildcards.clusters[:-1]) + if snakemake.wildcards.clusters.endswith('m'): + n_clusters = int(snakemake.wildcards.clusters[:-1]) aggregate_carriers = pd.Index(n.generators.carrier.unique()).difference(renewable_carriers) else: - n_clusters = int(wildcards.clusters) + n_clusters = int(snakemake.wildcards.clusters) aggregate_carriers = None # All if n_clusters == len(n.buses): @@ -354,11 +352,10 @@ if __name__ == "__main__": linemap = n.lines.index.to_series() clustering = pypsa.networkclustering.Clustering(n, busmap, linemap, linemap, pd.Series(dtype='O')) else: - line_length_factor = config['lines']['length_factor'] + line_length_factor = snakemake.config['lines']['length_factor'] Nyears = n.snapshot_weightings.objective.sum()/8760 - hvac_overhead_cost = (load_costs(tech_costs = paths.tech_costs, - config = config['costs'], - elec_config=config['electricity'], Nyears = Nyears) + + hvac_overhead_cost = (load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) .at['HVAC overhead', 'capital_cost']) def consense(x): @@ -367,22 +364,23 @@ if __name__ == "__main__": "The `potential` configuration option must agree for all renewable carriers, for now!" ) return v - potential_mode = consense(pd.Series([config['renewable'][tech]['potential'] + potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential'] for tech in renewable_carriers])) - custom_busmap = config["enable"].get("custom_busmap", False) + custom_busmap = snakemake.config["enable"].get("custom_busmap", False) if custom_busmap: - custom_busmap = pd.read_csv(paths.custom_busmap, index_col=0, squeeze=True) + custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True) custom_busmap.index = custom_busmap.index.astype(str) - logger.info(f"Imported custom busmap from {paths.custom_busmap}") + logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers, - line_length_factor, potential_mode, config['solving']['solver']['name'], + line_length_factor, potential_mode, + snakemake.config['solving']['solver']['name'], "kmeans", hvac_overhead_cost, focus_weights) update_p_nom_max(n) - clustering.network.export_to_netcdf(out.network) + clustering.network.export_to_netcdf(snakemake.output.network) for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative - getattr(clustering, attr).to_csv(out[attr]) + getattr(clustering, attr).to_csv(snakemake.output[attr]) - cluster_regions((clustering.busmap,), paths, out) + cluster_regions((clustering.busmap,), snakemake.input, snakemake.output) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index e0b488f5..f984ace6 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -56,7 +56,7 @@ Description """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import re import pypsa @@ -70,7 +70,7 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) -def add_co2limit(n, co2limit=1.487e+9, Nyears=1.): +def add_co2limit(n, co2limit, Nyears=1.): n.add("GlobalConstraint", "CO2Limit", carrier_attribute="co2_emissions", sense="<=", @@ -206,15 +206,13 @@ if __name__ == "__main__": clusters='40', ll='v0.3', opts='Co2L-24H') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + opts = snakemake.wildcards.opts.split('-') - opts = wildcards.opts.split('-') - - n = pypsa.Network(paths[0]) + n = pypsa.Network(snakemake.input[0]) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears) + costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) - set_line_s_max_pu(n, config['lines']['s_max_pu']) + set_line_s_max_pu(n, snakemake.config['lines']['s_max_pu']) for o in opts: m = re.match(r'^\d+h$', o, re.IGNORECASE) @@ -225,7 +223,7 @@ if __name__ == "__main__": for o in opts: m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: - solver_name = config["solving"]["solver"]["name"] + solver_name = snakemake.config["solving"]["solver"]["name"] n = apply_time_segmentation(n, m.group(0)[:-3], solver_name) break @@ -233,10 +231,10 @@ if __name__ == "__main__": if "Co2L" in o: m = re.findall("[0-9]*\.?[0-9]+$", o) if len(m) > 0: - co2limit = float(m[0]) * config['electricity']['co2base'] + co2limit = float(m[0]) * snakemake.config['electricity']['co2base'] add_co2limit(n, co2limit, Nyears) else: - add_co2limit(n, config['electricity']['co2limit'], Nyears) + add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears) break for o in opts: @@ -257,17 +255,17 @@ if __name__ == "__main__": c.df.loc[sel,attr] *= factor if 'Ep' in opts: - add_emission_prices(n, config['costs']['emission_prices']) + add_emission_prices(n, snakemake.config['costs']['emission_prices']) - ll_type, factor = wildcards.ll[0], wildcards.ll[1:] + ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] set_transmission_limit(n, ll_type, factor, costs, Nyears) - set_line_nom_max(n, s_nom_max_set=config["lines"].get("s_nom_max,", np.inf), - p_nom_max_set=config["links"].get("p_nom_max,", np.inf)) + set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf), + p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf)) if "ATK" in opts: enforce_autarky(n) elif "ATKc" in opts: enforce_autarky(n, only_crossborder=True) - n.export_to_netcdf(out[0]) + n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index c5a31f81..86869879 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -33,7 +33,7 @@ The :ref:`tutorial` uses a smaller `data bundle Date: Thu, 27 Jan 2022 21:04:50 +0100 Subject: [PATCH 097/102] Snakefile: use standard resource mem_mb rather than mem --- Snakefile | 34 +++++++++++++++++----------------- doc/release_notes.rst | 2 ++ 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/Snakefile b/Snakefile index ce79a421..c5d6a3cd 100644 --- a/Snakefile +++ b/Snakefile @@ -45,7 +45,7 @@ if config['enable'].get('prepare_links_p_nom', False): output: 'data/links_p_nom.csv' log: 'logs/prepare_links_p_nom.log' threads: 1 - resources: mem=500 + resources: mem_mb=500 script: 'scripts/prepare_links_p_nom.py' @@ -87,7 +87,7 @@ rule build_powerplants: output: "resources/powerplants.csv" log: "logs/build_powerplants.log" threads: 1 - resources: mem=500 + resources: mem_mb=500 script: "scripts/build_powerplants.py" @@ -108,7 +108,7 @@ rule base_network: log: "logs/base_network.log" benchmark: "benchmarks/base_network" threads: 1 - resources: mem=500 + resources: mem_mb=500 script: "scripts/base_network.py" @@ -128,7 +128,7 @@ rule build_shapes: nuts3_shapes='resources/nuts3_shapes.geojson' log: "logs/build_shapes.log" threads: 1 - resources: mem=500 + resources: mem_mb=500 script: "scripts/build_shapes.py" @@ -142,7 +142,7 @@ rule build_bus_regions: regions_offshore="resources/regions_offshore.geojson" log: "logs/build_bus_regions.log" threads: 1 - resources: mem=1000 + resources: mem_mb=1000 script: "scripts/build_bus_regions.py" if config['enable'].get('build_cutout', False): @@ -154,7 +154,7 @@ if config['enable'].get('build_cutout', False): log: "logs/build_cutout/{cutout}.log" benchmark: "benchmarks/build_cutout_{cutout}" threads: ATLITE_NPROCESSES - resources: mem=ATLITE_NPROCESSES * 1000 + resources: mem_mb=ATLITE_NPROCESSES * 1000 script: "scripts/build_cutout.py" @@ -200,7 +200,7 @@ rule build_renewable_profiles: log: "logs/build_renewable_profile_{technology}.log" benchmark: "benchmarks/build_renewable_profiles_{technology}" threads: ATLITE_NPROCESSES - resources: mem=ATLITE_NPROCESSES * 5000 + resources: mem_mb=ATLITE_NPROCESSES * 5000 script: "scripts/build_renewable_profiles.py" @@ -212,7 +212,7 @@ if 'hydro' in config['renewable'].keys(): cutout="cutouts/" + config["renewable"]['hydro']['cutout'] + ".nc" output: 'resources/profile_hydro.nc' log: "logs/build_hydro_profile.log" - resources: mem=5000 + resources: mem_mb=5000 script: 'scripts/build_hydro_profile.py' @@ -232,7 +232,7 @@ rule add_electricity: log: "logs/add_electricity.log" benchmark: "benchmarks/add_electricity" threads: 1 - resources: mem=5000 + resources: mem_mb=5000 script: "scripts/add_electricity.py" @@ -251,7 +251,7 @@ rule simplify_network: log: "logs/simplify_network/elec_s{simpl}.log" benchmark: "benchmarks/simplify_network/elec_s{simpl}" threads: 1 - resources: mem=4000 + resources: mem_mb=4000 script: "scripts/simplify_network.py" @@ -273,7 +273,7 @@ rule cluster_network: log: "logs/cluster_network/elec_s{simpl}_{clusters}.log" benchmark: "benchmarks/cluster_network/elec_s{simpl}_{clusters}" threads: 1 - resources: mem=6000 + resources: mem_mb=6000 script: "scripts/cluster_network.py" @@ -285,7 +285,7 @@ rule add_extra_components: log: "logs/add_extra_components/elec_s{simpl}_{clusters}.log" benchmark: "benchmarks/add_extra_components/elec_s{simpl}_{clusters}_ec" threads: 1 - resources: mem=3000 + resources: mem_mb=3000 script: "scripts/add_extra_components.py" @@ -295,7 +295,7 @@ rule prepare_network: log: "logs/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.log" benchmark: "benchmarks/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}" threads: 1 - resources: mem=4000 + resources: mem_mb=4000 script: "scripts/prepare_network.py" @@ -326,8 +326,8 @@ rule solve_network: memory="logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log" benchmark: "benchmarks/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}" threads: 4 - resources: mem=memory - shadow: "shallow" + resources: mem_mb=memory + shadow: "minimal" script: "scripts/solve_network.py" @@ -342,8 +342,8 @@ rule solve_operations_network: memory="logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_memory.log" benchmark: "benchmarks/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}" threads: 4 - resources: mem=(lambda w: 5000 + 372 * int(w.clusters)) - shadow: "shallow" + resources: mem_mb=(lambda w: 5000 + 372 * int(w.clusters)) + shadow: "minimal" script: "scripts/solve_operations_network.py" diff --git a/doc/release_notes.rst b/doc/release_notes.rst index c379cf5c..d1b7b356 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -22,6 +22,8 @@ Upcoming Release correction factor for solar PV capacity factors by default while satellite data is used. A correction factor of 0.854337 is recommended if reanalysis data like ERA5 is used. +* Resource definitions for memory usage now follow [Snakemake standard resource definition](https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#standard-resources) ```mem_mb`` rather than ``mem``. + PyPSA-Eur 0.4.0 (22th September 2021) ===================================== From c8c1c3c46e28d239fe5067211c24f150a4b14a91 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 29 Jan 2022 15:02:49 +0100 Subject: [PATCH 098/102] revert ea96d499e74e9b20dddd8e8e35ac7d502120ba70 --- scripts/base_network.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 28d804cd..6cdf8afd 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -558,7 +558,6 @@ def base_network(eg_buses, eg_converters, eg_transformers, eg_lines, eg_links, n.name = 'PyPSA-Eur' n.set_snapshots(pd.date_range(freq='h', **config['snapshots'])) - n.snapshot_weightings[:] *= 8760. / n.snapshot_weightings.sum() n.import_components_from_dataframe(buses, "Bus") n.import_components_from_dataframe(lines, "Line") From 09f1135b512896e5e0915824a7436e93e5820194 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sat, 29 Jan 2022 16:17:46 +0100 Subject: [PATCH 099/102] address pandas 1.4 deprectations --- scripts/base_network.py | 4 +++- scripts/build_powerplants.py | 2 +- scripts/build_shapes.py | 8 +++++--- scripts/simplify_network.py | 3 ++- 4 files changed, 11 insertions(+), 6 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 6cdf8afd..9f7f1e7f 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -245,7 +245,9 @@ def _add_links_from_tyndp(buses, links, links_tyndp, europe_shape): links_tyndp.index = "T" + links_tyndp.index.astype(str) - return buses, links.append(links_tyndp, sort=True) + links = pd.concat([links, links_tyndp], sort=True) + + return buses, links def _load_lines_from_eg(buses, eg_lines): diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index d4ad4989..764028d1 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -91,7 +91,7 @@ def add_custom_powerplants(ppl, custom_powerplants, custom_ppl_query=False): dtype={'bus': 'str'}) if isinstance(custom_ppl_query, str): add_ppls.query(custom_ppl_query, inplace=True) - return ppl.append(add_ppls, sort=False, ignore_index=True, verify_integrity=True) + return pd.concat([ppl, add_ppls], sort=False, ignore_index=True, verify_integrity=True) if __name__ == "__main__": diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 95867d89..22aed1fe 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -169,8 +169,10 @@ def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp): swiss = pd.read_excel(ch_popgdp, skiprows=3, index_col=0) swiss.columns = swiss.columns.to_series().map(cantons) - pop = pop.append(pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':])) - gdp = gdp.append(pd.to_numeric(swiss.loc['Gross domestic product per capita in Swiss francs', 'CH040':])) + swiss_pop = pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':]) + pop = pd.concat([pop, swiss_pop]) + swiss_gdp = pd.to_numeric(swiss.loc['Gross domestic product per capita in Swiss francs', 'CH040':]) + gdp = pd.concat([gdp, swiss_gdp]) df = df.join(pd.DataFrame(dict(pop=pop, gdp=gdp))) @@ -194,7 +196,7 @@ def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp): manual['geometry'] = manual['country'].map(country_shapes) manual = manual.dropna() - df = df.append(manual, sort=False) + df = pd.concat([df, manual], sort=False) df.loc['ME000', 'pop'] = 650. diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 70f27bf2..287dfe32 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -124,7 +124,8 @@ def simplify_network_to_380(n): several_trafo_b = trafo_map.isin(trafo_map.index) trafo_map.loc[several_trafo_b] = trafo_map.loc[several_trafo_b].map(trafo_map) missing_buses_i = n.buses.index.difference(trafo_map.index) - trafo_map = trafo_map.append(pd.Series(missing_buses_i, missing_buses_i)) + missing = pd.Series(missing_buses_i, missing_buses_i) + trafo_map = pd.concat([trafo_map, missing]) for c in n.one_port_components|n.branch_components: df = n.df(c) From 835bfc0f6a46d483c95ad03bb9dfbe67190625b3 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 1 Feb 2022 08:51:47 +0100 Subject: [PATCH 100/102] bugfix: access right config variables in build renewable profiles --- scripts/build_renewable_profiles.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index b37e6825..36845da5 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -206,12 +206,12 @@ if __name__ == '__main__': noprogress = not snakemake.config['atlite'].get('show_progress', True) config = snakemake.config['renewable'][snakemake.wildcards.technology] resource = config['resource'] # pv panel config / wind turbine config - correction_factor = snakemake.config.get('correction_factor', 1.) + correction_factor = config.get('correction_factor', 1.) capacity_per_sqkm = config['capacity_per_sqkm'] - p_nom_max_meth = snakemake.config.get('potential', 'conservative') + p_nom_max_meth = config.get('potential', 'conservative') if isinstance(config.get("corine", {}), list): - snakemake.config['corine'] = {'grid_codes': config['corine']} + config['corine'] = {'grid_codes': config['corine']} if correction_factor != 1.: logger.info(f'correction_factor is set as {correction_factor}') @@ -226,7 +226,7 @@ if __name__ == '__main__': if config['natura']: excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True) - corine = snakemake.config.get("corine", {}) + corine = config.get("corine", {}) if "grid_codes" in corine: codes = corine["grid_codes"] excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035) @@ -326,11 +326,11 @@ if __name__ == '__main__': ds['underwater_fraction'] = xr.DataArray(underwater_fraction, [buses]) # select only buses with some capacity and minimal capacity factor - ds = ds.sel(bus=((ds['profile'].mean('time') > snakemake.config.get('min_p_max_pu', 0.)) & - (ds['p_nom_max'] > snakemake.config.get('min_p_nom_max', 0.)))) + ds = ds.sel(bus=((ds['profile'].mean('time') > config.get('min_p_max_pu', 0.)) & + (ds['p_nom_max'] > config.get('min_p_nom_max', 0.)))) - if 'clip_p_max_pu' in snakemake.config: - min_p_max_pu = snakemake.config['clip_p_max_pu'] + if 'clip_p_max_pu' in config: + min_p_max_pu = config['clip_p_max_pu'] ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0) ds.to_netcdf(snakemake.output.profile) From f7ee47238a87e400655a5bd17090c73a4679caea Mon Sep 17 00:00:00 2001 From: Koen van Greevenbroek Date: Thu, 10 Feb 2022 15:57:16 +0100 Subject: [PATCH 101/102] Fix random state for kmean clustering When the kmeans algorithm is used to cluster networks, this is not deterministic by default. The result is that repeated runs of the `simplify_network` and `cluster_network` rules can and usually do produce different results that vary somewhat randomly. This makes results less reproducible when given only a pypsa-eur configuration file. The fix is to supply a fixed random state to the k-means algorithm. --- scripts/cluster_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 525196fc..99d428d4 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -226,6 +226,7 @@ def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algori algorithm_kwds.setdefault('n_init', 1000) algorithm_kwds.setdefault('max_iter', 30000) algorithm_kwds.setdefault('tol', 1e-6) + algorithm_kwds.setdefault('random_state', 0) n.determine_network_topology() From a2d3edd82b7bfdafa3a389994d6cd8ec5f19d924 Mon Sep 17 00:00:00 2001 From: Koen van Greevenbroek Date: Thu, 10 Feb 2022 16:07:31 +0100 Subject: [PATCH 102/102] Document the k-means random state fix --- doc/release_notes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index d1b7b356..c7d89c04 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -24,6 +24,8 @@ Upcoming Release * Resource definitions for memory usage now follow [Snakemake standard resource definition](https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#standard-resources) ```mem_mb`` rather than ``mem``. +* Network building is made deterministic by supplying a fixed random state to network clustering routines. + PyPSA-Eur 0.4.0 (22th September 2021) =====================================