Merge branch 'master' into simplify_to_substations

This commit is contained in:
Fabian Neumann 2021-06-30 10:34:44 +02:00 committed by GitHub
commit c5e59dab2f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 111 additions and 78 deletions

47
.github/workflows/ci.yaml vendored Normal file
View File

@ -0,0 +1,47 @@
# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: CC0-1.0
name: CI
on: [push]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
max-parallel: 5
matrix:
os:
- ubuntu-latest
- macos-latest
- windows-latest
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v2
- name: Setup Miniconda
uses: conda-incubator/setup-miniconda@v2.1.1
with: # checks out environment 'test' by default
mamba-version: "*"
channels: conda-forge,defaults
channel-priority: true
- name: Install dependencies
run: |
echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc
echo -e " - glpk\n - ipopt<3.13.3" >> envs/environment.yaml
mamba env update -f envs/environment.yaml --name test
- name: Test snakemake workflow
run: |
conda list
cp test/config.test1.yaml config.yaml
snakemake --cores all solve_all_networks
rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results

View File

@ -1,39 +0,0 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
branches:
only:
- master
os:
- windows
- linux
- osx
language: bash
before_install:
# install conda
- wget https://raw.githubusercontent.com/trichter/conda4travis/latest/conda4travis.sh -O conda4travis.sh
- source conda4travis.sh
# install conda environment
- conda install -c conda-forge mamba
- mamba env create -f ./envs/environment.yaml
- conda activate pypsa-eur
# install open-source solver
- mamba install -c conda-forge glpk ipopt'<3.13.3'
# list packages for easier debugging
- conda list
before_script:
- 'echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc'
script:
- cp ./test/config.test1.yaml ./config.yaml
- snakemake -j all solve_all_networks
- rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results
# could repeat for more configurations in future

View File

@ -4,7 +4,7 @@ SPDX-License-Identifier: CC-BY-4.0
--> -->
![GitHub release (latest by date including pre-releases)](https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases) ![GitHub release (latest by date including pre-releases)](https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases)
[![Build Status](https://travis-ci.org/PyPSA/pypsa-eur.svg?branch=master)](https://travis-ci.org/PyPSA/pypsa-eur) [![Build Status](https://github.com/pypsa/pypsa-eur/actions/workflows/ci.yaml/badge.svg)](https://github.com/PyPSA/pypsa-eur/actions)
[![Documentation](https://readthedocs.org/projects/pypsa-eur/badge/?version=latest)](https://pypsa-eur.readthedocs.io/en/latest/?badge=latest) [![Documentation](https://readthedocs.org/projects/pypsa-eur/badge/?version=latest)](https://pypsa-eur.readthedocs.io/en/latest/?badge=latest)
![Size](https://img.shields.io/github/repo-size/pypsa/pypsa-eur) ![Size](https://img.shields.io/github/repo-size/pypsa/pypsa-eur)
[![Zenodo](https://zenodo.org/badge/DOI/10.5281/zenodo.3520874.svg)](https://doi.org/10.5281/zenodo.3520874) [![Zenodo](https://zenodo.org/badge/DOI/10.5281/zenodo.3520874.svg)](https://doi.org/10.5281/zenodo.3520874)

View File

@ -40,7 +40,7 @@ enable:
electricity: electricity:
voltages: [220., 300., 380.] voltages: [220., 300., 380.]
co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5 co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5
co2base: 1.487e9 co2base: 1.487e+9
agg_p_nom_limits: data/agg_p_nom_minmax.csv agg_p_nom_limits: data/agg_p_nom_minmax.csv
extendable_carriers: extendable_carriers:

View File

@ -9,8 +9,8 @@ PyPSA-Eur: An Open Optimisation Model of the European Transmission System
.. image:: https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases .. image:: https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases
:alt: GitHub release (latest by date including pre-releases) :alt: GitHub release (latest by date including pre-releases)
.. image:: https://travis-ci.org/PyPSA/pypsa-eur.svg?branch=master .. image:: https://github.com/pypsa/pypsa-eur/actions/workflows/ci.yaml/badge.svg
:target: https://travis-ci.org/PyPSA/pypsa-eur :target: https://github.com/PyPSA/pypsa-eur/actions
.. image:: https://readthedocs.org/projects/pypsa-eur/badge/?version=latest .. image:: https://readthedocs.org/projects/pypsa-eur/badge/?version=latest
:target: https://pypsa-eur.readthedocs.io/en/latest/?badge=latest :target: https://pypsa-eur.readthedocs.io/en/latest/?badge=latest

View File

@ -1,5 +1,5 @@
.. ..
SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors SPDX-FileCopyrightText: 2019-2021 The PyPSA-Eur Authors
SPDX-License-Identifier: CC-BY-4.0 SPDX-License-Identifier: CC-BY-4.0
@ -20,6 +20,11 @@ Upcoming Release
* Add option to include marginal costs of links representing fuel cells, electrolysis, and battery inverters * Add option to include marginal costs of links representing fuel cells, electrolysis, and battery inverters
[`#232 <https://github.com/PyPSA/pypsa-eur/pull/232>`_]. [`#232 <https://github.com/PyPSA/pypsa-eur/pull/232>`_].
* Add option to pre-aggregate nodes without power injections (positive or negative, i.e. generation or demand) to electrically closest nodes or neighbors in ``simplify_network``. Defaults to ``False``. This affects nodes that are no substations or have no offshore connection. * Add option to pre-aggregate nodes without power injections (positive or negative, i.e. generation or demand) to electrically closest nodes or neighbors in ``simplify_network``. Defaults to ``False``. This affects nodes that are no substations or have no offshore connection.
* Fix: Add escape in :mod:`base_network` if all TYNDP links are already contained in the network [`#246 <https://github.com/PyPSA/pypsa-eur/pull/246>`_].
* Bugfix in :mod:`solve_operations_network`: optimised capacities are now fixed for all extendable links, not only HVDC links [`#244 <https://github.com/PyPSA/pypsa-eur/pull/244>`_].
* The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 <https://github.com/PyPSA/pypsa-eur/pull/241>`_].
* Continuous integration testing switches to Github Actions from Travis CI [`#252 <https://github.com/PyPSA/pypsa-eur/pull/252>`_].
* Bugfix in :mod:`build_renewable_profile` where offshore wind profiles could no longer be created [`#249 <https://github.com/PyPSA/pypsa-eur/pull/249>`_].
PyPSA-Eur 0.3.0 (7th December 2020) PyPSA-Eur 0.3.0 (7th December 2020)
================================== ==================================
@ -45,6 +50,7 @@ Using the ``{opts}`` wildcard for scenarios:
uses the `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package uses the `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package
[`#186 <https://github.com/PyPSA/pypsa-eur/pull/186>`_]. [`#186 <https://github.com/PyPSA/pypsa-eur/pull/186>`_].
More OPSD integration: More OPSD integration:
* Add renewable power plants from `OPSD <https://data.open-power-system-data.org/renewable_power_plants/2020-08-25>`_ to the network for specified technologies. * Add renewable power plants from `OPSD <https://data.open-power-system-data.org/renewable_power_plants/2020-08-25>`_ to the network for specified technologies.

View File

@ -13,13 +13,12 @@ dependencies:
- mamba # esp for windows build - mamba # esp for windows build
- pypsa>=0.17.1 - pypsa>=0.17.1
- atlite>=0.2.2 - atlite>=0.2.4
- dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved - dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved
# Dependencies of the workflow itself # Dependencies of the workflow itself
- xlrd - xlrd
- openpyxl - openpyxl
- scikit-learn
- pycountry - pycountry
- seaborn - seaborn
- snakemake-minimal - snakemake-minimal
@ -28,8 +27,17 @@ dependencies:
- pytables - pytables
- lxml - lxml
- powerplantmatching>=0.4.8 - powerplantmatching>=0.4.8
- numpy<=1.19.0 # otherwise macos fails - numpy<=1.19 # until new PyPSA after 27-06-21
- pandas
- geopandas
- xarray
- netcdf4
- networkx
- scipy
- shapely
- progressbar2
- pyomo
- matplotlib
# Keep in conda environment when calling ipython # Keep in conda environment when calling ipython
- ipython - ipython
@ -37,6 +45,13 @@ dependencies:
# GIS dependencies: # GIS dependencies:
- cartopy - cartopy
- descartes - descartes
- rasterio
# PyPSA-Eur-Sec Dependencies
- geopy
- tqdm
- pytz
- country_converter
- pip: - pip:
- vresutils==0.3.1 - vresutils==0.3.1

View File

@ -156,7 +156,6 @@ def aggregate_p_curtailed(n):
]) ])
def aggregate_costs(n, flatten=False, opts=None, existing_only=False): def aggregate_costs(n, flatten=False, opts=None, existing_only=False):
from six import iterkeys, itervalues
components = dict(Link=("p_nom", "p0"), components = dict(Link=("p_nom", "p0"),
Generator=("p_nom", "p"), Generator=("p_nom", "p"),
@ -167,8 +166,8 @@ def aggregate_costs(n, flatten=False, opts=None, existing_only=False):
costs = {} costs = {}
for c, (p_nom, p_attr) in zip( for c, (p_nom, p_attr) in zip(
n.iterate_components(iterkeys(components), skip_empty=False), n.iterate_components(components.keys(), skip_empty=False),
itervalues(components) components.values()
): ):
if c.df.empty: continue if c.df.empty: continue
if not existing_only: p_nom += "_opt" if not existing_only: p_nom += "_opt"

View File

@ -74,7 +74,6 @@ import scipy as sp
import networkx as nx import networkx as nx
from scipy.sparse import csgraph from scipy.sparse import csgraph
from six import iteritems
from itertools import product from itertools import product
from shapely.geometry import Point, LineString from shapely.geometry import Point, LineString
@ -213,6 +212,7 @@ def _add_links_from_tyndp(buses, links):
if links_tyndp["j"].notnull().any(): if links_tyndp["j"].notnull().any():
logger.info("TYNDP links already in the dataset (skipping): " + ", ".join(links_tyndp.loc[links_tyndp["j"].notnull(), "Name"])) logger.info("TYNDP links already in the dataset (skipping): " + ", ".join(links_tyndp.loc[links_tyndp["j"].notnull(), "Name"]))
links_tyndp = links_tyndp.loc[links_tyndp["j"].isnull()] links_tyndp = links_tyndp.loc[links_tyndp["j"].isnull()]
if links_tyndp.empty: return buses, links
tree = sp.spatial.KDTree(buses[['x', 'y']]) tree = sp.spatial.KDTree(buses[['x', 'y']])
_, ind0 = tree.query(links_tyndp[["x1", "y1"]]) _, ind0 = tree.query(links_tyndp[["x1", "y1"]])
@ -268,13 +268,13 @@ def _apply_parameter_corrections(n):
if corrections is None: return if corrections is None: return
for component, attrs in iteritems(corrections): for component, attrs in corrections.items():
df = n.df(component) df = n.df(component)
oid = _get_oid(df) oid = _get_oid(df)
if attrs is None: continue if attrs is None: continue
for attr, repls in iteritems(attrs): for attr, repls in attrs.items():
for i, r in iteritems(repls): for i, r in repls.items():
if i == 'oid': if i == 'oid':
r = oid.map(repls["oid"]).dropna() r = oid.map(repls["oid"]).dropna()
elif i == 'index': elif i == 'index':

View File

@ -183,6 +183,7 @@ import progressbar as pgb
import geopandas as gpd import geopandas as gpd
import xarray as xr import xarray as xr
import numpy as np import numpy as np
import functools
import atlite import atlite
import logging import logging
from pypsa.geo import haversine from pypsa.geo import haversine
@ -235,7 +236,10 @@ if __name__ == '__main__':
excluder.add_raster(paths.corine, codes=codes, buffer=buffer, crs=3035) excluder.add_raster(paths.corine, codes=codes, buffer=buffer, crs=3035)
if "max_depth" in config: if "max_depth" in config:
func = lambda v: v <= -config['max_depth'] # lambda not supported for atlite + multiprocessing
# use named function np.greater with partially frozen argument instead
# and exclude areas where: -max_depth > grid cell depth
func = functools.partial(np.greater,-config['max_depth'])
excluder.add_raster(paths.gebco, codes=func, crs=4236, nodata=-1000) excluder.add_raster(paths.gebco, codes=func, crs=4236, nodata=-1000)
if 'min_shore_distance' in config: if 'min_shore_distance' in config:

View File

@ -73,7 +73,7 @@ from _helpers import configure_logging
import os import os
import numpy as np import numpy as np
from operator import attrgetter from operator import attrgetter
from six.moves import reduce from functools import reduce
from itertools import takewhile from itertools import takewhile
import pandas as pd import pandas as pd

View File

@ -135,7 +135,7 @@ import pyomo.environ as po
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import seaborn as sns import seaborn as sns
from six.moves import reduce from functools import reduce
from pypsa.networkclustering import (busmap_by_kmeans, busmap_by_spectral_clustering, from pypsa.networkclustering import (busmap_by_kmeans, busmap_by_spectral_clustering,
_make_consense, get_clustering_from_busmap) _make_consense, get_clustering_from_busmap)

View File

@ -60,7 +60,6 @@ import os
import pypsa import pypsa
import pandas as pd import pandas as pd
from six import iteritems
from add_electricity import load_costs, update_transmission_costs from add_electricity import load_costs, update_transmission_costs
idx = pd.IndexSlice idx = pd.IndexSlice
@ -386,7 +385,7 @@ def make_summaries(networks_dict, country='all'):
for output in outputs: for output in outputs:
dfs[output] = pd.DataFrame(columns=columns,dtype=float) dfs[output] = pd.DataFrame(columns=columns,dtype=float)
for label, filename in iteritems(networks_dict): for label, filename in networks_dict.items():
print(label, filename) print(label, filename)
if not os.path.exists(filename): if not os.path.exists(filename):
print("does not exist!!") print("does not exist!!")
@ -417,7 +416,7 @@ def make_summaries(networks_dict, country='all'):
def to_csv(dfs): def to_csv(dfs):
dir = snakemake.output[0] dir = snakemake.output[0]
os.makedirs(dir, exist_ok=True) os.makedirs(dir, exist_ok=True)
for key, df in iteritems(dfs): for key, df in dfs.items():
df.to_csv(os.path.join(dir, f"{key}.csv")) df.to_csv(os.path.join(dir, f"{key}.csv"))

View File

@ -25,7 +25,6 @@ from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs,
import pandas as pd import pandas as pd
import numpy as np import numpy as np
from six.moves import zip
import cartopy.crs as ccrs import cartopy.crs as ccrs
import matplotlib.pyplot as plt import matplotlib.pyplot as plt

View File

@ -62,7 +62,6 @@ import re
import pypsa import pypsa
import numpy as np import numpy as np
import pandas as pd import pandas as pd
from six import iteritems
from add_electricity import load_costs, update_transmission_costs from add_electricity import load_costs, update_transmission_costs
@ -145,7 +144,7 @@ def average_every_nhours(n, offset):
for c in n.iterate_components(): for c in n.iterate_components():
pnl = getattr(m, c.list_name+"_t") pnl = getattr(m, c.list_name+"_t")
for k, df in iteritems(c.pnl): for k, df in c.pnl.items():
if not df.empty: if not df.empty:
pnl[k] = df.resample(offset).mean() pnl[k] = df.resample(offset).mean()

View File

@ -93,8 +93,7 @@ import numpy as np
import scipy as sp import scipy as sp
from scipy.sparse.csgraph import connected_components, dijkstra from scipy.sparse.csgraph import connected_components, dijkstra
from six import iteritems from functools import reduce
from six.moves import reduce
import pypsa import pypsa
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
@ -193,7 +192,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate
n.mremove(c, n.df(c).index) n.mremove(c, n.df(c).index)
import_components_from_dataframe(n, df, c) import_components_from_dataframe(n, df, c)
for attr, df in iteritems(pnl): for attr, df in pnl.items():
if not df.empty: if not df.empty:
import_series_from_dataframe(n, df, c, attr) import_series_from_dataframe(n, df, c, attr)
@ -237,7 +236,7 @@ def simplify_links(n):
if len(G.adj[m]) > 2 or (set(G.adj[m]) - nodes)} if len(G.adj[m]) > 2 or (set(G.adj[m]) - nodes)}
for u in supernodes: for u in supernodes:
for m, ls in iteritems(G.adj[u]): for m, ls in G.adj[u].items():
if m not in nodes or m in seen: continue if m not in nodes or m in seen: continue
buses = [u, m] buses = [u, m]
@ -245,7 +244,7 @@ def simplify_links(n):
while m not in (supernodes | seen): while m not in (supernodes | seen):
seen.add(m) seen.add(m)
for m2, ls in iteritems(G.adj[m]): for m2, ls in G.adj[m].items():
if m2 in seen or m2 == u: continue if m2 in seen or m2 == u: continue
buses.append(m2) buses.append(m2)
links.append(list(ls)) # [name for name in ls]) links.append(list(ls)) # [name for name in ls])
@ -365,6 +364,8 @@ def aggregate_to_substations(n, buses_i=None):
def cluster(n, n_clusters): def cluster(n, n_clusters):
logger.info(f"Clustering to {n_clusters} buses") logger.info(f"Clustering to {n_clusters} buses")
focus_weights = snakemake.config.get('focus_weights', None)
renewable_carriers = pd.Index([tech renewable_carriers = pd.Index([tech
for tech in n.generators.carrier.unique() for tech in n.generators.carrier.unique()
if tech.split('-', 2)[0] in snakemake.config['renewable']]) if tech.split('-', 2)[0] in snakemake.config['renewable']])
@ -378,7 +379,8 @@ def cluster(n, n_clusters):
for tech in renewable_carriers])) for tech in renewable_carriers]))
if len(renewable_carriers) > 0 else 'conservative') if len(renewable_carriers) > 0 else 'conservative')
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode, clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode,
solver_name=snakemake.config['solving']['solver']['name']) solver_name=snakemake.config['solving']['solver']['name'],
focus_weights=focus_weights)
return clustering.network, clustering.busmap return clustering.network, clustering.busmap

View File

@ -241,7 +241,7 @@ def extra_functionality(n, snapshots):
add_battery_constraints(n) add_battery_constraints(n)
def solve_network(n, config, solver_log=None, opts='', **kwargs): def solve_network(n, config, opts='', **kwargs):
solver_options = config['solving']['solver'].copy() solver_options = config['solving']['solver'].copy()
solver_name = solver_options.pop('name') solver_name = solver_options.pop('name')
cf_solving = config['solving']['options'] cf_solving = config['solving']['options']
@ -282,8 +282,9 @@ if __name__ == "__main__":
with memory_logger(filename=fn, interval=30.) as mem: with memory_logger(filename=fn, interval=30.) as mem:
n = pypsa.Network(snakemake.input[0]) n = pypsa.Network(snakemake.input[0])
n = prepare_network(n, solve_opts) n = prepare_network(n, solve_opts)
n = solve_network(n, config=snakemake.config, solver_dir=tmpdir, n = solve_network(n, config=snakemake.config, opts=opts,
solver_log=snakemake.log.solver, opts=opts) solver_dir=tmpdir,
solver_logfile=snakemake.log.solver)
n.export_to_netcdf(snakemake.output[0]) n.export_to_netcdf(snakemake.output[0])
logger.info("Maximum memory usage: {}".format(mem.mem_usage)) logger.info("Maximum memory usage: {}".format(mem.mem_usage))

View File

@ -71,7 +71,7 @@ def set_parameters_from_optimized(n, n_optim):
n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.) n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.)
n.lines['s_nom_extendable'] = False n.lines['s_nom_extendable'] = False
links_dc_i = n.links.index[n.links.carrier == 'DC'] links_dc_i = n.links.index[n.links.p_nom_extendable]
n.links.loc[links_dc_i, 'p_nom'] = \ n.links.loc[links_dc_i, 'p_nom'] = \
n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.) n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.)
n.links.loc[links_dc_i, 'p_nom_extendable'] = False n.links.loc[links_dc_i, 'p_nom_extendable'] = False
@ -111,8 +111,9 @@ if __name__ == "__main__":
fn = getattr(snakemake.log, 'memory', None) fn = getattr(snakemake.log, 'memory', None)
with memory_logger(filename=fn, interval=30.) as mem: with memory_logger(filename=fn, interval=30.) as mem:
n = prepare_network(n, solve_opts=snakemake.config['solving']['options']) n = prepare_network(n, solve_opts=snakemake.config['solving']['options'])
n = solve_network(n, config, solver_dir=tmpdir, n = solve_network(n, config=config, opts=opts,
solver_log=snakemake.log.solver, opts=opts) solver_dir=tmpdir,
solver_logfile=snakemake.log.solver)
n.export_to_netcdf(snakemake.output[0]) n.export_to_netcdf(snakemake.output[0])
logger.info("Maximum memory usage: {}".format(mem.mem_usage)) logger.info("Maximum memory usage: {}".format(mem.mem_usage))

View File

@ -24,7 +24,7 @@ clustering:
snapshots: snapshots:
start: "2013-03-01" start: "2013-03-01"
end: "2014-04-01" end: "2013-03-08"
closed: 'left' # end is not inclusive closed: 'left' # end is not inclusive
enable: enable:
@ -61,7 +61,7 @@ atlite:
module: era5 module: era5
x: [4., 15.] x: [4., 15.]
y: [46., 56.] y: [46., 56.]
time: ["2013-03", "2013-03"] time: ["2013-03-01", "2013-03-08"]
renewable: renewable:
onwind: onwind: