Merge branch 'master' into simplify_to_substations
This commit is contained in:
commit
c5e59dab2f
47
.github/workflows/ci.yaml
vendored
Normal file
47
.github/workflows/ci.yaml
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
name: CI
|
||||
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
max-parallel: 5
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- macos-latest
|
||||
- windows-latest
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash -l {0}
|
||||
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Setup Miniconda
|
||||
uses: conda-incubator/setup-miniconda@v2.1.1
|
||||
with: # checks out environment 'test' by default
|
||||
mamba-version: "*"
|
||||
channels: conda-forge,defaults
|
||||
channel-priority: true
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc
|
||||
echo -e " - glpk\n - ipopt<3.13.3" >> envs/environment.yaml
|
||||
mamba env update -f envs/environment.yaml --name test
|
||||
|
||||
- name: Test snakemake workflow
|
||||
run: |
|
||||
conda list
|
||||
cp test/config.test1.yaml config.yaml
|
||||
snakemake --cores all solve_all_networks
|
||||
rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results
|
39
.travis.yml
39
.travis.yml
@ -1,39 +0,0 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
|
||||
os:
|
||||
- windows
|
||||
- linux
|
||||
- osx
|
||||
|
||||
language: bash
|
||||
|
||||
before_install:
|
||||
# install conda
|
||||
- wget https://raw.githubusercontent.com/trichter/conda4travis/latest/conda4travis.sh -O conda4travis.sh
|
||||
- source conda4travis.sh
|
||||
|
||||
# install conda environment
|
||||
- conda install -c conda-forge mamba
|
||||
- mamba env create -f ./envs/environment.yaml
|
||||
- conda activate pypsa-eur
|
||||
|
||||
# install open-source solver
|
||||
- mamba install -c conda-forge glpk ipopt'<3.13.3'
|
||||
|
||||
# list packages for easier debugging
|
||||
- conda list
|
||||
|
||||
before_script:
|
||||
- 'echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc'
|
||||
|
||||
script:
|
||||
- cp ./test/config.test1.yaml ./config.yaml
|
||||
- snakemake -j all solve_all_networks
|
||||
- rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results
|
||||
# could repeat for more configurations in future
|
@ -4,7 +4,7 @@ SPDX-License-Identifier: CC-BY-4.0
|
||||
-->
|
||||
|
||||
![GitHub release (latest by date including pre-releases)](https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases)
|
||||
[![Build Status](https://travis-ci.org/PyPSA/pypsa-eur.svg?branch=master)](https://travis-ci.org/PyPSA/pypsa-eur)
|
||||
[![Build Status](https://github.com/pypsa/pypsa-eur/actions/workflows/ci.yaml/badge.svg)](https://github.com/PyPSA/pypsa-eur/actions)
|
||||
[![Documentation](https://readthedocs.org/projects/pypsa-eur/badge/?version=latest)](https://pypsa-eur.readthedocs.io/en/latest/?badge=latest)
|
||||
![Size](https://img.shields.io/github/repo-size/pypsa/pypsa-eur)
|
||||
[![Zenodo](https://zenodo.org/badge/DOI/10.5281/zenodo.3520874.svg)](https://doi.org/10.5281/zenodo.3520874)
|
||||
|
@ -40,7 +40,7 @@ enable:
|
||||
electricity:
|
||||
voltages: [220., 300., 380.]
|
||||
co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5
|
||||
co2base: 1.487e9
|
||||
co2base: 1.487e+9
|
||||
agg_p_nom_limits: data/agg_p_nom_minmax.csv
|
||||
|
||||
extendable_carriers:
|
||||
|
@ -9,8 +9,8 @@ PyPSA-Eur: An Open Optimisation Model of the European Transmission System
|
||||
.. image:: https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases
|
||||
:alt: GitHub release (latest by date including pre-releases)
|
||||
|
||||
.. image:: https://travis-ci.org/PyPSA/pypsa-eur.svg?branch=master
|
||||
:target: https://travis-ci.org/PyPSA/pypsa-eur
|
||||
.. image:: https://github.com/pypsa/pypsa-eur/actions/workflows/ci.yaml/badge.svg
|
||||
:target: https://github.com/PyPSA/pypsa-eur/actions
|
||||
|
||||
.. image:: https://readthedocs.org/projects/pypsa-eur/badge/?version=latest
|
||||
:target: https://pypsa-eur.readthedocs.io/en/latest/?badge=latest
|
||||
|
@ -1,5 +1,5 @@
|
||||
..
|
||||
SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors
|
||||
SPDX-FileCopyrightText: 2019-2021 The PyPSA-Eur Authors
|
||||
|
||||
SPDX-License-Identifier: CC-BY-4.0
|
||||
|
||||
@ -20,6 +20,11 @@ Upcoming Release
|
||||
* Add option to include marginal costs of links representing fuel cells, electrolysis, and battery inverters
|
||||
[`#232 <https://github.com/PyPSA/pypsa-eur/pull/232>`_].
|
||||
* Add option to pre-aggregate nodes without power injections (positive or negative, i.e. generation or demand) to electrically closest nodes or neighbors in ``simplify_network``. Defaults to ``False``. This affects nodes that are no substations or have no offshore connection.
|
||||
* Fix: Add escape in :mod:`base_network` if all TYNDP links are already contained in the network [`#246 <https://github.com/PyPSA/pypsa-eur/pull/246>`_].
|
||||
* Bugfix in :mod:`solve_operations_network`: optimised capacities are now fixed for all extendable links, not only HVDC links [`#244 <https://github.com/PyPSA/pypsa-eur/pull/244>`_].
|
||||
* The ``focus_weights`` are now also considered when pre-clustering in the :mod:`simplify_network` rule [`#241 <https://github.com/PyPSA/pypsa-eur/pull/241>`_].
|
||||
* Continuous integration testing switches to Github Actions from Travis CI [`#252 <https://github.com/PyPSA/pypsa-eur/pull/252>`_].
|
||||
* Bugfix in :mod:`build_renewable_profile` where offshore wind profiles could no longer be created [`#249 <https://github.com/PyPSA/pypsa-eur/pull/249>`_].
|
||||
|
||||
PyPSA-Eur 0.3.0 (7th December 2020)
|
||||
==================================
|
||||
@ -45,6 +50,7 @@ Using the ``{opts}`` wildcard for scenarios:
|
||||
uses the `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package
|
||||
[`#186 <https://github.com/PyPSA/pypsa-eur/pull/186>`_].
|
||||
|
||||
|
||||
More OPSD integration:
|
||||
|
||||
* Add renewable power plants from `OPSD <https://data.open-power-system-data.org/renewable_power_plants/2020-08-25>`_ to the network for specified technologies.
|
||||
|
@ -13,13 +13,12 @@ dependencies:
|
||||
- mamba # esp for windows build
|
||||
|
||||
- pypsa>=0.17.1
|
||||
- atlite>=0.2.2
|
||||
- atlite>=0.2.4
|
||||
- dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved
|
||||
|
||||
# Dependencies of the workflow itself
|
||||
- xlrd
|
||||
- openpyxl
|
||||
- scikit-learn
|
||||
- pycountry
|
||||
- seaborn
|
||||
- snakemake-minimal
|
||||
@ -28,8 +27,17 @@ dependencies:
|
||||
- pytables
|
||||
- lxml
|
||||
- powerplantmatching>=0.4.8
|
||||
- numpy<=1.19.0 # otherwise macos fails
|
||||
|
||||
- numpy<=1.19 # until new PyPSA after 27-06-21
|
||||
- pandas
|
||||
- geopandas
|
||||
- xarray
|
||||
- netcdf4
|
||||
- networkx
|
||||
- scipy
|
||||
- shapely
|
||||
- progressbar2
|
||||
- pyomo
|
||||
- matplotlib
|
||||
|
||||
# Keep in conda environment when calling ipython
|
||||
- ipython
|
||||
@ -37,6 +45,13 @@ dependencies:
|
||||
# GIS dependencies:
|
||||
- cartopy
|
||||
- descartes
|
||||
- rasterio
|
||||
|
||||
# PyPSA-Eur-Sec Dependencies
|
||||
- geopy
|
||||
- tqdm
|
||||
- pytz
|
||||
- country_converter
|
||||
|
||||
- pip:
|
||||
- vresutils==0.3.1
|
||||
|
@ -156,7 +156,6 @@ def aggregate_p_curtailed(n):
|
||||
])
|
||||
|
||||
def aggregate_costs(n, flatten=False, opts=None, existing_only=False):
|
||||
from six import iterkeys, itervalues
|
||||
|
||||
components = dict(Link=("p_nom", "p0"),
|
||||
Generator=("p_nom", "p"),
|
||||
@ -167,8 +166,8 @@ def aggregate_costs(n, flatten=False, opts=None, existing_only=False):
|
||||
|
||||
costs = {}
|
||||
for c, (p_nom, p_attr) in zip(
|
||||
n.iterate_components(iterkeys(components), skip_empty=False),
|
||||
itervalues(components)
|
||||
n.iterate_components(components.keys(), skip_empty=False),
|
||||
components.values()
|
||||
):
|
||||
if c.df.empty: continue
|
||||
if not existing_only: p_nom += "_opt"
|
||||
|
@ -74,7 +74,6 @@ import scipy as sp
|
||||
import networkx as nx
|
||||
|
||||
from scipy.sparse import csgraph
|
||||
from six import iteritems
|
||||
from itertools import product
|
||||
|
||||
from shapely.geometry import Point, LineString
|
||||
@ -213,6 +212,7 @@ def _add_links_from_tyndp(buses, links):
|
||||
if links_tyndp["j"].notnull().any():
|
||||
logger.info("TYNDP links already in the dataset (skipping): " + ", ".join(links_tyndp.loc[links_tyndp["j"].notnull(), "Name"]))
|
||||
links_tyndp = links_tyndp.loc[links_tyndp["j"].isnull()]
|
||||
if links_tyndp.empty: return buses, links
|
||||
|
||||
tree = sp.spatial.KDTree(buses[['x', 'y']])
|
||||
_, ind0 = tree.query(links_tyndp[["x1", "y1"]])
|
||||
@ -268,13 +268,13 @@ def _apply_parameter_corrections(n):
|
||||
|
||||
if corrections is None: return
|
||||
|
||||
for component, attrs in iteritems(corrections):
|
||||
for component, attrs in corrections.items():
|
||||
df = n.df(component)
|
||||
oid = _get_oid(df)
|
||||
if attrs is None: continue
|
||||
|
||||
for attr, repls in iteritems(attrs):
|
||||
for i, r in iteritems(repls):
|
||||
for attr, repls in attrs.items():
|
||||
for i, r in repls.items():
|
||||
if i == 'oid':
|
||||
r = oid.map(repls["oid"]).dropna()
|
||||
elif i == 'index':
|
||||
|
@ -183,6 +183,7 @@ import progressbar as pgb
|
||||
import geopandas as gpd
|
||||
import xarray as xr
|
||||
import numpy as np
|
||||
import functools
|
||||
import atlite
|
||||
import logging
|
||||
from pypsa.geo import haversine
|
||||
@ -235,7 +236,10 @@ if __name__ == '__main__':
|
||||
excluder.add_raster(paths.corine, codes=codes, buffer=buffer, crs=3035)
|
||||
|
||||
if "max_depth" in config:
|
||||
func = lambda v: v <= -config['max_depth']
|
||||
# lambda not supported for atlite + multiprocessing
|
||||
# use named function np.greater with partially frozen argument instead
|
||||
# and exclude areas where: -max_depth > grid cell depth
|
||||
func = functools.partial(np.greater,-config['max_depth'])
|
||||
excluder.add_raster(paths.gebco, codes=func, crs=4236, nodata=-1000)
|
||||
|
||||
if 'min_shore_distance' in config:
|
||||
|
@ -73,7 +73,7 @@ from _helpers import configure_logging
|
||||
import os
|
||||
import numpy as np
|
||||
from operator import attrgetter
|
||||
from six.moves import reduce
|
||||
from functools import reduce
|
||||
from itertools import takewhile
|
||||
|
||||
import pandas as pd
|
||||
|
@ -135,7 +135,7 @@ import pyomo.environ as po
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
|
||||
from six.moves import reduce
|
||||
from functools import reduce
|
||||
|
||||
from pypsa.networkclustering import (busmap_by_kmeans, busmap_by_spectral_clustering,
|
||||
_make_consense, get_clustering_from_busmap)
|
||||
|
@ -60,7 +60,6 @@ import os
|
||||
import pypsa
|
||||
import pandas as pd
|
||||
|
||||
from six import iteritems
|
||||
from add_electricity import load_costs, update_transmission_costs
|
||||
|
||||
idx = pd.IndexSlice
|
||||
@ -386,7 +385,7 @@ def make_summaries(networks_dict, country='all'):
|
||||
for output in outputs:
|
||||
dfs[output] = pd.DataFrame(columns=columns,dtype=float)
|
||||
|
||||
for label, filename in iteritems(networks_dict):
|
||||
for label, filename in networks_dict.items():
|
||||
print(label, filename)
|
||||
if not os.path.exists(filename):
|
||||
print("does not exist!!")
|
||||
@ -417,7 +416,7 @@ def make_summaries(networks_dict, country='all'):
|
||||
def to_csv(dfs):
|
||||
dir = snakemake.output[0]
|
||||
os.makedirs(dir, exist_ok=True)
|
||||
for key, df in iteritems(dfs):
|
||||
for key, df in dfs.items():
|
||||
df.to_csv(os.path.join(dir, f"{key}.csv"))
|
||||
|
||||
|
||||
|
@ -25,7 +25,6 @@ from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs,
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from six.moves import zip
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import matplotlib.pyplot as plt
|
||||
|
@ -62,7 +62,6 @@ import re
|
||||
import pypsa
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from six import iteritems
|
||||
|
||||
from add_electricity import load_costs, update_transmission_costs
|
||||
|
||||
@ -145,7 +144,7 @@ def average_every_nhours(n, offset):
|
||||
|
||||
for c in n.iterate_components():
|
||||
pnl = getattr(m, c.list_name+"_t")
|
||||
for k, df in iteritems(c.pnl):
|
||||
for k, df in c.pnl.items():
|
||||
if not df.empty:
|
||||
pnl[k] = df.resample(offset).mean()
|
||||
|
||||
|
@ -93,8 +93,7 @@ import numpy as np
|
||||
import scipy as sp
|
||||
from scipy.sparse.csgraph import connected_components, dijkstra
|
||||
|
||||
from six import iteritems
|
||||
from six.moves import reduce
|
||||
from functools import reduce
|
||||
|
||||
import pypsa
|
||||
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
|
||||
@ -193,7 +192,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate
|
||||
n.mremove(c, n.df(c).index)
|
||||
|
||||
import_components_from_dataframe(n, df, c)
|
||||
for attr, df in iteritems(pnl):
|
||||
for attr, df in pnl.items():
|
||||
if not df.empty:
|
||||
import_series_from_dataframe(n, df, c, attr)
|
||||
|
||||
@ -237,7 +236,7 @@ def simplify_links(n):
|
||||
if len(G.adj[m]) > 2 or (set(G.adj[m]) - nodes)}
|
||||
|
||||
for u in supernodes:
|
||||
for m, ls in iteritems(G.adj[u]):
|
||||
for m, ls in G.adj[u].items():
|
||||
if m not in nodes or m in seen: continue
|
||||
|
||||
buses = [u, m]
|
||||
@ -245,7 +244,7 @@ def simplify_links(n):
|
||||
|
||||
while m not in (supernodes | seen):
|
||||
seen.add(m)
|
||||
for m2, ls in iteritems(G.adj[m]):
|
||||
for m2, ls in G.adj[m].items():
|
||||
if m2 in seen or m2 == u: continue
|
||||
buses.append(m2)
|
||||
links.append(list(ls)) # [name for name in ls])
|
||||
@ -365,6 +364,8 @@ def aggregate_to_substations(n, buses_i=None):
|
||||
def cluster(n, n_clusters):
|
||||
logger.info(f"Clustering to {n_clusters} buses")
|
||||
|
||||
focus_weights = snakemake.config.get('focus_weights', None)
|
||||
|
||||
renewable_carriers = pd.Index([tech
|
||||
for tech in n.generators.carrier.unique()
|
||||
if tech.split('-', 2)[0] in snakemake.config['renewable']])
|
||||
@ -378,7 +379,8 @@ def cluster(n, n_clusters):
|
||||
for tech in renewable_carriers]))
|
||||
if len(renewable_carriers) > 0 else 'conservative')
|
||||
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode,
|
||||
solver_name=snakemake.config['solving']['solver']['name'])
|
||||
solver_name=snakemake.config['solving']['solver']['name'],
|
||||
focus_weights=focus_weights)
|
||||
|
||||
return clustering.network, clustering.busmap
|
||||
|
||||
|
@ -241,7 +241,7 @@ def extra_functionality(n, snapshots):
|
||||
add_battery_constraints(n)
|
||||
|
||||
|
||||
def solve_network(n, config, solver_log=None, opts='', **kwargs):
|
||||
def solve_network(n, config, opts='', **kwargs):
|
||||
solver_options = config['solving']['solver'].copy()
|
||||
solver_name = solver_options.pop('name')
|
||||
cf_solving = config['solving']['options']
|
||||
@ -282,8 +282,9 @@ if __name__ == "__main__":
|
||||
with memory_logger(filename=fn, interval=30.) as mem:
|
||||
n = pypsa.Network(snakemake.input[0])
|
||||
n = prepare_network(n, solve_opts)
|
||||
n = solve_network(n, config=snakemake.config, solver_dir=tmpdir,
|
||||
solver_log=snakemake.log.solver, opts=opts)
|
||||
n = solve_network(n, config=snakemake.config, opts=opts,
|
||||
solver_dir=tmpdir,
|
||||
solver_logfile=snakemake.log.solver)
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
||||
logger.info("Maximum memory usage: {}".format(mem.mem_usage))
|
||||
|
@ -71,7 +71,7 @@ def set_parameters_from_optimized(n, n_optim):
|
||||
n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.)
|
||||
n.lines['s_nom_extendable'] = False
|
||||
|
||||
links_dc_i = n.links.index[n.links.carrier == 'DC']
|
||||
links_dc_i = n.links.index[n.links.p_nom_extendable]
|
||||
n.links.loc[links_dc_i, 'p_nom'] = \
|
||||
n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.)
|
||||
n.links.loc[links_dc_i, 'p_nom_extendable'] = False
|
||||
@ -111,8 +111,9 @@ if __name__ == "__main__":
|
||||
fn = getattr(snakemake.log, 'memory', None)
|
||||
with memory_logger(filename=fn, interval=30.) as mem:
|
||||
n = prepare_network(n, solve_opts=snakemake.config['solving']['options'])
|
||||
n = solve_network(n, config, solver_dir=tmpdir,
|
||||
solver_log=snakemake.log.solver, opts=opts)
|
||||
n = solve_network(n, config=config, opts=opts,
|
||||
solver_dir=tmpdir,
|
||||
solver_logfile=snakemake.log.solver)
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
||||
logger.info("Maximum memory usage: {}".format(mem.mem_usage))
|
||||
|
@ -24,7 +24,7 @@ clustering:
|
||||
|
||||
snapshots:
|
||||
start: "2013-03-01"
|
||||
end: "2014-04-01"
|
||||
end: "2013-03-08"
|
||||
closed: 'left' # end is not inclusive
|
||||
|
||||
enable:
|
||||
@ -61,7 +61,7 @@ atlite:
|
||||
module: era5
|
||||
x: [4., 15.]
|
||||
y: [46., 56.]
|
||||
time: ["2013-03", "2013-03"]
|
||||
time: ["2013-03-01", "2013-03-08"]
|
||||
|
||||
renewable:
|
||||
onwind:
|
||||
|
Loading…
Reference in New Issue
Block a user