update environment and address deprecations (#291)

* update environment and address deprecations

* check pandas<1.3

* limit snakemake due to ascii encoding error, address review comments

* remove version restriction on snakemake
This commit is contained in:
Fabian Neumann 2022-01-14 08:43:21 +01:00 committed by GitHub
parent 5dd24ca87f
commit 72e277a007
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 13 additions and 12 deletions

View File

@ -58,7 +58,7 @@ The dataset consists of:
- Electrical demand time series from the
[OPSD project](https://open-power-system-data.org/).
- Renewable time series based on ERA5 and SARAH, assembled using the [atlite tool](https://github.com/FRESNA/atlite).
- Geographical potentials for wind and solar generators based on land use (CORINE) and excluding nature reserves (Natura2000) are computed with the [vresutils library](https://github.com/FRESNA/vresutils) and the [glaes library](https://github.com/FZJ-IEK3-VSA/glaes).
- Geographical potentials for wind and solar generators based on land use (CORINE) and excluding nature reserves (Natura2000) are computed with the [atlite library](https://github.com/PyPSA/atlite).
Already-built versions of the model can be found in the accompanying [Zenodo
repository](https://doi.org/10.5281/zenodo.3601881).

View File

@ -10,9 +10,9 @@ dependencies:
- python>=3.8
- pip
- pypsa>=0.18
- pypsa>=0.18.1
- atlite>=0.2.5
- dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved
- dask
# Dependencies of the workflow itself
- xlrd
@ -36,7 +36,7 @@ dependencies:
- progressbar2
- pyomo
- matplotlib
- proj<8
- proj
# Keep in conda environment when calling ipython
- ipython
@ -54,5 +54,5 @@ dependencies:
- tabula-py
- pip:
- vresutils==0.3.1
- vresutils>=0.3.1
- tsam>=1.1.0

View File

@ -95,7 +95,6 @@ import powerplantmatching as pm
from powerplantmatching.export import map_country_bus
from vresutils.costdata import annuity
from vresutils.load import timeseries_opsd
from vresutils import transfer as vtransfer
idx = pd.IndexSlice
@ -227,7 +226,6 @@ def attach_load(n):
# relative factors 0.6 and 0.4 have been determined from a linear
# regression on the country to continent load data
# (refer to vresutils.load._upsampling_weights)
factors = normed(0.6 * normed(gdp_n) + 0.4 * normed(pop_n))
return pd.DataFrame(factors.values * l.values[:,np.newaxis],
index=l.index, columns=factors.index)

View File

@ -97,7 +97,7 @@ def _get_country(df):
def _find_closest_links(links, new_links, distance_upper_bound=1.5):
treecoords = np.asarray([np.asarray(shapely.wkt.loads(s))[[0, -1]].flatten()
treecoords = np.asarray([np.asarray(shapely.wkt.loads(s).coords)[[0, -1]].flatten()
for s in links.geometry])
querycoords = np.vstack([new_links[['x1', 'y1', 'x2', 'y2']],
new_links[['x2', 'y2', 'x1', 'y1']]])

View File

@ -79,7 +79,7 @@ from itertools import takewhile
import pandas as pd
import geopandas as gpd
from shapely.geometry import MultiPolygon, Polygon
from shapely.ops import cascaded_union
from shapely.ops import unary_union
import pycountry as pyc
logger = logging.getLogger(__name__)
@ -95,7 +95,7 @@ def _get_country(target, **keys):
def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True):
if isinstance(polys, MultiPolygon):
polys = sorted(polys, key=attrgetter('area'), reverse=True)
polys = sorted(polys.geoms, key=attrgetter('area'), reverse=True)
mainpoly = polys[0]
mainlength = np.sqrt(mainpoly.area/(2.*np.pi))
if mainpoly.area > minarea:
@ -139,7 +139,7 @@ def country_cover(country_shapes, eez_shapes=None):
if eez_shapes is not None:
shapes += list(eez_shapes)
europe_shape = cascaded_union(shapes)
europe_shape = unary_union(shapes)
if isinstance(europe_shape, MultiPolygon):
europe_shape = max(europe_shape, key=attrgetter('area'))
return Polygon(shell=europe_shape.exterior)

View File

@ -140,6 +140,9 @@ from functools import reduce
from pypsa.networkclustering import (busmap_by_kmeans, busmap_by_spectral_clustering,
_make_consense, get_clustering_from_busmap)
import warnings
warnings.filterwarnings(action='ignore', category=UserWarning)
from add_electricity import load_costs
idx = pd.IndexSlice
@ -313,7 +316,7 @@ def cluster_regions(busmaps, input=None, output=None):
for which in ('regions_onshore', 'regions_offshore'):
regions = gpd.read_file(getattr(input, which)).set_index('name')
geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.cascaded_union)
geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.unary_union)
regions_c = gpd.GeoDataFrame(dict(geometry=geom_c))
regions_c.index.name = 'name'
save_to_geojson(regions_c, getattr(output, which))