refactor save to geojson file functionality to allow import/export of empty geodfs
This commit is contained in:
parent
467a1eb5e7
commit
c42d2bd97d
@ -45,6 +45,8 @@ This release is not on the ``master`` branch. It can be used with
|
||||
Upcoming Release
|
||||
================
|
||||
|
||||
* The workflow now supports to run a selection of countries which do not have any offshore regions assigned. Therefore the offshore technologies need to be disabled, otherwise the workflow will raise an error.
|
||||
|
||||
* Add an efficiency factor of 88.55% to offshore wind capacity factors
|
||||
as a proxy for wake losses. More rigorous modelling is `planned <https://github.com/PyPSA/pypsa-eur/issues/153>`_
|
||||
[`#277 <https://github.com/PyPSA/pypsa-eur/pull/277>`_].
|
||||
|
@ -27,7 +27,7 @@ dependencies:
|
||||
- powerplantmatching>=0.5.3
|
||||
- numpy
|
||||
- pandas
|
||||
- geopandas
|
||||
- geopandas>=0.11.0
|
||||
- xarray
|
||||
- netcdf4
|
||||
- networkx
|
||||
|
@ -4,7 +4,9 @@
|
||||
|
||||
import pandas as pd
|
||||
from pathlib import Path
|
||||
from collections import OrderedDict
|
||||
|
||||
REGION_COLS = ['geometry', 'name', 'x', 'y', 'country']
|
||||
|
||||
def configure_logging(snakemake, skip_handlers=False):
|
||||
"""
|
||||
|
@ -391,7 +391,9 @@ def _set_countries_and_substations(n, config, country_shapes, offshore_shapes):
|
||||
|
||||
countries = config['countries']
|
||||
country_shapes = gpd.read_file(country_shapes).set_index('name')['geometry']
|
||||
offshore_shapes = gpd.read_file(offshore_shapes).set_index('name')['geometry']
|
||||
# reindexing necessary for supporting empty geo-dataframes
|
||||
offshore_shapes = gpd.read_file(offshore_shapes)
|
||||
offshore_shapes = offshore_shapes.reindex(columns=['name', 'geometry']).set_index('name')['geometry']
|
||||
substation_b = buses['symbol'].str.contains('substation|converter station', case=False)
|
||||
|
||||
def prefer_voltage(x, which):
|
||||
|
@ -42,7 +42,7 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, REGION_COLS
|
||||
|
||||
import pypsa
|
||||
import os
|
||||
@ -55,13 +55,6 @@ from scipy.spatial import Voronoi
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def save_to_geojson(s, fn):
|
||||
if os.path.exists(fn):
|
||||
os.unlink(fn)
|
||||
schema = {**gpd.io.file.infer_schema(s), 'geometry': 'Unknown'}
|
||||
s.to_file(fn, driver='GeoJSON', schema=schema)
|
||||
|
||||
|
||||
def voronoi_partition_pts(points, outline):
|
||||
"""
|
||||
Compute the polygons of a voronoi partition of `points` within the
|
||||
@ -120,7 +113,8 @@ if __name__ == "__main__":
|
||||
n = pypsa.Network(snakemake.input.base_network)
|
||||
|
||||
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry']
|
||||
offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry']
|
||||
offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes)
|
||||
offshore_shapes = offshore_shapes.reindex(columns=REGION_COLS).set_index('name')['geometry']
|
||||
|
||||
onshore_regions = []
|
||||
offshore_regions = []
|
||||
@ -151,6 +145,8 @@ if __name__ == "__main__":
|
||||
offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2]
|
||||
offshore_regions.append(offshore_regions_c)
|
||||
|
||||
save_to_geojson(pd.concat(onshore_regions, ignore_index=True), snakemake.output.regions_onshore)
|
||||
|
||||
save_to_geojson(pd.concat(offshore_regions, ignore_index=True), snakemake.output.regions_offshore)
|
||||
pd.concat(onshore_regions, ignore_index=True).to_file(snakemake.output.regions_onshore)
|
||||
if offshore_regions:
|
||||
pd.concat(offshore_regions, ignore_index=True).to_file(snakemake.output.regions_offshore)
|
||||
else:
|
||||
offshore_shapes.to_frame().to_file(snakemake.output.regions_offshore)
|
@ -116,7 +116,7 @@ if __name__ == "__main__":
|
||||
# Determine the bounds from bus regions with a buffer of two grid cells
|
||||
onshore = gpd.read_file(snakemake.input.regions_onshore)
|
||||
offshore = gpd.read_file(snakemake.input.regions_offshore)
|
||||
regions = onshore.append(offshore)
|
||||
regions = pd.concat([onshore, offshore])
|
||||
d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy', 0.25))*2
|
||||
cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d]
|
||||
elif {'x', 'y'}.issubset(cutout_params):
|
||||
|
@ -221,7 +221,11 @@ if __name__ == '__main__':
|
||||
client = Client(cluster, asynchronous=True)
|
||||
|
||||
cutout = atlite.Cutout(snakemake.input['cutout'])
|
||||
regions = gpd.read_file(snakemake.input.regions).set_index('name').rename_axis('bus')
|
||||
regions = gpd.read_file(snakemake.input.regions)
|
||||
assert not regions.empty, (f"List of regions in {snakemake.input.regions} is empty, please "
|
||||
"disable the corresponding renewable technology")
|
||||
# do not pull up, set_index does not work if geo dataframe is empty
|
||||
regions = regions.set_index('name').rename_axis('bus')
|
||||
buses = regions.index
|
||||
|
||||
excluder = atlite.ExclusionContainer(crs=3035, res=100)
|
||||
|
@ -129,14 +129,15 @@ def eez(country_shapes, eez, country_list):
|
||||
df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c))
|
||||
s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False))
|
||||
s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3})
|
||||
s = s.to_frame("geometry")
|
||||
s.index.name = "name"
|
||||
return s
|
||||
|
||||
|
||||
def country_cover(country_shapes, eez_shapes=None):
|
||||
shapes = list(country_shapes)
|
||||
shapes = country_shapes
|
||||
if eez_shapes is not None:
|
||||
shapes += list(eez_shapes)
|
||||
shapes = pd.concat([shapes, eez_shapes])
|
||||
|
||||
europe_shape = unary_union(shapes)
|
||||
if isinstance(europe_shape, MultiPolygon):
|
||||
@ -203,16 +204,6 @@ def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp):
|
||||
return df
|
||||
|
||||
|
||||
def save_to_geojson(df, fn):
|
||||
if os.path.exists(fn):
|
||||
os.unlink(fn)
|
||||
if not isinstance(df, gpd.GeoDataFrame):
|
||||
df = gpd.GeoDataFrame(dict(geometry=df))
|
||||
df = df.reset_index()
|
||||
schema = {**gpd.io.file.infer_schema(df), 'geometry': 'Unknown'}
|
||||
df.to_file(fn, driver='GeoJSON', schema=schema)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
@ -220,15 +211,14 @@ if __name__ == "__main__":
|
||||
configure_logging(snakemake)
|
||||
|
||||
country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries'])
|
||||
save_to_geojson(country_shapes, snakemake.output.country_shapes)
|
||||
country_shapes.reset_index().to_file(snakemake.output.country_shapes)
|
||||
|
||||
offshore_shapes = eez(country_shapes, snakemake.input.eez, snakemake.config['countries'])
|
||||
save_to_geojson(offshore_shapes, snakemake.output.offshore_shapes)
|
||||
offshore_shapes.reset_index().to_file(snakemake.output.offshore_shapes)
|
||||
|
||||
europe_shape = country_cover(country_shapes, offshore_shapes)
|
||||
save_to_geojson(gpd.GeoSeries(europe_shape), snakemake.output.europe_shape)
|
||||
europe_shape = gpd.GeoDataFrame(geometry=[country_cover(country_shapes, offshore_shapes.geometry)])
|
||||
europe_shape.reset_index().to_file(snakemake.output.europe_shape)
|
||||
|
||||
nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop,
|
||||
snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp)
|
||||
|
||||
save_to_geojson(nuts3_shapes, snakemake.output.nuts3_shapes)
|
||||
nuts3_shapes.reset_index().to_file(snakemake.output.nuts3_shapes)
|
||||
|
@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes:
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging, update_p_nom_max
|
||||
from _helpers import configure_logging, update_p_nom_max, REGION_COLS
|
||||
|
||||
import pypsa
|
||||
import os
|
||||
@ -303,24 +303,18 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
|
||||
return clustering
|
||||
|
||||
|
||||
def save_to_geojson(s, fn):
|
||||
if os.path.exists(fn):
|
||||
os.unlink(fn)
|
||||
df = s.reset_index()
|
||||
schema = {**gpd.io.file.infer_schema(df), 'geometry': 'Unknown'}
|
||||
df.to_file(fn, driver='GeoJSON', schema=schema)
|
||||
|
||||
|
||||
def cluster_regions(busmaps, input=None, output=None):
|
||||
|
||||
busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
|
||||
|
||||
for which in ('regions_onshore', 'regions_offshore'):
|
||||
regions = gpd.read_file(getattr(input, which)).set_index('name')
|
||||
geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.unary_union)
|
||||
regions_c = gpd.GeoDataFrame(dict(geometry=geom_c))
|
||||
regions = gpd.read_file(getattr(input, which))
|
||||
regions = regions.reindex(columns=REGION_COLS).set_index('name')
|
||||
aggfunc = dict(x="mean", y="mean", country="first")
|
||||
regions_c = regions.dissolve(busmap, aggfunc=aggfunc)
|
||||
regions_c.index.name = 'name'
|
||||
save_to_geojson(regions_c, getattr(output, which))
|
||||
regions_c = regions_c.reset_index()
|
||||
regions_c.to_file(getattr(output, which))
|
||||
|
||||
|
||||
def plot_busmap_for_n_clusters(n, n_clusters, fn=None):
|
||||
|
Loading…
Reference in New Issue
Block a user