Merge branch 'master' into bugfixes_manual_load

This commit is contained in:
Fabian Neumann 2022-06-24 19:57:16 +02:00 committed by GitHub
commit ac966c0a99
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 99 additions and 38 deletions

1
.gitignore vendored
View File

@ -19,6 +19,7 @@ gurobi.log
/data
/data/links_p_nom.csv
/cutouts
/dask-worker-space
doc/_build

View File

@ -70,6 +70,10 @@ Upcoming Release
* Use updated SARAH-2 and ERA5 cutouts with slightly wider scope to east and additional variables.
* Fix crs bug. Change crs 4236 to 4326.
* Update rasterio version to correctly calculate exclusion raster
PyPSA-Eur 0.4.0 (22th September 2021)
=====================================

View File

@ -24,7 +24,7 @@ dependencies:
- yaml
- pytables
- lxml
- powerplantmatching>=0.4.8
- powerplantmatching>=0.5.3
- numpy
- pandas
- geopandas
@ -45,7 +45,7 @@ dependencies:
# GIS dependencies:
- cartopy
- descartes
- rasterio
- rasterio<=1.2.9 # 1.2.10 creates error https://github.com/PyPSA/atlite/issues/238
# PyPSA-Eur-Sec Dependencies
- geopy

View File

@ -231,6 +231,7 @@ def mock_snakemake(rulename, **wildcards):
import os
from pypsa.descriptors import Dict
from snakemake.script import Snakemake
from packaging.version import Version, parse
script_dir = Path(__file__).parent.resolve()
assert Path.cwd().resolve() == script_dir, \
@ -240,7 +241,8 @@ def mock_snakemake(rulename, **wildcards):
if os.path.exists(p):
snakefile = p
break
workflow = sm.Workflow(snakefile, overwrite_configfiles=[])
kwargs = dict(rerun_triggers=[]) if parse(sm.__version__) > Version("7.7.0") else {}
workflow = sm.Workflow(snakefile, overwrite_configfiles=[], **kwargs)
workflow.include(snakefile)
workflow.global_resources = {}
rule = workflow.get_rule(rulename)

View File

@ -47,9 +47,10 @@ from _helpers import configure_logging
import pypsa
import os
import pandas as pd
import numpy as np
import geopandas as gpd
from vresutils.graph import voronoi_partition_pts
from shapely.geometry import Polygon
from scipy.spatial import Voronoi
logger = logging.getLogger(__name__)
@ -61,6 +62,53 @@ def save_to_geojson(s, fn):
s.to_file(fn, driver='GeoJSON', schema=schema)
def voronoi_partition_pts(points, outline):
"""
Compute the polygons of a voronoi partition of `points` within the
polygon `outline`. Taken from
https://github.com/FRESNA/vresutils/blob/master/vresutils/graph.py
Attributes
----------
points : Nx2 - ndarray[dtype=float]
outline : Polygon
Returns
-------
polygons : N - ndarray[dtype=Polygon|MultiPolygon]
"""
points = np.asarray(points)
if len(points) == 1:
polygons = [outline]
else:
xmin, ymin = np.amin(points, axis=0)
xmax, ymax = np.amax(points, axis=0)
xspan = xmax - xmin
yspan = ymax - ymin
# to avoid any network positions outside all Voronoi cells, append
# the corners of a rectangle framing these points
vor = Voronoi(np.vstack((points,
[[xmin-3.*xspan, ymin-3.*yspan],
[xmin-3.*xspan, ymax+3.*yspan],
[xmax+3.*xspan, ymin-3.*yspan],
[xmax+3.*xspan, ymax+3.*yspan]])))
polygons = []
for i in range(len(points)):
poly = Polygon(vor.vertices[vor.regions[vor.point_region[i]]])
if not poly.is_valid:
poly = poly.buffer(0)
poly = poly.intersection(outline)
polygons.append(poly)
return np.array(polygons, dtype=object)
if __name__ == "__main__":
if 'snakemake' not in globals():
from _helpers import mock_snakemake

View File

@ -40,7 +40,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import atlite
import geopandas as gpd
@ -73,20 +73,17 @@ if __name__ == "__main__":
snakemake = mock_snakemake('build_natura_raster')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
cutouts = paths.cutouts
cutouts = snakemake.input.cutouts
xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts))
bounds = transform_bounds(4326, 3035, min(xs), min(ys), max(Xs), max(Ys))
transform, out_shape = get_transform_and_shape(bounds, res=100)
# adjusted boundaries
shapes = gpd.read_file(paths.natura).to_crs(3035)
shapes = gpd.read_file(snakemake.input.natura).to_crs(3035)
raster = ~geometry_mask(shapes.geometry, out_shape[::-1], transform)
raster = raster.astype(rio.uint8)
with rio.open(out[0], 'w', driver='GTiff', dtype=rio.uint8,
with rio.open(snakemake.output[0], 'w', driver='GTiff', dtype=rio.uint8,
count=1, transform=transform, crs=3035, compress='lzw',
width=raster.shape[1], height=raster.shape[0]) as dst:
dst.write(raster, indexes=1)

View File

@ -189,6 +189,7 @@ import logging
from pypsa.geo import haversine
from shapely.geometry import LineString
import time
from dask.distributed import Client, LocalCluster
from _helpers import configure_logging
@ -203,7 +204,7 @@ if __name__ == '__main__':
pgb.streams.wrap_stderr()
nprocesses = int(snakemake.threads)
noprogress = not snakemake.config['atlite'].get('show_progress', True)
noprogress = not snakemake.config['atlite'].get('show_progress', False)
config = snakemake.config['renewable'][snakemake.wildcards.technology]
resource = config['resource'] # pv panel config / wind turbine config
correction_factor = config.get('correction_factor', 1.)
@ -216,6 +217,8 @@ if __name__ == '__main__':
if correction_factor != 1.:
logger.info(f'correction_factor is set as {correction_factor}')
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True)
cutout = atlite.Cutout(snakemake.input['cutout'])
regions = gpd.read_file(snakemake.input.regions).set_index('name').rename_axis('bus')
@ -240,7 +243,7 @@ if __name__ == '__main__':
# use named function np.greater with partially frozen argument instead
# and exclude areas where: -max_depth > grid cell depth
func = functools.partial(np.greater,-config['max_depth'])
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4236, nodata=-1000)
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4326, nodata=-1000)
if 'min_shore_distance' in config:
buffer = config['min_shore_distance']
@ -266,7 +269,7 @@ if __name__ == '__main__':
potential = capacity_per_sqkm * availability.sum('bus') * area
func = getattr(cutout, resource.pop('method'))
resource['dask_kwargs'] = {'num_workers': nprocesses}
resource['dask_kwargs'] = {"scheduler": client}
capacity_factor = correction_factor * func(capacity_factor=True, **resource)
layout = capacity_factor * area * capacity_per_sqkm
profile, capacities = func(matrix=availability.stack(spatial=['y','x']),

View File

@ -281,7 +281,14 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
aggregate_generators_carriers=aggregate_carriers,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=line_length_factor,
generator_strategies={'p_nom_max': p_nom_max_strategy, 'p_nom_min': pd.Series.sum},
generator_strategies={'p_nom_max': p_nom_max_strategy,
'p_nom_min': pd.Series.sum,
'p_min_pu': pd.Series.mean,
'marginal_cost': pd.Series.mean,
'committable': np.any,
'ramp_limit_up': pd.Series.max,
'ramp_limit_down': pd.Series.max,
},
scale_link_capital_costs=False)
if not n.links.empty:

View File

@ -171,6 +171,9 @@ def calculate_capacity(n,label,capacity):
if 'p_nom_opt' in c.df.columns:
c_capacities = abs(c.df.p_nom_opt.multiply(c.df.sign)).groupby(c.df.carrier).sum()
capacity = include_in_summary(capacity, [c.list_name], label, c_capacities)
elif 'e_nom_opt' in c.df.columns:
c_capacities = abs(c.df.e_nom_opt.multiply(c.df.sign)).groupby(c.df.carrier).sum()
capacity = include_in_summary(capacity, [c.list_name], label, c_capacities)
for c in n.iterate_components(n.passive_branch_components):
c_capacities = c.df['s_nom_opt'].groupby(c.df.carrier).sum()
@ -185,11 +188,11 @@ def calculate_capacity(n,label,capacity):
def calculate_supply(n, label, supply):
"""calculate the max dispatch of each component at the buses where the loads are attached"""
load_types = n.loads.carrier.value_counts().index
load_types = n.buses.carrier.unique()
for i in load_types:
buses = n.loads.bus[n.loads.carrier == i].values
buses = n.buses.query("carrier == @i").index
bus_map = pd.Series(False,index=n.buses.index)
@ -232,11 +235,11 @@ def calculate_supply(n, label, supply):
def calculate_supply_energy(n, label, supply_energy):
"""calculate the total dispatch of each component at the buses where the loads are attached"""
load_types = n.loads.carrier.value_counts().index
load_types = n.buses.carrier.unique()
for i in load_types:
buses = n.loads.bus[n.loads.carrier == i].values
buses = n.buses.query("carrier == @i").index
bus_map = pd.Series(False,index=n.buses.index)

View File

@ -19,7 +19,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pypsa
import pandas as pd
@ -53,13 +53,11 @@ if __name__ == "__main__":
clusts= '5,full', country= 'all')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
plot_kwds = dict(drawstyle="steps-post")
clusters = wildcards.clusts.split(',')
techs = wildcards.techs.split(',')
country = wildcards.country
clusters = snakemake.wildcards.clusts.split(',')
techs = snakemake.wildcards.techs.split(',')
country = snakemake.wildcards.country
if country == 'all':
country = None
else:
@ -68,7 +66,7 @@ if __name__ == "__main__":
fig, axes = plt.subplots(1, len(techs))
for j, cluster in enumerate(clusters):
net = pypsa.Network(paths[j])
net = pypsa.Network(snakemake.input[j])
for i, tech in enumerate(techs):
cum_p_nom_max(net, tech, country).plot(x="p_max_pu", y="cum_p_nom_max",
@ -81,4 +79,4 @@ if __name__ == "__main__":
plt.legend(title="Cluster level")
fig.savefig(out[0], transparent=True, bbox_inches='tight')
fig.savefig(snakemake.output[0], transparent=True, bbox_inches='tight')

View File

@ -21,7 +21,7 @@ Description
import os
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pandas as pd
import matplotlib.pyplot as plt
@ -170,12 +170,12 @@ if __name__ == "__main__":
attr='', ext='png', country='all')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
config = snakemake.config
summary = wildcards.summary
summary = snakemake.wildcards.summary
try:
func = globals()[f"plot_{summary}"]
except KeyError:
raise RuntimeError(f"plotting function for {summary} has not been defined")
func(os.path.join(paths[0], f"{summary}.csv"), config, out[0])
func(os.path.join(snakemake.input[0], f"{summary}.csv"), config, snakemake.output[0])

View File

@ -37,7 +37,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pandas as pd
@ -63,8 +63,6 @@ if __name__ == "__main__":
snakemake = mock_snakemake('prepare_links_p_nom', simpl='', network='elec')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0]
mw = "Power (MW)"
@ -76,4 +74,4 @@ if __name__ == "__main__":
links_p_nom['x1'], links_p_nom['y1'] = extract_coordinates(links_p_nom['Converterstation 1'])
links_p_nom['x2'], links_p_nom['y2'] = extract_coordinates(links_p_nom['Converterstation 2'])
links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(out[0], index=False)
links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(snakemake.output[0], index=False)