From 0ae016a7d0a236e6c25ddd0044d4b02e623af7b7 Mon Sep 17 00:00:00 2001 From: cpschau Date: Wed, 10 Apr 2024 17:10:56 +0200 Subject: [PATCH 01/15] write shapes to base network --- scripts/base_network.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/scripts/base_network.py b/scripts/base_network.py index 346f99a5..f78f4b04 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -697,6 +697,14 @@ def _adjust_capacities_of_under_construction_branches(n, config): return n +def _set_shapes(n, country_shapes, offshore_shapes): + # Write the geodataframes country_shapes and offshore_shapes to the network.shapes component + country_shapes = gpd.read_file(country_shapes).rename(columns={"name": "idx"}) + country_shapes["type"] = "country" + offshore_shapes = gpd.read_file(offshore_shapes).rename(columns={"name": "idx"}) + offshore_shapes["type"] = "offshore" + all_shapes = pd.concat([country_shapes, offshore_shapes]) + n.shapes = pd.concat([n.shapes, all_shapes]) def base_network( eg_buses, @@ -758,12 +766,16 @@ def base_network( n = _adjust_capacities_of_under_construction_branches(n, config) + _set_shapes(n, country_shapes, offshore_shapes) + return n if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake + import os + os.chdir(os.path.dirname(os.path.abspath(__file__))) snakemake = mock_snakemake("base_network") configure_logging(snakemake) From c19b8d760ec19830e75dd2e3999acdbe17ca2866 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 10 Apr 2024 17:47:29 +0200 Subject: [PATCH 02/15] build_bus_regions: add shapes to network --- scripts/build_bus_regions.py | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index 9d993c17..816c7fad 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -173,12 +173,31 @@ if __name__ == "__main__": offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2] offshore_regions.append(offshore_regions_c) - pd.concat(onshore_regions, ignore_index=True).to_file( - snakemake.output.regions_onshore + gdf = pd.concat(onshore_regions, ignore_index=True) + gdf.to_file(snakemake.output.regions_onshore) + + index = gdf.index.astype(int) + n.shapes.index.astype(int).max() + 1 + n.madd( + "Shape", + index, + geometry=gdf.geometry, + idx=index, + component="Bus", + type="onshore", ) if offshore_regions: - pd.concat(offshore_regions, ignore_index=True).to_file( - snakemake.output.regions_offshore + gdf = pd.concat(offshore_regions, ignore_index=True) + gdf.to_file(snakemake.output.regions_offshore) + + index = gdf.index.astype(int) + n.shapes.index.astype(int).max() + 1 + n.madd( + "Shape", + index, + geometry=gdf.geometry, + idx=index, + component="Bus", + type="offshore", ) + else: offshore_shapes.to_frame().to_file(snakemake.output.regions_offshore) From 7d0b775ca9c84ead87cafc547dca5b2d28f6c824 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 10 Apr 2024 15:48:44 +0000 Subject: [PATCH 03/15] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/base_network.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index f78f4b04..77d3d250 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -697,6 +697,7 @@ def _adjust_capacities_of_under_construction_branches(n, config): return n + def _set_shapes(n, country_shapes, offshore_shapes): # Write the geodataframes country_shapes and offshore_shapes to the network.shapes component country_shapes = gpd.read_file(country_shapes).rename(columns={"name": "idx"}) @@ -704,7 +705,8 @@ def _set_shapes(n, country_shapes, offshore_shapes): offshore_shapes = gpd.read_file(offshore_shapes).rename(columns={"name": "idx"}) offshore_shapes["type"] = "offshore" all_shapes = pd.concat([country_shapes, offshore_shapes]) - n.shapes = pd.concat([n.shapes, all_shapes]) + n.shapes = pd.concat([n.shapes, all_shapes]) + def base_network( eg_buses, @@ -773,8 +775,10 @@ def base_network( if __name__ == "__main__": if "snakemake" not in globals(): - from _helpers import mock_snakemake import os + + from _helpers import mock_snakemake + os.chdir(os.path.dirname(os.path.abspath(__file__))) snakemake = mock_snakemake("base_network") From 9686407756684b064f27f77b75d06e4d3149142e Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 10 Apr 2024 17:58:08 +0200 Subject: [PATCH 04/15] cluster_network: add regions to n.shapes --- scripts/cluster_network.py | 34 +++++++++++++++++++++++++--------- 1 file changed, 25 insertions(+), 9 deletions(-) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index b0b73ade..a2473beb 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -428,16 +428,31 @@ def clustering_for_n_clusters( return clustering -def cluster_regions(busmaps, input=None, output=None): +def cluster_regions(busmaps, which, input=None, output=None): busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) - for which in ("regions_onshore", "regions_offshore"): - regions = gpd.read_file(getattr(input, which)) - regions = regions.reindex(columns=["name", "geometry"]).set_index("name") - regions_c = regions.dissolve(busmap) - regions_c.index.name = "name" - regions_c = regions_c.reset_index() - regions_c.to_file(getattr(output, which)) + regions = gpd.read_file(getattr(input, which)) + regions = regions.reindex(columns=["name", "geometry"]).set_index("name") + regions_c = regions.dissolve(busmap) + regions_c.index.name = "name" + regions_c = regions_c.reset_index() + regions_c.to_file(getattr(output, which)) + + # remove old regions + remove = n.shapes.query("component == 'Bus' and type == @which").index + n.mremove("Shape", remove) + + # add new clustered regions + index = regions_c.index.astype(int) + n.shapes.index.astype(int).max() + 1 + type = which.split("_")[1] + n.madd( + "Shape", + index, + geometry=regions_c.geometry, + idx=index, + component="Bus", + type="which", + ) def plot_busmap_for_n_clusters(n, n_clusters, fn=None): @@ -555,4 +570,5 @@ if __name__ == "__main__": ): # also available: linemap_positive, linemap_negative getattr(clustering, attr).to_csv(snakemake.output[attr]) - cluster_regions((clustering.busmap,), snakemake.input, snakemake.output) + for which in ["regions_onshore", "regions_offshore"]: + cluster_regions((clustering.busmap,), which, snakemake.input, snakemake.output) From 47134a88d833e2db1c1a6d95fd070ab208d5d5d6 Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 10 Apr 2024 18:13:14 +0200 Subject: [PATCH 05/15] regions to n.shapes: smooth out remaining issues --- scripts/base_network.py | 2 +- scripts/build_bus_regions.py | 7 +++++-- scripts/cluster_network.py | 3 ++- scripts/simplify_network.py | 3 ++- 4 files changed, 10 insertions(+), 5 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 77d3d250..528f04bf 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -705,7 +705,7 @@ def _set_shapes(n, country_shapes, offshore_shapes): offshore_shapes = gpd.read_file(offshore_shapes).rename(columns={"name": "idx"}) offshore_shapes["type"] = "offshore" all_shapes = pd.concat([country_shapes, offshore_shapes]) - n.shapes = pd.concat([n.shapes, all_shapes]) + n.shapes = pd.concat([n.shapes, all_shapes], ignore_index=True) def base_network( diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index 816c7fad..d26e6ba6 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -176,7 +176,8 @@ if __name__ == "__main__": gdf = pd.concat(onshore_regions, ignore_index=True) gdf.to_file(snakemake.output.regions_onshore) - index = gdf.index.astype(int) + n.shapes.index.astype(int).max() + 1 + offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0 + index = gdf.index.astype(int) + offset n.madd( "Shape", index, @@ -185,11 +186,13 @@ if __name__ == "__main__": component="Bus", type="onshore", ) + if offshore_regions: gdf = pd.concat(offshore_regions, ignore_index=True) gdf.to_file(snakemake.output.regions_offshore) - index = gdf.index.astype(int) + n.shapes.index.astype(int).max() + 1 + offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0 + index = gdf.index.astype(int) + offset n.madd( "Shape", index, diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index a2473beb..7a497626 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -443,7 +443,8 @@ def cluster_regions(busmaps, which, input=None, output=None): n.mremove("Shape", remove) # add new clustered regions - index = regions_c.index.astype(int) + n.shapes.index.astype(int).max() + 1 + offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0 + index = regions_c.index.astype(int) + offset type = which.split("_")[1] n.madd( "Shape", diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 24df7312..6e404192 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -629,4 +629,5 @@ if __name__ == "__main__": busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) busmap_s.to_csv(snakemake.output.busmap) - cluster_regions(busmaps, snakemake.input, snakemake.output) + for which in ["regions_onshore", "regions_offshore"]: + cluster_regions(busmaps, which, snakemake.input, snakemake.output) From 590d3635790222ddddefc388a59547b4db64d02c Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 11 Apr 2024 12:13:07 +0200 Subject: [PATCH 06/15] clustering: add docstring to cluster_regions function, fix network reference --- scripts/cluster_network.py | 35 ++++++++++++++++++++++++----------- scripts/simplify_network.py | 2 +- 2 files changed, 25 insertions(+), 12 deletions(-) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 7a497626..a18a2079 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -428,7 +428,20 @@ def clustering_for_n_clusters( return clustering -def cluster_regions(busmaps, which, input=None, output=None): +def cluster_regions(n, busmaps, which, input=None, output=None): + """ + Cluster regions based on busmaps and save the results to a file and to the + network. + + Parameters: + - busmaps (list): A list of busmaps used for clustering. + - which (str): The type of regions to cluster. + - input (str, optional): The input file path. Defaults to None. + - output (str, optional): The output file path. Defaults to None. + + Returns: + None + """ busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) regions = gpd.read_file(getattr(input, which)) @@ -438,7 +451,7 @@ def cluster_regions(busmaps, which, input=None, output=None): regions_c = regions_c.reset_index() regions_c.to_file(getattr(output, which)) - # remove old regions + # remove original regions remove = n.shapes.query("component == 'Bus' and type == @which").index n.mremove("Shape", remove) @@ -456,8 +469,8 @@ def cluster_regions(busmaps, which, input=None, output=None): ) -def plot_busmap_for_n_clusters(n, n_clusters, fn=None): - busmap = busmap_for_n_clusters(n, n_clusters) +def plot_busmap_for_n_clusters(n, n_clusters, solver_name="scip", fn=None): + busmap = busmap_for_n_clusters(n, n_clusters, solver_name) cs = busmap.unique() cr = sns.color_palette("hls", len(cs)) n.plot(bus_colors=busmap.map(dict(zip(cs, cr)))) @@ -554,17 +567,15 @@ if __name__ == "__main__": params.focus_weights, ) - update_p_nom_max(clustering.network) + nc = clustering.network + update_p_nom_max(nc) if params.cluster_network.get("consider_efficiency_classes"): labels = [f" {label} efficiency" for label in ["low", "medium", "high"]] - nc = clustering.network nc.generators["carrier"] = nc.generators.carrier.replace(labels, "", regex=True) - clustering.network.meta = dict( - snakemake.config, **dict(wildcards=dict(snakemake.wildcards)) - ) - clustering.network.export_to_netcdf(snakemake.output.network) + nc.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) + nc.export_to_netcdf(snakemake.output.network) for attr in ( "busmap", "linemap", @@ -572,4 +583,6 @@ if __name__ == "__main__": getattr(clustering, attr).to_csv(snakemake.output[attr]) for which in ["regions_onshore", "regions_offshore"]: - cluster_regions((clustering.busmap,), which, snakemake.input, snakemake.output) + cluster_regions( + nc, (clustering.busmap,), which, snakemake.input, snakemake.output + ) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 6e404192..7b8710a0 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -630,4 +630,4 @@ if __name__ == "__main__": busmap_s.to_csv(snakemake.output.busmap) for which in ["regions_onshore", "regions_offshore"]: - cluster_regions(busmaps, which, snakemake.input, snakemake.output) + cluster_regions(n, busmaps, which, snakemake.input, snakemake.output) From 219847012d381c73c1479bf021cb36d05f797cc0 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 11 Apr 2024 12:56:49 +0200 Subject: [PATCH 07/15] build_bus_regions: fix shapes index for correct alignment in madd cluster_regions: further modularize functions, fix index alignment --- scripts/build_bus_regions.py | 24 +++++++++--------- scripts/cluster_network.py | 48 +++++++++++++++++++++--------------- 2 files changed, 40 insertions(+), 32 deletions(-) diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index d26e6ba6..d0225797 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -173,31 +173,31 @@ if __name__ == "__main__": offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2] offshore_regions.append(offshore_regions_c) - gdf = pd.concat(onshore_regions, ignore_index=True) - gdf.to_file(snakemake.output.regions_onshore) + shapes = pd.concat(onshore_regions, ignore_index=True) + shapes.to_file(snakemake.output.regions_onshore) offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0 - index = gdf.index.astype(int) + offset + shapes.index = shapes.index.astype(int) + offset n.madd( "Shape", - index, - geometry=gdf.geometry, - idx=index, + shapes.index, + geometry=shapes.geometry, + idx=shapes.name, component="Bus", type="onshore", ) if offshore_regions: - gdf = pd.concat(offshore_regions, ignore_index=True) - gdf.to_file(snakemake.output.regions_offshore) + shapes = pd.concat(offshore_regions, ignore_index=True) + shapes.to_file(snakemake.output.regions_offshore) offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0 - index = gdf.index.astype(int) + offset + shapes.index = shapes.index.astype(int) + offset n.madd( "Shape", - index, - geometry=gdf.geometry, - idx=index, + shapes.index, + geometry=shapes.geometry, + idx=shapes.name, component="Bus", type="offshore", ) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index a18a2079..9dd0226d 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -428,7 +428,7 @@ def clustering_for_n_clusters( return clustering -def cluster_regions(n, busmaps, which, input=None, output=None): +def cluster_regions(busmaps, regions): """ Cluster regions based on busmaps and save the results to a file and to the network. @@ -436,36 +436,41 @@ def cluster_regions(n, busmaps, which, input=None, output=None): Parameters: - busmaps (list): A list of busmaps used for clustering. - which (str): The type of regions to cluster. - - input (str, optional): The input file path. Defaults to None. - - output (str, optional): The output file path. Defaults to None. Returns: None """ busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) - - regions = gpd.read_file(getattr(input, which)) regions = regions.reindex(columns=["name", "geometry"]).set_index("name") regions_c = regions.dissolve(busmap) regions_c.index.name = "name" - regions_c = regions_c.reset_index() - regions_c.to_file(getattr(output, which)) + return regions_c.reset_index() - # remove original regions + +def append_bus_shapes(n, shapes, type): + """ + Append shapes to the network. + + Parameters: + n (pypsa.Network): The network to which the shapes will be appended. + shapes (geopandas.GeoDataFrame): The shapes to be appended. + **kwargs: Additional keyword arguments used in `n.madd`. + + Returns: + None + """ remove = n.shapes.query("component == 'Bus' and type == @which").index n.mremove("Shape", remove) - # add new clustered regions offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0 - index = regions_c.index.astype(int) + offset - type = which.split("_")[1] + shapes.index = shapes.index.astype(int) + offset n.madd( "Shape", - index, - geometry=regions_c.geometry, - idx=index, + shapes.index, + geometry=shapes.geometry, + idx=shapes.name, component="Bus", - type="which", + type=type, ) @@ -574,15 +579,18 @@ if __name__ == "__main__": labels = [f" {label} efficiency" for label in ["low", "medium", "high"]] nc.generators["carrier"] = nc.generators.carrier.replace(labels, "", regex=True) - nc.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) - nc.export_to_netcdf(snakemake.output.network) for attr in ( "busmap", "linemap", ): # also available: linemap_positive, linemap_negative getattr(clustering, attr).to_csv(snakemake.output[attr]) + nc.shapes = n.shapes.copy() for which in ["regions_onshore", "regions_offshore"]: - cluster_regions( - nc, (clustering.busmap,), which, snakemake.input, snakemake.output - ) + regions = gpd.read_file(snakemake.input[which]) + clustered_regions = cluster_regions((clustering.busmap,), regions) + append_bus_shapes(nc, clustered_regions, type=which.split("_")[1]) + clustered_regions.to_file(snakemake.output[which]) + + nc.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) + nc.export_to_netcdf(snakemake.output.network) From f1b33992f7adc8941e668d61eb30dbf4e46d233d Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 11 Apr 2024 13:27:52 +0200 Subject: [PATCH 08/15] move append_bus_shapes to build_bus_regions, apply it where possible --- scripts/build_bus_regions.py | 58 ++++++++++++++++++++++-------------- scripts/cluster_network.py | 30 ++----------------- scripts/simplify_network.py | 15 +++++++--- 3 files changed, 48 insertions(+), 55 deletions(-) diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index d0225797..05a7729e 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -109,6 +109,34 @@ def voronoi_partition_pts(points, outline): return polygons +def append_bus_shapes(n, shapes, type): + """ + Append shapes to the network. If shapes with the same component and type + already exist, they will be removed. + + Parameters: + n (pypsa.Network): The network to which the shapes will be appended. + shapes (geopandas.GeoDataFrame): The shapes to be appended. + **kwargs: Additional keyword arguments used in `n.madd`. + + Returns: + None + """ + remove = n.shapes.query("component == 'Bus' and type == @type").index + n.mremove("Shape", remove) + + offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0 + shapes = shapes.rename(lambda x: int(x) + offset) + n.madd( + "Shape", + shapes.index, + geometry=shapes.geometry, + idx=shapes.name, + component="Bus", + type=type, + ) + + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake @@ -119,7 +147,8 @@ if __name__ == "__main__": countries = snakemake.params.countries - n = pypsa.Network(snakemake.input.base_network) + base_network = snakemake.input.base_network + n = pypsa.Network(base_network) country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index("name")[ "geometry" @@ -175,32 +204,15 @@ if __name__ == "__main__": shapes = pd.concat(onshore_regions, ignore_index=True) shapes.to_file(snakemake.output.regions_onshore) - - offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0 - shapes.index = shapes.index.astype(int) + offset - n.madd( - "Shape", - shapes.index, - geometry=shapes.geometry, - idx=shapes.name, - component="Bus", - type="onshore", - ) + append_bus_shapes(n, shapes, "onshore") if offshore_regions: shapes = pd.concat(offshore_regions, ignore_index=True) shapes.to_file(snakemake.output.regions_offshore) - - offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0 - shapes.index = shapes.index.astype(int) + offset - n.madd( - "Shape", - shapes.index, - geometry=shapes.geometry, - idx=shapes.name, - component="Bus", - type="offshore", - ) + append_bus_shapes(n, shapes, "offshore") else: offshore_shapes.to_frame().to_file(snakemake.output.regions_offshore) + + # save network with shapes + n.export_to_netcdf(base_network) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 9dd0226d..f58e5f8b 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -135,6 +135,7 @@ import pypsa import seaborn as sns from _helpers import configure_logging, set_scenario_config, update_p_nom_max from add_electricity import load_costs +from build_bus_regions import append_bus_shapes from packaging.version import Version, parse from pypsa.clustering.spatial import ( busmap_by_greedy_modularity, @@ -447,33 +448,6 @@ def cluster_regions(busmaps, regions): return regions_c.reset_index() -def append_bus_shapes(n, shapes, type): - """ - Append shapes to the network. - - Parameters: - n (pypsa.Network): The network to which the shapes will be appended. - shapes (geopandas.GeoDataFrame): The shapes to be appended. - **kwargs: Additional keyword arguments used in `n.madd`. - - Returns: - None - """ - remove = n.shapes.query("component == 'Bus' and type == @which").index - n.mremove("Shape", remove) - - offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0 - shapes.index = shapes.index.astype(int) + offset - n.madd( - "Shape", - shapes.index, - geometry=shapes.geometry, - idx=shapes.name, - component="Bus", - type=type, - ) - - def plot_busmap_for_n_clusters(n, n_clusters, solver_name="scip", fn=None): busmap = busmap_for_n_clusters(n, n_clusters, solver_name) cs = busmap.unique() @@ -589,8 +563,8 @@ if __name__ == "__main__": for which in ["regions_onshore", "regions_offshore"]: regions = gpd.read_file(snakemake.input[which]) clustered_regions = cluster_regions((clustering.busmap,), regions) - append_bus_shapes(nc, clustered_regions, type=which.split("_")[1]) clustered_regions.to_file(snakemake.output[which]) + append_bus_shapes(nc, clustered_regions, type=which.split("_")[1]) nc.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) nc.export_to_netcdf(snakemake.output.network) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 7b8710a0..f129945c 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -88,12 +88,14 @@ The rule :mod:`simplify_network` does up to four things: import logging from functools import reduce +import geopandas as gpd import numpy as np import pandas as pd import pypsa import scipy as sp from _helpers import configure_logging, set_scenario_config, update_p_nom_max from add_electricity import load_costs +from build_bus_regions import append_bus_shapes from cluster_network import cluster_regions, clustering_for_n_clusters from pypsa.clustering.spatial import ( aggregateoneport, @@ -610,6 +612,7 @@ if __name__ == "__main__": n.lines.drop(remove, axis=1, errors="ignore", inplace=True) if snakemake.wildcards.simpl: + shapes = n.shapes n, cluster_map = cluster( n, int(snakemake.wildcards.simpl), @@ -619,15 +622,19 @@ if __name__ == "__main__": params.simplify_network["feature"], params.aggregation_strategies, ) + n.shapes = shapes busmaps.append(cluster_map) update_p_nom_max(n) - n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) - n.export_to_netcdf(snakemake.output.network) - busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) busmap_s.to_csv(snakemake.output.busmap) for which in ["regions_onshore", "regions_offshore"]: - cluster_regions(n, busmaps, which, snakemake.input, snakemake.output) + regions = gpd.read_file(snakemake.input[which]) + clustered_regions = cluster_regions(busmaps, regions) + clustered_regions.to_file(snakemake.output[which]) + append_bus_shapes(n, clustered_regions, type=which.split("_")[1]) + + n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) + n.export_to_netcdf(snakemake.output.network) From 4d4b8ea2666e141977f7b168452561e37a79f16a Mon Sep 17 00:00:00 2001 From: cpschau Date: Thu, 11 Apr 2024 13:45:02 +0200 Subject: [PATCH 09/15] n.madd instead of pd.concat --- scripts/base_network.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 528f04bf..706c112e 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -704,8 +704,14 @@ def _set_shapes(n, country_shapes, offshore_shapes): country_shapes["type"] = "country" offshore_shapes = gpd.read_file(offshore_shapes).rename(columns={"name": "idx"}) offshore_shapes["type"] = "offshore" - all_shapes = pd.concat([country_shapes, offshore_shapes]) - n.shapes = pd.concat([n.shapes, all_shapes], ignore_index=True) + all_shapes = pd.concat([country_shapes, offshore_shapes], ignore_index=True) + n.madd( + "Shape", + all_shapes.index, + geometry=all_shapes.geometry, + idx=all_shapes.idx, + type=all_shapes.type, + ) def base_network( From f2db3c63270b94aabd1cf2d65f16ede0e446b5a2 Mon Sep 17 00:00:00 2001 From: cpschau Date: Thu, 11 Apr 2024 16:26:16 +0200 Subject: [PATCH 10/15] add AC & DC lines --- scripts/base_network.py | 67 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/scripts/base_network.py b/scripts/base_network.py index 706c112e..00750502 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -705,6 +705,10 @@ def _set_shapes(n, country_shapes, offshore_shapes): offshore_shapes = gpd.read_file(offshore_shapes).rename(columns={"name": "idx"}) offshore_shapes["type"] = "offshore" all_shapes = pd.concat([country_shapes, offshore_shapes], ignore_index=True) +<<<<<<< HEAD +======= + +>>>>>>> dce7d57a (add AC & DC lines) n.madd( "Shape", all_shapes.index, @@ -712,6 +716,69 @@ def _set_shapes(n, country_shapes, offshore_shapes): idx=all_shapes.idx, type=all_shapes.type, ) +<<<<<<< HEAD +======= + + # Write the AC and DC line shapes to the network.shapes component + start_index = n.shapes.index.astype(int).max() + 1 + index_AC = pd.RangeIndex(start=start_index, stop=start_index + len(n.lines)) + geo_AC = gpd.GeoSeries( + n.lines.geometry.apply(shapely.wkt.loads).fillna( + n.lines[["bus0", "bus1"]].apply( + lambda x: LineString( + [n.buses.loc[x[0], ["x", "y"]], n.buses.loc[x[1], ["x", "y"]]] + ), + axis=1, + ) + ) + ) + geo_AC.index = index_AC + + n.madd( + "Shape", + index_AC, + geometry=geo_AC.values, + idx=n.lines.index, + component="Line", + type=n.lines.carrier.values, + ) + + if n.links.empty: + return + start_index = n.shapes.index.astype(int).max() + 1 + index_DC = pd.RangeIndex(start=start_index, stop=start_index + len(n.links)) + if "geometry" in n.links.columns: + geo_DC = gpd.GeoSeries( + n.links.geometry.apply(shapely.wkt.loads).fillna( + n.links[["bus0", "bus1"]].apply( + lambda x: LineString( + [n.buses.loc[x[0], ["x", "y"]], n.buses.loc[x[1], ["x", "y"]]] + ), + axis=1, + ) + ) + ) + else: + geo_DC = gpd.GeoSeries( + n.links[["bus0", "bus1"]].apply( + lambda x: LineString( + [n.buses.loc[x[0], ["x", "y"]], n.buses.loc[x[1], ["x", "y"]]] + ), + axis=1, + ) + ) + + geo_DC = gpd.GeoSeries(geo_DC) + + n.madd( + "Shape", + index_DC, + geometry=geo_DC.values, + idx=n.links.index, + component="Link", + type=n.links.carrier.values, + ) +>>>>>>> dce7d57a (add AC & DC lines) def base_network( From c68e3420c0e400398e1cb1fe5cc5295148d157e1 Mon Sep 17 00:00:00 2001 From: cpschau Date: Thu, 11 Apr 2024 16:42:19 +0200 Subject: [PATCH 11/15] clean-up after rebase --- scripts/base_network.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 00750502..916241a9 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -705,19 +705,13 @@ def _set_shapes(n, country_shapes, offshore_shapes): offshore_shapes = gpd.read_file(offshore_shapes).rename(columns={"name": "idx"}) offshore_shapes["type"] = "offshore" all_shapes = pd.concat([country_shapes, offshore_shapes], ignore_index=True) -<<<<<<< HEAD -======= - ->>>>>>> dce7d57a (add AC & DC lines) n.madd( "Shape", all_shapes.index, geometry=all_shapes.geometry, idx=all_shapes.idx, - type=all_shapes.type, + type=all_shapes["type"], ) -<<<<<<< HEAD -======= # Write the AC and DC line shapes to the network.shapes component start_index = n.shapes.index.astype(int).max() + 1 @@ -778,7 +772,6 @@ def _set_shapes(n, country_shapes, offshore_shapes): component="Link", type=n.links.carrier.values, ) ->>>>>>> dce7d57a (add AC & DC lines) def base_network( From 1867e510e26d22790d1efd1c50078c213082a71a Mon Sep 17 00:00:00 2001 From: cpschau Date: Thu, 11 Apr 2024 17:14:17 +0200 Subject: [PATCH 12/15] remove redundant line --- scripts/base_network.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 916241a9..663e995f 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -762,8 +762,6 @@ def _set_shapes(n, country_shapes, offshore_shapes): ) ) - geo_DC = gpd.GeoSeries(geo_DC) - n.madd( "Shape", index_DC, From 3cf2d0c05983bdf9c082cd2a9aed242ab307bc37 Mon Sep 17 00:00:00 2001 From: cpschau Date: Tue, 16 Apr 2024 10:24:16 +0200 Subject: [PATCH 13/15] no addition of straight lines --- scripts/base_network.py | 58 ----------------------------------------- 1 file changed, 58 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 663e995f..6727a724 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -713,64 +713,6 @@ def _set_shapes(n, country_shapes, offshore_shapes): type=all_shapes["type"], ) - # Write the AC and DC line shapes to the network.shapes component - start_index = n.shapes.index.astype(int).max() + 1 - index_AC = pd.RangeIndex(start=start_index, stop=start_index + len(n.lines)) - geo_AC = gpd.GeoSeries( - n.lines.geometry.apply(shapely.wkt.loads).fillna( - n.lines[["bus0", "bus1"]].apply( - lambda x: LineString( - [n.buses.loc[x[0], ["x", "y"]], n.buses.loc[x[1], ["x", "y"]]] - ), - axis=1, - ) - ) - ) - geo_AC.index = index_AC - - n.madd( - "Shape", - index_AC, - geometry=geo_AC.values, - idx=n.lines.index, - component="Line", - type=n.lines.carrier.values, - ) - - if n.links.empty: - return - start_index = n.shapes.index.astype(int).max() + 1 - index_DC = pd.RangeIndex(start=start_index, stop=start_index + len(n.links)) - if "geometry" in n.links.columns: - geo_DC = gpd.GeoSeries( - n.links.geometry.apply(shapely.wkt.loads).fillna( - n.links[["bus0", "bus1"]].apply( - lambda x: LineString( - [n.buses.loc[x[0], ["x", "y"]], n.buses.loc[x[1], ["x", "y"]]] - ), - axis=1, - ) - ) - ) - else: - geo_DC = gpd.GeoSeries( - n.links[["bus0", "bus1"]].apply( - lambda x: LineString( - [n.buses.loc[x[0], ["x", "y"]], n.buses.loc[x[1], ["x", "y"]]] - ), - axis=1, - ) - ) - - n.madd( - "Shape", - index_DC, - geometry=geo_DC.values, - idx=n.links.index, - component="Link", - type=n.links.carrier.values, - ) - def base_network( eg_buses, From 6aac8c90b4c221e11a9d44df7ddcf91d7765a3f6 Mon Sep 17 00:00:00 2001 From: cpschau Date: Tue, 16 Apr 2024 10:30:39 +0200 Subject: [PATCH 14/15] added release note --- doc/release_notes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index d42b149f..4441f439 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -181,6 +181,8 @@ Upcoming Release * Fix custom busmap read in `cluster_network`. +* Added shapes to .nc file for different stages of the network object in `base_network`, `build_bus_regions`, and `cluster_network`. + PyPSA-Eur 0.10.0 (19th February 2024) ===================================== From 6ea8d52a81085d3396d831289bd5c3b0f50c602c Mon Sep 17 00:00:00 2001 From: cpschau Date: Tue, 16 Apr 2024 10:43:07 +0200 Subject: [PATCH 15/15] no directory change before mock_snakemake --- scripts/base_network.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 6727a724..d96a7e54 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -781,12 +781,9 @@ def base_network( if __name__ == "__main__": if "snakemake" not in globals(): - import os from _helpers import mock_snakemake - os.chdir(os.path.dirname(os.path.abspath(__file__))) - snakemake = mock_snakemake("base_network") configure_logging(snakemake) set_scenario_config(snakemake)