Merge pull request #1013 from PyPSA/store-network-shapes

Store network shapes
This commit is contained in:
Fabian Hofmann 2024-04-17 12:59:54 +02:00 committed by GitHub
commit 0a19d90b60
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 104 additions and 29 deletions

View File

@ -184,6 +184,8 @@ Upcoming Release
* Fix custom busmap read in `cluster_network`. * Fix custom busmap read in `cluster_network`.
* Added shapes to .nc file for different stages of the network object in `base_network`, `build_bus_regions`, and `cluster_network`.
* Fix p_nom_min of renewables generators for myopic approach and add check of existing capacities in `add_land_use_constraint_m`. * Fix p_nom_min of renewables generators for myopic approach and add check of existing capacities in `add_land_use_constraint_m`.
* Add documentation section for how to contribute documentation * Add documentation section for how to contribute documentation

View File

@ -698,6 +698,22 @@ def _adjust_capacities_of_under_construction_branches(n, config):
return n return n
def _set_shapes(n, country_shapes, offshore_shapes):
# Write the geodataframes country_shapes and offshore_shapes to the network.shapes component
country_shapes = gpd.read_file(country_shapes).rename(columns={"name": "idx"})
country_shapes["type"] = "country"
offshore_shapes = gpd.read_file(offshore_shapes).rename(columns={"name": "idx"})
offshore_shapes["type"] = "offshore"
all_shapes = pd.concat([country_shapes, offshore_shapes], ignore_index=True)
n.madd(
"Shape",
all_shapes.index,
geometry=all_shapes.geometry,
idx=all_shapes.idx,
type=all_shapes["type"],
)
def base_network( def base_network(
eg_buses, eg_buses,
eg_converters, eg_converters,
@ -758,11 +774,14 @@ def base_network(
n = _adjust_capacities_of_under_construction_branches(n, config) n = _adjust_capacities_of_under_construction_branches(n, config)
_set_shapes(n, country_shapes, offshore_shapes)
return n return n
if __name__ == "__main__": if __name__ == "__main__":
if "snakemake" not in globals(): if "snakemake" not in globals():
from _helpers import mock_snakemake from _helpers import mock_snakemake
snakemake = mock_snakemake("base_network") snakemake = mock_snakemake("base_network")

View File

@ -109,6 +109,34 @@ def voronoi_partition_pts(points, outline):
return polygons return polygons
def append_bus_shapes(n, shapes, type):
"""
Append shapes to the network. If shapes with the same component and type
already exist, they will be removed.
Parameters:
n (pypsa.Network): The network to which the shapes will be appended.
shapes (geopandas.GeoDataFrame): The shapes to be appended.
**kwargs: Additional keyword arguments used in `n.madd`.
Returns:
None
"""
remove = n.shapes.query("component == 'Bus' and type == @type").index
n.mremove("Shape", remove)
offset = n.shapes.index.astype(int).max() + 1 if not n.shapes.empty else 0
shapes = shapes.rename(lambda x: int(x) + offset)
n.madd(
"Shape",
shapes.index,
geometry=shapes.geometry,
idx=shapes.name,
component="Bus",
type=type,
)
if __name__ == "__main__": if __name__ == "__main__":
if "snakemake" not in globals(): if "snakemake" not in globals():
from _helpers import mock_snakemake from _helpers import mock_snakemake
@ -119,7 +147,8 @@ if __name__ == "__main__":
countries = snakemake.params.countries countries = snakemake.params.countries
n = pypsa.Network(snakemake.input.base_network) base_network = snakemake.input.base_network
n = pypsa.Network(base_network)
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index("name")[ country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index("name")[
"geometry" "geometry"
@ -173,12 +202,17 @@ if __name__ == "__main__":
offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2] offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2]
offshore_regions.append(offshore_regions_c) offshore_regions.append(offshore_regions_c)
pd.concat(onshore_regions, ignore_index=True).to_file( shapes = pd.concat(onshore_regions, ignore_index=True)
snakemake.output.regions_onshore shapes.to_file(snakemake.output.regions_onshore)
) append_bus_shapes(n, shapes, "onshore")
if offshore_regions: if offshore_regions:
pd.concat(offshore_regions, ignore_index=True).to_file( shapes = pd.concat(offshore_regions, ignore_index=True)
snakemake.output.regions_offshore shapes.to_file(snakemake.output.regions_offshore)
) append_bus_shapes(n, shapes, "offshore")
else: else:
offshore_shapes.to_frame().to_file(snakemake.output.regions_offshore) offshore_shapes.to_frame().to_file(snakemake.output.regions_offshore)
# save network with shapes
n.export_to_netcdf(base_network)

View File

@ -135,6 +135,7 @@ import pypsa
import seaborn as sns import seaborn as sns
from _helpers import configure_logging, set_scenario_config, update_p_nom_max from _helpers import configure_logging, set_scenario_config, update_p_nom_max
from add_electricity import load_costs from add_electricity import load_costs
from build_bus_regions import append_bus_shapes
from packaging.version import Version, parse from packaging.version import Version, parse
from pypsa.clustering.spatial import ( from pypsa.clustering.spatial import (
busmap_by_greedy_modularity, busmap_by_greedy_modularity,
@ -428,20 +429,27 @@ def clustering_for_n_clusters(
return clustering return clustering
def cluster_regions(busmaps, input=None, output=None): def cluster_regions(busmaps, regions):
"""
Cluster regions based on busmaps and save the results to a file and to the
network.
Parameters:
- busmaps (list): A list of busmaps used for clustering.
- which (str): The type of regions to cluster.
Returns:
None
"""
busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
regions = regions.reindex(columns=["name", "geometry"]).set_index("name")
for which in ("regions_onshore", "regions_offshore"): regions_c = regions.dissolve(busmap)
regions = gpd.read_file(getattr(input, which)) regions_c.index.name = "name"
regions = regions.reindex(columns=["name", "geometry"]).set_index("name") return regions_c.reset_index()
regions_c = regions.dissolve(busmap)
regions_c.index.name = "name"
regions_c = regions_c.reset_index()
regions_c.to_file(getattr(output, which))
def plot_busmap_for_n_clusters(n, n_clusters, fn=None): def plot_busmap_for_n_clusters(n, n_clusters, solver_name="scip", fn=None):
busmap = busmap_for_n_clusters(n, n_clusters) busmap = busmap_for_n_clusters(n, n_clusters, solver_name)
cs = busmap.unique() cs = busmap.unique()
cr = sns.color_palette("hls", len(cs)) cr = sns.color_palette("hls", len(cs))
n.plot(bus_colors=busmap.map(dict(zip(cs, cr)))) n.plot(bus_colors=busmap.map(dict(zip(cs, cr))))
@ -538,21 +546,25 @@ if __name__ == "__main__":
params.focus_weights, params.focus_weights,
) )
update_p_nom_max(clustering.network) nc = clustering.network
update_p_nom_max(nc)
if params.cluster_network.get("consider_efficiency_classes"): if params.cluster_network.get("consider_efficiency_classes"):
labels = [f" {label} efficiency" for label in ["low", "medium", "high"]] labels = [f" {label} efficiency" for label in ["low", "medium", "high"]]
nc = clustering.network
nc.generators["carrier"] = nc.generators.carrier.replace(labels, "", regex=True) nc.generators["carrier"] = nc.generators.carrier.replace(labels, "", regex=True)
clustering.network.meta = dict(
snakemake.config, **dict(wildcards=dict(snakemake.wildcards))
)
clustering.network.export_to_netcdf(snakemake.output.network)
for attr in ( for attr in (
"busmap", "busmap",
"linemap", "linemap",
): # also available: linemap_positive, linemap_negative ): # also available: linemap_positive, linemap_negative
getattr(clustering, attr).to_csv(snakemake.output[attr]) getattr(clustering, attr).to_csv(snakemake.output[attr])
cluster_regions((clustering.busmap,), snakemake.input, snakemake.output) nc.shapes = n.shapes.copy()
for which in ["regions_onshore", "regions_offshore"]:
regions = gpd.read_file(snakemake.input[which])
clustered_regions = cluster_regions((clustering.busmap,), regions)
clustered_regions.to_file(snakemake.output[which])
append_bus_shapes(nc, clustered_regions, type=which.split("_")[1])
nc.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
nc.export_to_netcdf(snakemake.output.network)

View File

@ -88,12 +88,14 @@ The rule :mod:`simplify_network` does up to four things:
import logging import logging
from functools import reduce from functools import reduce
import geopandas as gpd
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import pypsa import pypsa
import scipy as sp import scipy as sp
from _helpers import configure_logging, set_scenario_config, update_p_nom_max from _helpers import configure_logging, set_scenario_config, update_p_nom_max
from add_electricity import load_costs from add_electricity import load_costs
from build_bus_regions import append_bus_shapes
from cluster_network import cluster_regions, clustering_for_n_clusters from cluster_network import cluster_regions, clustering_for_n_clusters
from pypsa.clustering.spatial import ( from pypsa.clustering.spatial import (
aggregateoneport, aggregateoneport,
@ -610,6 +612,7 @@ if __name__ == "__main__":
n.lines.drop(remove, axis=1, errors="ignore", inplace=True) n.lines.drop(remove, axis=1, errors="ignore", inplace=True)
if snakemake.wildcards.simpl: if snakemake.wildcards.simpl:
shapes = n.shapes
n, cluster_map = cluster( n, cluster_map = cluster(
n, n,
int(snakemake.wildcards.simpl), int(snakemake.wildcards.simpl),
@ -619,14 +622,19 @@ if __name__ == "__main__":
params.simplify_network["feature"], params.simplify_network["feature"],
params.aggregation_strategies, params.aggregation_strategies,
) )
n.shapes = shapes
busmaps.append(cluster_map) busmaps.append(cluster_map)
update_p_nom_max(n) update_p_nom_max(n)
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
n.export_to_netcdf(snakemake.output.network)
busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
busmap_s.to_csv(snakemake.output.busmap) busmap_s.to_csv(snakemake.output.busmap)
cluster_regions(busmaps, snakemake.input, snakemake.output) for which in ["regions_onshore", "regions_offshore"]:
regions = gpd.read_file(snakemake.input[which])
clustered_regions = cluster_regions(busmaps, regions)
clustered_regions.to_file(snakemake.output[which])
append_bus_shapes(n, clustered_regions, type=which.split("_")[1])
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
n.export_to_netcdf(snakemake.output.network)