cluster network: update to new clustering module (pypsa v0.25)

This commit is contained in:
Fabian 2023-07-14 15:47:41 +02:00
parent 3fc84a51e5
commit f566142d38
3 changed files with 32 additions and 69 deletions

View File

@ -277,23 +277,6 @@ def progress_retrieve(url, file, disable=False):
urllib.request.urlretrieve(url, file, reporthook=update_to)
def get_aggregation_strategies(aggregation_strategies):
# default aggregation strategies that cannot be defined in .yaml format must be specified within
# the function, otherwise (when defaults are passed in the function's definition) they get lost
# when custom values are specified in the config.
import numpy as np
from pypsa.clustering.spatial import _make_consense
bus_strategies = dict(country=_make_consense("Bus", "country"))
bus_strategies.update(aggregation_strategies.get("buses", {}))
generator_strategies = {"build_year": lambda x: 0, "lifetime": lambda x: np.inf}
generator_strategies.update(aggregation_strategies.get("generators", {}))
return bus_strategies, generator_strategies
def mock_snakemake(rulename, configfiles=[], **wildcards):
"""
This function is expected to be executed from the 'scripts'-directory of '

View File

@ -133,12 +133,13 @@ import pandas as pd
import pyomo.environ as po
import pypsa
import seaborn as sns
from _helpers import configure_logging, get_aggregation_strategies, update_p_nom_max
from _helpers import configure_logging, update_p_nom_max
from pypsa.clustering.spatial import (
busmap_by_greedy_modularity,
busmap_by_hac,
busmap_by_kmeans,
get_clustering_from_busmap,
make_consense,
)
warnings.filterwarnings(action="ignore", category=UserWarning)
@ -395,10 +396,6 @@ def clustering_for_n_clusters(
extended_link_costs=0,
focus_weights=None,
):
bus_strategies, generator_strategies = get_aggregation_strategies(
aggregation_strategies
)
if not isinstance(custom_busmap, pd.Series):
busmap = busmap_for_n_clusters(
n, n_clusters, solver_name, focus_weights, algorithm, feature
@ -409,12 +406,12 @@ def clustering_for_n_clusters(
clustering = get_clustering_from_busmap(
n,
busmap,
bus_strategies=bus_strategies,
bus_strategies={"country": make_consense},
aggregate_generators_weighted=True,
aggregate_generators_carriers=aggregate_carriers,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=line_length_factor,
generator_strategies=generator_strategies,
generator_strategies=aggregation_strategies["generators"],
scale_link_capital_costs=False,
)
@ -460,7 +457,7 @@ if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake("cluster_network", simpl="", clusters="5")
snakemake = mock_snakemake("cluster_network", simpl="", clusters="37c")
configure_logging(snakemake)
params = snakemake.params
@ -470,11 +467,13 @@ if __name__ == "__main__":
exclude_carriers = params.cluster_network["exclude_carriers"]
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
conventional_carriers = set(params.conventional_carriers)
if snakemake.wildcards.clusters.endswith("m"):
n_clusters = int(snakemake.wildcards.clusters[:-1])
aggregate_carriers = set(params.conventional_carriers).intersection(
aggregate_carriers
)
aggregate_carriers = params.conventional_carriers & aggregate_carriers
elif snakemake.wildcards.clusters.endswith("c"):
n_clusters = int(snakemake.wildcards.clusters[:-1])
aggregate_carriers = aggregate_carriers - conventional_carriers
elif snakemake.wildcards.clusters == "all":
n_clusters = len(n.buses)
else:
@ -497,22 +496,6 @@ if __name__ == "__main__":
Nyears,
).at["HVAC overhead", "capital_cost"]
def consense(x):
v = x.iat[0]
assert (
x == v
).all() or x.isnull().all(), "The `potential` configuration option must agree for all renewable carriers, for now!"
return v
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {
k: getattr(pd.Series, v)
for k, v in params.aggregation_strategies[p].items()
}
for p in params.aggregation_strategies.keys()
}
custom_busmap = params.custom_busmap
if custom_busmap:
custom_busmap = pd.read_csv(

View File

@ -92,14 +92,14 @@ import numpy as np
import pandas as pd
import pypsa
import scipy as sp
from _helpers import configure_logging, get_aggregation_strategies, update_p_nom_max
from _helpers import configure_logging, update_p_nom_max
from add_electricity import load_costs
from cluster_network import cluster_regions, clustering_for_n_clusters
from pypsa.clustering.spatial import (
aggregategenerators,
aggregateoneport,
busmap_by_stubs,
get_clustering_from_busmap,
make_consense,
)
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
from scipy.sparse.csgraph import connected_components, dijkstra
@ -253,11 +253,15 @@ def _aggregate_and_move_components(
_adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output)
_, generator_strategies = get_aggregation_strategies(aggregation_strategies)
generator_strategies = aggregation_strategies["generators"]
carriers = set(n.generators.carrier) - set(exclude_carriers)
generators, generators_pnl = aggregategenerators(
n, busmap, carriers=carriers, custom_strategies=generator_strategies
generators, generators_pnl = aggregateoneport(
n,
busmap,
"Generator",
carriers=carriers,
custom_strategies=generator_strategies,
)
replace_components(n, "Generator", generators, generators_pnl)
@ -478,9 +482,7 @@ def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None):
busmap = n.buses.index.to_series()
busmap.loc[buses_i] = dist.idxmin(1)
bus_strategies, generator_strategies = get_aggregation_strategies(
aggregation_strategies
)
bus_strategies = {"country": make_consense}
clustering = get_clustering_from_busmap(
n,
@ -534,15 +536,6 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input.network)
Nyears = n.snapshot_weightings.objective.sum() / 8760
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {
k: getattr(pd.Series, v)
for k, v in params.aggregation_strategies[p].items()
}
for p in params.aggregation_strategies.keys()
}
n, trafo_map = simplify_network_to_380(n)
technology_costs = load_costs(
@ -560,7 +553,7 @@ if __name__ == "__main__":
params.p_max_pu,
params.simplify_network["exclude_carriers"],
snakemake.output,
aggregation_strategies,
params.aggregation_strategies,
)
busmaps = [trafo_map, simplify_links_map]
@ -573,12 +566,12 @@ if __name__ == "__main__":
params.length_factor,
params.simplify_network,
snakemake.output,
aggregation_strategies=aggregation_strategies,
aggregation_strategies=params.aggregation_strategies,
)
busmaps.append(stub_map)
if params.simplify_network["to_substations"]:
n, substation_map = aggregate_to_substations(n, aggregation_strategies)
n, substation_map = aggregate_to_substations(n, params.aggregation_strategies)
busmaps.append(substation_map)
# treatment of outliers (nodes without a profile for considered carrier):
@ -592,7 +585,9 @@ if __name__ == "__main__":
logger.info(
f"clustering preparation (hac): aggregating {len(buses_i)} buses of type {carrier}."
)
n, busmap_hac = aggregate_to_substations(n, aggregation_strategies, buses_i)
n, busmap_hac = aggregate_to_substations(
n, params.aggregation_strategies, buses_i
)
busmaps.append(busmap_hac)
if snakemake.wildcards.simpl:
@ -603,20 +598,22 @@ if __name__ == "__main__":
solver_name,
params.simplify_network["algorithm"],
params.simplify_network["feature"],
aggregation_strategies,
params.aggregation_strategies,
)
busmaps.append(cluster_map)
# some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed
# and are lost when clustering (for example with the simpl wildcard), we remove them for consistency:
buses_c = {
remove = [
"symbol",
"tags",
"under_construction",
"substation_lv",
"substation_off",
}.intersection(n.buses.columns)
n.buses = n.buses.drop(buses_c, axis=1)
"geometry",
]
n.buses.drop(remove, axis=1, inplace=True, errors="ignore")
n.lines.drop(remove, axis=1, errors="ignore", inplace=True)
update_p_nom_max(n)