Merge pull request #696 from PyPSA/cluster-network-update

cluster network: update to new clustering module (pypsa v0.25)
This commit is contained in:
Fabian Hofmann 2023-07-17 16:17:57 +02:00 committed by GitHub
commit 7ff7a7ee34
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 41 additions and 71 deletions

View File

@ -10,7 +10,7 @@ dependencies:
- python>=3.8 - python>=3.8
- pip - pip
- pypsa>=0.23 # - pypsa>=0.23
- atlite>=0.2.9 - atlite>=0.2.9
- dask - dask
@ -56,3 +56,4 @@ dependencies:
- pip: - pip:
- tsam>=1.1.0 - tsam>=1.1.0
- git+https://github.com/PyPSA/PyPSA.git@master

View File

@ -277,23 +277,6 @@ def progress_retrieve(url, file, disable=False):
urllib.request.urlretrieve(url, file, reporthook=update_to) urllib.request.urlretrieve(url, file, reporthook=update_to)
def get_aggregation_strategies(aggregation_strategies):
# default aggregation strategies that cannot be defined in .yaml format must be specified within
# the function, otherwise (when defaults are passed in the function's definition) they get lost
# when custom values are specified in the config.
import numpy as np
from pypsa.clustering.spatial import _make_consense
bus_strategies = dict(country=_make_consense("Bus", "country"))
bus_strategies.update(aggregation_strategies.get("buses", {}))
generator_strategies = {"build_year": lambda x: 0, "lifetime": lambda x: np.inf}
generator_strategies.update(aggregation_strategies.get("generators", {}))
return bus_strategies, generator_strategies
def mock_snakemake(rulename, configfiles=[], **wildcards): def mock_snakemake(rulename, configfiles=[], **wildcards):
""" """
This function is expected to be executed from the 'scripts'-directory of ' This function is expected to be executed from the 'scripts'-directory of '

View File

@ -133,7 +133,7 @@ import pandas as pd
import pyomo.environ as po import pyomo.environ as po
import pypsa import pypsa
import seaborn as sns import seaborn as sns
from _helpers import configure_logging, get_aggregation_strategies, update_p_nom_max from _helpers import configure_logging, update_p_nom_max
from pypsa.clustering.spatial import ( from pypsa.clustering.spatial import (
busmap_by_greedy_modularity, busmap_by_greedy_modularity,
busmap_by_hac, busmap_by_hac,
@ -395,10 +395,6 @@ def clustering_for_n_clusters(
extended_link_costs=0, extended_link_costs=0,
focus_weights=None, focus_weights=None,
): ):
bus_strategies, generator_strategies = get_aggregation_strategies(
aggregation_strategies
)
if not isinstance(custom_busmap, pd.Series): if not isinstance(custom_busmap, pd.Series):
busmap = busmap_for_n_clusters( busmap = busmap_for_n_clusters(
n, n_clusters, solver_name, focus_weights, algorithm, feature n, n_clusters, solver_name, focus_weights, algorithm, feature
@ -406,15 +402,20 @@ def clustering_for_n_clusters(
else: else:
busmap = custom_busmap busmap = custom_busmap
line_strategies = aggregation_strategies.get("lines", dict())
generator_strategies = aggregation_strategies.get("generators", dict())
one_port_strategies = aggregation_strategies.get("one_ports", dict())
clustering = get_clustering_from_busmap( clustering = get_clustering_from_busmap(
n, n,
busmap, busmap,
bus_strategies=bus_strategies,
aggregate_generators_weighted=True, aggregate_generators_weighted=True,
aggregate_generators_carriers=aggregate_carriers, aggregate_generators_carriers=aggregate_carriers,
aggregate_one_ports=["Load", "StorageUnit"], aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=line_length_factor, line_length_factor=line_length_factor,
line_strategies=line_strategies,
generator_strategies=generator_strategies, generator_strategies=generator_strategies,
one_port_strategies=one_port_strategies,
scale_link_capital_costs=False, scale_link_capital_costs=False,
) )
@ -460,7 +461,7 @@ if __name__ == "__main__":
if "snakemake" not in globals(): if "snakemake" not in globals():
from _helpers import mock_snakemake from _helpers import mock_snakemake
snakemake = mock_snakemake("cluster_network", simpl="", clusters="5") snakemake = mock_snakemake("cluster_network", simpl="", clusters="37c")
configure_logging(snakemake) configure_logging(snakemake)
params = snakemake.params params = snakemake.params
@ -470,11 +471,13 @@ if __name__ == "__main__":
exclude_carriers = params.cluster_network["exclude_carriers"] exclude_carriers = params.cluster_network["exclude_carriers"]
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers) aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
conventional_carriers = set(params.conventional_carriers)
if snakemake.wildcards.clusters.endswith("m"): if snakemake.wildcards.clusters.endswith("m"):
n_clusters = int(snakemake.wildcards.clusters[:-1]) n_clusters = int(snakemake.wildcards.clusters[:-1])
aggregate_carriers = set(params.conventional_carriers).intersection( aggregate_carriers = params.conventional_carriers & aggregate_carriers
aggregate_carriers elif snakemake.wildcards.clusters.endswith("c"):
) n_clusters = int(snakemake.wildcards.clusters[:-1])
aggregate_carriers = aggregate_carriers - conventional_carriers
elif snakemake.wildcards.clusters == "all": elif snakemake.wildcards.clusters == "all":
n_clusters = len(n.buses) n_clusters = len(n.buses)
else: else:
@ -497,22 +500,6 @@ if __name__ == "__main__":
Nyears, Nyears,
).at["HVAC overhead", "capital_cost"] ).at["HVAC overhead", "capital_cost"]
def consense(x):
v = x.iat[0]
assert (
x == v
).all() or x.isnull().all(), "The `potential` configuration option must agree for all renewable carriers, for now!"
return v
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {
k: getattr(pd.Series, v)
for k, v in params.aggregation_strategies[p].items()
}
for p in params.aggregation_strategies.keys()
}
custom_busmap = params.custom_busmap custom_busmap = params.custom_busmap
if custom_busmap: if custom_busmap:
custom_busmap = pd.read_csv( custom_busmap = pd.read_csv(

View File

@ -86,17 +86,16 @@ The rule :mod:`simplify_network` does up to four things:
""" """
import logging import logging
from functools import reduce from functools import partial, reduce
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import pypsa import pypsa
import scipy as sp import scipy as sp
from _helpers import configure_logging, get_aggregation_strategies, update_p_nom_max from _helpers import configure_logging, update_p_nom_max
from add_electricity import load_costs from add_electricity import load_costs
from cluster_network import cluster_regions, clustering_for_n_clusters from cluster_network import cluster_regions, clustering_for_n_clusters
from pypsa.clustering.spatial import ( from pypsa.clustering.spatial import (
aggregategenerators,
aggregateoneport, aggregateoneport,
busmap_by_stubs, busmap_by_stubs,
get_clustering_from_busmap, get_clustering_from_busmap,
@ -253,11 +252,15 @@ def _aggregate_and_move_components(
_adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output) _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output)
_, generator_strategies = get_aggregation_strategies(aggregation_strategies) generator_strategies = aggregation_strategies["generators"]
carriers = set(n.generators.carrier) - set(exclude_carriers) carriers = set(n.generators.carrier) - set(exclude_carriers)
generators, generators_pnl = aggregategenerators( generators, generators_pnl = aggregateoneport(
n, busmap, carriers=carriers, custom_strategies=generator_strategies n,
busmap,
"Generator",
carriers=carriers,
custom_strategies=generator_strategies,
) )
replace_components(n, "Generator", generators, generators_pnl) replace_components(n, "Generator", generators, generators_pnl)
@ -478,19 +481,20 @@ def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None):
busmap = n.buses.index.to_series() busmap = n.buses.index.to_series()
busmap.loc[buses_i] = dist.idxmin(1) busmap.loc[buses_i] = dist.idxmin(1)
bus_strategies, generator_strategies = get_aggregation_strategies( line_strategies = aggregation_strategies.get("lines", dict())
aggregation_strategies generator_strategies = aggregation_strategies.get("generators", dict())
) one_port_strategies = aggregation_strategies.get("one_ports", dict())
clustering = get_clustering_from_busmap( clustering = get_clustering_from_busmap(
n, n,
busmap, busmap,
bus_strategies=bus_strategies,
aggregate_generators_weighted=True, aggregate_generators_weighted=True,
aggregate_generators_carriers=None, aggregate_generators_carriers=None,
aggregate_one_ports=["Load", "StorageUnit"], aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=1.0, line_length_factor=1.0,
line_strategies=line_strategies,
generator_strategies=generator_strategies, generator_strategies=generator_strategies,
one_port_strategies=one_port_strategies,
scale_link_capital_costs=False, scale_link_capital_costs=False,
) )
return clustering.network, busmap return clustering.network, busmap
@ -534,15 +538,6 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input.network) n = pypsa.Network(snakemake.input.network)
Nyears = n.snapshot_weightings.objective.sum() / 8760 Nyears = n.snapshot_weightings.objective.sum() / 8760
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {
k: getattr(pd.Series, v)
for k, v in params.aggregation_strategies[p].items()
}
for p in params.aggregation_strategies.keys()
}
n, trafo_map = simplify_network_to_380(n) n, trafo_map = simplify_network_to_380(n)
technology_costs = load_costs( technology_costs = load_costs(
@ -560,7 +555,7 @@ if __name__ == "__main__":
params.p_max_pu, params.p_max_pu,
params.simplify_network["exclude_carriers"], params.simplify_network["exclude_carriers"],
snakemake.output, snakemake.output,
aggregation_strategies, params.aggregation_strategies,
) )
busmaps = [trafo_map, simplify_links_map] busmaps = [trafo_map, simplify_links_map]
@ -573,12 +568,12 @@ if __name__ == "__main__":
params.length_factor, params.length_factor,
params.simplify_network, params.simplify_network,
snakemake.output, snakemake.output,
aggregation_strategies=aggregation_strategies, aggregation_strategies=params.aggregation_strategies,
) )
busmaps.append(stub_map) busmaps.append(stub_map)
if params.simplify_network["to_substations"]: if params.simplify_network["to_substations"]:
n, substation_map = aggregate_to_substations(n, aggregation_strategies) n, substation_map = aggregate_to_substations(n, params.aggregation_strategies)
busmaps.append(substation_map) busmaps.append(substation_map)
# treatment of outliers (nodes without a profile for considered carrier): # treatment of outliers (nodes without a profile for considered carrier):
@ -592,7 +587,9 @@ if __name__ == "__main__":
logger.info( logger.info(
f"clustering preparation (hac): aggregating {len(buses_i)} buses of type {carrier}." f"clustering preparation (hac): aggregating {len(buses_i)} buses of type {carrier}."
) )
n, busmap_hac = aggregate_to_substations(n, aggregation_strategies, buses_i) n, busmap_hac = aggregate_to_substations(
n, params.aggregation_strategies, buses_i
)
busmaps.append(busmap_hac) busmaps.append(busmap_hac)
if snakemake.wildcards.simpl: if snakemake.wildcards.simpl:
@ -603,20 +600,22 @@ if __name__ == "__main__":
solver_name, solver_name,
params.simplify_network["algorithm"], params.simplify_network["algorithm"],
params.simplify_network["feature"], params.simplify_network["feature"],
aggregation_strategies, params.aggregation_strategies,
) )
busmaps.append(cluster_map) busmaps.append(cluster_map)
# some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed # some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed
# and are lost when clustering (for example with the simpl wildcard), we remove them for consistency: # and are lost when clustering (for example with the simpl wildcard), we remove them for consistency:
buses_c = { remove = [
"symbol", "symbol",
"tags", "tags",
"under_construction", "under_construction",
"substation_lv", "substation_lv",
"substation_off", "substation_off",
}.intersection(n.buses.columns) "geometry",
n.buses = n.buses.drop(buses_c, axis=1) ]
n.buses.drop(remove, axis=1, inplace=True, errors="ignore")
n.lines.drop(remove, axis=1, errors="ignore", inplace=True)
update_p_nom_max(n) update_p_nom_max(n)