Merge branch 'master' into introduce_hac_clustering

This commit is contained in:
Martha Frysztacki 2022-06-27 20:45:40 +02:00 committed by GitHub
commit 50a518a155
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 119 additions and 64 deletions

View File

@ -21,12 +21,22 @@ countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'F
clustering:
simplify_network:
to_substations: true # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
algorithm: hac
feature: solar+onwind-time
cluster_network:
algorithm: hac # choose from: [hac, kmeans]
feature: solar+onwind-time # only for hac. choose from: [solar+onwind-time, solar+onwind-cap, solar-time, solar-cap, solar+offwind-cap] etc.
aggregation_strategies:
generators:
p_nom_max: sum # use "min" for more conservative assumptions
p_nom_min: sum
p_min_pu: mean
marginal_cost: mean
committable: any
ramp_limit_up: max
ramp_limit_down: max
efficiency: mean
snapshots:
start: "2013-01-01"

View File

@ -21,12 +21,22 @@ countries: ['BE']
clustering:
simplify_network:
to_substations: true # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
algorithm: hac
feature: solar+onwind-time
cluster_network:
algorithm: hac # choose from: [hac, kmeans]
feature: solar+onwind-time # only for hac. choose from: [solar+onwind-time, solar+onwind-cap, solar-time, solar-cap, solar+offwind-cap] etc.
aggregation_strategies:
generators:
p_nom_max: sum # use "min" for more conservative assumptions
p_nom_min: sum
p_min_pu: mean
marginal_cost: mean
committable: any
ramp_limit_up: max
ramp_limit_down: max
efficiency: mean
snapshots:
start: "2013-03-01"

View File

@ -1,8 +1,13 @@
,Unit,Values,Description
simplify_network,,,
-- to_substations,bool,"One of {'true','false'}","Aggregates all nodes without power injection (positive or negative, i.e. demand or generation) to electrically closest ones"
-- to_substations,bool,"{'true','false'}","Aggregates all nodes without power injection (positive or negative, i.e. demand or generation) to electrically closest ones"
-- algorithm,str,"One of {kmeans, hac}",
-- feature,str,"Str in the format carrier1+carrier2+...+carrierN-X, where CarrierI can be from {solar, onwind, offwind, ror} and X is one of {cap, time}.",
cluster_network,,,
cluster_network
-- algorithm,str,"One of {kmeans, hac}",
-- feature,str,"Str in the format carrier1+carrier2+...+carrierN-X, where CarrierI can be from {solar, onwind, offwind, ror} and X is one of {cap, time}.",
aggregation_strategies,,,
-- generators,,,
-- -- {key},str,"{key} can be any of the component of the generator (str). Its value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator."
-- buses,,,
-- -- {key},str,"{key} can be any of the component of the bus (str). Its value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new bus."

Can't render this file because it has a wrong number of fields in line 6.

View File

@ -74,6 +74,7 @@ Upcoming Release
* Update rasterio version to correctly calculate exclusion raster
* Clustering strategies for generators and buses have moved from distinct scripts to configurables to unify the process and make it more transparent.
PyPSA-Eur 0.4.0 (22th September 2021)
=====================================

View File

@ -210,6 +210,22 @@ def progress_retrieve(url, file):
urllib.request.urlretrieve(url, file, reporthook=dlProgress)
def get_aggregation_strategies(aggregation_strategies):
# default aggregation strategies that cannot be defined in .yaml format must be specified within
# the function, otherwise (when defaults are passed in the function's definition) they get lost
# when custom values are specified in the config.
import numpy as np
from pypsa.networkclustering import _make_consense
bus_strategies = dict(country=_make_consense("Bus", "country"))
bus_strategies.update(aggregation_strategies.get("buses", {}))
generator_strategies = {'build_year': lambda x: 0, 'lifetime': lambda x: np.inf}
generator_strategies.update(aggregation_strategies.get("generators", {}))
return bus_strategies, generator_strategies
def mock_snakemake(rulename, **wildcards):
"""

View File

@ -11,11 +11,10 @@ Relevant Settings
.. code:: yaml
focus_weights:
clustering:
aggregation_strategies:
renewable: (keys)
{technology}:
potential:
focus_weights:
solving:
solver:
@ -122,7 +121,7 @@ Exemplary unsolved network clustered to 37 nodes:
"""
import logging
from _helpers import configure_logging, update_p_nom_max
from _helpers import configure_logging, update_p_nom_max, get_aggregation_strategies
import pypsa
import os
@ -334,17 +333,10 @@ def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algori
def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carriers=None,
line_length_factor=1.25, potential_mode='simple', solver_name="cbc",
line_length_factor=1.25, aggregation_strategies=dict(), solver_name="cbc",
algorithm="hac", feature=None, extended_link_costs=0, focus_weights=None):
logger.info(f"Clustering network using algorithm `{algorithm}` and feature `{feature}`...")
if potential_mode == 'simple':
p_nom_max_strategy = pd.Series.sum
elif potential_mode == 'conservative':
p_nom_max_strategy = pd.Series.min
else:
raise AttributeError(f"potential_mode should be one of 'simple' or 'conservative' but is '{potential_mode}'")
bus_strategies, generator_strategies = get_aggregation_strategies(aggregation_strategies)
if not isinstance(custom_busmap, pd.Series):
busmap = busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights, algorithm, feature)
@ -353,19 +345,12 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
clustering = get_clustering_from_busmap(
n, busmap,
bus_strategies=dict(country=_make_consense("Bus", "country")),
bus_strategies=bus_strategies,
aggregate_generators_weighted=True,
aggregate_generators_carriers=aggregate_carriers,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=line_length_factor,
generator_strategies={'p_nom_max': p_nom_max_strategy,
'p_nom_min': pd.Series.sum,
'p_min_pu': pd.Series.mean,
'marginal_cost': pd.Series.mean,
'committable': np.any,
'ramp_limit_up': pd.Series.max,
'ramp_limit_down': pd.Series.max,
},
generator_strategies=generator_strategies,
scale_link_capital_costs=False)
if not n.links.empty:
@ -452,8 +437,13 @@ if __name__ == "__main__":
"The `potential` configuration option must agree for all renewable carriers, for now!"
)
return v
potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential']
for tech in renewable_carriers]))
aggregation_strategies = snakemake.config["clustering"].get("aggregation_strategies", {})
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {k: getattr(pd.Series, v) for k,v in aggregation_strategies[p].items()}
for p in aggregation_strategies.keys()
}
custom_busmap = snakemake.config["enable"].get("custom_busmap", False)
if custom_busmap:
custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True)
@ -462,14 +452,14 @@ if __name__ == "__main__":
cluster_config = snakemake.config.get('clustering', {}).get('cluster_network', {})
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers,
line_length_factor, potential_mode,
line_length_factor, aggregation_strategies,
snakemake.config['solving']['solver']['name'],
cluster_config.get("algorithm", "hac"),
cluster_config.get("feature", "solar+onwind-time"),
hvac_overhead_cost, focus_weights)
update_p_nom_max(n)
update_p_nom_max(clustering.network)
clustering.network.export_to_netcdf(snakemake.output.network)
for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative
getattr(clustering, attr).to_csv(snakemake.output[attr])

View File

@ -13,6 +13,10 @@ Relevant Settings
.. code:: yaml
clustering:
simplify:
aggregation_strategies:
costs:
USD2013_to_EUR2013:
discountrate:
@ -22,10 +26,6 @@ Relevant Settings
electricity:
max_hours:
renewables: (keys)
{technology}:
potential:
lines:
length_factor:
@ -83,7 +83,7 @@ The rule :mod:`simplify_network` does up to four things:
"""
import logging
from _helpers import configure_logging, update_p_nom_max
from _helpers import configure_logging, update_p_nom_max, get_aggregation_strategies
from cluster_network import clustering_for_n_clusters, cluster_regions
from add_electricity import load_costs
@ -189,7 +189,10 @@ def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, out
def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output, aggregate_one_ports={"Load", "StorageUnit"}):
def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output,
aggregate_one_ports={"Load", "StorageUnit"},
aggregation_strategies=dict()):
def replace_components(n, c, df, pnl):
n.mremove(c, n.df(c).index)
@ -200,7 +203,11 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output, a
_adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output)
generators, generators_pnl = aggregategenerators(n, busmap, custom_strategies={'p_nom_min': np.sum})
_, generator_strategies = get_aggregation_strategies(aggregation_strategies)
generators, generators_pnl = aggregategenerators(
n, busmap, custom_strategies=generator_strategies
)
replace_components(n, "Generator", generators, generators_pnl)
for one_port in aggregate_one_ports:
@ -214,7 +221,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output, a
n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)])
def simplify_links(n, costs, config, output):
def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
## Complex multi-node links are folded into end-points
logger.info("Simplifying connected link components")
@ -306,21 +313,23 @@ def simplify_links(n, costs, config, output):
logger.debug("Collecting all components using the busmap")
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output)
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output,
aggregation_strategies=aggregation_strategies)
return n, busmap
def remove_stubs(n, costs, config, output):
def remove_stubs(n, costs, config, output, aggregation_strategies=dict()):
logger.info("Removing stubs")
busmap = busmap_by_stubs(n) # ['country'])
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config)
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output)
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output,
aggregation_strategies=aggregation_strategies)
return n, busmap
def aggregate_to_substations(n, buses_i=None):
def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None):
# can be used to aggregate a selection of buses to electrically closest neighbors
# if no buses are given, nodes that are no substations or without offshore connection are aggregated
@ -345,18 +354,20 @@ def aggregate_to_substations(n, buses_i=None):
busmap = n.buses.index.to_series()
busmap.loc[buses_i] = dist.idxmin(1)
bus_strategies, generator_strategies = get_aggregation_strategies(aggregation_strategies)
clustering = get_clustering_from_busmap(n, busmap,
bus_strategies=dict(country=_make_consense("Bus", "country")),
bus_strategies=bus_strategies,
aggregate_generators_weighted=True,
aggregate_generators_carriers=None,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=1.0,
generator_strategies={'p_nom_max': 'sum', 'p_nom_min': 'sum'},
generator_strategies=generator_strategies,
scale_link_capital_costs=False)
return clustering.network, busmap
def cluster(n, n_clusters, config, algorithm="hac", feature=None):
def cluster(n, n_clusters, config, algorithm="hac", feature=None, aggregation_strategies=dict()):
logger.info(f"Clustering to {n_clusters} buses")
focus_weights = config.get('focus_weights', None)
@ -364,17 +375,9 @@ def cluster(n, n_clusters, config, algorithm="hac", feature=None):
renewable_carriers = pd.Index([tech
for tech in n.generators.carrier.unique()
if tech.split('-', 2)[0] in config['renewable']])
def consense(x):
v = x.iat[0]
assert ((x == v).all() or x.isnull().all()), (
"The `potential` configuration option must agree for all renewable carriers, for now!"
)
return v
potential_mode = (consense(pd.Series([config['renewable'][tech]['potential']
for tech in renewable_carriers]))
if len(renewable_carriers) > 0 else 'conservative')
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode,
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False,
aggregation_strategies=aggregation_strategies,
solver_name=config['solving']['solver']['name'],
algorithm=algorithm, feature=feature,
focus_weights=focus_weights)
@ -390,21 +393,30 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input.network)
aggregation_strategies = snakemake.config["clustering"].get("aggregation_strategies", {})
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {k: getattr(pd.Series, v) for k,v in aggregation_strategies[p].items()}
for p in aggregation_strategies.keys()
}
n, trafo_map = simplify_network_to_380(n)
Nyears = n.snapshot_weightings.objective.sum() / 8760
technology_costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
n, simplify_links_map = simplify_links(n, technology_costs, snakemake.config, snakemake.output)
n, simplify_links_map = simplify_links(n, technology_costs, snakemake.config, snakemake.output,
aggregation_strategies)
n, stub_map = remove_stubs(n, technology_costs, snakemake.config, snakemake.output)
n, stub_map = remove_stubs(n, technology_costs, snakemake.config, snakemake.output,
aggregation_strategies=aggregation_strategies)
busmaps = [trafo_map, simplify_links_map, stub_map]
cluster_config = snakemake.config.get('clustering', {}).get('simplify_network', {})
if cluster_config.get('to_substations', False):
n, substation_map = aggregate_to_substations(n)
if cluster_config.get('clustering', {}).get('simplify', {}).get('to_substations', False):
n, substation_map = aggregate_to_substations(n, aggregation_strategies)
busmaps.append(substation_map)
# treatment of outliers (nodes without a profile for considered carrier):
@ -422,8 +434,9 @@ if __name__ == "__main__":
if snakemake.wildcards.simpl:
n, cluster_map = cluster(n, int(snakemake.wildcards.simpl), snakemake.config,
algorithm=cluster_config.get('algorithm', 'hac'),
feature=cluster_config.get('feature', None))
cluster_config.get('algorithm', 'hac'),
cluster_config.get('feature', None),
aggregation_strategies)
busmaps.append(cluster_map)
# some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed

View File

@ -20,12 +20,22 @@ countries: ['BE']
clustering:
simplify_network:
to_substations: true # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
algorithm: hac
feature: solar+onwind-time
cluster_network:
algorithm: hac # choose from: [hac, kmeans]
feature: solar+onwind-time # only for hac. choose from: [solar+onwind-time, solar+onwind-cap, solar-time, solar-cap, solar+offwind-cap] etc.
aggregation_strategies:
generators:
p_nom_max: sum # use "min" for more conservative assumptions
p_nom_min: sum
p_min_pu: mean
marginal_cost: mean
committable: any
ramp_limit_up: max
ramp_limit_down: max
efficiency: mean
snapshots:
start: "2013-03-01"