add options to aggregate snapshots temporally

This commit is contained in:
lisazeyen 2022-08-08 08:53:07 +02:00
parent 8dd143f369
commit ce6b92dd6d

View File

@ -2413,6 +2413,84 @@ def limit_individual_line_extension(n, maxext):
hvdc = n.links.index[n.links.carrier == 'DC']
n.links.loc[hvdc, 'p_nom_max'] = n.links.loc[hvdc, 'p_nom'] + maxext
def apply_time_segmentation(n, segments, solver_name="cbc",
overwrite_time_dependent=False):
"""Aggregating time series to segments with different lengths
Input:
n: pypsa Network
segments: (int) number of segments in which the typical period should be
subdivided
solver_name: (str) name of solver
overwrite_time_dependent: (bool) overwrite time dependent data of pypsa network
with typical time series created by tsam
"""
try:
import tsam.timeseriesaggregation as tsam
except:
raise ModuleNotFoundError("Optional dependency 'tsam' not found."
"Install via 'pip install tsam'")
# get all time-dependent data
columns = pd.MultiIndex.from_tuples([],names=['component', 'key', 'asset'])
raw = pd.DataFrame(index=n.snapshots,columns=columns)
for component in n.all_components:
pnl = n.pnl(component)
for key in pnl.keys():
if not pnl[key].empty:
df = pnl[key].copy()
df.columns = pd.MultiIndex.from_product([[component], [key], df.columns])
raw = pd.concat([raw, df], axis=1)
# normalise all time-dependent data
annual_max = raw.max().replace(0,1)
raw = raw.div(annual_max, level=0)
# get representative segments
agg = tsam.TimeSeriesAggregation(raw, hoursPerPeriod=len(raw),
noTypicalPeriods=1, noSegments=int(segments),
segmentation=True, solver=solver_name)
segmented = agg.createTypicalPeriods()
weightings = segmented.index.get_level_values("Segment Duration")
offsets = np.insert(np.cumsum(weightings[:-1]), 0, 0)
timesteps = [raw.index[0] + pd.Timedelta(f"{offset}h") for offset in offsets]
snapshots = pd.DatetimeIndex(timesteps)
sn_weightings = pd.Series(weightings, index=snapshots, name="weightings", dtype="float64")
n.set_snapshots(sn_weightings.index)
n.snapshot_weightings = n.snapshot_weightings.mul(sn_weightings, axis=0)
# overwrite time-dependent data with timeseries created by tsam
if overwrite_time_dependent:
values_t = segmented.mul(annual_max).set_index(snapshots)
for component, key in values_t.columns.droplevel(2).unique():
n.pnl(component)[key] = values_t[component, key]
return n
def set_temporal_aggregation(n, opts, solver_name):
"""Aggregate network temporally."""
for o in opts:
# temporal averaging
m = re.match(r"^\d+h$", o, re.IGNORECASE)
if m is not None:
n = average_every_nhours(n, m.group(0))
# representive snapshots
m = re.match(r"^\d+sn$", o, re.IGNORECASE)
if m is not None:
sn = int(m.group(0).split("sn")[0])
logger.info("use every {} snapshot as representative".format(sn))
n.set_snapshots(n.snapshots[::sn])
n.snapshot_weightings *= sn
# segments with package tsam
if "SEG" in o:
segments = int(o.replace("SEG",""))
logger.info("use temporal segmentation with {} segments".format(segments))
n = apply_time_segmentation(n, segments, solver_name=solver_name)
return n
#%%
if __name__ == "__main__":
if 'snakemake' not in globals():
@ -2514,11 +2592,8 @@ if __name__ == "__main__":
if options["co2_network"]:
add_co2_network(n, costs)
for o in opts:
m = re.match(r'^\d+h$', o, re.IGNORECASE)
if m is not None:
n = average_every_nhours(n, m.group(0))
break
solver_name = snakemake.config["solving"]["solver"]["name"]
n = set_temporal_aggregation(n, opts, solver_name)
limit_type = "config"
limit = get(snakemake.config["co2_budget"], investment_year)