Merge branch 'master' of github.com:PyPSA/pypsa-eur into eurostat-march2024

This commit is contained in:
Fabian Neumann 2024-05-21 18:49:41 +02:00
commit c600d327b0
2 changed files with 17 additions and 14 deletions

View File

@ -132,22 +132,20 @@ def disable_grid_expansion_if_limit_hit(n):
minimum and extendable is turned off; the corresponding global
constraint is then dropped.
"""
cols = {"cost": "capital_cost", "volume": "length"}
for limit_type in ["cost", "volume"]:
glcs = n.global_constraints.query(
f"type == 'transmission_expansion_{limit_type}_limit'"
)
types = {"expansion_cost": "capital_cost", "volume_expansion": "length"}
for limit_type in types:
glcs = n.global_constraints.query(f"type == 'transmission_{limit_type}_limit'")
for name, glc in glcs.iterrows():
total_expansion = (
(
n.lines.query("s_nom_extendable")
.eval(f"s_nom_min * {cols[limit_type]}")
.eval(f"s_nom_min * {types[limit_type]}")
.sum()
)
+ (
n.links.query("carrier == 'DC' and p_nom_extendable")
.eval(f"p_nom_min * {cols[limit_type]}")
.eval(f"p_nom_min * {types[limit_type]}")
.sum()
)
).sum()

View File

@ -3634,15 +3634,13 @@ def set_temporal_aggregation(n, resolution, snapshot_weightings):
logger.info("Use every %s snapshot as representative", sn)
n.set_snapshots(n.snapshots[::sn])
n.snapshot_weightings *= sn
return n
else:
# Otherwise, use the provided snapshots
snapshot_weightings = pd.read_csv(
snapshot_weightings, index_col=0, parse_dates=True
)
n.set_snapshots(snapshot_weightings.index)
n.snapshot_weightings = snapshot_weightings
# Define a series used for aggregation, mapping each hour in
# n.snapshots to the closest previous timestep in
# snapshot_weightings.index
@ -3656,16 +3654,23 @@ def set_temporal_aggregation(n, resolution, snapshot_weightings):
.map(lambda i: snapshot_weightings.index[i])
)
m = n.copy(with_time=False)
m.set_snapshots(snapshot_weightings.index)
m.snapshot_weightings = snapshot_weightings
# Aggregation all time-varying data.
for c in n.iterate_components():
pnl = getattr(m, c.list_name + "_t")
for k, df in c.pnl.items():
if not df.empty:
if c.list_name == "stores" and k == "e_max_pu":
c.pnl[k] = df.groupby(aggregation_map).min()
pnl[k] = df.groupby(aggregation_map).min()
elif c.list_name == "stores" and k == "e_min_pu":
c.pnl[k] = df.groupby(aggregation_map).max()
pnl[k] = df.groupby(aggregation_map).max()
else:
c.pnl[k] = df.groupby(aggregation_map).mean()
pnl[k] = df.groupby(aggregation_map).mean()
return m
def lossy_bidirectional_links(n, carrier, efficiencies={}):
@ -3818,7 +3823,7 @@ if __name__ == "__main__":
if options["allam_cycle"]:
add_allam(n, costs)
set_temporal_aggregation(
n = set_temporal_aggregation(
n, snakemake.params.time_resolution, snakemake.input.snapshot_weightings
)