2020-07-07 16:20:51 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2023-03-06 17:49:23 +00:00
|
|
|
# SPDX-FileCopyrightText: : 2020-2023 The PyPSA-Eur Authors
|
|
|
|
#
|
|
|
|
# SPDX-License-Identifier: MIT
|
2023-03-09 11:48:00 +00:00
|
|
|
"""
|
|
|
|
Prepares brownfield data from previous planning horizon.
|
|
|
|
"""
|
2023-03-09 11:45:43 +00:00
|
|
|
|
2020-07-07 16:20:51 +00:00
|
|
|
import logging
|
2023-03-06 08:27:45 +00:00
|
|
|
|
2020-07-07 16:20:51 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2021-07-01 18:09:04 +00:00
|
|
|
|
2020-07-07 16:20:51 +00:00
|
|
|
import pandas as pd
|
2023-03-06 08:27:45 +00:00
|
|
|
|
2020-07-07 16:20:51 +00:00
|
|
|
idx = pd.IndexSlice
|
|
|
|
|
2022-01-07 10:38:25 +00:00
|
|
|
import numpy as np
|
2020-07-07 16:20:51 +00:00
|
|
|
import pypsa
|
2023-07-13 20:31:55 +00:00
|
|
|
from _helpers import update_config_with_sector_opts
|
2023-03-06 18:18:17 +00:00
|
|
|
from add_existing_baseyear import add_build_year_to_new_assets
|
2020-07-07 16:20:51 +00:00
|
|
|
|
2020-07-30 06:27:33 +00:00
|
|
|
|
2020-07-08 14:28:08 +00:00
|
|
|
def add_brownfield(n, n_p, year):
|
2023-02-24 13:42:51 +00:00
|
|
|
logger.info(f"Preparing brownfield for the year {year}")
|
2020-07-30 06:27:33 +00:00
|
|
|
|
2022-04-12 08:03:04 +00:00
|
|
|
# electric transmission grid set optimised capacities of previous as minimum
|
|
|
|
n.lines.s_nom_min = n_p.lines.s_nom_opt
|
|
|
|
dc_i = n.links[n.links.carrier == "DC"].index
|
|
|
|
n.links.loc[dc_i, "p_nom_min"] = n_p.links.loc[dc_i, "p_nom_opt"]
|
|
|
|
|
2020-08-12 16:08:01 +00:00
|
|
|
for c in n_p.iterate_components(["Link", "Generator", "Store"]):
|
|
|
|
attr = "e" if c.name == "Store" else "p"
|
|
|
|
|
2021-07-01 18:09:04 +00:00
|
|
|
# first, remove generators, links and stores that track
|
|
|
|
# CO2 or global EU values since these are already in n
|
2022-01-07 10:38:25 +00:00
|
|
|
n_p.mremove(c.name, c.df.index[c.df.lifetime == np.inf])
|
2020-08-12 16:08:01 +00:00
|
|
|
|
2021-07-01 18:09:04 +00:00
|
|
|
# remove assets whose build_year + lifetime < year
|
|
|
|
n_p.mremove(c.name, c.df.index[c.df.build_year + c.df.lifetime < year])
|
2020-08-12 16:08:01 +00:00
|
|
|
|
2021-07-01 18:09:04 +00:00
|
|
|
# remove assets if their optimized nominal capacity is lower than a threshold
|
|
|
|
# since CHP heat Link is proportional to CHP electric Link, make sure threshold is compatible
|
|
|
|
chp_heat = c.df.index[
|
2023-10-08 09:20:57 +00:00
|
|
|
(c.df[f"{attr}_nom_extendable"] & c.df.index.str.contains("urban central"))
|
2023-10-08 09:20:36 +00:00
|
|
|
& c.df.index.str.contains("CHP")
|
|
|
|
& c.df.index.str.contains("heat")
|
2021-07-01 18:09:04 +00:00
|
|
|
]
|
|
|
|
|
2023-06-15 16:52:25 +00:00
|
|
|
threshold = snakemake.params.threshold_capacity
|
2022-01-07 10:38:25 +00:00
|
|
|
|
2020-08-14 07:11:19 +00:00
|
|
|
if not chp_heat.empty:
|
2021-07-01 18:09:04 +00:00
|
|
|
threshold_chp_heat = (
|
|
|
|
threshold
|
|
|
|
* c.df.efficiency[chp_heat.str.replace("heat", "electric")].values
|
|
|
|
* c.df.p_nom_ratio[chp_heat.str.replace("heat", "electric")].values
|
|
|
|
/ c.df.efficiency[chp_heat].values
|
|
|
|
)
|
|
|
|
n_p.mremove(
|
|
|
|
c.name,
|
2023-10-08 09:20:57 +00:00
|
|
|
chp_heat[c.df.loc[chp_heat, f"{attr}_nom_opt"] < threshold_chp_heat],
|
2021-07-01 18:09:04 +00:00
|
|
|
)
|
2022-01-07 10:38:25 +00:00
|
|
|
|
2021-07-01 18:09:04 +00:00
|
|
|
n_p.mremove(
|
|
|
|
c.name,
|
|
|
|
c.df.index[
|
2023-10-08 09:20:36 +00:00
|
|
|
(c.df[f"{attr}_nom_extendable"] & ~c.df.index.isin(chp_heat))
|
|
|
|
& (c.df[f"{attr}_nom_opt"] < threshold)
|
2023-03-06 08:27:45 +00:00
|
|
|
],
|
2021-07-01 18:09:04 +00:00
|
|
|
)
|
2020-08-12 16:08:01 +00:00
|
|
|
|
2021-07-01 18:09:04 +00:00
|
|
|
# copy over assets but fix their capacity
|
2023-10-08 09:20:36 +00:00
|
|
|
c.df[f"{attr}_nom"] = c.df[f"{attr}_nom_opt"]
|
|
|
|
c.df[f"{attr}_nom_extendable"] = False
|
2020-08-12 16:08:01 +00:00
|
|
|
|
2021-07-01 18:09:04 +00:00
|
|
|
n.import_components_from_dataframe(c.df, c.name)
|
2020-08-12 16:08:01 +00:00
|
|
|
|
2021-07-01 18:09:04 +00:00
|
|
|
# copy time-dependent
|
|
|
|
selection = n.component_attrs[c.name].type.str.contains(
|
|
|
|
"series"
|
|
|
|
) & n.component_attrs[c.name].status.str.contains("Input")
|
|
|
|
for tattr in n.component_attrs[c.name].index[selection]:
|
|
|
|
n.import_series_from_dataframe(c.pnl[tattr], c.name, tattr)
|
2020-07-30 06:27:33 +00:00
|
|
|
|
2022-01-07 15:53:37 +00:00
|
|
|
# deal with gas network
|
|
|
|
pipe_carrier = ["gas pipeline"]
|
2023-06-15 16:52:25 +00:00
|
|
|
if snakemake.params.H2_retrofit:
|
2022-04-12 07:56:58 +00:00
|
|
|
# drop capacities of previous year to avoid duplicating
|
2022-03-21 08:14:15 +00:00
|
|
|
to_drop = n.links.carrier.isin(pipe_carrier) & (n.links.build_year != year)
|
|
|
|
n.mremove("Link", n.links.loc[to_drop].index)
|
2022-04-12 07:56:58 +00:00
|
|
|
|
|
|
|
# subtract the already retrofitted from today's gas grid capacity
|
|
|
|
h2_retrofitted_fixed_i = n.links[
|
|
|
|
(n.links.carrier == "H2 pipeline retrofitted")
|
|
|
|
& (n.links.build_year != year)
|
|
|
|
].index
|
|
|
|
gas_pipes_i = n.links[n.links.carrier.isin(pipe_carrier)].index
|
2023-06-15 16:52:25 +00:00
|
|
|
CH4_per_H2 = 1 / snakemake.params.H2_retrofit_capacity_per_CH4
|
2022-04-12 07:56:58 +00:00
|
|
|
fr = "H2 pipeline retrofitted"
|
|
|
|
to = "gas pipeline"
|
|
|
|
# today's pipe capacity
|
|
|
|
pipe_capacity = n.links.loc[gas_pipes_i, "p_nom"]
|
|
|
|
# already retrofitted capacity from gas -> H2
|
|
|
|
already_retrofitted = (
|
|
|
|
n.links.loc[h2_retrofitted_fixed_i, "p_nom"]
|
2023-02-16 17:42:19 +00:00
|
|
|
.rename(lambda x: x.split("-2")[0].replace(fr, to))
|
|
|
|
.groupby(level=0)
|
|
|
|
.sum()
|
2023-03-06 08:27:45 +00:00
|
|
|
)
|
2022-04-12 07:56:58 +00:00
|
|
|
remaining_capacity = (
|
|
|
|
pipe_capacity
|
|
|
|
- CH4_per_H2
|
|
|
|
* already_retrofitted.reindex(index=pipe_capacity.index).fillna(0)
|
2023-03-06 08:27:45 +00:00
|
|
|
)
|
2022-04-12 07:56:58 +00:00
|
|
|
n.links.loc[gas_pipes_i, "p_nom"] = remaining_capacity
|
2022-03-21 08:14:15 +00:00
|
|
|
else:
|
|
|
|
new_pipes = n.links.carrier.isin(pipe_carrier) & (
|
|
|
|
n.links.build_year == year
|
|
|
|
)
|
|
|
|
n.links.loc[new_pipes, "p_nom"] = 0.0
|
|
|
|
n.links.loc[new_pipes, "p_nom_min"] = 0.0
|
2020-07-30 06:27:33 +00:00
|
|
|
|
2023-12-08 16:53:28 +00:00
|
|
|
def disable_grid_expansion_if_LV_limit_hit(n):
|
|
|
|
if not "lv_limit" in n.global_constraints.index:
|
|
|
|
return
|
|
|
|
|
|
|
|
#calculate minimum LV
|
|
|
|
attr = "nom_min"
|
|
|
|
dc = n.links.index[n.links.carrier == "DC"]
|
|
|
|
tot = (n.lines["s_" + attr]*n.lines["length"]).sum() + (n.links.loc[dc,"p_" + attr]*n.links.loc[dc,"length"]).sum()
|
|
|
|
|
|
|
|
diff = n.global_constraints.at["lv_limit","constant"]-tot
|
|
|
|
|
|
|
|
#allow small numerical differences
|
|
|
|
limit = 1
|
|
|
|
|
|
|
|
if diff < limit:
|
|
|
|
logger.info(f"LV is already reached (gap {diff}), disabling expansion and LV limit")
|
|
|
|
expandable_acs = n.lines.index[n.lines.s_nom_extendable]
|
|
|
|
n.lines.loc[expandable_acs,"s_nom_extendable"] = False
|
|
|
|
n.lines.loc[expandable_acs,"s_nom"] = n.lines.loc[expandable_acs,"s_nom_min"]
|
|
|
|
|
|
|
|
expandable_dcs = n.links.index[n.links.p_nom_extendable & (n.links.carrier == "DC")]
|
|
|
|
n.links.loc[expandable_dcs,"p_nom_extendable"] = False
|
|
|
|
n.links.loc[expandable_dcs,"p_nom"] = n.links.loc[expandable_dcs,"p_nom_min"]
|
|
|
|
|
|
|
|
n.global_constraints.drop("lv_limit",
|
|
|
|
inplace=True)
|
2022-04-12 08:03:04 +00:00
|
|
|
|
2020-07-07 16:20:51 +00:00
|
|
|
if __name__ == "__main__":
|
|
|
|
if "snakemake" not in globals():
|
2023-03-06 18:09:45 +00:00
|
|
|
from _helpers import mock_snakemake
|
2023-03-06 08:27:45 +00:00
|
|
|
|
2021-07-01 18:09:04 +00:00
|
|
|
snakemake = mock_snakemake(
|
|
|
|
"add_brownfield",
|
|
|
|
simpl="",
|
2022-03-18 12:46:40 +00:00
|
|
|
clusters="37",
|
2022-01-07 11:45:33 +00:00
|
|
|
opts="",
|
2023-03-09 07:36:41 +00:00
|
|
|
ll="v1.0",
|
2022-03-18 12:46:40 +00:00
|
|
|
sector_opts="168H-T-H-B-I-solar+p3-dist1",
|
2021-07-01 18:09:04 +00:00
|
|
|
planning_horizons=2030,
|
2020-07-07 16:20:51 +00:00
|
|
|
)
|
|
|
|
|
2023-05-17 16:43:30 +00:00
|
|
|
logging.basicConfig(level=snakemake.config["logging"]["level"])
|
2023-03-06 08:27:45 +00:00
|
|
|
|
2022-07-20 09:35:12 +00:00
|
|
|
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
|
|
|
|
|
2023-02-24 13:42:51 +00:00
|
|
|
logger.info(f"Preparing brownfield from the file {snakemake.input.network_p}")
|
2020-07-07 16:20:51 +00:00
|
|
|
|
2021-07-01 18:09:04 +00:00
|
|
|
year = int(snakemake.wildcards.planning_horizons)
|
2020-07-07 16:20:51 +00:00
|
|
|
|
2023-07-13 20:31:55 +00:00
|
|
|
n = pypsa.Network(snakemake.input.network)
|
2020-07-30 06:27:33 +00:00
|
|
|
|
2020-08-12 16:08:01 +00:00
|
|
|
add_build_year_to_new_assets(n, year)
|
|
|
|
|
2023-07-13 20:31:55 +00:00
|
|
|
n_p = pypsa.Network(snakemake.input.network_p)
|
2021-07-01 18:09:04 +00:00
|
|
|
|
2020-07-08 14:28:08 +00:00
|
|
|
add_brownfield(n, n_p, year)
|
2020-07-30 06:27:33 +00:00
|
|
|
|
2023-12-08 16:53:28 +00:00
|
|
|
disable_grid_expansion_if_LV_limit_hit(n)
|
|
|
|
|
2022-06-30 06:42:18 +00:00
|
|
|
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
2020-07-30 06:27:33 +00:00
|
|
|
n.export_to_netcdf(snakemake.output[0])
|