Add suffix -year to optimized components before solving

This simplifies the structure of add_brownfield.py dramatically.

Some other changes need to be make elsewhere because of name
changes (e.g. battery constraints in solve_network.py).
This commit is contained in:
Tom Brown 2020-08-12 18:08:01 +02:00
parent 9012125585
commit 663e1195e1
3 changed files with 71 additions and 137 deletions

View File

@ -18,9 +18,7 @@ import yaml
import pytz import pytz
from vresutils.costdata import annuity from add_existing_baseyear import add_build_year_to_new_assets
from prepare_sector_network import prepare_costs
#First tell PyPSA that links can have multiple outputs by #First tell PyPSA that links can have multiple outputs by
#overriding the component_attrs. This can be done for #overriding the component_attrs. This can be done for
@ -43,121 +41,39 @@ override_component_attrs["Store"].loc["build_year"] = ["integer","year",np.nan,"
override_component_attrs["Store"].loc["lifetime"] = ["float","years",np.nan,"build year","Input (optional)"] override_component_attrs["Store"].loc["lifetime"] = ["float","years",np.nan,"build year","Input (optional)"]
def add_brownfield(n, n_p, year): def add_brownfield(n, n_p, year):
print("adding brownfield") print("adding brownfield")
#first, remove generators, links and stores that track CO2 or global EU values for c in n_p.iterate_components(["Link", "Generator", "Store"]):
n_p.mremove("Generator", [index for index in n_p.generators.index.to_list() if 'ror' in index])
n_p.mremove("Generator", ['EU fossil gas', 'fossil oil'] )
n_p.mremove("Store", ['co2 atmosphere', 'co2 stored', 'EU gas Store'] )
n_p.mremove("Link", ['co2 vent'] )
if "H" in opts: attr = "e" if c.name == "Store" else "p"
n_p.mremove("Link", [index for index in n_p.links.index.to_list() if 'water tanks charger' in index])
n_p.mremove("Link", [index for index in n_p.links.index.to_list() if 'water tanks discharger' in index])
if "B" in opts:
n_p.mremove("Store", ['EU biogas', 'EU solid biomass'])
n_p.mremove("Link", ['biogas to gas'])
if "I" in opts:
n_p.mremove("Store", ['Fischer-Tropsch Store'])
n_p.mremove("Link", ['process emissions' , 'gas for industry', 'solid biomass for industry'])
if "T" in opts:
n_p.mremove("Store", [index for index in n_p.stores.index.to_list() if 'battery storage' in index])
n_p.mremove("Link", [index for index in n_p.links.index.to_list() if 'BEV charger' in index])
n_p.mremove("Link", [index for index in n_p.links.index.to_list() if 'V2G' in index])
previous_timestep=snakemake.config['scenario']['planning_horizons'][snakemake.config['scenario']['planning_horizons'].index(year)-1] #first, remove generators, links and stores that track CO2 or global EU values
previous_timesteps=snakemake.config['scenario']['planning_horizons'][0:snakemake.config['scenario']['planning_horizons'].index(year)] #since these are already in n
grouping_years=snakemake.config['existing_capacities']['grouping_years'] n_p.mremove(c.name,
c.df.index[c.df.lifetime.isna()])
#remove assets whose build_year + lifetime < year are removed
n_p.mremove(c.name,
c.df.index[c.df.build_year + c.df.lifetime < year])
### GENERATORS ### #remove assets if their optimized nominal capacity is lower than a threshold
# generators whose build_year + lifetime < year are removed n_p.mremove(c.name,
n_p.mremove("Generator", [index for index in n_p.generators.index.to_list() c.df.index[c.df[attr + "_nom_opt"] < snakemake.config['existing_capacities']['threshold_capacity']])
if (n_p.generators.loc[index, 'build_year']+n_p.generators.loc[index, 'lifetime'] < int(year))])
# remove generators if their optimized nominal capacity is lower than a threshold #copy over assets but fix their capacity
n_p.mremove("Generator", [index for index in n_p.generators.index.to_list() c.df[attr + "_nom"] = c.df[attr + "_nom_opt"]
if (n_p.generators.loc[index, 'p_nom_opt'] < snakemake.config['existing_capacities']['threshold_capacity'])]) c.df[attr + "_nom_extendable"] = False
n.import_components_from_dataframe(c.df,
c.name)
# generators whose capacity was optimized in the previous year are renamed and build year is added #copy time-dependent
n_p.generators.index=np.where(n_p.generators.index.str[-4:].isin(previous_timesteps+grouping_years)==False, for tattr in n.component_attrs[c.name].index[(n.component_attrs[c.name].type.str.contains("series") &
n_p.generators.index + '-' + previous_timestep, n.component_attrs[c.name].status.str.contains("Input"))]:
n_p.generators.index) n.import_series_from_dataframe(c.pnl[tattr],
n_p.generators.loc[[index for index in n_p.generators.index.to_list() c.name,
if previous_timestep in index], 'build_year']=int(previous_timestep) tattr)
#add generators from previous step
n.madd("Generator",
n_p.generators.index,
bus=n_p.generators.bus,
carrier=n_p.generators.carrier,
p_nom=n_p.generators.p_nom_opt,
marginal_cost=n_p.generators.marginal_cost,
capital_cost=n_p.generators.capital_cost,
efficiency=n_p.generators.efficiency,
p_max_pu=n_p.generators_t.p_max_pu,
build_year=n_p.generators.build_year,
lifetime=n_p.generators.lifetime)
### STORES ###
# stores whose installationYear + lifetime < year are removed
n_p.mremove("Store", [index for index in n_p.stores.index.to_list()
if (n_p.stores.loc[index, 'build_year']+n_p.stores.loc[index, 'lifetime'] < int(year))])
# remove stores if their optimized nominal capacity is lower than a threshold
n_p.mremove("Store", [index for index in n_p.stores.index.to_list()
if (n_p.stores.loc[index, 'e_nom_opt'] < snakemake.config['existing_capacities']['threshold_capacity'])])
# stores whose capacity was optimized in the previous year are renamed and the build year is added
n_p.stores.index=np.where(n_p.stores.index.str[-4:].isin(previous_timesteps+grouping_years)==False,
n_p.stores.index + '-' + previous_timestep,
n_p.stores.index)
n_p.stores.loc[[index for index in n_p.stores.index.to_list()
if previous_timestep in index], 'build_year']=int(previous_timestep)
#add stores from previous steps
n.madd("Store",
n_p.stores.index,
bus=n_p.stores.bus,
carrier=n_p.stores.carrier,
e_nom=n_p.stores.e_nom_opt,
e_cyclic=True,
capital_cost=n_p.stores.capital_cost,
build_year=n_p.stores.build_year,
lifetime=n_p.stores.lifetime)
### LINKS ###
# TODO: add_chp_constraint() in solve_network needs to be adjusted
n_p.mremove("Link", [index for index in n_p.links.index.to_list() if 'CHP' in index])
# links whose installationYear + lifetime < year are removed
n_p.mremove("Link", [index for index in n_p.links.index.to_list()
if (n_p.links.loc[index, 'build_year']+n_p.links.loc[index, 'lifetime'] < int(year))])
# delete links if their optimized nominal capacity is lower than a threshold
n_p.mremove("Link", [index for index in n_p.links.index.to_list()
if (n_p.links.loc[index, 'p_nom_opt'] < snakemake.config['existing_capacities']['threshold_capacity'])])
# links whose capacity was optimized in the previous year are renamed and the build year is added
n_p.links.index=np.where(n_p.links.index.str[-4:].isin(previous_timesteps+grouping_years)==False,
n_p.links.index + '-' + previous_timestep,
n_p.links.index)
n_p.links.loc[[index for index in n_p.links.index.to_list()
if previous_timestep in index], 'build_year']=int(previous_timestep)
#add links from previous steps
n.madd("Link",
n_p.links.index,
bus0=n_p.links.bus0,
bus1=n_p.links.bus1,
bus2=n_p.links.bus2,
carrier=n_p.links.carrier,
p_nom=n_p.links.p_nom_opt,
marginal_cost=n_p.links.marginal_cost,
capital_cost=n_p.links.capital_cost,
efficiency=n_p.links.efficiency,
efficiency2=n_p.links.efficiency2,
build_year=n_p.links.build_year,
lifetime=n_p.links.lifetime)
if __name__ == "__main__": if __name__ == "__main__":
@ -183,27 +99,16 @@ if __name__ == "__main__":
print(snakemake.input.network_p) print(snakemake.input.network_p)
logging.basicConfig(level=snakemake.config['logging_level']) logging.basicConfig(level=snakemake.config['logging_level'])
options = snakemake.config["sector"] year=int(snakemake.wildcards.planning_horizons)
opts = snakemake.wildcards.sector_opts.split('-')
year=snakemake.wildcards.planning_horizons
n = pypsa.Network(snakemake.input.network, n = pypsa.Network(snakemake.input.network,
override_component_attrs=override_component_attrs) override_component_attrs=override_component_attrs)
add_build_year_to_new_assets(n, year)
n_p = pypsa.Network(snakemake.input.network_p, n_p = pypsa.Network(snakemake.input.network_p,
override_component_attrs=override_component_attrs) override_component_attrs=override_component_attrs)
#%% #%%
add_brownfield(n, n_p, year) add_brownfield(n, n_p, year)
Nyears = n.snapshot_weightings.sum()/8760.
costs = prepare_costs(snakemake.input.costs,
snakemake.config['costs']['USD2013_to_EUR2013'],
snakemake.config['costs']['discountrate'],
Nyears)
baseyear = snakemake.config['scenario']["planning_horizons"][0]
n.export_to_netcdf(snakemake.output[0]) n.export_to_netcdf(snakemake.output[0])

View File

@ -43,6 +43,33 @@ override_component_attrs["Store"].loc["build_year"] = ["integer","year",np.nan,"
override_component_attrs["Store"].loc["lifetime"] = ["float","years",np.nan,"build year","Input (optional)"] override_component_attrs["Store"].loc["lifetime"] = ["float","years",np.nan,"build year","Input (optional)"]
def add_build_year_to_new_assets(n, baseyear):
"""
Parameters
----------
n : network
baseyear: year in which optimized assets are built
"""
#Give assets with lifetimes and no build year the build year baseyear
for c in n.iterate_components(["Link", "Generator", "Store"]):
assets = c.df.index[~c.df.lifetime.isna() & c.df.build_year.isna()]
c.df.loc[assets, "build_year"] = baseyear
#add -baseyear to name
rename = pd.Series(c.df.index, c.df.index)
rename[assets] += "-" + str(baseyear)
c.df.rename(index=rename, inplace=True)
#rename time-dependent
for attr in n.component_attrs[c.name].index[(n.component_attrs[c.name].type.str.contains("series") &
n.component_attrs[c.name].status.str.contains("Input"))]:
c.pnl[attr].rename(columns=rename, inplace=True)
def add_existing_renewables(df_agg): def add_existing_renewables(df_agg):
""" """
Append existing renewables to the df_agg pd.DataFrame Append existing renewables to the df_agg pd.DataFrame
@ -82,7 +109,7 @@ def add_existing_renewables(df_agg):
nodal_fraction = pd.Series(0.,elec_buses) nodal_fraction = pd.Series(0.,elec_buses)
for country in n.buses.loc[elec_buses,"country"].unique(): for country in n.buses.loc[elec_buses,"country"].unique():
gens = [c for c in n.generators_t.p_max_pu.columns if c[:2] == country and c[-len(carrier):] == carrier] gens = n.generators.index[(n.generators.index.str[:2] == country) & (n.generators.carrier == carrier)]
cfs = n.generators_t.p_max_pu[gens].mean() cfs = n.generators_t.p_max_pu[gens].mean()
cfs_key = cfs/cfs.sum() cfs_key = cfs/cfs.sum()
nodal_fraction.loc[n.generators.loc[gens,"bus"]] = cfs_key.values nodal_fraction.loc[n.generators.loc[gens,"bus"]] = cfs_key.values
@ -101,7 +128,7 @@ def add_existing_renewables(df_agg):
df_agg.at[name,"YearCommissioned"] = year df_agg.at[name,"YearCommissioned"] = year
df_agg.at[name,"cluster_bus"] = node df_agg.at[name,"cluster_bus"] = node
def add_power_capacities_installed_before_baseyear(n, grouping_years, costs): def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, baseyear):
""" """
Parameters Parameters
@ -179,9 +206,9 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs):
if generator in ['solar', 'onwind', 'offwind']: if generator in ['solar', 'onwind', 'offwind']:
if generator =='offwind': if generator =='offwind':
p_max_pu=n.generators_t.p_max_pu[capacity.index + ' offwind-ac'] p_max_pu=n.generators_t.p_max_pu[capacity.index + ' offwind-ac' + '-' + str(baseyear)]
else: else:
p_max_pu=n.generators_t.p_max_pu[capacity.index + ' ' + generator] p_max_pu=n.generators_t.p_max_pu[capacity.index + ' ' + generator + '-' + str(baseyear)]
n.madd("Generator", n.madd("Generator",
capacity.index, capacity.index,
@ -408,6 +435,8 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input.network, n = pypsa.Network(snakemake.input.network,
override_component_attrs=override_component_attrs) override_component_attrs=override_component_attrs)
add_build_year_to_new_assets(n, baseyear)
Nyears = n.snapshot_weightings.sum()/8760. Nyears = n.snapshot_weightings.sum()/8760.
costs = prepare_costs(snakemake.input.costs, costs = prepare_costs(snakemake.input.costs,
snakemake.config['costs']['USD2013_to_EUR2013'], snakemake.config['costs']['USD2013_to_EUR2013'],
@ -415,7 +444,7 @@ if __name__ == "__main__":
Nyears) Nyears)
grouping_years=snakemake.config['existing_capacities']['grouping_years'] grouping_years=snakemake.config['existing_capacities']['grouping_years']
add_power_capacities_installed_before_baseyear(n, grouping_years, costs) add_power_capacities_installed_before_baseyear(n, grouping_years, costs, baseyear)
if "H" in opts: if "H" in opts:
time_dep_hp_cop = options["time_dep_hp_cop"] time_dep_hp_cop = options["time_dep_hp_cop"]

View File

@ -109,13 +109,14 @@ def add_eps_storage_constraint(n):
def add_battery_constraints(n): def add_battery_constraints(n):
nodes = n.buses.index[n.buses.carrier.isin(["battery","home battery"])] chargers = n.links.index[n.links.carrier.str.contains("battery charger") & n.links.p_nom_extendable]
dischargers = chargers.str.replace("charger","discharger")
link_p_nom = get_var(n, "Link", "p_nom") link_p_nom = get_var(n, "Link", "p_nom")
lhs = linexpr((1,link_p_nom[nodes + " charger"]), lhs = linexpr((1,link_p_nom[chargers]),
(-n.links.loc[nodes + " discharger", "efficiency"].values, (-n.links.loc[dischargers, "efficiency"].values,
link_p_nom[nodes + " discharger"].values)) link_p_nom[dischargers].values))
define_constraints(n, lhs, "=", 0, 'Link', 'charger_ratio') define_constraints(n, lhs, "=", 0, 'Link', 'charger_ratio')
@ -363,4 +364,3 @@ if __name__ == "__main__":
n.export_to_netcdf(snakemake.output[0]) n.export_to_netcdf(snakemake.output[0])
logger.info("Maximum memory usage: {}".format(mem.mem_usage)) logger.info("Maximum memory usage: {}".format(mem.mem_usage))