remove operations rules to reduce amount of simultaneous changes

This commit is contained in:
Fabian Neumann 2024-03-04 17:42:35 +01:00
parent 1ff8321c5e
commit e6347621f9
4 changed files with 0 additions and 386 deletions

View File

@ -828,21 +828,6 @@ solving:
walltime: "12:00:00" walltime: "12:00:00"
operations:
rolling_horizon:
window: 10 # days
overlap: 8 # days
bidding_prices: # EUR/MW
H2 Electrolysis: -10
H2 Fuel Cell: 200
urban central water tanks charger: -10
urban central water tanks discharger: 10
hydro: 70
solid biomass: 150
biogas: 100
co2_price: 500 # EUR/t
co2_sequestation_limit: 200 # Mt/a
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#plotting # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#plotting
plotting: plotting:
map: map:

View File

@ -41,61 +41,3 @@ rule solve_sector_network:
"../envs/environment.yaml" "../envs/environment.yaml"
script: script:
"../scripts/solve_network.py" "../scripts/solve_network.py"
rule solve_operations_network_other_year:
input:
pre=RDIR
+ "/prenetworks/elec{weather_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc",
post=RDIR
+ "/postnetworks/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc",
output:
RDIR
+ "/operations/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}.nc",
shadow:
"shallow"
log:
solver=RDIR
+ "/logs/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}_solver.log",
python=RDIR
+ "/logs/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}_python.log",
threads: 4
resources:
mem_mb=10000,
benchmark:
(
RDIR
+ "/benchmarks/solve_operations_network/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}"
)
script:
"../scripts/solve_operations_network_other_year.py"
# rule solve_operations_network_other_year_myopic:
# input:
# overrides="data/override_component_attrs",
# pre=RDIR
# + "/prenetworks/elec{weather_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc",
# post=RDIR
# + "/postnetworks/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc",
# previous=solved_previous_year,
# output:
# RDIR
# + "/operations/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}_myopic.nc",
# shadow:
# "shallow"
# log:
# solver=RDIR
# + "/logs/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}_solver.log",
# python=RDIR
# + "/logs/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}_python.log",
# threads: 4
# resources:
# mem_mb=10000,
# benchmark:
# (
# RDIR
# + "/benchmarks/solve_operations_network_myopic/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}"
# )
# script:
# "../scripts/solve_operations_network_other_year_myopic.py"

View File

@ -1,147 +0,0 @@
# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: : 2017-2024 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
"""
Solve operations network.
"""
import logging
import numpy as np
import pypsa
from solve_network import prepare_network, solve_network
logger = logging.getLogger(__name__)
pypsa.pf.logger.setLevel(logging.WARNING)
def set_parameters_from_optimized(n, n_optim):
lines_typed_i = n.lines.index[n.lines.type != ""]
n.lines.loc[lines_typed_i, "num_parallel"] = n_optim.lines["num_parallel"].reindex(
lines_typed_i, fill_value=0.0
)
n.lines.loc[lines_typed_i, "s_nom"] = (
np.sqrt(3)
* n.lines["type"].map(n.line_types.i_nom)
* n.lines.bus0.map(n.buses.v_nom)
* n.lines.num_parallel
)
lines_untyped_i = n.lines.index[n.lines.type == ""]
for attr in ("s_nom", "r", "x"):
n.lines.loc[lines_untyped_i, attr] = n_optim.lines[attr].reindex(
lines_untyped_i, fill_value=0.0
)
n.lines["s_nom_extendable"] = False
links_dc_i = n.links.index[n.links.p_nom_extendable]
n.links.loc[links_dc_i, "p_nom"] = n_optim.links["p_nom_opt"].reindex(
links_dc_i, fill_value=0.0
)
n.links.loc[links_dc_i, "p_nom_extendable"] = False
gen_extend_i = n.generators.index[n.generators.p_nom_extendable]
n.generators.loc[gen_extend_i, "p_nom"] = n_optim.generators["p_nom_opt"].reindex(
gen_extend_i, fill_value=0.0
)
n.generators.loc[gen_extend_i, "p_nom_extendable"] = False
stor_units_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable]
n.storage_units.loc[stor_units_extend_i, "p_nom"] = n_optim.storage_units[
"p_nom_opt"
].reindex(stor_units_extend_i, fill_value=0.0)
n.storage_units.loc[stor_units_extend_i, "p_nom_extendable"] = False
stor_extend_i = n.stores.index[n.stores.e_nom_extendable]
n.stores.loc[stor_extend_i, "e_nom"] = n_optim.stores["e_nom_opt"].reindex(
stor_extend_i, fill_value=0.0
)
n.stores.loc[stor_extend_i, "e_nom_extendable"] = False
return n
def remove_unused_components(n, threshold=50):
logger.info("Remove assets that are barely used to speed things up.")
for c in n.iterate_components({"Store", "Link", "Generator"}):
attr = "e_nom" if c.name == "Store" else "p_nom"
to_remove = c.df.loc[c.df[attr] < threshold].index
logger.info(f"Removing barely used {c.name}s:\n{to_remove}")
n.mremove(c.name, to_remove)
return n
def add_load_shedding(n, voll=1e4):
logger.info("Add load shedding to all buses.")
if "load" in n.generators.carrier.unique():
to_remove = n.generators.query("carrier == 'load'").index
logger.info(f"Removing pre-existing load shedding:\n{to_remove}")
n.mremove("Generator", to_remove)
n.madd(
"Generator",
n.buses.index,
suffix=" load",
bus=n.buses.index,
carrier="load",
marginal_cost=voll,
p_nom=1e6,
)
return n
if __name__ == "__main__":
if "snakemake" not in globals():
from helper import mock_snakemake
snakemake = mock_snakemake(
"solve_operations_network",
capacity_year=1952,
simpl="",
opts="",
clusters=37,
lv=2.0,
sector_opts="Co2L0-25H-T-H-B-I-A",
planning_horizons=2030,
weather_year=2013,
)
logging.basicConfig(
filename=snakemake.log.python, level=snakemake.config["logging_level"]
)
tmpdir = snakemake.config["solving"].get("tmpdir")
if tmpdir is not None:
from pathlib import Path
Path(tmpdir).mkdir(parents=True, exist_ok=True)
n = pypsa.Network(snakemake.input.pre)
n_post = pypsa.Network(snakemake.input.post)
n = set_parameters_from_optimized(n, n_post)
del n_post
n = remove_unused_components(n)
n = add_load_shedding(n)
opts = snakemake.wildcards.sector_opts.split("-")
solve_opts = snakemake.config["solving"]["options"]
solve_opts["skip_iterations"] = True
n = prepare_network(n, solve_opts)
n = solve_network(
n,
config=snakemake.config,
opts=opts,
solver_dir=tmpdir,
solver_logfile=snakemake.log.solver,
)
n.export_to_netcdf(snakemake.output[0])

View File

@ -1,166 +0,0 @@
# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: : 2017-2024 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
"""
Solve myopic operations network.
"""
import logging
import pandas as pd
import pypsa
from solve_network import prepare_network, solve_network
from solve_operations_network import (
add_load_shedding,
remove_unused_components,
set_parameters_from_optimized,
)
logger = logging.getLogger(__name__)
pypsa.pf.logger.setLevel(logging.WARNING)
def prepare_myopic(n, config, store_soc, storage_unit_soc):
n.stores.e_cyclic = False
n.storage_units.cyclic_state_of_charge = False
biomass_stores = n.stores.carrier.str.isin(["solid biomass", "biogas"])
biomass_potential = n.stores.loc[biomass_stores, "e_initial"]
# storage level contiguity across years
n.stores.e_initial = store_soc
n.storage_units.state_of_charge_initial = storage_unit_soc
# replace co2 limit with co2 price
n.remove("GlobalConstraint", "CO2Limit")
n.stores.at["co2 atmosphere", "marginal_cost"] = -config["co2_price"]
# handle co2 sequestration
assert (
sum(n.stores.carriers == "co2 stored") == 1
), "Myopic operation not implemented for spatially resolved CO2 sequestration."
n.stores.at["co2 stored", "e_nom"] = config["co2_sequestration_limit"] * 1e6 # t/a
# reset co2 emissions
n.stores.loc[n.stores.carrier == "co2 stored", "e_initial"] = 0.0
n.stores.at["co2 atmosphere", "e_initial"] = 0.0
# replenish fossil gas and oil with 1000 TWh each
fossil_stores = n.stores.carrier.str.isin(["gas", "oil"])
n.stores.loc[fossil_stores, "e_initial"] = 1e9
n.stores.loc[fossil_stores, "e_nom"] = 10e9
# replenish annual solid biomass and biogas potentials
n.stores.loc[biomass_stores, "e_initial"] = biomass_potential
# set storage bidding prices
bidding_prices = config["bidding_prices"]
for c in n.iterate_components({"Store", "Link", "StorageUnit"}):
c.df.marginal_cost.update(c.df.carrier.map(bidding_prices).dropna())
# deduct industry solid biomass
assert (
sum(n.stores.carriers == "solid biomass") == 1
), "Myopic operation not implemented for spatially resolved solid biomass."
n.stores.at["EU solid biomass", "e_initial"] -= (
n.loads.at["solid biomass for industry", "p_set"] * 8760
)
n.remove("Load", "solid biomass for industry")
return n
def solve_network_myopic(n, config, opts="", **kwargs):
rolling_horizon = config["operations"]["rolling_horizon"]
freq = int(pd.infer_freq(n.snapshots)[:-1])
window = rolling_horizon["window"] * 24 // freq
overlap = rolling_horizon["overlap"] * 24 // freq
kept = window - overlap
length = len(n.snapshots)
assert (
kept > 0
), f"Overlap ({overlap} days) must be smaller than windows ({window} days)."
for i in range(length // kept):
snapshots = n.snapshots[i * kept : (i + 1) * kept + overlap]
logger.info(f"Optimising operations from {snapshots[0]} to {snapshots[-1]}")
n = solve_network(n, config, opts=opts, snapshots=snapshots, **kwargs)
last_kept = n.snapshots[(i + 1) * kept - 1]
logger.info(f"Setting initial SOCs from {last_kept} for next iteration.\n")
n.stores.e_initial = n.stores_t.e.loc[last_kept]
n.storage_units.state_of_charge_initial = n.storage_units_t.state_of_charge.loc[
last_kept
]
# final segment until end of year
snapshots = n.snapshots[(i + 1) * kept :]
n = solve_network(n, config, opts=opts, snapshots=snapshots, **kwargs)
return n
if __name__ == "__main__":
if "snakemake" not in globals():
from helper import mock_snakemake
snakemake = mock_snakemake(
"solve_operations_network_myopic",
capacity_year=1952,
simpl="",
opts="",
clusters=37,
lv=2.0,
sector_opts="Co2L0-25H-T-H-B-I-A",
planning_horizons=2030,
weather_year=2013,
)
logging.basicConfig(
filename=snakemake.log.python, level=snakemake.config["logging_level"]
)
tmpdir = snakemake.config["solving"].get("tmpdir")
if tmpdir is not None:
from pathlib import Path
Path(tmpdir).mkdir(parents=True, exist_ok=True)
config = snakemake.config["operations"]
n = pypsa.Network(snakemake.input.pre)
n_post = pypsa.Network(snakemake.input.post)
n = set_parameters_from_optimized(n, n_post)
del n_post
n_previous = pypsa.Network(snakemake.input.previous)
store_soc = n_previous.stores_t.e.iloc[-1]
storage_unit_soc = n_previous.storage_units_t.state_of_charge.iloc[-1]
del n_previous
n = remove_unused_components(n)
n = add_load_shedding(n)
n = prepare_myopic(n, config, store_soc, storage_unit_soc)
opts = snakemake.wildcards.sector_opts.split("-")
solve_opts = snakemake.config["solving"]["options"]
solve_opts["skip_iterations"] = True
n = prepare_network(n, solve_opts)
n = solve_network_myopic(
n,
config=snakemake.config,
opts=opts,
solver_dir=tmpdir,
solver_logfile=snakemake.log.solver,
)
n.export_to_netcdf(snakemake.output[0])