add rules to solve operations with perfect and myopic foresight

This commit is contained in:
Fabian Neumann 2022-08-01 11:55:31 +02:00
parent d4fa922a2a
commit 64e1ef895d
4 changed files with 314 additions and 5 deletions

View File

@ -13,6 +13,7 @@ configfile: "config.yaml"
wildcard_constraints: wildcard_constraints:
weather_year="[0-9]{4}|", weather_year="[0-9]{4}|",
capacity_year="[0-9]{4}|",
lv="[a-z0-9\.]+", lv="[a-z0-9\.]+",
simpl="[a-zA-Z0-9]*", simpl="[a-zA-Z0-9]*",
clusters="[0-9]+m?", clusters="[0-9]+m?",
@ -680,3 +681,39 @@ if config["foresight"] == "myopic":
resources: mem_mb=config['solving']['mem'] resources: mem_mb=config['solving']['mem']
benchmark: RDIR + "/benchmarks/solve_network/elec{weather_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}" benchmark: RDIR + "/benchmarks/solve_network/elec{weather_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}"
script: "scripts/solve_network.py" script: "scripts/solve_network.py"
rule solve_operations_network:
input:
pre=RDIR + "/prenetworks/elec{weather_year}_s_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc",
post=RDIR + "/postnetworks/elec{capacity_year}_s_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc",
output: "elec{capacity_year}_s_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}.nc"
shadow: "shallow"
log:
solver=RDIR + "/logs/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}_solver.log",
python=RDIR + "/logs/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}_python.log",
threads: 4
resources: mem_mb=10000
benchmark: RDIR + "/benchmarks/solve_operations_network/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}"
script: "scripts/solve_operations_network.py"
def solved_previous_year(wildcards):
previous_year = int(wildcards.weather_year) - 1
return RDIR + "/postnetworks/elec" + previous_year + "_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc"
rule solve_operations_network_myopic:
input:
pre=RDIR + "/prenetworks/elec{weather_year}_s_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc",
post=RDIR + "/postnetworks/elec{capacity_year}_s_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc",
previous=solved_previous_year
output: "elec{capacity_year}_s_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}_myopic.nc"
shadow: "shallow"
log:
solver=RDIR + "/logs/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}_solver.log",
python=RDIR + "/logs/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}_python.log",
threads: 4
resources: mem_mb=10000
benchmark: RDIR + "/benchmarks/solve_operations_network_myopic/elec{capacity_year}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_{weather_year}"
script: "scripts/solve_operations_network_myopic.py"

View File

@ -71,7 +71,7 @@ def prepare_network(n, solve_opts=None):
df.where(df>solve_opts['clip_p_max_pu'], other=0., inplace=True) df.where(df>solve_opts['clip_p_max_pu'], other=0., inplace=True)
if solve_opts.get('load_shedding'): if solve_opts.get('load_shedding'):
n.add("Carrier", "Load") n.add("Carrier", "load")
n.madd("Generator", n.buses.index, " load", n.madd("Generator", n.buses.index, " load",
bus=n.buses.index, bus=n.buses.index,
carrier='load', carrier='load',
@ -246,7 +246,7 @@ def extra_functionality(n, snapshots):
add_co2_sequestration_limit(n, snapshots) add_co2_sequestration_limit(n, snapshots)
def solve_network(n, config, opts='', **kwargs): def solve_network(n, config, opts='', snapshots=None, **kwargs):
solver_options = config['solving']['solver'].copy() solver_options = config['solving']['solver'].copy()
solver_name = solver_options.pop('name') solver_name = solver_options.pop('name')
cf_solving = config['solving']['options'] cf_solving = config['solving']['options']
@ -259,12 +259,15 @@ def solve_network(n, config, opts='', **kwargs):
n.config = config n.config = config
n.opts = opts n.opts = opts
if snapshots is None:
snapshots = n.snapshots
if cf_solving.get('skip_iterations', False): if cf_solving.get('skip_iterations', False):
network_lopf(n, solver_name=solver_name, solver_options=solver_options, network_lopf(n, snapshots, solver_name=solver_name, solver_options=solver_options,
extra_functionality=extra_functionality, extra_functionality=extra_functionality,
keep_shadowprices=keep_shadowprices, **kwargs) keep_shadowprices=keep_shadowprices, **kwargs)
else: else:
ilopf(n, solver_name=solver_name, solver_options=solver_options, ilopf(n, snapshots, solver_name=solver_name, solver_options=solver_options,
track_iterations=track_iterations, track_iterations=track_iterations,
min_iterations=min_iterations, min_iterations=min_iterations,
max_iterations=max_iterations, max_iterations=max_iterations,
@ -295,7 +298,7 @@ if __name__ == "__main__":
if tmpdir is not None: if tmpdir is not None:
from pathlib import Path from pathlib import Path
Path(tmpdir).mkdir(parents=True, exist_ok=True) Path(tmpdir).mkdir(parents=True, exist_ok=True)
opts = snakemake.wildcards.opts.split('-') opts = snakemake.wildcards.sector_opts.split('-')
solve_opts = snakemake.config['solving']['options'] solve_opts = snakemake.config['solving']['options']
fn = getattr(snakemake.log, 'memory', None) fn = getattr(snakemake.log, 'memory', None)

View File

@ -0,0 +1,126 @@
"""Solve operations network."""
import pypsa
import numpy as np
from solve_network import solve_network, prepare_network
from helper import override_component_attrs
import logging
logger = logging.getLogger(__name__)
pypsa.pf.logger.setLevel(logging.WARNING)
def set_parameters_from_optimized(n, n_optim):
lines_typed_i = n.lines.index[n.lines.type != '']
n.lines.loc[lines_typed_i, 'num_parallel'] = \
n_optim.lines['num_parallel'].reindex(lines_typed_i, fill_value=0.)
n.lines.loc[lines_typed_i, 's_nom'] = (
np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) *
n.lines.bus0.map(n.buses.v_nom) * n.lines.num_parallel)
lines_untyped_i = n.lines.index[n.lines.type == '']
for attr in ('s_nom', 'r', 'x'):
n.lines.loc[lines_untyped_i, attr] = \
n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.)
n.lines['s_nom_extendable'] = False
links_dc_i = n.links.index[n.links.p_nom_extendable]
n.links.loc[links_dc_i, 'p_nom'] = \
n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.)
n.links.loc[links_dc_i, 'p_nom_extendable'] = False
gen_extend_i = n.generators.index[n.generators.p_nom_extendable]
n.generators.loc[gen_extend_i, 'p_nom'] = \
n_optim.generators['p_nom_opt'].reindex(gen_extend_i, fill_value=0.)
n.generators.loc[gen_extend_i, 'p_nom_extendable'] = False
stor_units_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable]
n.storage_units.loc[stor_units_extend_i, 'p_nom'] = \
n_optim.storage_units['p_nom_opt'].reindex(stor_units_extend_i, fill_value=0.)
n.storage_units.loc[stor_units_extend_i, 'p_nom_extendable'] = False
stor_extend_i = n.stores.index[n.stores.e_nom_extendable]
n.stores.loc[stor_extend_i, 'e_nom'] = \
n_optim.stores['e_nom_opt'].reindex(stor_extend_i, fill_value=0.)
n.stores.loc[stor_extend_i, 'e_nom_extendable'] = False
return n
def remove_unused_components(n, threshold=50):
logger.info("Remove assets that are barely used to speed things up.")
for c in n.iterate_components({"Store", "Link", "Generator"}):
attr = "e_nom" if c.name == "Store" else "p_nom"
to_remove = c.df.loc[c.df[attr] < threshold].index
logger.info(f"Removing barely used {c.name}s:\n{to_remove}")
n.mremove(c.name, to_remove)
return n
def add_load_shedding(n, voll=1e4):
logger.info("Add load shedding to all buses.")
if "load" in n.generators.carrier.unique():
to_remove = n.generators.query("carrier == 'load'").index
logger.info(f"Removing pre-existing load shedding:\n{to_remove}")
n.mremove("Generator", to_remove)
n.madd("Generator", n.buses.index,
suffix=" load",
bus=n.buses.index,
carrier='load',
marginal_cost=voll,
p_nom=1e6
)
return n
if __name__ == "__main__":
if 'snakemake' not in globals():
from helper import mock_snakemake
snakemake = mock_snakemake(
'solve_operations_network',
capacity_year=1952,
simpl='',
opts='',
clusters=37,
lv=2.0,
sector_opts='Co2L0-25H-T-H-B-I-A',
planning_horizons=2030,
weather_year=2013
)
logging.basicConfig(filename=snakemake.log.python,
level=snakemake.config['logging_level'])
tmpdir = snakemake.config['solving'].get('tmpdir')
if tmpdir is not None:
from pathlib import Path
Path(tmpdir).mkdir(parents=True, exist_ok=True)
overrides = override_component_attrs(snakemake.input.overrides)
n = pypsa.Network(snakemake.input.pre, override_component_attrs=overrides)
n_post = pypsa.Network(snakemake.input.post, override_component_attrs=overrides)
n = set_parameters_from_optimized(n, n_post)
del n_post
n = remove_unused_components(n)
n = add_load_shedding(n)
opts = snakemake.wildcards.sector_opts.split('-')
solve_opts = snakemake.config['solving']['options']
solve_opts['skip_iterations'] = True
n = prepare_network(n, solve_opts)
n = solve_network(n, config=snakemake.config, opts=opts,
solver_dir=tmpdir,
solver_logfile=snakemake.log.solver)
n.export_to_netcdf(snakemake.output[0])

View File

@ -0,0 +1,143 @@
"""Solve myopic operations network."""
import pypsa
import pandas as pd
from solve_network import solve_network, prepare_network
from solve_operations_network import set_parameters_from_optimized, remove_unused_components, add_load_shedding
from helper import override_component_attrs
import logging
logger = logging.getLogger(__name__)
pypsa.pf.logger.setLevel(logging.WARNING)
def prepare_myopic(n, config, store_soc, storage_unit_soc):
n.stores.e_cyclic = False
n.storage_units.cyclic_state_of_charge = False
biomass_stores = n.stores.carrier.str.isin(["solid biomass", "biogas"])
biomass_potential = n.stores.loc[biomass_stores, "e_initial"]
n.stores.e_initial = store_soc
n.storage_units.state_of_charge_initial = storage_unit_soc
n.remove("GlobalConstraint", "CO2Limit")
n.stores.at["co2 atmosphere", "marginal_cost"] = -config["co2_price"]
# handle co2 sequestration
assert sum(n.stores.carriers == "co2 stored") == 1, "Myopic operation not implemented for spatially resolved CO2 sequestration."
n.stores.at["co2 stored", 'e_nom'] = config['co2_sequestration_limit'] * 1e6 # t/a
# reset co2 emissions
n.stores.loc[n.stores.carrier == 'co2 stored', "e_initial"] = 0.
n.stores.at["co2 atmosphere", "e_initial"] = 0.
# replenish fossil gas and oil with 1000 TWh each
fossil_stores = n.stores.carrier.str.isin(["gas", "oil"])
n.stores.loc[fossil_stores, 'e_initial'] = 1e9
n.stores.loc[fossil_stores, 'e_nom'] = 10e9
# replenish annual solid biomass and biogas potentials
n.stores.loc[biomass_stores, "e_initial"] = biomass_potential
# set storage bidding prices
# TODO bidding prices for Fischer-Tropsch, Methanation for H2?
bidding_prices = config["bidding_prices"]
for c in n.iterate_components({"Store", "Link", "StorageUnit"}):
c.df.marginal_cost.update(c.df.carrier.map(bidding_prices).dropna())
# deduct industry solid biomass
assert sum(n.stores.carriers == "solid biomass") == 1, "Myopic operation not implemented for spatially resolved solid biomass."
n.stores.at["EU solid biomass", "e_initial"] -= n.loads.at["solid biomass for industry", "p_set"] * 8760
n.remove("Load", "solid biomass for industry")
return n
def solve_network_myopic(n, config, opts='', **kwargs):
rolling_horizon = config["operations"]["rolling_horizon"]
freq = int(pd.infer_freq(n.snapshots)[:-1])
window = rolling_horizon["window"] * 24 // freq
overlap = rolling_horizon["overlap"] * 24 // freq
kept = window - overlap
length = len(n.snapshots)
assert kept > 0, f"Overlap ({overlap} days) must be smaller than windows ({window} days)."
for i in range(length // kept):
snapshots = n.snapshots[i * kept:(i + 1) * kept + overlap]
logger.info(f"Optimising operations from {snapshots[0]} to {snapshots[-1]}")
n = solve_network(n, config, opts=opts, snapshots=snapshots, **kwargs)
last_kept = n.snapshots[(i + 1) * kept - 1]
logger.info(f"Setting initial SOCs from {last_kept} for next iteration.\n")
n.stores.e_initial = n.stores_t.e.loc[last_kept]
n.storage_units.state_of_charge_initial = n.storage_units_t.state_of_charge.loc[last_kept]
return n
if __name__ == "__main__":
if 'snakemake' not in globals():
from helper import mock_snakemake
snakemake = mock_snakemake(
'solve_operations_network_myopic',
capacity_year=1952,
simpl='',
opts='',
clusters=37,
lv=2.0,
sector_opts='Co2L0-25H-T-H-B-I-A',
planning_horizons=2030,
weather_year=2013
)
logging.basicConfig(filename=snakemake.log.python,
level=snakemake.config['logging_level'])
tmpdir = snakemake.config['solving'].get('tmpdir')
if tmpdir is not None:
from pathlib import Path
Path(tmpdir).mkdir(parents=True, exist_ok=True)
config = snakemake.config["operations"]
overrides = override_component_attrs(snakemake.input.overrides)
n = pypsa.Network(snakemake.input.pre, override_component_attrs=overrides)
n_post = pypsa.Network(snakemake.input.post, override_component_attrs=overrides)
n = set_parameters_from_optimized(n, n_post)
del n_post
n_previous = pypsa.Network(snakemake.input.previous, override_component_attrs=overrides)
store_soc = n_previous.stores_t.e.iloc[-1]
storage_unit_soc = n_previous.storage_units_t.state_of_charge.iloc[-1]
del n_previous
n = remove_unused_components(n)
n = add_load_shedding(n)
n = prepare_myopic(n, config, store_soc, storage_unit_soc)
opts = snakemake.wildcards.sector_opts.split('-')
solve_opts = snakemake.config['solving']['options']
solve_opts['skip_iterations'] = True
n = prepare_network(n, solve_opts)
n = solve_network_myopic(
n,
config=snakemake.config,
opts=opts,
solver_dir=tmpdir,
solver_logfile=snakemake.log.solver
)
n.export_to_netcdf(snakemake.output[0])