2020-05-29 07:50:55 +00:00
|
|
|
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
|
|
|
#
|
2021-09-14 14:37:41 +00:00
|
|
|
# SPDX-License-Identifier: MIT
|
2020-05-29 07:50:55 +00:00
|
|
|
|
2019-08-08 13:02:28 +00:00
|
|
|
"""
|
2019-08-11 20:34:18 +00:00
|
|
|
Solves linear optimal power flow for a network iteratively while updating reactances.
|
2019-08-11 09:40:47 +00:00
|
|
|
|
|
|
|
Relevant Settings
|
|
|
|
-----------------
|
|
|
|
|
2019-08-11 11:17:36 +00:00
|
|
|
.. code:: yaml
|
|
|
|
|
|
|
|
solving:
|
|
|
|
tmpdir:
|
|
|
|
options:
|
|
|
|
formulation:
|
|
|
|
clip_p_max_pu:
|
|
|
|
load_shedding:
|
|
|
|
noisy_costs:
|
|
|
|
nhours:
|
|
|
|
min_iterations:
|
|
|
|
max_iterations:
|
2020-02-19 16:03:19 +00:00
|
|
|
skip_iterations:
|
2020-02-10 11:06:43 +00:00
|
|
|
track_iterations:
|
2019-08-11 11:17:36 +00:00
|
|
|
solver:
|
|
|
|
name:
|
|
|
|
|
2019-11-14 16:50:24 +00:00
|
|
|
.. seealso::
|
2019-08-13 08:03:46 +00:00
|
|
|
Documentation of the configuration file ``config.yaml`` at
|
|
|
|
:ref:`electricity_cf`, :ref:`solving_cf`, :ref:`plotting_cf`
|
|
|
|
|
2019-08-11 09:40:47 +00:00
|
|
|
Inputs
|
|
|
|
------
|
|
|
|
|
2020-12-03 18:50:53 +00:00
|
|
|
- ``networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`prepare`
|
2019-08-11 20:34:18 +00:00
|
|
|
|
2019-08-11 09:40:47 +00:00
|
|
|
Outputs
|
|
|
|
-------
|
|
|
|
|
2020-12-03 18:50:53 +00:00
|
|
|
- ``results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Solved PyPSA network including optimisation results
|
2019-08-11 20:34:18 +00:00
|
|
|
|
2019-08-14 13:36:46 +00:00
|
|
|
.. image:: ../img/results.png
|
2019-08-14 08:35:41 +00:00
|
|
|
:scale: 40 %
|
|
|
|
|
2019-08-11 09:40:47 +00:00
|
|
|
Description
|
|
|
|
-----------
|
|
|
|
|
2019-08-14 09:07:52 +00:00
|
|
|
Total annual system costs are minimised with PyPSA. The full formulation of the
|
|
|
|
linear optimal power flow (plus investment planning
|
|
|
|
is provided in the
|
|
|
|
`documentation of PyPSA <https://pypsa.readthedocs.io/en/latest/optimal_power_flow.html#linear-optimal-power-flow>`_.
|
2020-02-10 11:06:43 +00:00
|
|
|
The optimization is based on the ``pyomo=False`` setting in the :func:`network.lopf` and :func:`pypsa.linopf.ilopf` function.
|
|
|
|
Additionally, some extra constraints specified in :mod:`prepare_network` are added.
|
2019-08-14 09:07:52 +00:00
|
|
|
|
|
|
|
Solving the network in multiple iterations is motivated through the dependence of transmission line capacities and impedances.
|
|
|
|
As lines are expanded their electrical parameters change, which renders the optimisation bilinear even if the power flow
|
|
|
|
equations are linearized.
|
|
|
|
To retain the computational advantage of continuous linear programming, a sequential linear programming technique
|
|
|
|
is used, where in between iterations the line impedances are updated.
|
|
|
|
Details (and errors made through this heuristic) are discussed in the paper
|
|
|
|
|
|
|
|
- Fabian Neumann and Tom Brown. `Heuristics for Transmission Expansion Planning in Low-Carbon Energy System Models <https://arxiv.org/abs/1907.10548>`_), *16th International Conference on the European Energy Market*, 2019. `arXiv:1907.10548 <https://arxiv.org/abs/1907.10548>`_.
|
|
|
|
|
|
|
|
.. warning::
|
|
|
|
Capital costs of existing network components are not included in the objective function,
|
|
|
|
since for the optimisation problem they are just a constant term (no influence on optimal result).
|
|
|
|
|
|
|
|
Therefore, these capital costs are not included in ``network.objective``!
|
|
|
|
|
|
|
|
If you want to calculate the full total annual system costs add these to the objective value.
|
|
|
|
|
2019-08-11 20:34:18 +00:00
|
|
|
.. tip::
|
2019-08-13 15:52:33 +00:00
|
|
|
The rule :mod:`solve_all_networks` runs
|
2019-11-14 16:50:24 +00:00
|
|
|
for all ``scenario`` s in the configuration file
|
2019-08-13 15:52:33 +00:00
|
|
|
the rule :mod:`solve_network`.
|
2019-08-11 20:34:18 +00:00
|
|
|
|
2019-08-08 13:02:28 +00:00
|
|
|
"""
|
|
|
|
|
2017-12-18 19:31:27 +00:00
|
|
|
import logging
|
2019-11-28 07:22:52 +00:00
|
|
|
from _helpers import configure_logging
|
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
import pandas as pd
|
2020-09-26 11:10:50 +00:00
|
|
|
import re
|
2018-02-10 16:19:46 +00:00
|
|
|
|
|
|
|
import pypsa
|
2020-02-10 11:06:43 +00:00
|
|
|
from pypsa.linopf import (get_var, define_constraints, linexpr, join_exprs,
|
|
|
|
network_lopf, ilopf)
|
2020-12-03 18:50:53 +00:00
|
|
|
|
2020-02-10 11:06:43 +00:00
|
|
|
from pathlib import Path
|
2018-03-02 13:13:12 +00:00
|
|
|
from vresutils.benchmark import memory_logger
|
2018-02-01 11:42:56 +00:00
|
|
|
|
2020-12-03 18:50:53 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-02-10 11:06:43 +00:00
|
|
|
def prepare_network(n, solve_opts):
|
2018-02-01 11:42:56 +00:00
|
|
|
|
2017-12-18 19:31:27 +00:00
|
|
|
if 'clip_p_max_pu' in solve_opts:
|
|
|
|
for df in (n.generators_t.p_max_pu, n.storage_units_t.inflow):
|
|
|
|
df.where(df>solve_opts['clip_p_max_pu'], other=0., inplace=True)
|
|
|
|
|
|
|
|
if solve_opts.get('load_shedding'):
|
|
|
|
n.add("Carrier", "Load")
|
2021-08-18 08:38:51 +00:00
|
|
|
buses_i = n.buses.query("carrier == 'AC'").index
|
|
|
|
n.madd("Generator", buses_i, " load",
|
|
|
|
bus=buses_i,
|
2018-01-29 21:28:33 +00:00
|
|
|
carrier='load',
|
2018-10-22 21:20:51 +00:00
|
|
|
sign=1e-3, # Adjust sign to measure p and p_nom in kW instead of MW
|
|
|
|
marginal_cost=1e2, # Eur/kWh
|
2018-01-29 21:28:33 +00:00
|
|
|
# intersect between macroeconomic and surveybased
|
|
|
|
# willingness to pay
|
|
|
|
# http://journal.frontiersin.org/article/10.3389/fenrg.2015.00055/full
|
2018-10-22 21:20:51 +00:00
|
|
|
p_nom=1e9 # kW
|
2020-02-10 11:06:43 +00:00
|
|
|
)
|
2017-12-18 19:31:27 +00:00
|
|
|
|
|
|
|
if solve_opts.get('noisy_costs'):
|
2019-02-03 12:50:05 +00:00
|
|
|
for t in n.iterate_components(n.one_port_components):
|
2017-12-18 19:31:27 +00:00
|
|
|
#if 'capital_cost' in t.df:
|
|
|
|
# t.df['capital_cost'] += 1e1 + 2.*(np.random.random(len(t.df)) - 0.5)
|
|
|
|
if 'marginal_cost' in t.df:
|
2020-02-10 11:06:43 +00:00
|
|
|
t.df['marginal_cost'] += (1e-2 + 2e-3 *
|
|
|
|
(np.random.random(len(t.df)) - 0.5))
|
2017-12-18 19:31:27 +00:00
|
|
|
|
2018-03-09 14:41:46 +00:00
|
|
|
for t in n.iterate_components(['Line', 'Link']):
|
2020-02-10 11:06:43 +00:00
|
|
|
t.df['capital_cost'] += (1e-1 +
|
|
|
|
2e-2*(np.random.random(len(t.df)) - 0.5)) * t.df['length']
|
2018-03-09 14:41:46 +00:00
|
|
|
|
2017-12-18 19:31:27 +00:00
|
|
|
if solve_opts.get('nhours'):
|
|
|
|
nhours = solve_opts['nhours']
|
2018-03-09 14:41:46 +00:00
|
|
|
n.set_snapshots(n.snapshots[:nhours])
|
2021-08-06 13:43:12 +00:00
|
|
|
n.snapshot_weightings[:] = 8760. / nhours
|
2017-12-18 19:31:27 +00:00
|
|
|
|
|
|
|
return n
|
|
|
|
|
2018-02-10 16:19:46 +00:00
|
|
|
|
2020-02-10 11:06:43 +00:00
|
|
|
def add_CCL_constraints(n, config):
|
|
|
|
agg_p_nom_limits = config['electricity'].get('agg_p_nom_limits')
|
|
|
|
|
|
|
|
try:
|
|
|
|
agg_p_nom_minmax = pd.read_csv(agg_p_nom_limits,
|
|
|
|
index_col=list(range(2)))
|
|
|
|
except IOError:
|
|
|
|
logger.exception("Need to specify the path to a .csv file containing "
|
|
|
|
"aggregate capacity limits per country in "
|
|
|
|
"config['electricity']['agg_p_nom_limit'].")
|
|
|
|
logger.info("Adding per carrier generation capacity constraints for "
|
|
|
|
"individual countries")
|
|
|
|
|
|
|
|
gen_country = n.generators.bus.map(n.buses.country)
|
|
|
|
# cc means country and carrier
|
|
|
|
p_nom_per_cc = (pd.DataFrame(
|
|
|
|
{'p_nom': linexpr((1, get_var(n, 'Generator', 'p_nom'))),
|
|
|
|
'country': gen_country, 'carrier': n.generators.carrier})
|
|
|
|
.dropna(subset=['p_nom'])
|
|
|
|
.groupby(['country', 'carrier']).p_nom
|
|
|
|
.apply(join_exprs))
|
|
|
|
minimum = agg_p_nom_minmax['min'].dropna()
|
|
|
|
if not minimum.empty:
|
|
|
|
minconstraint = define_constraints(n, p_nom_per_cc[minimum.index],
|
|
|
|
'>=', minimum, 'agg_p_nom', 'min')
|
|
|
|
maximum = agg_p_nom_minmax['max'].dropna()
|
|
|
|
if not maximum.empty:
|
|
|
|
maxconstraint = define_constraints(n, p_nom_per_cc[maximum.index],
|
|
|
|
'<=', maximum, 'agg_p_nom', 'max')
|
|
|
|
|
|
|
|
|
2020-09-26 11:10:50 +00:00
|
|
|
def add_EQ_constraints(n, o, scaling=1e-1):
|
|
|
|
float_regex = "[0-9]*\.?[0-9]+"
|
|
|
|
level = float(re.findall(float_regex, o)[0])
|
|
|
|
if o[-1] == 'c':
|
|
|
|
ggrouper = n.generators.bus.map(n.buses.country)
|
|
|
|
lgrouper = n.loads.bus.map(n.buses.country)
|
|
|
|
sgrouper = n.storage_units.bus.map(n.buses.country)
|
|
|
|
else:
|
|
|
|
ggrouper = n.generators.bus
|
|
|
|
lgrouper = n.loads.bus
|
|
|
|
sgrouper = n.storage_units.bus
|
2021-08-06 13:43:12 +00:00
|
|
|
load = n.snapshot_weightings.generators @ \
|
2020-09-26 11:10:50 +00:00
|
|
|
n.loads_t.p_set.groupby(lgrouper, axis=1).sum()
|
2021-08-06 13:43:12 +00:00
|
|
|
inflow = n.snapshot_weightings.stores @ \
|
2020-09-26 11:10:50 +00:00
|
|
|
n.storage_units_t.inflow.groupby(sgrouper, axis=1).sum()
|
|
|
|
inflow = inflow.reindex(load.index).fillna(0.)
|
|
|
|
rhs = scaling * ( level * load - inflow )
|
2021-08-06 13:43:12 +00:00
|
|
|
lhs_gen = linexpr((n.snapshot_weightings.generators * scaling,
|
2020-09-26 11:10:50 +00:00
|
|
|
get_var(n, "Generator", "p").T)
|
|
|
|
).T.groupby(ggrouper, axis=1).apply(join_exprs)
|
2021-08-06 13:43:12 +00:00
|
|
|
lhs_spill = linexpr((-n.snapshot_weightings.stores * scaling,
|
2020-09-26 11:10:50 +00:00
|
|
|
get_var(n, "StorageUnit", "spill").T)
|
|
|
|
).T.groupby(sgrouper, axis=1).apply(join_exprs)
|
|
|
|
lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("")
|
|
|
|
lhs = lhs_gen + lhs_spill
|
|
|
|
define_constraints(n, lhs, ">=", rhs, "equity", "min")
|
|
|
|
|
|
|
|
|
2020-02-10 11:06:43 +00:00
|
|
|
def add_BAU_constraints(n, config):
|
|
|
|
mincaps = pd.Series(config['electricity']['BAU_mincapacities'])
|
|
|
|
lhs = (linexpr((1, get_var(n, 'Generator', 'p_nom')))
|
|
|
|
.groupby(n.generators.carrier).apply(join_exprs))
|
|
|
|
define_constraints(n, lhs, '>=', mincaps[lhs.index], 'Carrier', 'bau_mincaps')
|
|
|
|
|
|
|
|
|
|
|
|
def add_SAFE_constraints(n, config):
|
|
|
|
peakdemand = (1. + config['electricity']['SAFE_reservemargin']) *\
|
|
|
|
n.loads_t.p_set.sum(axis=1).max()
|
|
|
|
conv_techs = config['plotting']['conv_techs']
|
|
|
|
exist_conv_caps = n.generators.query('~p_nom_extendable & carrier in @conv_techs')\
|
|
|
|
.p_nom.sum()
|
|
|
|
ext_gens_i = n.generators.query('carrier in @conv_techs & p_nom_extendable').index
|
|
|
|
lhs = linexpr((1, get_var(n, 'Generator', 'p_nom')[ext_gens_i])).sum()
|
|
|
|
rhs = peakdemand - exist_conv_caps
|
|
|
|
define_constraints(n, lhs, '>=', rhs, 'Safe', 'mintotalcap')
|
|
|
|
|
2022-03-18 11:44:57 +00:00
|
|
|
def add_minRenew_constraints(n, config, o):
|
|
|
|
'''
|
2022-11-09 15:50:05 +00:00
|
|
|
Adds the constraint to have a minimum share of renewable energy production.
|
2022-03-18 11:44:57 +00:00
|
|
|
As renewable carriers the renewables from the configs listed under renewable are taken.
|
|
|
|
To use this constraint simply add the wildcard RE{share} in the opts wildcard like RE0.8 for a 80% renewable share
|
|
|
|
'''
|
2022-11-09 15:50:05 +00:00
|
|
|
import operator
|
2022-03-18 13:48:34 +00:00
|
|
|
renewables=list(config["electricity"]["renewable_aim"].keys())
|
|
|
|
if len(o)>2:
|
|
|
|
share=float(o[2:])
|
2022-11-09 15:50:05 +00:00
|
|
|
renewables_b = n.generators.carrier.str.contains("|".join(renewables))
|
|
|
|
renewables_i = n.generators[renewables_b].index
|
|
|
|
conventionals_i = n.generators[~renewables_b].index
|
2022-04-20 14:18:15 +00:00
|
|
|
weightings = n.snapshot_weightings.generators
|
|
|
|
coeff = pd.DataFrame({c: weightings for c in renewables_i})
|
2022-11-09 15:50:05 +00:00
|
|
|
vres = get_var(n, "Generator", "p")[renewables_i]
|
|
|
|
conv = get_var(n, "Generator", "p")[conventionals_i]
|
|
|
|
lhs = linexpr(((1 - share) * coeff, vres)).sum().sum()
|
|
|
|
lhs += linexpr((- share * coeff, conv)).sum().sum()
|
|
|
|
rhs = 0
|
2022-03-18 13:48:34 +00:00
|
|
|
define_constraints(n, lhs, '>=', rhs, 'Carrier', 'min_generation_renewables')
|
|
|
|
else:
|
|
|
|
for tech in renewables:
|
|
|
|
filter = n.generators.query("carrier.str.match(@tech)").index
|
|
|
|
lhs= linexpr((1, get_var(n, "Generator", "p_nom").loc[filter])).sum()
|
|
|
|
rhs= config["electricity"]["renewable_aim"][tech] * 1000 #in GW
|
|
|
|
define_constraints(n, lhs, '>=', rhs, 'Carrier', f'min_capacity_{tech}')
|
2022-03-18 11:44:57 +00:00
|
|
|
|
2022-03-31 14:20:25 +00:00
|
|
|
def add_base_load_constraint(n, config):
|
|
|
|
'''
|
|
|
|
Adds the constraint that conventional carriers defined in the config have a base load namely p_min_pu.
|
|
|
|
To use this constraint simply add the wildcard BL in the opts wildcard.
|
|
|
|
'''
|
|
|
|
carriers=config["electricity"]["base_load"].keys()
|
|
|
|
for carrier in carriers:
|
|
|
|
filter=n.generators.query("carrier==@carrier").index
|
|
|
|
n.generators.loc[filter,"p_min_pu"]=config["electricity"]["base_load"][carrier]
|
2020-02-10 11:06:43 +00:00
|
|
|
|
|
|
|
def add_battery_constraints(n):
|
|
|
|
nodes = n.buses.index[n.buses.carrier == "battery"]
|
|
|
|
if nodes.empty or ('Link', 'p_nom') not in n.variables.index:
|
|
|
|
return
|
|
|
|
link_p_nom = get_var(n, "Link", "p_nom")
|
|
|
|
lhs = linexpr((1,link_p_nom[nodes + " charger"]),
|
|
|
|
(-n.links.loc[nodes + " discharger", "efficiency"].values,
|
|
|
|
link_p_nom[nodes + " discharger"].values))
|
|
|
|
define_constraints(n, lhs, "=", 0, 'Link', 'charger_ratio')
|
|
|
|
|
|
|
|
|
|
|
|
def extra_functionality(n, snapshots):
|
|
|
|
"""
|
|
|
|
Collects supplementary constraints which will be passed to ``pypsa.linopf.network_lopf``.
|
|
|
|
If you want to enforce additional custom constraints, this is a good location to add them.
|
|
|
|
The arguments ``opts`` and ``snakemake.config`` are expected to be attached to the network.
|
|
|
|
"""
|
|
|
|
opts = n.opts
|
|
|
|
config = n.config
|
|
|
|
if 'BAU' in opts and n.generators.p_nom_extendable.any():
|
|
|
|
add_BAU_constraints(n, config)
|
|
|
|
if 'SAFE' in opts and n.generators.p_nom_extendable.any():
|
|
|
|
add_SAFE_constraints(n, config)
|
|
|
|
if 'CCL' in opts and n.generators.p_nom_extendable.any():
|
|
|
|
add_CCL_constraints(n, config)
|
2022-04-04 17:03:09 +00:00
|
|
|
if "BL" in opts:
|
2022-03-31 14:20:25 +00:00
|
|
|
add_base_load_constraint(n, config)
|
2020-09-26 11:10:50 +00:00
|
|
|
for o in opts:
|
|
|
|
if "EQ" in o:
|
|
|
|
add_EQ_constraints(n, o)
|
2022-03-18 11:44:57 +00:00
|
|
|
if 'RE' in o:
|
|
|
|
add_minRenew_constraints(n, config, o)
|
2020-02-10 11:06:43 +00:00
|
|
|
add_battery_constraints(n)
|
|
|
|
|
|
|
|
|
2021-05-25 13:55:23 +00:00
|
|
|
def solve_network(n, config, opts='', **kwargs):
|
2020-02-10 11:06:43 +00:00
|
|
|
solver_options = config['solving']['solver'].copy()
|
|
|
|
solver_name = solver_options.pop('name')
|
2020-12-03 18:50:53 +00:00
|
|
|
cf_solving = config['solving']['options']
|
|
|
|
track_iterations = cf_solving.get('track_iterations', False)
|
|
|
|
min_iterations = cf_solving.get('min_iterations', 4)
|
|
|
|
max_iterations = cf_solving.get('max_iterations', 6)
|
2020-02-10 15:47:11 +00:00
|
|
|
|
2020-02-10 11:06:43 +00:00
|
|
|
# add to network for extra_functionality
|
|
|
|
n.config = config
|
|
|
|
n.opts = opts
|
2020-02-10 15:47:11 +00:00
|
|
|
|
2020-12-03 18:50:53 +00:00
|
|
|
if cf_solving.get('skip_iterations', False):
|
2020-02-10 11:06:43 +00:00
|
|
|
network_lopf(n, solver_name=solver_name, solver_options=solver_options,
|
|
|
|
extra_functionality=extra_functionality, **kwargs)
|
2019-06-18 11:24:29 +00:00
|
|
|
else:
|
2020-02-10 11:06:43 +00:00
|
|
|
ilopf(n, solver_name=solver_name, solver_options=solver_options,
|
|
|
|
track_iterations=track_iterations,
|
2020-02-25 21:00:36 +00:00
|
|
|
min_iterations=min_iterations,
|
|
|
|
max_iterations=max_iterations,
|
2020-02-10 11:06:43 +00:00
|
|
|
extra_functionality=extra_functionality, **kwargs)
|
2017-12-18 19:31:27 +00:00
|
|
|
return n
|
|
|
|
|
2020-02-10 11:06:43 +00:00
|
|
|
|
2017-12-18 19:31:27 +00:00
|
|
|
if __name__ == "__main__":
|
2018-01-30 22:09:06 +00:00
|
|
|
if 'snakemake' not in globals():
|
2019-12-09 20:29:15 +00:00
|
|
|
from _helpers import mock_snakemake
|
|
|
|
snakemake = mock_snakemake('solve_network', network='elec', simpl='',
|
2022-03-18 13:48:34 +00:00
|
|
|
clusters='40', ll='v1.0', opts='Co2L-4H-RE')
|
2019-11-28 07:22:52 +00:00
|
|
|
configure_logging(snakemake)
|
|
|
|
|
2018-02-01 11:42:56 +00:00
|
|
|
tmpdir = snakemake.config['solving'].get('tmpdir')
|
|
|
|
if tmpdir is not None:
|
2020-02-10 11:06:43 +00:00
|
|
|
Path(tmpdir).mkdir(parents=True, exist_ok=True)
|
|
|
|
opts = snakemake.wildcards.opts.split('-')
|
|
|
|
solve_opts = snakemake.config['solving']['options']
|
2018-02-01 11:42:56 +00:00
|
|
|
|
2020-12-03 18:50:53 +00:00
|
|
|
fn = getattr(snakemake.log, 'memory', None)
|
|
|
|
with memory_logger(filename=fn, interval=30.) as mem:
|
2018-02-10 16:19:46 +00:00
|
|
|
n = pypsa.Network(snakemake.input[0])
|
2020-02-10 11:06:43 +00:00
|
|
|
n = prepare_network(n, solve_opts)
|
2022-01-24 18:48:26 +00:00
|
|
|
n = solve_network(n, snakemake.config, opts, solver_dir=tmpdir,
|
2021-05-25 13:55:23 +00:00
|
|
|
solver_logfile=snakemake.log.solver)
|
2018-02-10 16:19:46 +00:00
|
|
|
n.export_to_netcdf(snakemake.output[0])
|
2017-12-18 19:31:27 +00:00
|
|
|
|
2018-03-02 13:13:12 +00:00
|
|
|
logger.info("Maximum memory usage: {}".format(mem.mem_usage))
|