pypsa-eur/scripts/prepare_network.py
Chiara Anselmetti ed4fdc9150
Keep links to store components when using ATK wildcard (#214)
* Update prepare_network.py

The new ATK wildcard removes all lines + links without further distinction; however, since storage options are now modeled as store components, the links to and from the the storage units for (dis)charge are eliminated as well. Thus, the storage options drop out of the optimisation. 
Especially when only allowing renewables as generation sources, optimisation may become infeasible for a high temporal resolution (capacity factors = 0 for certain hours; no further options to serve the load). 
This issue does not arise with the ATKc wildcard, since bus0 and bus1 of the (dis)charge links share the same country code.

The proposed change is a very quick fix in the enforce_autarky function, solely removing DC links.

* Update scripts/prepare_network.py

Co-authored-by: Fabian Neumann <fabian.neumann@outlook.de>

* Update prepare_network.py

Co-authored-by: Fabian Neumann <fabian.neumann@outlook.de>
2021-01-11 10:27:27 +01:00

283 lines
9.7 KiB
Python
Executable File

# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# coding: utf-8
"""
Prepare PyPSA network for solving according to :ref:`opts` and :ref:`ll`, such as
- adding an annual **limit** of carbon-dioxide emissions,
- adding an exogenous **price** per tonne emissions of carbon-dioxide (or other kinds),
- setting an **N-1 security margin** factor for transmission line capacities,
- specifying an expansion limit on the **cost** of transmission expansion,
- specifying an expansion limit on the **volume** of transmission expansion, and
- reducing the **temporal** resolution by averaging over multiple hours
or segmenting time series into chunks of varying lengths using ``tsam``.
Relevant Settings
-----------------
.. code:: yaml
costs:
emission_prices:
USD2013_to_EUR2013:
discountrate:
marginal_cost:
capital_cost:
electricity:
co2limit:
max_hours:
.. seealso::
Documentation of the configuration file ``config.yaml`` at
:ref:`costs_cf`, :ref:`electricity_cf`
Inputs
------
- ``data/costs.csv``: The database of cost assumptions for all included technologies for specific years from various sources; e.g. discount rate, lifetime, investment (CAPEX), fixed operation and maintenance (FOM), variable operation and maintenance (VOM), fuel costs, efficiency, carbon-dioxide intensity.
- ``networks/elec_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
Outputs
-------
- ``networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Complete PyPSA network that will be handed to the ``solve_network`` rule.
Description
-----------
.. tip::
The rule :mod:`prepare_all_networks` runs
for all ``scenario`` s in the configuration file
the rule :mod:`prepare_network`.
"""
import logging
from _helpers import configure_logging
import re
import pypsa
import numpy as np
import pandas as pd
from six import iteritems
from add_electricity import load_costs, update_transmission_costs
idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def add_co2limit(n, Nyears=1., factor=None):
if factor is not None:
annual_emissions = factor*snakemake.config['electricity']['co2base']
else:
annual_emissions = snakemake.config['electricity']['co2limit']
n.add("GlobalConstraint", "CO2Limit",
carrier_attribute="co2_emissions", sense="<=",
constant=annual_emissions * Nyears)
def add_emission_prices(n, emission_prices=None, exclude_co2=False):
if emission_prices is None:
emission_prices = snakemake.config['costs']['emission_prices']
if exclude_co2: emission_prices.pop('co2')
ep = (pd.Series(emission_prices).rename(lambda x: x+'_emissions') *
n.carriers.filter(like='_emissions')).sum(axis=1)
gen_ep = n.generators.carrier.map(ep) / n.generators.efficiency
n.generators['marginal_cost'] += gen_ep
su_ep = n.storage_units.carrier.map(ep) / n.storage_units.efficiency_dispatch
n.storage_units['marginal_cost'] += su_ep
def set_line_s_max_pu(n):
s_max_pu = snakemake.config['lines']['s_max_pu']
n.lines['s_max_pu'] = s_max_pu
logger.info(f"N-1 security margin of lines set to {s_max_pu}")
def set_transmission_limit(n, ll_type, factor, Nyears=1):
links_dc_b = n.links.carrier == 'DC' if not n.links.empty else pd.Series()
_lines_s_nom = (np.sqrt(3) * n.lines.type.map(n.line_types.i_nom) *
n.lines.num_parallel * n.lines.bus0.map(n.buses.v_nom))
lines_s_nom = n.lines.s_nom.where(n.lines.type == '', _lines_s_nom)
col = 'capital_cost' if ll_type == 'c' else 'length'
ref = (lines_s_nom @ n.lines[col] +
n.links.loc[links_dc_b, "p_nom"] @ n.links.loc[links_dc_b, col])
costs = load_costs(Nyears, snakemake.input.tech_costs,
snakemake.config['costs'],
snakemake.config['electricity'])
update_transmission_costs(n, costs, simple_hvdc_costs=False)
if factor == 'opt' or float(factor) > 1.0:
n.lines['s_nom_min'] = lines_s_nom
n.lines['s_nom_extendable'] = True
n.links.loc[links_dc_b, 'p_nom_min'] = n.links.loc[links_dc_b, 'p_nom']
n.links.loc[links_dc_b, 'p_nom_extendable'] = True
if factor != 'opt':
con_type = 'expansion_cost' if ll_type == 'c' else 'volume_expansion'
rhs = float(factor) * ref
n.add('GlobalConstraint', f'l{ll_type}_limit',
type=f'transmission_{con_type}_limit',
sense='<=', constant=rhs, carrier_attribute='AC, DC')
return n
def average_every_nhours(n, offset):
logger.info(f"Resampling the network to {offset}")
m = n.copy(with_time=False)
snapshot_weightings = n.snapshot_weightings.resample(offset).sum()
m.set_snapshots(snapshot_weightings.index)
m.snapshot_weightings = snapshot_weightings
for c in n.iterate_components():
pnl = getattr(m, c.list_name+"_t")
for k, df in iteritems(c.pnl):
if not df.empty:
pnl[k] = df.resample(offset).mean()
return m
def apply_time_segmentation(n, segments):
logger.info(f"Aggregating time series to {segments} segments.")
try:
import tsam.timeseriesaggregation as tsam
except:
raise ModuleNotFoundError("Optional dependency 'tsam' not found."
"Install via 'pip install tsam'")
p_max_pu_norm = n.generators_t.p_max_pu.max()
p_max_pu = n.generators_t.p_max_pu / p_max_pu_norm
load_norm = n.loads_t.p_set.max()
load = n.loads_t.p_set / load_norm
inflow_norm = n.storage_units_t.inflow.max()
inflow = n.storage_units_t.inflow / inflow_norm
raw = pd.concat([p_max_pu, load, inflow], axis=1, sort=False)
solver_name = snakemake.config["solving"]["solver"]["name"]
agg = tsam.TimeSeriesAggregation(raw, hoursPerPeriod=len(raw),
noTypicalPeriods=1, noSegments=int(segments),
segmentation=True, solver=solver_name)
segmented = agg.createTypicalPeriods()
weightings = segmented.index.get_level_values("Segment Duration")
offsets = np.insert(np.cumsum(weightings[:-1]), 0, 0)
snapshots = [n.snapshots[0] + pd.Timedelta(f"{offset}h") for offset in offsets]
n.set_snapshots(pd.DatetimeIndex(snapshots, name='name'))
n.snapshot_weightings = pd.Series(weightings, index=snapshots, name="weightings", dtype="float64")
segmented.index = snapshots
n.generators_t.p_max_pu = segmented[n.generators_t.p_max_pu.columns] * p_max_pu_norm
n.loads_t.p_set = segmented[n.loads_t.p_set.columns] * load_norm
n.storage_units_t.inflow = segmented[n.storage_units_t.inflow.columns] * inflow_norm
return n
def enforce_autarky(n, only_crossborder=False):
if only_crossborder:
lines_rm = n.lines.loc[
n.lines.bus0.map(n.buses.country) !=
n.lines.bus1.map(n.buses.country)
].index
links_rm = n.links.loc[
n.links.bus0.map(n.buses.country) !=
n.links.bus1.map(n.buses.country)
].index
else:
lines_rm = n.lines.index
links_rm = n.links.loc[n.links.carrier=="DC"].index
n.mremove("Line", lines_rm)
n.mremove("Link", links_rm)
def set_line_nom_max(n):
s_nom_max_set = snakemake.config["lines"].get("s_nom_max,", np.inf)
p_nom_max_set = snakemake.config["links"].get("p_nom_max", np.inf)
n.lines.s_nom_max.clip(upper=s_nom_max_set, inplace=True)
n.links.p_nom_max.clip(upper=p_nom_max_set, inplace=True)
if __name__ == "__main__":
if 'snakemake' not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake('prepare_network', network='elec', simpl='',
clusters='40', ll='v0.3', opts='Co2L-24H')
configure_logging(snakemake)
opts = snakemake.wildcards.opts.split('-')
n = pypsa.Network(snakemake.input[0])
Nyears = n.snapshot_weightings.sum() / 8760.
set_line_s_max_pu(n)
for o in opts:
m = re.match(r'^\d+h$', o, re.IGNORECASE)
if m is not None:
n = average_every_nhours(n, m.group(0))
break
for o in opts:
m = re.match(r'^\d+seg$', o, re.IGNORECASE)
if m is not None:
n = apply_time_segmentation(n, m.group(0)[:-3])
break
for o in opts:
if "Co2L" in o:
m = re.findall("[0-9]*\.?[0-9]+$", o)
if len(m) > 0:
add_co2limit(n, Nyears, float(m[0]))
else:
add_co2limit(n, Nyears)
break
for o in opts:
oo = o.split("+")
suptechs = map(lambda c: c.split("-", 2)[0], n.carriers.index)
if oo[0].startswith(tuple(suptechs)):
carrier = oo[0]
# handles only p_nom_max as stores and lines have no potentials
attr_lookup = {"p": "p_nom_max", "c": "capital_cost"}
attr = attr_lookup[oo[1][0]]
factor = float(oo[1][1:])
if carrier == "AC": # lines do not have carrier
n.lines[attr] *= factor
else:
comps = {"Generator", "Link", "StorageUnit", "Store"}
for c in n.iterate_components(comps):
sel = c.df.carrier.str.contains(carrier)
c.df.loc[sel,attr] *= factor
if 'Ep' in opts:
add_emission_prices(n)
ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:]
set_transmission_limit(n, ll_type, factor, Nyears)
set_line_nom_max(n)
if "ATK" in opts:
enforce_autarky(n)
elif "ATKc" in opts:
enforce_autarky(n, only_crossborder=True)
n.export_to_netcdf(snakemake.output[0])