Merge pull request #625 from PyPSA/linopy-eur

add Linopy to PyPSA-Eur
This commit is contained in:
Fabian Hofmann 2023-03-10 16:49:15 +01:00 committed by GitHub
commit ecb6dfbab2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 608 additions and 686 deletions

View File

@ -629,15 +629,7 @@ solving:
track_iterations: false
min_iterations: 4
max_iterations: 6
keep_shadowprices:
- Bus
- Line
- Link
- Transformer
- GlobalConstraint
- Generator
- Store
- StorageUnit
seed: 123
solver:
name: gurobi

View File

@ -5,9 +5,9 @@
rule solve_network:
input:
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
output:
RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
log:
solver=normpath(
LOGS + "solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log"
@ -31,10 +31,9 @@ rule solve_network:
rule solve_operations_network:
input:
unprepared=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc",
optimized=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
output:
RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc",
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc",
log:
solver=normpath(
LOGS

View File

@ -103,4 +103,4 @@ rule solve_sector_network_myopic:
conda:
"../envs/environment.yaml"
script:
"../scripts/solve_sector_network.py"
"../scripts/solve_network.py"

View File

@ -35,4 +35,4 @@ rule solve_sector_network:
conda:
"../envs/environment.yaml"
script:
"../scripts/solve_sector_network.py"
"../scripts/solve_network.py"

View File

@ -339,11 +339,12 @@ def mock_snakemake(rulename, configfiles=[], **wildcards):
kwargs = (
dict(rerun_triggers=[]) if parse(sm.__version__) > Version("7.7.0") else {}
)
workflow = sm.Workflow(snakefile, **kwargs)
workflow.include(snakefile)
if isinstance(configfiles, str):
configfiles = [configfiles]
workflow = sm.Workflow(snakefile, overwrite_configfiles=configfiles, **kwargs)
workflow.include(snakefile)
if configfiles:
for f in configfiles:
if not os.path.exists(f):

View File

@ -83,7 +83,7 @@ def build_transport_demand(traffic_fn, airtemp_fn, nodes, nodal_transport_data):
)
transport = (
(transport_shape.multiply(energy_totals_transport) * 1e6 * Nyears)
(transport_shape.multiply(energy_totals_transport) * 1e6 * nyears)
.divide(efficiency_gain * ice_correction)
.multiply(1 + dd_EV)
)
@ -181,7 +181,7 @@ if __name__ == "__main__":
snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"], tz="UTC")
Nyears = 1
nyears = len(snapshots) / 8760
nodal_transport_data = build_nodal_transport_data(
snakemake.input.transport_data, pop_layout

View File

@ -442,9 +442,13 @@ def calculate_metrics(n, label, metrics):
["line_volume_AC", "line_volume_DC"], label
].sum()
if hasattr(n, "line_volume_limit"):
metrics.at["line_volume_limit", label] = n.line_volume_limit
metrics.at["line_volume_shadow", label] = n.line_volume_limit_dual
if "lv_limit" in n.global_constraints.index:
metrics.at["line_volume_limit", label] = n.global_constraints.at[
"lv_limit", "constant"
]
metrics.at["line_volume_shadow", label] = n.global_constraints.at[
"lv_limit", "mu"
]
if "CO2Limit" in n.global_constraints.index:
metrics.at["co2_shadow", label] = n.global_constraints.at["CO2Limit", "mu"]
@ -695,7 +699,7 @@ if __name__ == "__main__":
for planning_horizon in snakemake.config["scenario"]["planning_horizons"]
}
Nyears = 1
Nyears = len(pd.date_range(freq="h", **snakemake.config["snapshots"])) / 8760
costs_db = prepare_costs(
snakemake.input.costs,

View File

@ -394,8 +394,7 @@ def plot_h2_map(network, regions):
link_widths=link_widths_retro,
branch_components=["Link"],
ax=ax,
color_geomap=False,
boundaries=map_opts["boundaries"],
**map_opts,
)
regions.plot(
@ -922,11 +921,11 @@ if __name__ == "__main__":
snakemake = mock_snakemake(
"plot_network",
simpl="",
clusters="181",
ll="vopt",
opts="",
sector_opts="Co2L0-730H-T-H-B-I-A-solar+p3-linemaxext10",
planning_horizons="2050",
clusters="5",
ll="v1.5",
sector_opts="CO2L0-1H-T-H-B-I-A-solar+p3-dist1",
planning_horizons="2030",
)
logging.basicConfig(level=snakemake.config["logging"]["level"])
@ -938,6 +937,9 @@ if __name__ == "__main__":
map_opts = snakemake.config["plotting"]["map"]
if map_opts["boundaries"] is None:
map_opts["boundaries"] = regions.total_bounds[[0, 2, 1, 3]] + [-1, 1, -1, 1]
plot_map(
n,
components=["generators", "links", "stores", "storage_units"],

View File

@ -676,7 +676,7 @@ def add_dac(n, costs):
)
def add_co2limit(n, Nyears=1.0, limit=0.0):
def add_co2limit(n, nyears=1.0, limit=0.0):
logger.info(f"Adding CO2 budget limit as per unit of 1990 levels of {limit}")
countries = snakemake.config["countries"]
@ -688,7 +688,7 @@ def add_co2limit(n, Nyears=1.0, limit=0.0):
co2_limit = co2_totals.loc[countries, sectors].sum().sum()
co2_limit *= limit * Nyears
co2_limit *= limit * nyears
n.add(
"GlobalConstraint",
@ -732,7 +732,7 @@ def cycling_shift(df, steps=1):
return df
def prepare_costs(cost_file, config, Nyears):
def prepare_costs(cost_file, config, nyears):
# set all asset costs and other parameters
costs = pd.read_csv(cost_file, index_col=[0, 1]).sort_index()
@ -750,7 +750,7 @@ def prepare_costs(cost_file, config, Nyears):
return annuity(v["lifetime"], v["discount rate"]) + v["FOM"] / 100
costs["fixed"] = [
annuity_factor(v) * v["investment"] * Nyears for i, v in costs.iterrows()
annuity_factor(v) * v["investment"] * nyears for i, v in costs.iterrows()
]
return costs
@ -1409,6 +1409,7 @@ def add_land_transport(n, costs):
# TODO options?
logger.info("Add land transport")
nhours = n.snapshot_weightings.generators.sum()
transport = pd.read_csv(
snakemake.input.transport_demand, index_col=0, parse_dates=True
@ -1558,7 +1559,7 @@ def add_land_transport(n, costs):
ice_share
/ ice_efficiency
* transport[nodes].sum().sum()
/ 8760
/ nhours
* costs.at["oil", "CO2 intensity"]
)
@ -2333,11 +2334,13 @@ def add_industry(n, costs):
logger.info("Add industrial demand")
nodes = pop_layout.index
nhours = n.snapshot_weightings.generators.sum()
nyears = nhours / 8760
# 1e6 to convert TWh to MWh
industrial_demand = (
pd.read_csv(snakemake.input.industrial_demand, index_col=0) * 1e6
)
) * nyears
n.madd(
"Bus",
@ -2352,10 +2355,10 @@ def add_industry(n, costs):
industrial_demand.loc[spatial.biomass.locations, "solid biomass"].rename(
index=lambda x: x + " solid biomass for industry"
)
/ 8760
/ nhours
)
else:
p_set = industrial_demand["solid biomass"].sum() / 8760
p_set = industrial_demand["solid biomass"].sum() / nhours
n.madd(
"Load",
@ -2402,7 +2405,7 @@ def add_industry(n, costs):
unit="MWh_LHV",
)
gas_demand = industrial_demand.loc[nodes, "methane"] / 8760.0
gas_demand = industrial_demand.loc[nodes, "methane"] / nhours
if options["gas_network"]:
spatial_gas_demand = gas_demand.rename(index=lambda x: x + " gas for industry")
@ -2454,7 +2457,7 @@ def add_industry(n, costs):
suffix=" H2 for industry",
bus=nodes + " H2",
carrier="H2 for industry",
p_set=industrial_demand.loc[nodes, "hydrogen"] / 8760,
p_set=industrial_demand.loc[nodes, "hydrogen"] / nhours,
)
shipping_hydrogen_share = get(options["shipping_hydrogen_share"], investment_year)
@ -2470,11 +2473,11 @@ def add_industry(n, costs):
domestic_navigation = pop_weighted_energy_totals.loc[
nodes, "total domestic navigation"
].squeeze()
international_navigation = pd.read_csv(
snakemake.input.shipping_demand, index_col=0
).squeeze()
international_navigation = (
pd.read_csv(snakemake.input.shipping_demand, index_col=0).squeeze() * nyears
)
all_navigation = domestic_navigation + international_navigation
p_set = all_navigation * 1e6 / 8760
p_set = all_navigation * 1e6 / nhours
if shipping_hydrogen_share:
oil_efficiency = options.get(
@ -2681,7 +2684,7 @@ def add_industry(n, costs):
)
demand_factor = options.get("HVC_demand_factor", 1)
p_set = demand_factor * industrial_demand.loc[nodes, "naphtha"].sum() / 8760
p_set = demand_factor * industrial_demand.loc[nodes, "naphtha"].sum() / nhours
if demand_factor != 1:
logger.warning(f"Changing HVC demand by {demand_factor*100-100:+.2f}%.")
@ -2699,7 +2702,7 @@ def add_industry(n, costs):
demand_factor
* pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1).sum()
* 1e6
/ 8760
/ nhours
)
if demand_factor != 1:
logger.warning(f"Changing aviation demand by {demand_factor*100-100:+.2f}%.")
@ -2718,7 +2721,7 @@ def add_industry(n, costs):
co2_release = ["naphtha for industry", "kerosene for aviation"]
co2 = (
n.loads.loc[co2_release, "p_set"].sum() * costs.at["oil", "CO2 intensity"]
- industrial_demand.loc[nodes, "process emission from feedstock"].sum() / 8760
- industrial_demand.loc[nodes, "process emission from feedstock"].sum() / nhours
)
n.add(
@ -2741,12 +2744,13 @@ def add_industry(n, costs):
for node in nodes
],
carrier="low-temperature heat for industry",
p_set=industrial_demand.loc[nodes, "low-temperature heat"] / 8760,
p_set=industrial_demand.loc[nodes, "low-temperature heat"] / nhours,
)
# remove today's industrial electricity demand by scaling down total electricity demand
for ct in n.buses.country.dropna().unique():
# TODO map onto n.bus.country
loads_i = n.loads.index[
(n.loads.index.str[:2] == ct) & (n.loads.carrier == "electricity")
]
@ -2765,7 +2769,7 @@ def add_industry(n, costs):
suffix=" industry electricity",
bus=nodes,
carrier="industry electricity",
p_set=industrial_demand.loc[nodes, "electricity"] / 8760,
p_set=industrial_demand.loc[nodes, "electricity"] / nhours,
)
n.madd(
@ -2782,10 +2786,10 @@ def add_industry(n, costs):
-industrial_demand.loc[nodes, sel]
.sum(axis=1)
.rename(index=lambda x: x + " process emissions")
/ 8760
/ nhours
)
else:
p_set = -industrial_demand.loc[nodes, sel].sum(axis=1).sum() / 8760
p_set = -industrial_demand.loc[nodes, sel].sum(axis=1).sum() / nhours
# this should be process emissions fossil+feedstock
# then need load on atmosphere for feedstock emissions that are currently going to atmosphere via Link Fischer-Tropsch demand
@ -2829,10 +2833,10 @@ def add_industry(n, costs):
industrial_demand.loc[spatial.ammonia.locations, "ammonia"].rename(
index=lambda x: x + " NH3"
)
/ 8760
/ nhours
)
else:
p_set = industrial_demand["ammonia"].sum() / 8760
p_set = industrial_demand["ammonia"].sum() / nhours
n.madd(
"Load",
@ -2884,6 +2888,7 @@ def add_agriculture(n, costs):
logger.info("Add agriculture, forestry and fishing sector.")
nodes = pop_layout.index
nhours = n.snapshot_weightings.generators.sum()
# electricity
@ -2895,7 +2900,7 @@ def add_agriculture(n, costs):
carrier="agriculture electricity",
p_set=pop_weighted_energy_totals.loc[nodes, "total agriculture electricity"]
* 1e6
/ 8760,
/ nhours,
)
# heat
@ -2908,7 +2913,7 @@ def add_agriculture(n, costs):
carrier="agriculture heat",
p_set=pop_weighted_energy_totals.loc[nodes, "total agriculture heat"]
* 1e6
/ 8760,
/ nhours,
)
# machinery
@ -2944,7 +2949,7 @@ def add_agriculture(n, costs):
/ efficiency_gain
* machinery_nodal_energy
* 1e6
/ 8760,
/ nhours,
)
if oil_share > 0:
@ -2953,14 +2958,14 @@ def add_agriculture(n, costs):
["agriculture machinery oil"],
bus=spatial.oil.nodes,
carrier="agriculture machinery oil",
p_set=oil_share * machinery_nodal_energy.sum() * 1e6 / 8760,
p_set=oil_share * machinery_nodal_energy.sum() * 1e6 / nhours,
)
co2 = (
oil_share
* machinery_nodal_energy.sum()
* 1e6
/ 8760
/ nhours
* costs.at["oil", "CO2 intensity"]
)
@ -3229,19 +3234,19 @@ def set_temporal_aggregation(n, opts, solver_name):
return n
# %%
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake(
"prepare_sector_network",
configfiles="test/config.overnight.yaml",
simpl="",
opts="",
clusters="37",
clusters="5",
ll="v1.5",
sector_opts="cb40ex0-365H-T-H-B-I-A-solar+p3-dist1",
planning_horizons="2020",
sector_opts="CO2L0-24H-T-H-B-I-A-solar+p3-dist1",
planning_horizons="2030",
)
logging.basicConfig(level=snakemake.config["logging"]["level"])
@ -3258,16 +3263,17 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input.network, override_component_attrs=overrides)
pop_layout = pd.read_csv(snakemake.input.clustered_pop_layout, index_col=0)
Nyears = n.snapshot_weightings.generators.sum() / 8760
nhours = n.snapshot_weightings.generators.sum()
nyears = nhours / 8760
costs = prepare_costs(
snakemake.input.costs,
snakemake.config["costs"],
Nyears,
nyears,
)
pop_weighted_energy_totals = pd.read_csv(
snakemake.input.pop_weighted_energy_totals, index_col=0
pop_weighted_energy_totals = (
pd.read_csv(snakemake.input.pop_weighted_energy_totals, index_col=0) * nyears
)
patch_electricity_network(n)
@ -3369,7 +3375,7 @@ if __name__ == "__main__":
limit = float(limit.replace("p", ".").replace("m", "-"))
break
logger.info(f"Add CO2 limit from {limit_type}")
add_co2limit(n, Nyears, limit)
add_co2limit(n, nyears, limit)
for o in opts:
if not o[:10] == "linemaxext":

660
scripts/solve_network.py Executable file → Normal file
View File

@ -4,8 +4,11 @@
# SPDX-License-Identifier: MIT
"""
Solves linear optimal power flow for a network iteratively while updating
reactances.
Solves optimal operation and capacity for a network with the option to
iteratively optimize while updating line reactances.
This script is used for optimizing the electrical network as well as the
sector coupled network.
Relevant Settings
-----------------
@ -13,7 +16,6 @@ Relevant Settings
.. code:: yaml
solving:
tmpdir:
options:
formulation:
clip_p_max_pu:
@ -26,6 +28,7 @@ Relevant Settings
track_iterations:
solver:
name:
options:
.. seealso::
Documentation of the configuration file ``config.yaml`` at
@ -51,6 +54,7 @@ Total annual system costs are minimised with PyPSA. The full formulation of the
linear optimal power flow (plus investment planning
is provided in the
`documentation of PyPSA <https://pypsa.readthedocs.io/en/latest/optimal_power_flow.html#linear-optimal-power-flow>`_.
The optimization is based on the ``pyomo=False`` setting in the :func:`network.lopf` and :func:`pypsa.linopf.ilopf` function.
Additionally, some extra constraints specified in :mod:`prepare_network` are added.
@ -64,69 +68,166 @@ Details (and errors made through this heuristic) are discussed in the paper
- Fabian Neumann and Tom Brown. `Heuristics for Transmission Expansion Planning in Low-Carbon Energy System Models <https://arxiv.org/abs/1907.10548>`_), *16th International Conference on the European Energy Market*, 2019. `arXiv:1907.10548 <https://arxiv.org/abs/1907.10548>`_.
.. warning::
Capital costs of existing network components are not included in the objective function,
since for the optimisation problem they are just a constant term (no influence on optimal result).
Therefore, these capital costs are not included in ``network.objective``!
If you want to calculate the full total annual system costs add these to the objective value.
.. tip::
The rule :mod:`solve_all_networks` runs
for all ``scenario`` s in the configuration file
the rule :mod:`solve_network`.
"""
import logging
import re
from pathlib import Path
import numpy as np
import pandas as pd
import pypsa
from _helpers import configure_logging
from pypsa.descriptors import get_switchable_as_dense as get_as_dense
from pypsa.linopf import (
define_constraints,
define_variables,
get_var,
ilopf,
join_exprs,
linexpr,
network_lopf,
import xarray as xr
from _helpers import (
configure_logging,
override_component_attrs,
update_config_with_sector_opts,
)
from vresutils.benchmark import memory_logger
logger = logging.getLogger(__name__)
pypsa.pf.logger.setLevel(logging.WARNING)
def prepare_network(n, solve_opts):
def add_land_use_constraint(n, config):
if "m" in snakemake.wildcards.clusters:
_add_land_use_constraint_m(n, config)
else:
_add_land_use_constraint(n, config)
def _add_land_use_constraint(n, config):
# warning: this will miss existing offwind which is not classed AC-DC and has carrier 'offwind'
for carrier in ["solar", "onwind", "offwind-ac", "offwind-dc"]:
ext_i = (n.generators.carrier == carrier) & ~n.generators.p_nom_extendable
existing = (
n.generators.loc[ext_i, "p_nom"]
.groupby(n.generators.bus.map(n.buses.location))
.sum()
)
existing.index += " " + carrier + "-" + snakemake.wildcards.planning_horizons
n.generators.loc[existing.index, "p_nom_max"] -= existing
# check if existing capacities are larger than technical potential
existing_large = n.generators[
n.generators["p_nom_min"] > n.generators["p_nom_max"]
].index
if len(existing_large):
logger.warning(
f"Existing capacities larger than technical potential for {existing_large},\
adjust technical potential to existing capacities"
)
n.generators.loc[existing_large, "p_nom_max"] = n.generators.loc[
existing_large, "p_nom_min"
]
n.generators.p_nom_max.clip(lower=0, inplace=True)
def _add_land_use_constraint_m(n, config):
# if generators clustering is lower than network clustering, land_use accounting is at generators clusters
planning_horizons = config["scenario"]["planning_horizons"]
grouping_years = config["existing_capacities"]["grouping_years"]
current_horizon = snakemake.wildcards.planning_horizons
for carrier in ["solar", "onwind", "offwind-ac", "offwind-dc"]:
existing = n.generators.loc[n.generators.carrier == carrier, "p_nom"]
ind = list(
set(
[
i.split(sep=" ")[0] + " " + i.split(sep=" ")[1]
for i in existing.index
]
)
)
previous_years = [
str(y)
for y in planning_horizons + grouping_years
if y < int(snakemake.wildcards.planning_horizons)
]
for p_year in previous_years:
ind2 = [
i for i in ind if i + " " + carrier + "-" + p_year in existing.index
]
sel_current = [i + " " + carrier + "-" + current_horizon for i in ind2]
sel_p_year = [i + " " + carrier + "-" + p_year for i in ind2]
n.generators.loc[sel_current, "p_nom_max"] -= existing.loc[
sel_p_year
].rename(lambda x: x[:-4] + current_horizon)
n.generators.p_nom_max.clip(lower=0, inplace=True)
def add_co2_sequestration_limit(n, limit=200):
"""
Add a global constraint on the amount of Mt CO2 that can be sequestered.
"""
n.carriers.loc["co2 stored", "co2_absorptions"] = -1
n.carriers.co2_absorptions = n.carriers.co2_absorptions.fillna(0)
limit = limit * 1e6
for o in opts:
if "seq" not in o:
continue
limit = float(o[o.find("seq") + 3 :]) * 1e6
break
n.add(
"GlobalConstraint",
"co2_sequestration_limit",
sense="<=",
constant=limit,
type="primary_energy",
carrier_attribute="co2_absorptions",
)
def prepare_network(n, solve_opts=None, config=None):
if "clip_p_max_pu" in solve_opts:
for df in (n.generators_t.p_max_pu, n.storage_units_t.inflow):
for df in (
n.generators_t.p_max_pu,
n.generators_t.p_min_pu, # TODO: check if this can be removed
n.storage_units_t.inflow,
):
df.where(df > solve_opts["clip_p_max_pu"], other=0.0, inplace=True)
load_shedding = solve_opts.get("load_shedding")
if load_shedding:
if solve_opts.get("load_shedding"):
# intersect between macroeconomic and surveybased willingness to pay
# http://journal.frontiersin.org/article/10.3389/fenrg.2015.00055/full
# TODO: retrieve color and nice name from config
n.add("Carrier", "load", color="#dd2e23", nice_name="Load shedding")
buses_i = n.buses.query("carrier == 'AC'").index
if not np.isscalar(load_shedding):
# TODO: do not scale via sign attribute (use Eur/MWh instead of Eur/kWh)
load_shedding = 1e2 # Eur/kWh
# intersect between macroeconomic and surveybased
# willingness to pay
# http://journal.frontiersin.org/article/10.3389/fenrg.2015.00055/full)
n.madd(
"Generator",
buses_i,
" load",
bus=buses_i,
bus=n.buses.index,
carrier="load",
sign=1e-3, # Adjust sign to measure p and p_nom in kW instead of MW
marginal_cost=load_shedding,
marginal_cost=load_shedding, # Eur/kWh
p_nom=1e9, # kW
)
if solve_opts.get("noisy_costs"):
for t in n.iterate_components(n.one_port_components):
for t in n.iterate_components():
# if 'capital_cost' in t.df:
# t.df['capital_cost'] += 1e1 + 2.*(np.random.random(len(t.df)) - 0.5)
if "marginal_cost" in t.df:
@ -144,61 +245,96 @@ def prepare_network(n, solve_opts):
n.set_snapshots(n.snapshots[:nhours])
n.snapshot_weightings[:] = 8760.0 / nhours
if config["foresight"] == "myopic":
add_land_use_constraint(n, config)
if n.stores.carrier.eq("co2 stored").any():
limit = config["sector"].get("co2_sequestration_potential", 200)
add_co2_sequestration_limit(n, limit=limit)
return n
def add_CCL_constraints(n, config):
agg_p_nom_limits = config["electricity"].get("agg_p_nom_limits")
"""
Add CCL (country & carrier limit) constraint to the network.
try:
agg_p_nom_minmax = pd.read_csv(agg_p_nom_limits, index_col=list(range(2)))
except IOError:
logger.exception(
"Need to specify the path to a .csv file containing "
"aggregate capacity limits per country in "
"config['electricity']['agg_p_nom_limit']."
Add minimum and maximum levels of generator nominal capacity per carrier
for individual countries. Opts and path for agg_p_nom_minmax.csv must be defined
in config.yaml. Default file is available at data/agg_p_nom_minmax.csv.
Parameters
----------
n : pypsa.Network
config : dict
Example
-------
scenario:
opts: [Co2L-CCL-24H]
electricity:
agg_p_nom_limits: data/agg_p_nom_minmax.csv
"""
agg_p_nom_minmax = pd.read_csv(
config["electricity"]["agg_p_nom_limits"], index_col=[0, 1]
)
logger.info(
"Adding per carrier generation capacity constraints for " "individual countries"
logger.info("Adding generation capacity constraints per carrier and country")
p_nom = n.model["Generator-p_nom"]
gens = n.generators.query("p_nom_extendable").rename_axis(index="Generator-ext")
grouper = [gens.bus.map(n.buses.country), gens.carrier]
grouper = xr.DataArray(pd.MultiIndex.from_arrays(grouper), dims=["Generator-ext"])
lhs = p_nom.groupby(grouper).sum().rename(bus="country")
minimum = xr.DataArray(agg_p_nom_minmax["min"].dropna()).rename(dim_0="group")
index = minimum.indexes["group"].intersection(lhs.indexes["group"])
if not index.empty:
n.model.add_constraints(
lhs.sel(group=index) >= minimum.loc[index], name="agg_p_nom_min"
)
gen_country = n.generators.bus.map(n.buses.country)
# cc means country and carrier
p_nom_per_cc = (
pd.DataFrame(
{
"p_nom": linexpr((1, get_var(n, "Generator", "p_nom"))),
"country": gen_country,
"carrier": n.generators.carrier,
}
)
.dropna(subset=["p_nom"])
.groupby(["country", "carrier"])
.p_nom.apply(join_exprs)
)
minimum = agg_p_nom_minmax["min"].dropna()
if not minimum.empty:
minconstraint = define_constraints(
n, p_nom_per_cc[minimum.index], ">=", minimum, "agg_p_nom", "min"
)
maximum = agg_p_nom_minmax["max"].dropna()
if not maximum.empty:
maxconstraint = define_constraints(
n, p_nom_per_cc[maximum.index], "<=", maximum, "agg_p_nom", "max"
maximum = xr.DataArray(agg_p_nom_minmax["max"].dropna()).rename(dim_0="group")
index = maximum.indexes["group"].intersection(lhs.indexes["group"])
if not index.empty:
n.model.add_constraints(
lhs.sel(group=index) <= maximum.loc[index], name="agg_p_nom_max"
)
def add_EQ_constraints(n, o, scaling=1e-1):
"""
Add equity constraints to the network.
Currently this is only implemented for the electricity sector only.
Opts must be specified in the config.yaml.
Parameters
----------
n : pypsa.Network
o : str
Example
-------
scenario:
opts: [Co2L-EQ0.7-24H]
Require each country or node to on average produce a minimal share
of its total electricity consumption itself. Example: EQ0.7c demands each country
to produce on average at least 70% of its consumption; EQ0.7 demands
each node to produce on average at least 70% of its consumption.
"""
# TODO: Generalize to cover myopic and other sectors?
float_regex = "[0-9]*\.?[0-9]+"
level = float(re.findall(float_regex, o)[0])
if o[-1] == "c":
ggrouper = n.generators.bus.map(n.buses.country)
lgrouper = n.loads.bus.map(n.buses.country)
sgrouper = n.storage_units.bus.map(n.buses.country)
ggrouper = n.generators.bus.map(n.buses.country).to_xarray()
lgrouper = n.loads.bus.map(n.buses.country).to_xarray()
sgrouper = n.storage_units.bus.map(n.buses.country).to_xarray()
else:
ggrouper = n.generators.bus
lgrouper = n.loads.bus
sgrouper = n.storage_units.bus
ggrouper = n.generators.bus.to_xarray()
lgrouper = n.loads.bus.to_xarray()
sgrouper = n.storage_units.bus.to_xarray()
load = (
n.snapshot_weightings.generators
@ n.loads_t.p_set.groupby(lgrouper, axis=1).sum()
@ -209,147 +345,271 @@ def add_EQ_constraints(n, o, scaling=1e-1):
)
inflow = inflow.reindex(load.index).fillna(0.0)
rhs = scaling * (level * load - inflow)
p = n.model["Generator-p"]
lhs_gen = (
linexpr(
(n.snapshot_weightings.generators * scaling, get_var(n, "Generator", "p").T)
)
.T.groupby(ggrouper, axis=1)
.apply(join_exprs)
(p * (n.snapshot_weightings.generators * scaling))
.groupby(ggrouper)
.sum()
.sum("snapshot")
)
# TODO: double check that this is really needed, why do have to subtract the spillage
if not n.storage_units_t.inflow.empty:
spillage = n.model["StorageUnit-spill"]
lhs_spill = (
linexpr(
(
-n.snapshot_weightings.stores * scaling,
get_var(n, "StorageUnit", "spill").T,
(spillage * (-n.snapshot_weightings.stores * scaling))
.groupby(sgrouper)
.sum()
.sum("snapshot")
)
)
.T.groupby(sgrouper, axis=1)
.apply(join_exprs)
)
lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("")
lhs = lhs_gen + lhs_spill
else:
lhs = lhs_gen
define_constraints(n, lhs, ">=", rhs, "equity", "min")
n.model.add_constraints(lhs >= rhs, name="equity_min")
def add_BAU_constraints(n, config):
"""
Add a per-carrier minimal overall capacity.
BAU_mincapacities and opts must be adjusted in the config.yaml.
Parameters
----------
n : pypsa.Network
config : dict
Example
-------
scenario:
opts: [Co2L-BAU-24H]
electricity:
BAU_mincapacities:
solar: 0
onwind: 0
OCGT: 100000
offwind-ac: 0
offwind-dc: 0
Which sets minimum expansion across all nodes e.g. in Europe to 100GW.
OCGT bus 1 + OCGT bus 2 + ... > 100000
"""
mincaps = pd.Series(config["electricity"]["BAU_mincapacities"])
lhs = (
linexpr((1, get_var(n, "Generator", "p_nom")))
.groupby(n.generators.carrier)
.apply(join_exprs)
)
define_constraints(n, lhs, ">=", mincaps[lhs.index], "Carrier", "bau_mincaps")
p_nom = n.model["Generator-p_nom"]
ext_i = n.generators.query("p_nom_extendable")
ext_carrier_i = xr.DataArray(ext_i.carrier.rename_axis("Generator-ext"))
lhs = p_nom.groupby(ext_carrier_i).sum()
index = mincaps.index.intersection(lhs.indexes["carrier"])
rhs = mincaps[index].rename_axis("carrier")
n.model.add_constraints(lhs >= rhs, name="bau_mincaps")
# TODO: think about removing or make per country
def add_SAFE_constraints(n, config):
peakdemand = (
1.0 + config["electricity"]["SAFE_reservemargin"]
) * n.loads_t.p_set.sum(axis=1).max()
"""
Add a capacity reserve margin of a certain fraction above the peak demand.
Renewable generators and storage do not contribute. Ignores network.
Parameters
----------
n : pypsa.Network
config : dict
Example
-------
config.yaml requires to specify opts:
scenario:
opts: [Co2L-SAFE-24H]
electricity:
SAFE_reservemargin: 0.1
Which sets a reserve margin of 10% above the peak demand.
"""
peakdemand = n.loads_t.p_set.sum(axis=1).max()
margin = 1.0 + config["electricity"]["SAFE_reservemargin"]
reserve_margin = peakdemand * margin
# TODO: do not take this from the plotting config!
conv_techs = config["plotting"]["conv_techs"]
ext_gens_i = n.generators.query("carrier in @conv_techs & p_nom_extendable").index
p_nom = n.model["Generator-p_nom"].loc[ext_gens_i]
lhs = p_nom.sum()
exist_conv_caps = n.generators.query(
"~p_nom_extendable & carrier in @conv_techs"
).p_nom.sum()
ext_gens_i = n.generators.query("carrier in @conv_techs & p_nom_extendable").index
lhs = linexpr((1, get_var(n, "Generator", "p_nom")[ext_gens_i])).sum()
rhs = peakdemand - exist_conv_caps
define_constraints(n, lhs, ">=", rhs, "Safe", "mintotalcap")
def add_operational_reserve_margin_constraint(n, config):
reserve_config = config["electricity"]["operational_reserve"]
EPSILON_LOAD = reserve_config["epsilon_load"]
EPSILON_VRES = reserve_config["epsilon_vres"]
CONTINGENCY = reserve_config["contingency"]
# Reserve Variables
reserve = get_var(n, "Generator", "r")
lhs = linexpr((1, reserve)).sum(1)
# Share of extendable renewable capacities
ext_i = n.generators.query("p_nom_extendable").index
vres_i = n.generators_t.p_max_pu.columns
if not ext_i.empty and not vres_i.empty:
capacity_factor = n.generators_t.p_max_pu[vres_i.intersection(ext_i)]
renewable_capacity_variables = get_var(n, "Generator", "p_nom")[
vres_i.intersection(ext_i)
]
lhs += linexpr(
(-EPSILON_VRES * capacity_factor, renewable_capacity_variables)
).sum(1)
# Total demand at t
demand = n.loads_t.p_set.sum(1)
# VRES potential of non extendable generators
capacity_factor = n.generators_t.p_max_pu[vres_i.difference(ext_i)]
renewable_capacity = n.generators.p_nom[vres_i.difference(ext_i)]
potential = (capacity_factor * renewable_capacity).sum(1)
# Right-hand-side
rhs = EPSILON_LOAD * demand + EPSILON_VRES * potential + CONTINGENCY
define_constraints(n, lhs, ">=", rhs, "Reserve margin")
def update_capacity_constraint(n):
gen_i = n.generators.index
ext_i = n.generators.query("p_nom_extendable").index
fix_i = n.generators.query("not p_nom_extendable").index
dispatch = get_var(n, "Generator", "p")
reserve = get_var(n, "Generator", "r")
capacity_fixed = n.generators.p_nom[fix_i]
p_max_pu = get_as_dense(n, "Generator", "p_max_pu")
lhs = linexpr((1, dispatch), (1, reserve))
if not ext_i.empty:
capacity_variable = get_var(n, "Generator", "p_nom")
lhs += linexpr((-p_max_pu[ext_i], capacity_variable)).reindex(
columns=gen_i, fill_value=""
)
rhs = (p_max_pu[fix_i] * capacity_fixed).reindex(columns=gen_i, fill_value=0)
define_constraints(n, lhs, "<=", rhs, "Generators", "updated_capacity_constraint")
rhs = reserve_margin - exist_conv_caps
n.model.add_constraints(lhs >= rhs, name="safe_mintotalcap")
def add_operational_reserve_margin(n, sns, config):
"""
Build reserve margin constraints based on the formulation given in
https://genxproject.github.io/GenX/dev/core/#Reserves.
Parameters
----------
n : pypsa.Network
sns: pd.DatetimeIndex
config : dict
Example:
--------
config.yaml requires to specify operational_reserve:
operational_reserve: # like https://genxproject.github.io/GenX/dev/core/#Reserves
activate: true
epsilon_load: 0.02 # percentage of load at each snapshot
epsilon_vres: 0.02 # percentage of VRES at each snapshot
contingency: 400000 # MW
"""
define_variables(n, 0, np.inf, "Generator", "r", axes=[sns, n.generators.index])
reserve_config = config["electricity"]["operational_reserve"]
EPSILON_LOAD = reserve_config["epsilon_load"]
EPSILON_VRES = reserve_config["epsilon_vres"]
CONTINGENCY = reserve_config["contingency"]
add_operational_reserve_margin_constraint(n, config)
# Reserve Variables
n.model.add_variables(
0, np.inf, coords=[sns, n.generators.index], name="Generator-r"
)
reserve = n.model["Generator-r"]
lhs = reserve.sum("Generator")
update_capacity_constraint(n)
# Share of extendable renewable capacities
ext_i = n.generators.query("p_nom_extendable").index
vres_i = n.generators_t.p_max_pu.columns
if not ext_i.empty and not vres_i.empty:
capacity_factor = n.generators_t.p_max_pu[vres_i.intersection(ext_i)]
p_nom_vres = (
n.model["Generator-p_nom"]
.loc[vres_i.intersection(ext_i)]
.rename({"Generator-ext": "Generator"})
)
lhs = lhs + (p_nom_vres * (-EPSILON_VRES * capacity_factor)).sum()
# Total demand per t
demand = n.loads_t.p_set.sum(axis=1)
# VRES potential of non extendable generators
capacity_factor = n.generators_t.p_max_pu[vres_i.difference(ext_i)]
renewable_capacity = n.generators.p_nom[vres_i.difference(ext_i)]
potential = (capacity_factor * renewable_capacity).sum(axis=1)
# Right-hand-side
rhs = EPSILON_LOAD * demand + EPSILON_VRES * potential + CONTINGENCY
n.model.add_constraints(lhs >= rhs, name="reserve_margin")
reserve = n.model["Generator-r"]
lhs = n.model.constraints["Generator-fix-p-upper"].lhs
lhs = lhs + reserve.loc[:, lhs.coords["Generator-fix"]].drop("Generator")
rhs = n.model.constraints["Generator-fix-p-upper"].rhs
n.model.add_constraints(lhs <= rhs, name="Generator-fix-p-upper-reserve")
lhs = n.model.constraints["Generator-ext-p-upper"].lhs
lhs = lhs + reserve.loc[:, lhs.coords["Generator-ext"]].drop("Generator")
rhs = n.model.constraints["Generator-ext-p-upper"].rhs
n.model.add_constraints(lhs >= rhs, name="Generator-ext-p-upper-reserve")
def add_battery_constraints(n):
nodes = n.buses.index[n.buses.carrier == "battery"]
if nodes.empty or ("Link", "p_nom") not in n.variables.index:
"""
Add constraint ensuring that charger = discharger, i.e.
1 * charger_size - efficiency * discharger_size = 0
"""
if not n.links.p_nom_extendable.any():
return
link_p_nom = get_var(n, "Link", "p_nom")
lhs = linexpr(
(1, link_p_nom[nodes + " charger"]),
(
-n.links.loc[nodes + " discharger", "efficiency"].values,
link_p_nom[nodes + " discharger"].values,
),
discharger_bool = n.links.index.str.contains("battery discharger")
charger_bool = n.links.index.str.contains("battery charger")
dischargers_ext = n.links[discharger_bool].query("p_nom_extendable").index
chargers_ext = n.links[charger_bool].query("p_nom_extendable").index
eff = n.links.efficiency[dischargers_ext].values
lhs = (
n.model["Link-p_nom"].loc[chargers_ext]
- n.model["Link-p_nom"].loc[dischargers_ext] * eff
)
define_constraints(n, lhs, "=", 0, "Link", "charger_ratio")
n.model.add_constraints(lhs == 0, name="Link-charger_ratio")
def add_chp_constraints(n):
electric = (
n.links.index.str.contains("urban central")
& n.links.index.str.contains("CHP")
& n.links.index.str.contains("electric")
)
heat = (
n.links.index.str.contains("urban central")
& n.links.index.str.contains("CHP")
& n.links.index.str.contains("heat")
)
electric_ext = n.links[electric].query("p_nom_extendable").index
heat_ext = n.links[heat].query("p_nom_extendable").index
electric_fix = n.links[electric].query("~p_nom_extendable").index
heat_fix = n.links[heat].query("~p_nom_extendable").index
p = n.model["Link-p"] # dimension: [time, link]
# output ratio between heat and electricity and top_iso_fuel_line for extendable
if not electric_ext.empty:
p_nom = n.model["Link-p_nom"]
lhs = (
p_nom.loc[electric_ext]
* (n.links.p_nom_ratio * n.links.efficiency)[electric_ext].values
- p_nom.loc[heat_ext] * n.links.efficiency[heat_ext].values
)
n.model.add_constraints(lhs == 0, name="chplink-fix_p_nom_ratio")
rename = {"Link-ext": "Link"}
lhs = (
p.loc[:, electric_ext]
+ p.loc[:, heat_ext]
- p_nom.rename(rename).loc[electric_ext]
)
n.model.add_constraints(lhs <= 0, name="chplink-top_iso_fuel_line_ext")
# top_iso_fuel_line for fixed
if not electric_fix.empty:
lhs = p.loc[:, electric_fix] + p.loc[:, heat_fix]
rhs = n.links.p_nom[electric_fix]
n.model.add_constraints(lhs <= rhs, name="chplink-top_iso_fuel_line_fix")
# back-pressure
if not electric.empty:
lhs = (
p.loc[:, heat] * (n.links.efficiency[heat] * n.links.c_b[electric].values)
- p.loc[:, electric] * n.links.efficiency[electric]
)
n.model.add_constraints(lhs <= rhs, name="chplink-backpressure")
def add_pipe_retrofit_constraint(n):
"""
Add constraint for retrofitting existing CH4 pipelines to H2 pipelines.
"""
gas_pipes_i = n.links.query("carrier == 'gas pipeline' and p_nom_extendable").index
h2_retrofitted_i = n.links.query(
"carrier == 'H2 pipeline retrofitted' and p_nom_extendable"
).index
if h2_retrofitted_i.empty or gas_pipes_i.empty:
return
p_nom = n.model["Link-p_nom"]
CH4_per_H2 = 1 / n.config["sector"]["H2_retrofit_capacity_per_CH4"]
lhs = p_nom.loc[gas_pipes_i] + CH4_per_H2 * p_nom.loc[h2_retrofitted_i]
rhs = n.links.p_nom[gas_pipes_i].rename_axis("Link-ext")
n.model.add_constraints(lhs == rhs, name="Link-pipe_retrofit")
def extra_functionality(n, snapshots):
"""
Collects supplementary constraints which will be passed to
``pypsa.linopf.network_lopf``.
``pypsa.optimization.optimize``.
If you want to enforce additional custom constraints, this is a good
location to add them. The arguments ``opts`` and
@ -370,6 +630,7 @@ def extra_functionality(n, snapshots):
if "EQ" in o:
add_EQ_constraints(n, o)
add_battery_constraints(n)
add_pipe_retrofit_constraint(n)
def solve_network(n, config, opts="", **kwargs):
@ -393,19 +654,30 @@ def solve_network(n, config, opts="", **kwargs):
logger.info("No expandable lines found. Skipping iterative solving.")
if skip_iterations:
network_lopf(
n, solver_name=solver_name, solver_options=solver_options, **kwargs
status, condition = n.optimize(
solver_name=solver_name,
extra_functionality=extra_functionality,
**solver_options,
**kwargs,
)
else:
ilopf(
n,
status, condition = n.optimize.optimize_transmission_expansion_iteratively(
solver_name=solver_name,
solver_options=solver_options,
track_iterations=track_iterations,
min_iterations=min_iterations,
max_iterations=max_iterations,
**kwargs
extra_functionality=extra_functionality,
**solver_options,
**kwargs,
)
if status != "ok":
logger.warning(
f"Solving status '{status}' with termination condition '{condition}'"
)
if "infeasible" in condition:
raise RuntimeError("Solving status 'infeasible'")
return n
@ -414,28 +686,40 @@ if __name__ == "__main__":
from _helpers import mock_snakemake
snakemake = mock_snakemake(
"solve_network", simpl="", clusters="5", ll="v1.5", opts=""
"solve_sector_network",
configfiles="test/config.overnight.yaml",
simpl="",
opts="",
clusters="5",
ll="v1.5",
sector_opts="CO2L0-24H-T-H-B-I-A-solar+p3-dist1",
planning_horizons="2030",
)
configure_logging(snakemake)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
tmpdir = snakemake.config["solving"].get("tmpdir")
if tmpdir is not None:
Path(tmpdir).mkdir(parents=True, exist_ok=True)
opts = snakemake.wildcards.opts.split("-")
opts = (snakemake.wildcards.opts + "-" + snakemake.wildcards.sector_opts).split("-")
opts = [o for o in opts if o != ""]
solve_opts = snakemake.config["solving"]["options"]
np.random.seed(solve_opts.get("seed", 123))
fn = getattr(snakemake.log, "memory", None)
with memory_logger(filename=fn, interval=30.0) as mem:
n = pypsa.Network(snakemake.input[0])
n = prepare_network(n, solve_opts)
n = solve_network(
n,
snakemake.config,
opts,
extra_functionality=extra_functionality,
solver_dir=tmpdir,
solver_logfile=snakemake.log.solver,
if "overrides" in snakemake.input.keys():
overrides = override_component_attrs(snakemake.input.overrides)
n = pypsa.Network(
snakemake.input.network, override_component_attrs=overrides
)
else:
n = pypsa.Network(snakemake.input.network)
n = prepare_network(n, solve_opts, config=snakemake.config)
n = solve_network(
n, config=snakemake.config, opts=opts, log_fn=snakemake.log.solver
)
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
n.export_to_netcdf(snakemake.output[0])

View File

@ -46,98 +46,60 @@ Description
"""
import logging
from pathlib import Path
import numpy as np
import pypsa
from _helpers import configure_logging
from _helpers import (
configure_logging,
override_component_attrs,
update_config_with_sector_opts,
)
from solve_network import prepare_network, solve_network
from vresutils.benchmark import memory_logger
logger = logging.getLogger(__name__)
def set_parameters_from_optimized(n, n_optim):
lines_typed_i = n.lines.index[n.lines.type != ""]
n.lines.loc[lines_typed_i, "num_parallel"] = n_optim.lines["num_parallel"].reindex(
lines_typed_i, fill_value=0.0
)
n.lines.loc[lines_typed_i, "s_nom"] = (
np.sqrt(3)
* n.lines["type"].map(n.line_types.i_nom)
* n.lines.bus0.map(n.buses.v_nom)
* n.lines.num_parallel
)
lines_untyped_i = n.lines.index[n.lines.type == ""]
for attr in ("s_nom", "r", "x"):
n.lines.loc[lines_untyped_i, attr] = n_optim.lines[attr].reindex(
lines_untyped_i, fill_value=0.0
)
n.lines["s_nom_extendable"] = False
links_dc_i = n.links.index[n.links.p_nom_extendable]
n.links.loc[links_dc_i, "p_nom"] = n_optim.links["p_nom_opt"].reindex(
links_dc_i, fill_value=0.0
)
n.links.loc[links_dc_i, "p_nom_extendable"] = False
gen_extend_i = n.generators.index[n.generators.p_nom_extendable]
n.generators.loc[gen_extend_i, "p_nom"] = n_optim.generators["p_nom_opt"].reindex(
gen_extend_i, fill_value=0.0
)
n.generators.loc[gen_extend_i, "p_nom_extendable"] = False
stor_units_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable]
n.storage_units.loc[stor_units_extend_i, "p_nom"] = n_optim.storage_units[
"p_nom_opt"
].reindex(stor_units_extend_i, fill_value=0.0)
n.storage_units.loc[stor_units_extend_i, "p_nom_extendable"] = False
stor_extend_i = n.stores.index[n.stores.e_nom_extendable]
n.stores.loc[stor_extend_i, "e_nom"] = n_optim.stores["e_nom_opt"].reindex(
stor_extend_i, fill_value=0.0
)
n.stores.loc[stor_extend_i, "e_nom_extendable"] = False
return n
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake(
"solve_operations_network",
configfiles="test/config.test1.yaml",
simpl="",
opts="",
clusters="5",
ll="copt",
opts="Co2L-BAU-24H",
ll="v1.5",
sector_opts="",
planning_horizons="",
)
configure_logging(snakemake)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
tmpdir = snakemake.config["solving"].get("tmpdir")
if tmpdir is not None:
Path(tmpdir).mkdir(parents=True, exist_ok=True)
opts = (snakemake.wildcards.opts + "-" + snakemake.wildcards.sector_opts).split("-")
opts = [o for o in opts if o != ""]
solve_opts = snakemake.config["solving"]["options"]
n = pypsa.Network(snakemake.input.unprepared)
n_optim = pypsa.Network(snakemake.input.optimized)
n = set_parameters_from_optimized(n, n_optim)
del n_optim
opts = snakemake.wildcards.opts.split("-")
snakemake.config["solving"]["options"]["skip_iterations"] = False
np.random.seed(solve_opts.get("seed", 123))
fn = getattr(snakemake.log, "memory", None)
with memory_logger(filename=fn, interval=30.0) as mem:
n = prepare_network(n, snakemake.config["solving"]["options"])
n = solve_network(
n,
snakemake.config,
opts,
solver_dir=tmpdir,
solver_logfile=snakemake.log.solver,
if "overrides" in snakemake.input:
overrides = override_component_attrs(snakemake.input.overrides)
n = pypsa.Network(
snakemake.input.network, override_component_attrs=overrides
)
else:
n = pypsa.Network(snakemake.input.network)
n.optimize.fix_optimal_capacities()
n = prepare_network(n, solve_opts, config=snakemake.config)
n = solve_network(
n, config=snakemake.config, opts=opts, log_fn=snakemake.log.solver
)
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
n.export_to_netcdf(snakemake.output[0])

View File

@ -1,365 +0,0 @@
# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: : 2020-2023 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
"""
Solves sector-coupled network.
"""
import logging
import numpy as np
import pypsa
from _helpers import override_component_attrs, update_config_with_sector_opts
from vresutils.benchmark import memory_logger
logger = logging.getLogger(__name__)
pypsa.pf.logger.setLevel(logging.WARNING)
def add_land_use_constraint(n):
if "m" in snakemake.wildcards.clusters:
_add_land_use_constraint_m(n)
else:
_add_land_use_constraint(n)
def _add_land_use_constraint(n):
# warning: this will miss existing offwind which is not classed AC-DC and has carrier 'offwind'
for carrier in ["solar", "onwind", "offwind-ac", "offwind-dc"]:
ext_i = (n.generators.carrier == carrier) & ~n.generators.p_nom_extendable
existing = (
n.generators.loc[ext_i, "p_nom"]
.groupby(n.generators.bus.map(n.buses.location))
.sum()
)
existing.index += " " + carrier + "-" + snakemake.wildcards.planning_horizons
n.generators.loc[existing.index, "p_nom_max"] -= existing
# check if existing capacities are larger than technical potential
existing_large = n.generators[
n.generators["p_nom_min"] > n.generators["p_nom_max"]
].index
if len(existing_large):
logger.warning(
f"Existing capacities larger than technical potential for {existing_large},\
adjust technical potential to existing capacities"
)
n.generators.loc[existing_large, "p_nom_max"] = n.generators.loc[
existing_large, "p_nom_min"
]
n.generators.p_nom_max.clip(lower=0, inplace=True)
def _add_land_use_constraint_m(n):
# if generators clustering is lower than network clustering, land_use accounting is at generators clusters
planning_horizons = snakemake.config["scenario"]["planning_horizons"]
grouping_years = snakemake.config["existing_capacities"]["grouping_years"]
current_horizon = snakemake.wildcards.planning_horizons
for carrier in ["solar", "onwind", "offwind-ac", "offwind-dc"]:
existing = n.generators.loc[n.generators.carrier == carrier, "p_nom"]
ind = list(
set(
[
i.split(sep=" ")[0] + " " + i.split(sep=" ")[1]
for i in existing.index
]
)
)
previous_years = [
str(y)
for y in planning_horizons + grouping_years
if y < int(snakemake.wildcards.planning_horizons)
]
for p_year in previous_years:
ind2 = [
i for i in ind if i + " " + carrier + "-" + p_year in existing.index
]
sel_current = [i + " " + carrier + "-" + current_horizon for i in ind2]
sel_p_year = [i + " " + carrier + "-" + p_year for i in ind2]
n.generators.loc[sel_current, "p_nom_max"] -= existing.loc[
sel_p_year
].rename(lambda x: x[:-4] + current_horizon)
n.generators.p_nom_max.clip(lower=0, inplace=True)
def add_co2_sequestration_limit(n, limit=200):
"""
Add a global constraint on the amount of Mt CO2 that can be sequestered.
"""
n.carriers.loc["co2 stored", "co2_absorptions"] = -1
n.carriers.co2_absorptions = n.carriers.co2_absorptions.fillna(0)
limit = limit * 1e6
for o in opts:
if "seq" not in o:
continue
limit = float(o[o.find("seq") + 3 :]) * 1e6
break
n.add(
"GlobalConstraint",
"co2_sequestration_limit",
sense="<=",
constant=limit,
type="primary_energy",
carrier_attribute="co2_absorptions",
)
def prepare_network(n, solve_opts=None, config=None):
if "clip_p_max_pu" in solve_opts:
for df in (
n.generators_t.p_max_pu,
n.generators_t.p_min_pu,
n.storage_units_t.inflow,
):
df.where(df > solve_opts["clip_p_max_pu"], other=0.0, inplace=True)
if solve_opts.get("load_shedding"):
# intersect between macroeconomic and surveybased willingness to pay
# http://journal.frontiersin.org/article/10.3389/fenrg.2015.00055/full
n.add("Carrier", "Load")
n.madd(
"Generator",
n.buses.index,
" load",
bus=n.buses.index,
carrier="load",
sign=1e-3, # Adjust sign to measure p and p_nom in kW instead of MW
marginal_cost=1e2, # Eur/kWh
p_nom=1e9, # kW
)
if solve_opts.get("noisy_costs"):
for t in n.iterate_components():
# if 'capital_cost' in t.df:
# t.df['capital_cost'] += 1e1 + 2.*(np.random.random(len(t.df)) - 0.5)
if "marginal_cost" in t.df:
np.random.seed(174)
t.df["marginal_cost"] += 1e-2 + 2e-3 * (
np.random.random(len(t.df)) - 0.5
)
for t in n.iterate_components(["Line", "Link"]):
np.random.seed(123)
t.df["capital_cost"] += (
1e-1 + 2e-2 * (np.random.random(len(t.df)) - 0.5)
) * t.df["length"]
if solve_opts.get("nhours"):
nhours = solve_opts["nhours"]
n.set_snapshots(n.snapshots[:nhours])
n.snapshot_weightings[:] = 8760.0 / nhours
if snakemake.config["foresight"] == "myopic":
add_land_use_constraint(n)
if n.stores.carrier.eq("co2 stored").any():
limit = config["sector"].get("co2_sequestration_potential", 200)
add_co2_sequestration_limit(n, limit=limit)
return n
def add_battery_constraints(n):
"""
Add constraint ensuring that charger = discharger:
1 * charger_size - efficiency * discharger_size = 0
"""
discharger_bool = n.links.index.str.contains("battery discharger")
charger_bool = n.links.index.str.contains("battery charger")
dischargers_ext = n.links[discharger_bool].query("p_nom_extendable").index
chargers_ext = n.links[charger_bool].query("p_nom_extendable").index
eff = n.links.efficiency[dischargers_ext].values
lhs = (
n.model["Link-p_nom"].loc[chargers_ext]
- n.model["Link-p_nom"].loc[dischargers_ext] * eff
)
n.model.add_constraints(lhs == 0, name="Link-charger_ratio")
def add_chp_constraints(n):
electric = (
n.links.index.str.contains("urban central")
& n.links.index.str.contains("CHP")
& n.links.index.str.contains("electric")
)
heat = (
n.links.index.str.contains("urban central")
& n.links.index.str.contains("CHP")
& n.links.index.str.contains("heat")
)
electric_ext = n.links[electric].query("p_nom_extendable").index
heat_ext = n.links[heat].query("p_nom_extendable").index
electric_fix = n.links[electric].query("~p_nom_extendable").index
heat_fix = n.links[heat].query("~p_nom_extendable").index
p = n.model["Link-p"] # dimension: [time, link]
# output ratio between heat and electricity and top_iso_fuel_line for extendable
if not electric_ext.empty:
p_nom = n.model["Link-p_nom"]
lhs = (
p_nom.loc[electric_ext]
* (n.links.p_nom_ratio * n.links.efficiency)[electric_ext].values
- p_nom.loc[heat_ext] * n.links.efficiency[heat_ext].values
)
n.model.add_constraints(lhs == 0, name="chplink-fix_p_nom_ratio")
rename = {"Link-ext": "Link"}
lhs = (
p.loc[:, electric_ext]
+ p.loc[:, heat_ext]
- p_nom.rename(rename).loc[electric_ext]
)
n.model.add_constraints(lhs <= 0, name="chplink-top_iso_fuel_line_ext")
# top_iso_fuel_line for fixed
if not electric_fix.empty:
lhs = p.loc[:, electric_fix] + p.loc[:, heat_fix]
rhs = n.links.p_nom[electric_fix]
n.model.add_constraints(lhs <= rhs, name="chplink-top_iso_fuel_line_fix")
# back-pressure
if not electric.empty:
lhs = (
p.loc[:, heat] * (n.links.efficiency[heat] * n.links.c_b[electric].values)
- p.loc[:, electric] * n.links.efficiency[electric]
)
n.model.add_constraints(lhs <= rhs, name="chplink-backpressure")
def add_pipe_retrofit_constraint(n):
"""
Add constraint for retrofitting existing CH4 pipelines to H2 pipelines.
"""
gas_pipes_i = n.links.query("carrier == 'gas pipeline' and p_nom_extendable").index
h2_retrofitted_i = n.links.query(
"carrier == 'H2 pipeline retrofitted' and p_nom_extendable"
).index
if h2_retrofitted_i.empty or gas_pipes_i.empty:
return
p_nom = n.model["Link-p_nom"]
CH4_per_H2 = 1 / n.config["sector"]["H2_retrofit_capacity_per_CH4"]
lhs = p_nom.loc[gas_pipes_i] + CH4_per_H2 * p_nom.loc[h2_retrofitted_i]
rhs = n.links.p_nom[gas_pipes_i].rename_axis("Link-ext")
n.model.add_constraints(lhs == rhs, name="Link-pipe_retrofit")
def extra_functionality(n, snapshots):
add_battery_constraints(n)
add_pipe_retrofit_constraint(n)
def solve_network(n, config, opts="", **kwargs):
set_of_options = config["solving"]["solver"]["options"]
solver_options = (
config["solving"]["solver_options"][set_of_options] if set_of_options else {}
)
solver_name = config["solving"]["solver"]["name"]
cf_solving = config["solving"]["options"]
track_iterations = cf_solving.get("track_iterations", False)
min_iterations = cf_solving.get("min_iterations", 4)
max_iterations = cf_solving.get("max_iterations", 6)
# add to network for extra_functionality
n.config = config
n.opts = opts
skip_iterations = cf_solving.get("skip_iterations", False)
if not n.lines.s_nom_extendable.any():
skip_iterations = True
logger.info("No expandable lines found. Skipping iterative solving.")
if skip_iterations:
status, condition = n.optimize(
solver_name=solver_name,
extra_functionality=extra_functionality,
**solver_options,
**kwargs,
)
else:
status, condition = n.optimize.optimize_transmission_expansion_iteratively(
solver_name=solver_name,
track_iterations=track_iterations,
min_iterations=min_iterations,
max_iterations=max_iterations,
extra_functionality=extra_functionality,
**solver_options,
**kwargs,
)
if status != "ok":
logger.warning(
f"Solving status '{status}' with termination condition '{condition}'"
)
return n
# %%
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake(
"solve_network_myopic",
simpl="",
opts="",
clusters="45",
ll="v1.0",
sector_opts="8760H-T-H-B-I-A-solar+p3-dist1",
planning_horizons="2020",
)
logging.basicConfig(
filename=snakemake.log.python, level=snakemake.config["logging"]["level"]
)
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
tmpdir = snakemake.config["solving"].get("tmpdir")
if tmpdir is not None:
from pathlib import Path
Path(tmpdir).mkdir(parents=True, exist_ok=True)
opts = snakemake.wildcards.sector_opts.split("-")
solve_opts = snakemake.config["solving"]["options"]
fn = getattr(snakemake.log, "memory", None)
with memory_logger(filename=fn, interval=30.0) as mem:
overrides = override_component_attrs(snakemake.input.overrides)
n = pypsa.Network(snakemake.input.network, override_component_attrs=overrides)
n = prepare_network(n, solve_opts, config=snakemake.config)
n = solve_network(
n, config=snakemake.config, opts=opts, log_fn=snakemake.log.solver
)
if "lv_limit" in n.global_constraints.index:
n.line_volume_limit = n.global_constraints.at["lv_limit", "constant"]
n.line_volume_limit_dual = n.global_constraints.at["lv_limit", "mu"]
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
n.export_to_netcdf(snakemake.output[0])
logger.info("Maximum memory usage: {}".format(mem.mem_usage))

View File

@ -58,3 +58,15 @@ solving:
name: glpk
options: glpk-default
mem: 4000
plotting:
map:
boundaries:
eu_node_location:
x: -5.5
y: 46.
costs_max: 1000
costs_threshold: 0.0000001
energy_max:
energy_min:
energy_threshold: 0.000001

View File

@ -59,3 +59,15 @@ solving:
name: glpk
options: glpk-default
mem: 4000
plotting:
map:
boundaries:
eu_node_location:
x: -5.5
y: 46.
costs_max: 1000
costs_threshold: 0.0000001
energy_max:
energy_min:
energy_threshold: 0.000001

View File

@ -65,3 +65,16 @@ solving:
solver:
name: glpk
options: "glpk-default"
plotting:
map:
boundaries:
eu_node_location:
x: -5.5
y: 46.
costs_max: 1000
costs_threshold: 0.0000001
energy_max:
energy_min:
energy_threshold: 0.000001