add Linopy to PyPSA
This commit is contained in:
parent
3a80ac2027
commit
4cb21f05ec
@ -78,6 +78,7 @@ Details (and errors made through this heuristic) are discussed in the paper
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@ -85,16 +86,10 @@ import numpy as np
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pypsa
|
import pypsa
|
||||||
from _helpers import configure_logging
|
from _helpers import configure_logging
|
||||||
|
from linopy import merge
|
||||||
from pypsa.descriptors import get_switchable_as_dense as get_as_dense
|
from pypsa.descriptors import get_switchable_as_dense as get_as_dense
|
||||||
from pypsa.linopf import (
|
from pypsa.optimization.abstract import optimize_transmission_expansion_iteratively
|
||||||
define_constraints,
|
from pypsa.optimization.optimize import optimize
|
||||||
define_variables,
|
|
||||||
get_var,
|
|
||||||
ilopf,
|
|
||||||
join_exprs,
|
|
||||||
linexpr,
|
|
||||||
network_lopf,
|
|
||||||
)
|
|
||||||
from vresutils.benchmark import memory_logger
|
from vresutils.benchmark import memory_logger
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -148,47 +143,90 @@ def prepare_network(n, solve_opts):
|
|||||||
|
|
||||||
|
|
||||||
def add_CCL_constraints(n, config):
|
def add_CCL_constraints(n, config):
|
||||||
agg_p_nom_limits = config["electricity"].get("agg_p_nom_limits")
|
"""
|
||||||
|
Add CCL (country & carrier limit) constraint to the network.
|
||||||
|
|
||||||
|
Add minimum and maximum levels of generator nominal capacity per carrier
|
||||||
|
for individual countries. Opts and path for agg_p_nom_minmax.csv must be defined
|
||||||
|
in config.yaml. Default file is available at data/agg_p_nom_minmax.csv.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
n : pypsa.Network
|
||||||
|
config : dict
|
||||||
|
|
||||||
|
Example
|
||||||
|
-------
|
||||||
|
scenario:
|
||||||
|
opts: [Co2L-CCL-24H]
|
||||||
|
electricity:
|
||||||
|
agg_p_nom_limits: data/agg_p_nom_minmax.csv
|
||||||
|
"""
|
||||||
|
pypsa_eur_path = os.path.dirname(os.getcwd())
|
||||||
|
agg_p_nom_limits = os.path.join(
|
||||||
|
pypsa_eur_path, config["electricity"].get("agg_p_nom_limits")
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
agg_p_nom_minmax = pd.read_csv(agg_p_nom_limits, index_col=list(range(2)))
|
agg_p_nom_minmax = pd.read_csv(agg_p_nom_limits, index_col=list(range(2)))
|
||||||
except IOError:
|
except IOError:
|
||||||
logger.exception(
|
logger.exception(
|
||||||
"Need to specify the path to a .csv file containing "
|
"Need to specify the path to a .csv file containing "
|
||||||
"aggregate capacity limits per country in "
|
"aggregate capacity limits per country. "
|
||||||
"config['electricity']['agg_p_nom_limit']."
|
"Path specified in config['electricity']['agg_p_nom_limit']. "
|
||||||
|
f"Currently read path is 'pypsa-eur/{agg_p_nom_limits}'."
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
"Adding per carrier generation capacity constraints for " "individual countries"
|
"Adding per carrier generation capacity constraints for " "individual countries"
|
||||||
)
|
)
|
||||||
|
capacity_variable = n.model["Generator-p_nom"]
|
||||||
|
|
||||||
gen_country = n.generators.bus.map(n.buses.country)
|
lhs = []
|
||||||
# cc means country and carrier
|
ext_carriers = n.generators.query("p_nom_extendable").carrier.unique()
|
||||||
p_nom_per_cc = (
|
for c in ext_carriers:
|
||||||
pd.DataFrame(
|
ext_carrier = n.generators.query("p_nom_extendable and carrier == @c")
|
||||||
{
|
country_grouper = (
|
||||||
"p_nom": linexpr((1, get_var(n, "Generator", "p_nom"))),
|
ext_carrier.bus.map(n.buses.country)
|
||||||
"country": gen_country,
|
.rename_axis("Generator-ext")
|
||||||
"carrier": n.generators.carrier,
|
.rename("country")
|
||||||
}
|
|
||||||
)
|
)
|
||||||
.dropna(subset=["p_nom"])
|
ext_carrier_per_country = capacity_variable.loc[
|
||||||
.groupby(["country", "carrier"])
|
country_grouper.index
|
||||||
.p_nom.apply(join_exprs)
|
].groupby_sum(country_grouper)
|
||||||
|
lhs.append(ext_carrier_per_country)
|
||||||
|
lhs = merge(lhs, dim=pd.Index(ext_carriers, name="carrier"))
|
||||||
|
|
||||||
|
min_matrix = agg_p_nom_minmax["min"].to_xarray().unstack().reindex_like(lhs)
|
||||||
|
max_matrix = agg_p_nom_minmax["max"].to_xarray().unstack().reindex_like(lhs)
|
||||||
|
|
||||||
|
n.model.add_constraints(
|
||||||
|
lhs >= min_matrix, name="agg_p_nom_min", mask=min_matrix.notnull()
|
||||||
|
)
|
||||||
|
n.model.add_constraints(
|
||||||
|
lhs <= max_matrix, name="agg_p_nom_max", mask=max_matrix.notnull()
|
||||||
)
|
)
|
||||||
minimum = agg_p_nom_minmax["min"].dropna()
|
|
||||||
if not minimum.empty:
|
|
||||||
minconstraint = define_constraints(
|
|
||||||
n, p_nom_per_cc[minimum.index], ">=", minimum, "agg_p_nom", "min"
|
|
||||||
)
|
|
||||||
maximum = agg_p_nom_minmax["max"].dropna()
|
|
||||||
if not maximum.empty:
|
|
||||||
maxconstraint = define_constraints(
|
|
||||||
n, p_nom_per_cc[maximum.index], "<=", maximum, "agg_p_nom", "max"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def add_EQ_constraints(n, o, scaling=1e-1):
|
def add_EQ_constraints(n, o, scaling=1e-1):
|
||||||
|
"""
|
||||||
|
Add equality constraints to the network.
|
||||||
|
|
||||||
|
Opts must be specified in the config.yaml.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
n : pypsa.Network
|
||||||
|
o : str
|
||||||
|
|
||||||
|
Example
|
||||||
|
-------
|
||||||
|
scenario:
|
||||||
|
opts: [Co2L-EQ0.7-24H]
|
||||||
|
|
||||||
|
Require each country or node to on average produce a minimal share
|
||||||
|
of its total consumption itself. Example: EQ0.7c demands each country
|
||||||
|
to produce on average at least 70% of its consumption; EQ0.7 demands
|
||||||
|
each node to produce on average at least 70% of its consumption.
|
||||||
|
"""
|
||||||
float_regex = "[0-9]*\.?[0-9]+"
|
float_regex = "[0-9]*\.?[0-9]+"
|
||||||
level = float(re.findall(float_regex, o)[0])
|
level = float(re.findall(float_regex, o)[0])
|
||||||
if o[-1] == "c":
|
if o[-1] == "c":
|
||||||
@ -209,78 +247,142 @@ def add_EQ_constraints(n, o, scaling=1e-1):
|
|||||||
)
|
)
|
||||||
inflow = inflow.reindex(load.index).fillna(0.0)
|
inflow = inflow.reindex(load.index).fillna(0.0)
|
||||||
rhs = scaling * (level * load - inflow)
|
rhs = scaling * (level * load - inflow)
|
||||||
|
dispatch_variable = n.model["Generator-p"].T
|
||||||
lhs_gen = (
|
lhs_gen = (
|
||||||
linexpr(
|
(dispatch_variable * (n.snapshot_weightings.generators * scaling))
|
||||||
(n.snapshot_weightings.generators * scaling, get_var(n, "Generator", "p").T)
|
.groupby_sum(ggrouper)
|
||||||
)
|
.sum("snapshot")
|
||||||
.T.groupby(ggrouper, axis=1)
|
|
||||||
.apply(join_exprs)
|
|
||||||
)
|
)
|
||||||
if not n.storage_units_t.inflow.empty:
|
if not n.storage_units_t.inflow.empty:
|
||||||
|
spillage_variable = n.model["StorageUnit-spill"]
|
||||||
lhs_spill = (
|
lhs_spill = (
|
||||||
linexpr(
|
(spillage_variable * (-n.snapshot_weightings.stores * scaling))
|
||||||
(
|
.groupby_sum(sgrouper)
|
||||||
-n.snapshot_weightings.stores * scaling,
|
.sum("snapshot")
|
||||||
get_var(n, "StorageUnit", "spill").T,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.T.groupby(sgrouper, axis=1)
|
|
||||||
.apply(join_exprs)
|
|
||||||
)
|
)
|
||||||
lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("")
|
lhs = merge(lhs_gen, lhs_spill)
|
||||||
lhs = lhs_gen + lhs_spill
|
|
||||||
else:
|
else:
|
||||||
lhs = lhs_gen
|
lhs = lhs_gen
|
||||||
define_constraints(n, lhs, ">=", rhs, "equity", "min")
|
n.model.add_constraints(lhs >= rhs, name="equity_min")
|
||||||
|
|
||||||
|
|
||||||
def add_BAU_constraints(n, config):
|
def add_BAU_constraints(n, config):
|
||||||
|
"""
|
||||||
|
Add a per-carrier minimal overall capacity.
|
||||||
|
|
||||||
|
BAU_mincapacities and opts must be adjusted in the config.yaml.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
n : pypsa.Network
|
||||||
|
config : dict
|
||||||
|
|
||||||
|
Example
|
||||||
|
-------
|
||||||
|
scenario:
|
||||||
|
opts: [Co2L-BAU-24H]
|
||||||
|
electricity:
|
||||||
|
BAU_mincapacities:
|
||||||
|
solar: 0
|
||||||
|
onwind: 0
|
||||||
|
OCGT: 100000
|
||||||
|
offwind-ac: 0
|
||||||
|
offwind-dc: 0
|
||||||
|
Which sets minimum expansion across all nodes e.g. in Europe to 100GW.
|
||||||
|
OCGT bus 1 + OCGT bus 2 + ... > 100000
|
||||||
|
"""
|
||||||
mincaps = pd.Series(config["electricity"]["BAU_mincapacities"])
|
mincaps = pd.Series(config["electricity"]["BAU_mincapacities"])
|
||||||
lhs = (
|
capacity_variable = n.model["Generator-p_nom"]
|
||||||
linexpr((1, get_var(n, "Generator", "p_nom")))
|
ext_i = n.generators.query("p_nom_extendable")
|
||||||
.groupby(n.generators.carrier)
|
ext_carrier_i = ext_i.carrier.rename_axis("Generator-ext")
|
||||||
.apply(join_exprs)
|
lhs = capacity_variable.groupby_sum(ext_carrier_i)
|
||||||
)
|
rhs = mincaps[lhs.coords["carrier"].values].rename_axis("carrier")
|
||||||
define_constraints(n, lhs, ">=", mincaps[lhs.index], "Carrier", "bau_mincaps")
|
n.model.add_constraints(lhs >= rhs, name="bau_mincaps")
|
||||||
|
|
||||||
|
|
||||||
def add_SAFE_constraints(n, config):
|
def add_SAFE_constraints(n, config):
|
||||||
peakdemand = (
|
"""
|
||||||
1.0 + config["electricity"]["SAFE_reservemargin"]
|
Add a capacity reserve margin of a certain fraction above the peak demand.
|
||||||
) * n.loads_t.p_set.sum(axis=1).max()
|
Renewable generators and storage do not contribute. Ignores network.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
n : pypsa.Network
|
||||||
|
config : dict
|
||||||
|
|
||||||
|
Example
|
||||||
|
-------
|
||||||
|
config.yaml requires to specify opts:
|
||||||
|
|
||||||
|
scenario:
|
||||||
|
opts: [Co2L-SAFE-24H]
|
||||||
|
electricity:
|
||||||
|
SAFE_reservemargin: 0.1
|
||||||
|
Which sets a reserve margin of 10% above the peak demand.
|
||||||
|
"""
|
||||||
|
peakdemand = n.loads_t.p_set.sum(axis=1).max()
|
||||||
|
margin = 1.0 + config["electricity"]["SAFE_reservemargin"]
|
||||||
|
reserve_margin = peakdemand * margin
|
||||||
conv_techs = config["plotting"]["conv_techs"]
|
conv_techs = config["plotting"]["conv_techs"]
|
||||||
|
ext_gens_i = n.generators.query("carrier in @conv_techs & p_nom_extendable").index
|
||||||
|
capacity_variable = n.model["Generator-p_nom"]
|
||||||
|
ext_cap_var = capacity_variable.sel({"Generator-ext": ext_gens_i})
|
||||||
|
lhs = ext_cap_var.sum()
|
||||||
exist_conv_caps = n.generators.query(
|
exist_conv_caps = n.generators.query(
|
||||||
"~p_nom_extendable & carrier in @conv_techs"
|
"~p_nom_extendable & carrier in @conv_techs"
|
||||||
).p_nom.sum()
|
).p_nom.sum()
|
||||||
ext_gens_i = n.generators.query("carrier in @conv_techs & p_nom_extendable").index
|
rhs = reserve_margin - exist_conv_caps
|
||||||
lhs = linexpr((1, get_var(n, "Generator", "p_nom")[ext_gens_i])).sum()
|
n.model.add_constraints(lhs >= rhs, name="safe_mintotalcap")
|
||||||
rhs = peakdemand - exist_conv_caps
|
|
||||||
define_constraints(n, lhs, ">=", rhs, "Safe", "mintotalcap")
|
|
||||||
|
|
||||||
|
|
||||||
def add_operational_reserve_margin_constraint(n, config):
|
def add_operational_reserve_margin_constraint(n, sns, config):
|
||||||
|
"""
|
||||||
|
Define minimum operational reserve margin for a given snapshot.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
n : pypsa.Network
|
||||||
|
config : dict
|
||||||
|
|
||||||
|
Example:
|
||||||
|
--------
|
||||||
|
config.yaml requires to specify operational_reserve:
|
||||||
|
operational_reserve: # like https://genxproject.github.io/GenX/dev/core/#Reserves
|
||||||
|
activate: true
|
||||||
|
epsilon_load: 0.02 # percentage of load at each snapshot
|
||||||
|
epsilon_vres: 0.02 # percentage of VRES at each snapshot
|
||||||
|
contingency: 400000 # MW
|
||||||
|
"""
|
||||||
reserve_config = config["electricity"]["operational_reserve"]
|
reserve_config = config["electricity"]["operational_reserve"]
|
||||||
EPSILON_LOAD = reserve_config["epsilon_load"]
|
EPSILON_LOAD = reserve_config["epsilon_load"]
|
||||||
EPSILON_VRES = reserve_config["epsilon_vres"]
|
EPSILON_VRES = reserve_config["epsilon_vres"]
|
||||||
CONTINGENCY = reserve_config["contingency"]
|
CONTINGENCY = reserve_config["contingency"]
|
||||||
|
|
||||||
# Reserve Variables
|
# Reserve Variables
|
||||||
reserve = get_var(n, "Generator", "r")
|
n.model.add_variables(
|
||||||
lhs = linexpr((1, reserve)).sum(1)
|
0, np.inf, coords=[sns, n.generators.index], name="Generator-r"
|
||||||
|
)
|
||||||
|
reserve = n.model["Generator-r"]
|
||||||
|
lhs = reserve.sum("Generator")
|
||||||
|
|
||||||
# Share of extendable renewable capacities
|
# Share of extendable renewable capacities
|
||||||
ext_i = n.generators.query("p_nom_extendable").index
|
ext_i = n.generators.query("p_nom_extendable").index
|
||||||
vres_i = n.generators_t.p_max_pu.columns
|
vres_i = n.generators_t.p_max_pu.columns
|
||||||
if not ext_i.empty and not vres_i.empty:
|
if not ext_i.empty and not vres_i.empty:
|
||||||
capacity_factor = n.generators_t.p_max_pu[vres_i.intersection(ext_i)]
|
capacity_factor = n.generators_t.p_max_pu[vres_i.intersection(ext_i)]
|
||||||
renewable_capacity_variables = get_var(n, "Generator", "p_nom")[
|
renewable_capacity_variables = (
|
||||||
vres_i.intersection(ext_i)
|
n.model["Generator-p_nom"]
|
||||||
]
|
.sel({"Generator-ext": vres_i.intersection(ext_i)})
|
||||||
lhs += linexpr(
|
.rename({"Generator-ext": "Generator"})
|
||||||
(-EPSILON_VRES * capacity_factor, renewable_capacity_variables)
|
)
|
||||||
).sum(1)
|
lhs = merge(
|
||||||
|
lhs,
|
||||||
|
(renewable_capacity_variables * (-EPSILON_VRES * capacity_factor)).sum(
|
||||||
|
["Generator"]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
# Total demand at t
|
# Total demand per t
|
||||||
demand = n.loads_t.p_set.sum(1)
|
demand = n.loads_t.p_set.sum(1)
|
||||||
|
|
||||||
# VRES potential of non extendable generators
|
# VRES potential of non extendable generators
|
||||||
@ -291,59 +393,76 @@ def add_operational_reserve_margin_constraint(n, config):
|
|||||||
# Right-hand-side
|
# Right-hand-side
|
||||||
rhs = EPSILON_LOAD * demand + EPSILON_VRES * potential + CONTINGENCY
|
rhs = EPSILON_LOAD * demand + EPSILON_VRES * potential + CONTINGENCY
|
||||||
|
|
||||||
define_constraints(n, lhs, ">=", rhs, "Reserve margin")
|
n.model.add_constraints(lhs >= rhs, name="reserve_margin")
|
||||||
|
|
||||||
|
|
||||||
def update_capacity_constraint(n):
|
def update_capacity_constraint(n):
|
||||||
|
"""
|
||||||
|
Update the capacity constraint to include the new capacity variables.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
n : pypsa.Network
|
||||||
|
"""
|
||||||
gen_i = n.generators.index
|
gen_i = n.generators.index
|
||||||
ext_i = n.generators.query("p_nom_extendable").index
|
ext_i = n.generators.query("p_nom_extendable").index
|
||||||
fix_i = n.generators.query("not p_nom_extendable").index
|
fix_i = n.generators.query("not p_nom_extendable").index
|
||||||
|
|
||||||
dispatch = get_var(n, "Generator", "p")
|
dispatch = n.model["Generator-p"]
|
||||||
reserve = get_var(n, "Generator", "r")
|
reserve = n.model["Generator-r"]
|
||||||
|
p_max_pu = get_as_dense(n, "Generator", "p_max_pu")
|
||||||
capacity_fixed = n.generators.p_nom[fix_i]
|
capacity_fixed = n.generators.p_nom[fix_i]
|
||||||
|
|
||||||
p_max_pu = get_as_dense(n, "Generator", "p_max_pu")
|
lhs = merge(
|
||||||
|
dispatch * 1,
|
||||||
lhs = linexpr((1, dispatch), (1, reserve))
|
reserve * 1,
|
||||||
|
)
|
||||||
|
|
||||||
if not ext_i.empty:
|
if not ext_i.empty:
|
||||||
capacity_variable = get_var(n, "Generator", "p_nom")
|
capacity_variable = n.model["Generator-p_nom"]
|
||||||
lhs += linexpr((-p_max_pu[ext_i], capacity_variable)).reindex(
|
lhs = merge(
|
||||||
columns=gen_i, fill_value=""
|
lhs,
|
||||||
|
capacity_variable.rename({"Generator-ext": "Generator"}) * -p_max_pu[ext_i],
|
||||||
)
|
)
|
||||||
|
|
||||||
rhs = (p_max_pu[fix_i] * capacity_fixed).reindex(columns=gen_i, fill_value=0)
|
rhs = (p_max_pu[fix_i] * capacity_fixed).reindex(columns=gen_i)
|
||||||
|
n.model.add_constraints(
|
||||||
define_constraints(n, lhs, "<=", rhs, "Generators", "updated_capacity_constraint")
|
lhs <= rhs, name="gen_updated_capacity_constraint", mask=rhs.notnull()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def add_operational_reserve_margin(n, sns, config):
|
def add_operational_reserve_margin(n, sns, config):
|
||||||
"""
|
"""
|
||||||
Build reserve margin constraints based on the formulation given in
|
Build reserve margin constraints based on the formulation given in
|
||||||
https://genxproject.github.io/GenX/dev/core/#Reserves.
|
https://genxproject.github.io/GenX/dev/core/#Reserves.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
n : pypsa.Network
|
||||||
|
sns: pd.DatetimeIndex
|
||||||
|
config : dict
|
||||||
"""
|
"""
|
||||||
define_variables(n, 0, np.inf, "Generator", "r", axes=[sns, n.generators.index])
|
add_operational_reserve_margin_constraint(n, sns, config)
|
||||||
|
|
||||||
add_operational_reserve_margin_constraint(n, config)
|
|
||||||
|
|
||||||
update_capacity_constraint(n)
|
update_capacity_constraint(n)
|
||||||
|
|
||||||
|
|
||||||
def add_battery_constraints(n):
|
def add_battery_constraints(n):
|
||||||
|
"""
|
||||||
|
Add constraints to ensure that the ratio between the charger and
|
||||||
|
discharger.
|
||||||
|
|
||||||
|
1 * charger_size - efficiency * discharger_size = 0
|
||||||
|
"""
|
||||||
nodes = n.buses.index[n.buses.carrier == "battery"]
|
nodes = n.buses.index[n.buses.carrier == "battery"]
|
||||||
if nodes.empty or ("Link", "p_nom") not in n.variables.index:
|
if nodes.empty:
|
||||||
return
|
return
|
||||||
link_p_nom = get_var(n, "Link", "p_nom")
|
vars_link = n.model["Link-p_nom"]
|
||||||
lhs = linexpr(
|
eff = n.links.loc[nodes + " discharger", "efficiency"]
|
||||||
(1, link_p_nom[nodes + " charger"]),
|
lhs = merge(
|
||||||
(
|
vars_link.sel({"Link-ext": nodes + " charger"}) * 1,
|
||||||
-n.links.loc[nodes + " discharger", "efficiency"].values,
|
vars_link.sel({"Link-ext": nodes + " discharger"}) * -eff,
|
||||||
link_p_nom[nodes + " discharger"].values,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
define_constraints(n, lhs, "=", 0, "Link", "charger_ratio")
|
n.model.add_constraints(lhs == 0, name="link_charger_ratio")
|
||||||
|
|
||||||
|
|
||||||
def extra_functionality(n, snapshots):
|
def extra_functionality(n, snapshots):
|
||||||
@ -373,11 +492,8 @@ def extra_functionality(n, snapshots):
|
|||||||
|
|
||||||
|
|
||||||
def solve_network(n, config, opts="", **kwargs):
|
def solve_network(n, config, opts="", **kwargs):
|
||||||
set_of_options = config["solving"]["solver"]["options"]
|
solver_options = config["solving"]["solver"].copy()
|
||||||
solver_options = (
|
solver_name = solver_options.pop("name")
|
||||||
config["solving"]["solver_options"][set_of_options] if set_of_options else {}
|
|
||||||
)
|
|
||||||
solver_name = config["solving"]["solver"]["name"]
|
|
||||||
cf_solving = config["solving"]["options"]
|
cf_solving = config["solving"]["options"]
|
||||||
track_iterations = cf_solving.get("track_iterations", False)
|
track_iterations = cf_solving.get("track_iterations", False)
|
||||||
min_iterations = cf_solving.get("min_iterations", 4)
|
min_iterations = cf_solving.get("min_iterations", 4)
|
||||||
@ -393,19 +509,25 @@ def solve_network(n, config, opts="", **kwargs):
|
|||||||
logger.info("No expandable lines found. Skipping iterative solving.")
|
logger.info("No expandable lines found. Skipping iterative solving.")
|
||||||
|
|
||||||
if skip_iterations:
|
if skip_iterations:
|
||||||
network_lopf(
|
optimize(
|
||||||
n, solver_name=solver_name, solver_options=solver_options, **kwargs
|
n,
|
||||||
|
solver_name=solver_name,
|
||||||
|
solver_options=solver_options,
|
||||||
|
extra_functionality=extra_functionality,
|
||||||
|
**kwargs,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
ilopf(
|
optimize_transmission_expansion_iteratively(
|
||||||
n,
|
n,
|
||||||
solver_name=solver_name,
|
solver_name=solver_name,
|
||||||
solver_options=solver_options,
|
solver_options=solver_options,
|
||||||
track_iterations=track_iterations,
|
track_iterations=track_iterations,
|
||||||
min_iterations=min_iterations,
|
min_iterations=min_iterations,
|
||||||
max_iterations=max_iterations,
|
max_iterations=max_iterations,
|
||||||
**kwargs
|
extra_functionality=extra_functionality,
|
||||||
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
return n
|
return n
|
||||||
|
|
||||||
|
|
||||||
@ -413,8 +535,13 @@ if __name__ == "__main__":
|
|||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
from _helpers import mock_snakemake
|
from _helpers import mock_snakemake
|
||||||
|
|
||||||
|
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"solve_network", simpl="", clusters="5", ll="v1.5", opts=""
|
"solve_network",
|
||||||
|
simpl="",
|
||||||
|
clusters="5",
|
||||||
|
ll="copt",
|
||||||
|
opts="Co2L-BAU-24H",
|
||||||
)
|
)
|
||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
|
|
||||||
@ -432,11 +559,10 @@ if __name__ == "__main__":
|
|||||||
n,
|
n,
|
||||||
snakemake.config,
|
snakemake.config,
|
||||||
opts,
|
opts,
|
||||||
extra_functionality=extra_functionality,
|
|
||||||
solver_dir=tmpdir,
|
solver_dir=tmpdir,
|
||||||
solver_logfile=snakemake.log.solver,
|
solver_logfile=snakemake.log.solver,
|
||||||
)
|
)
|
||||||
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
||||||
n.export_to_netcdf(snakemake.output[0])
|
n.export_to_netcdf(snakemake.output[0])
|
||||||
|
|
||||||
logger.info("Maximum memory usage: {}".format(mem.mem_usage))
|
logger.info("Maximum memory usage: {}".format(mem.mem_usage))
|
Loading…
Reference in New Issue
Block a user