scripts: Add first version of base_network and solve_network scripts
This commit is contained in:
parent
ce28787f7e
commit
5f4dc833b8
330
scripts/base_network.py
Normal file
330
scripts/base_network.py
Normal file
@ -0,0 +1,330 @@
|
||||
# coding: utf-8
|
||||
|
||||
import yaml
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import scipy as sp, scipy.spatial
|
||||
from scipy.sparse import csgraph
|
||||
from operator import attrgetter
|
||||
from six import iteritems
|
||||
from itertools import count, chain
|
||||
|
||||
import shapely, shapely.prepared, shapely.wkt
|
||||
from shapely.geometry import Point
|
||||
|
||||
from vresutils import shapes as vshapes
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
import pypsa
|
||||
|
||||
def _find_closest_bus(buses, pos):
|
||||
if (not hasattr(_find_closest_bus, 'kdtree')) or len(_find_closest_bus.kdtree.data) != len(buses.index):
|
||||
_find_closest_bus.kdtree = sp.spatial.cKDTree(buses.loc[:,["x", "y"]].values)
|
||||
return buses.index[_find_closest_bus.kdtree.query(pos)[1]]
|
||||
|
||||
def _load_buses_from_eg():
|
||||
buses = (pd.read_csv(snakemake.input.eg_buses, quotechar="'",
|
||||
true_values='t', false_values='f',
|
||||
dtype=dict(bus_id="str"))
|
||||
.set_index("bus_id")
|
||||
.drop(['under_construction', 'station_id'], axis=1)
|
||||
.rename(columns=dict(voltage='v_nom')))
|
||||
|
||||
buses['carrier'] = buses.pop('dc').map({True: 'DC', False: 'AC'})
|
||||
|
||||
# remove all buses outside of all countries including exclusive economic zones (offshore)
|
||||
europe_shape = vshapes.country_cover(snakemake.config['countries'])
|
||||
europe_shape_exterior = shapely.geometry.Polygon(shell=europe_shape.exterior) # no holes
|
||||
europe_shape_exterior_prepped = shapely.prepared.prep(europe_shape_exterior)
|
||||
buses_in_europe_b = buses[['x', 'y']].apply(lambda p: europe_shape_exterior_prepped.contains(Point(p)), axis=1)
|
||||
|
||||
buses_with_v_nom_to_keep_b = buses.v_nom.isin(snakemake.config['electricity']['voltages']) | buses.v_nom.isnull()
|
||||
logger.info("Removing buses with voltages {}".format(pd.Index(buses.v_nom.unique()).dropna().difference(snakemake.config['electricity']['voltages'])))
|
||||
|
||||
return pd.DataFrame(buses.loc[buses_in_europe_b & buses_with_v_nom_to_keep_b])
|
||||
|
||||
def _load_transformers_from_eg(buses):
|
||||
transformers = (pd.read_csv(snakemake.input.eg_transformers, quotechar="'",
|
||||
true_values='t', false_values='f',
|
||||
dtype=dict(transformer_id='str', bus0='str', bus1='str'))
|
||||
.set_index('transformer_id'))
|
||||
|
||||
transformers = _remove_dangling_branches(transformers, buses)
|
||||
|
||||
return transformers
|
||||
|
||||
def _load_converters_from_eg(buses):
|
||||
converters = (pd.read_csv(snakemake.input.eg_converters, quotechar="'",
|
||||
true_values='t', false_values='f',
|
||||
dtype=dict(converter_id='str', bus0='str', bus1='str'))
|
||||
.set_index('converter_id'))
|
||||
|
||||
converters = _remove_dangling_branches(converters, buses)
|
||||
|
||||
converters['carrier'] = 'B2B'
|
||||
|
||||
return converters
|
||||
|
||||
|
||||
def _load_links_from_eg(buses):
|
||||
links = (pd.read_csv(snakemake.input.eg_links, quotechar="'", true_values='t', false_values='f',
|
||||
dtype=dict(link_id='str', bus0='str', bus1='str', under_construction="bool"))
|
||||
.set_index('link_id'))
|
||||
|
||||
links['length'] /= 1e3
|
||||
|
||||
links = _remove_dangling_branches(links, buses)
|
||||
|
||||
# Add DC line parameters
|
||||
links['carrier'] = 'DC'
|
||||
|
||||
return links
|
||||
|
||||
def _load_lines_from_eg(buses):
|
||||
lines = (pd.read_csv(snakemake.input.eg_lines, quotechar="'", true_values='t', false_values='f',
|
||||
dtype=dict(line_id='str', bus0='str', bus1='str',
|
||||
underground="bool", under_construction="bool"))
|
||||
.set_index('line_id')
|
||||
.rename(columns=dict(voltage='v_nom', circuits='num_parallel')))
|
||||
|
||||
lines['length'] /= 1e3
|
||||
|
||||
lines = _remove_dangling_branches(lines, buses)
|
||||
|
||||
return lines
|
||||
|
||||
def _split_aclines_with_several_voltages(buses, lines, transformers):
|
||||
## Split AC lines with multiple voltages
|
||||
def namer(string, start=0): return (string.format(x) for x in count(start=start))
|
||||
busname = namer("M{:02}")
|
||||
trafoname = namer("M{:02}")
|
||||
linename = namer("M{:02}")
|
||||
|
||||
def find_or_add_lower_v_nom_bus(bus, v_nom):
|
||||
candidates = transformers.loc[(transformers.bus1 == bus) &
|
||||
(transformers.bus0.map(buses.v_nom) == v_nom),
|
||||
'bus0']
|
||||
if len(candidates):
|
||||
return candidates.iloc[0]
|
||||
new_bus = next(busname)
|
||||
buses.loc[new_bus] = pd.Series({'v_nom': v_nom, 'symbol': 'joint', 'carrier': 'AC',
|
||||
'x': buses.at[bus, 'x'], 'y': buses.at[bus, 'y'],
|
||||
'under_construction': buses.at[bus, 'under_construction']})
|
||||
|
||||
transformers.loc[next(trafoname)] = pd.Series({'bus0': new_bus, 'bus1': bus})
|
||||
return new_bus
|
||||
|
||||
voltage_levels = lines.v_nom.unique()
|
||||
|
||||
for line in lines.tags.str.extract(r'"text_"=>"\(?(\d+)\+(\d+).*?"', expand=True).dropna().itertuples():
|
||||
v_nom = int(line._2)
|
||||
if lines.at[line.Index, 'num_parallel'] > 1:
|
||||
lines.at[line.Index, 'num_parallel'] -= 1
|
||||
|
||||
if v_nom in voltage_levels:
|
||||
bus0 = find_or_add_lower_v_nom_bus(lines.at[line.Index, 'bus0'], v_nom)
|
||||
bus1 = find_or_add_lower_v_nom_bus(lines.at[line.Index, 'bus1'], v_nom)
|
||||
lines.loc[next(linename)] = pd.Series(
|
||||
dict(chain(iteritems({'bus0': bus0, 'bus1': bus1, 'v_nom': v_nom, 'circuits': 1}),
|
||||
iteritems({k: lines.at[line.Index, k]
|
||||
for k in ('underground', 'under_construction',
|
||||
'tags', 'geometry', 'length')})))
|
||||
)
|
||||
|
||||
return buses, lines, transformers
|
||||
|
||||
def _apply_parameter_corrections(n):
|
||||
with open(snakemake.input.parameter_corrections) as f:
|
||||
corrections = yaml.load(f)
|
||||
|
||||
for component, attrs in iteritems(corrections):
|
||||
df = n.df(component)
|
||||
for attr, repls in iteritems(attrs):
|
||||
for i, r in iteritems(repls):
|
||||
if i == 'oid':
|
||||
df["oid"] = df.tags.str.extract('"oid"=>"(\d+)"', expand=False)
|
||||
r = df.oid.map(repls["oid"]).dropna()
|
||||
elif i == 'index':
|
||||
r = pd.Series(repls["index"])
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
df.loc[r.index, attr] = r
|
||||
|
||||
def _set_electrical_parameters_lines(lines):
|
||||
v_noms = snakemake.config['electricity']['voltages']
|
||||
linetypes = snakemake.config['lines']['types']
|
||||
|
||||
for v_nom in v_noms:
|
||||
lines.loc[lines["v_nom"] == v_nom, 'type'] = linetypes[v_nom]
|
||||
|
||||
lines['s_max_pu'] = snakemake.config['lines']['s_max_pu']
|
||||
|
||||
return lines
|
||||
|
||||
def _set_electrical_parameters_links(links):
|
||||
links['p_max_pu'] = snakemake.config['links']['s_max_pu']
|
||||
links['p_min_pu'] = -1. * snakemake.config['links']['s_max_pu']
|
||||
|
||||
links_p_nom = pd.read_csv(snakemake.input.links_p_nom)
|
||||
|
||||
tree = sp.spatial.KDTree(np.vstack([
|
||||
links_p_nom[['x1', 'y1', 'x2', 'y2']],
|
||||
links_p_nom[['x2', 'y2', 'x1', 'y1']]
|
||||
]))
|
||||
|
||||
dist, ind = tree.query(
|
||||
np.asarray([np.asarray(shapely.wkt.loads(s))[[0, -1]].flatten()
|
||||
for s in links.geometry]),
|
||||
distance_upper_bound=1.5
|
||||
)
|
||||
|
||||
links_p_nom["j"] =(
|
||||
pd.DataFrame(dict(D=dist, i=links_p_nom.index[ind % len(links_p_nom)]), index=links.index)
|
||||
.groupby('i').D.idxmin()
|
||||
)
|
||||
|
||||
p_nom = links_p_nom.dropna(subset=["j"]).set_index("j")["Power (MW)"]
|
||||
links.loc[p_nom.index, "p_nom"] = p_nom
|
||||
|
||||
links.loc[links.under_construction.astype(bool), "p_nom"] = 0.
|
||||
|
||||
return links
|
||||
|
||||
def _set_electrical_parameters_transformers(transformers):
|
||||
config = snakemake.config['transformers']
|
||||
|
||||
## Add transformer parameters
|
||||
transformers["x"] = config.get('x', 0.1)
|
||||
transformers["s_nom"] = config.get('s_nom', 2000)
|
||||
transformers['type'] = config.get('type', '')
|
||||
|
||||
return transformers
|
||||
|
||||
def _remove_dangling_branches(branches, buses):
|
||||
return pd.DataFrame(branches.loc[branches.bus0.isin(buses.index) & branches.bus1.isin(buses.index)])
|
||||
|
||||
def _connect_close_buses(network, radius=1.):
|
||||
adj = network.graph(["Line", "Transformer", "Link"]).adj
|
||||
|
||||
n_lines_added = 0
|
||||
n_transformers_added = 0
|
||||
ac_buses = network.buses[network.buses.carrier == 'AC']
|
||||
|
||||
for i,u in enumerate(ac_buses.index):
|
||||
|
||||
vs = ac_buses[["x","y"]].iloc[i+1:]
|
||||
distance_km = pypsa.geo.haversine(vs, ac_buses.loc[u,["x","y"]])
|
||||
|
||||
for j,v in enumerate(vs.index):
|
||||
km = distance_km[j,0]
|
||||
|
||||
if km < radius:
|
||||
if u in adj[v]:
|
||||
continue
|
||||
#print(u,v,ac_buses.at[u,"v_nom"], ac_buses.at[v,"v_nom"],km)
|
||||
|
||||
if ac_buses.at[u,"v_nom"] != ac_buses.at[v,"v_nom"]:
|
||||
network.add("Transformer","extra_trafo_{}_{}".format(u,v),s_nom=2000,bus0=u,bus1=v,x=0.1)
|
||||
n_transformers_added += 1
|
||||
else:
|
||||
network.add("Line","extra_line_{}_{}".format(u,v),s_nom=4000,bus0=u,bus1=v,x=0.1)
|
||||
n_lines_added += 1
|
||||
|
||||
logger.info("Added {} lines and {} transformers to connect buses less than {} km apart."
|
||||
.format(n_lines_added, n_transformers_added, radius))
|
||||
|
||||
return network
|
||||
|
||||
def _remove_connected_components_smaller_than(network, min_size):
|
||||
network.determine_network_topology()
|
||||
|
||||
sub_network_sizes = network.buses.groupby('sub_network').size()
|
||||
subs_to_remove = sub_network_sizes.index[sub_network_sizes < min_size]
|
||||
|
||||
logger.info("Removing {} small sub_networks (synchronous zones) with less than {} buses. In total {} buses."
|
||||
.format(len(subs_to_remove), min_size, network.buses.sub_network.isin(subs_to_remove).sum()))
|
||||
|
||||
return network[~network.buses.sub_network.isin(subs_to_remove)]
|
||||
|
||||
def _remove_unconnected_components(network):
|
||||
_, labels = csgraph.connected_components(network.adjacency_matrix(), directed=False)
|
||||
component = pd.Series(labels, index=network.buses.index)
|
||||
|
||||
component_sizes = component.value_counts()
|
||||
components_to_remove = component_sizes.iloc[1:]
|
||||
|
||||
logger.info("Removing {} unconnected network components with less than {} buses. In total {} buses."
|
||||
.format(len(components_to_remove), components_to_remove.max(), components_to_remove.sum()))
|
||||
|
||||
return network[component == component_sizes.index[0]]
|
||||
|
||||
def base_network():
|
||||
buses = _load_buses_from_eg()
|
||||
|
||||
links = _load_links_from_eg(buses)
|
||||
converters = _load_converters_from_eg(buses)
|
||||
|
||||
lines = _load_lines_from_eg(buses)
|
||||
transformers = _load_transformers_from_eg(buses)
|
||||
|
||||
# buses, lines, transformers = _split_aclines_with_several_voltages(buses, lines, transformers)
|
||||
|
||||
lines = _set_electrical_parameters_lines(lines)
|
||||
links = _set_electrical_parameters_links(links)
|
||||
transformers = _set_electrical_parameters_transformers(transformers)
|
||||
|
||||
n = pypsa.Network()
|
||||
n.name = 'PyPSA-Eur'
|
||||
|
||||
n.set_snapshots(pd.date_range(snakemake.config['historical_year'], periods=8760, freq='h'))
|
||||
|
||||
n.import_components_from_dataframe(buses, "Bus")
|
||||
n.import_components_from_dataframe(lines, "Line")
|
||||
n.import_components_from_dataframe(transformers, "Transformer")
|
||||
n.import_components_from_dataframe(links, "Link")
|
||||
n.import_components_from_dataframe(converters, "Link")
|
||||
|
||||
if 'T' in snakemake.wildcards.opts.split('-'):
|
||||
raise NotImplemented
|
||||
|
||||
# n = _connect_close_buses(n, radius=1.)
|
||||
|
||||
n = _remove_unconnected_components(n)
|
||||
|
||||
_apply_parameter_corrections(n)
|
||||
|
||||
# Workaround: import_components_from_dataframe does not preserve types of foreign columns
|
||||
n.lines['underground'] = n.lines['underground'].astype(bool)
|
||||
n.lines['under_construction'] = n.lines['under_construction'].astype(bool)
|
||||
n.links['underground'] = n.links['underground'].astype(bool)
|
||||
n.links['under_construction'] = n.links['under_construction'].astype(bool)
|
||||
|
||||
return n
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Detect running outside of snakemake and mock snakemake for testing
|
||||
if 'snakemake' not in globals():
|
||||
from vresutils import Dict
|
||||
import yaml
|
||||
snakemake = Dict()
|
||||
snakemake.input = Dict(
|
||||
eg_buses='../data/entsoegridkit/buses.csv',
|
||||
eg_lines='../data/entsoegridkit/lines.csv',
|
||||
eg_links='../data/entsoegridkit/links.csv',
|
||||
eg_converters='../data/entsoegridkit/converters.csv',
|
||||
eg_transformers='../data/entsoegridkit/transformers.csv',
|
||||
parameter_corrections='../data/parameter_corrections.yaml',
|
||||
links_p_nom='../data/links_p_nom.csv'
|
||||
)
|
||||
|
||||
snakemake.wildcards = Dict(opts='LC')
|
||||
with open('../config.yaml') as f:
|
||||
snakemake.config = yaml.load(f)
|
||||
snakemake.output = ['../networks/base_LC.h5']
|
||||
|
||||
logger.setLevel(level=snakemake.config['logging_level'])
|
||||
|
||||
n = base_network()
|
||||
n.export_to_hdf5(snakemake.output[0])
|
220
scripts/solve_network.py
Normal file
220
scripts/solve_network.py
Normal file
@ -0,0 +1,220 @@
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
import logging
|
||||
logging.basicConfig(filename=snakemake.log.python, level=logging.INFO)
|
||||
|
||||
import pypsa
|
||||
|
||||
from _helpers import madd
|
||||
|
||||
if 'tmpdir' in snakemake.config['solving']:
|
||||
# PYOMO should write its lp files into tmp here
|
||||
tmpdir = snakemake.config['solving']['tmpdir']
|
||||
import os
|
||||
if not os.path.isdir(tmpdir):
|
||||
os.mkdir(tmpdir)
|
||||
from pyutilib.services import TempfileManager
|
||||
TempfileManager.tempdir = tmpdir
|
||||
|
||||
def prepare_network(n):
|
||||
solve_opts = snakemake.config['solving']['options']
|
||||
if 'clip_p_max_pu' in solve_opts:
|
||||
for df in (n.generators_t.p_max_pu, n.storage_units_t.inflow):
|
||||
df.where(df>solve_opts['clip_p_max_pu'], other=0., inplace=True)
|
||||
|
||||
if solve_opts.get('load_shedding'):
|
||||
n.add("Carrier", "Load")
|
||||
madd(n, "Generator", "Load",
|
||||
bus=n.buses.index,
|
||||
carrier='load',
|
||||
marginal_cost=1.0e5 * snakemake.config['costs']['EUR_to_ZAR'],
|
||||
# intersect between macroeconomic and surveybased
|
||||
# willingness to pay
|
||||
# http://journal.frontiersin.org/article/10.3389/fenrg.2015.00055/full
|
||||
p_nom=1e6)
|
||||
|
||||
if solve_opts.get('noisy_costs'):
|
||||
for t in n.iterate_components():
|
||||
#if 'capital_cost' in t.df:
|
||||
# t.df['capital_cost'] += 1e1 + 2.*(np.random.random(len(t.df)) - 0.5)
|
||||
if 'marginal_cost' in t.df:
|
||||
t.df['marginal_cost'] += 1e-2 + 2e-3*(np.random.random(len(t.df)) - 0.5)
|
||||
|
||||
if solve_opts.get('nhours'):
|
||||
nhours = solve_opts['nhours']
|
||||
n = n[:solve_opts['nhours'], :]
|
||||
n.snapshot_weightings[:] = 8760./nhours
|
||||
|
||||
return n
|
||||
|
||||
def solve_network(n):
|
||||
def add_opts_constraints(n):
|
||||
opts = snakemake.wildcards.opts.split('-')
|
||||
|
||||
if 'BAU' in opts:
|
||||
mincaps = snakemake.config['electricity']['BAU_mincapacities']
|
||||
def bau_mincapacities_rule(model, carrier):
|
||||
gens = n.generators.index[n.generators.p_nom_extendable & (n.generators.carrier == carrier)]
|
||||
return sum(model.generator_p_nom[gen] for gen in gens) >= mincaps[carrier]
|
||||
n.model.bau_mincapacities = pypsa.opt.Constraint(list(mincaps), rule=bau_mincapacities_rule)
|
||||
|
||||
if 'SAFE' in opts:
|
||||
peakdemand = (1. + snakemake.config['electricity']['SAFE_reservemargin']) * n.loads_t.p_set.sum(axis=1).max()
|
||||
conv_techs = snakemake.config['plotting']['conv_techs']
|
||||
exist_conv_caps = n.generators.loc[n.generators.carrier.isin(conv_techs) & ~n.generators.p_nom_extendable, 'p_nom'].sum()
|
||||
ext_gens_i = n.generators.index[n.generators.carrier.isin(conv_techs) & n.generators.p_nom_extendable]
|
||||
n.model.safe_peakdemand = pypsa.opt.Constraint(expr=sum(n.model.generator_p_nom[gen] for gen in ext_gens_i) >= peakdemand - exist_conv_caps)
|
||||
|
||||
def fix_lines(n, lines_i=None, links_i=None): # , fix=True):
|
||||
if lines_i is not None and len(lines_i) > 0:
|
||||
s_nom = n.lines.s_nom.where(
|
||||
n.lines.type == '',
|
||||
np.sqrt(3) * n.lines.type.map(n.line_types.i_nom) * n.lines.bus0.map(n.buses.v_nom) * n.lines.num_parallel
|
||||
)
|
||||
for l in lines_i:
|
||||
n.model.passive_branch_s_nom["Line", l].fix(s_nom.at[l])
|
||||
# n.model.passive_branch_s_nom[l].fixed = fix
|
||||
if isinstance(n.opt, pypsa.opf.PersistentSolver):
|
||||
n.opt.update_var(n.model.passive_branch_s_nom)
|
||||
|
||||
if links_i is not None and len(links_i) > 0:
|
||||
for l in links_i:
|
||||
n.model.link_p_nom[l].fix(n.links.at[l, 'p_nom'])
|
||||
# n.model.link_p_nom[l].fixed = fix
|
||||
if isinstance(n.opt, pypsa.opf.PersistentSolver):
|
||||
n.opt.update_var(n.model.link_p_nom)
|
||||
|
||||
# Not sure if this is needed
|
||||
# n.model.preprocess()
|
||||
|
||||
solve_opts = snakemake.config['solving']['options']
|
||||
|
||||
solver_options = snakemake.config['solving']['solver'].copy()
|
||||
solver_options['logfile'] = snakemake.log.gurobi
|
||||
solver_name = solver_options.pop('name')
|
||||
|
||||
def run_lopf(n, allow_warning_status=False, fix_zero_lines=False):
|
||||
if not hasattr(n, 'opt') or not isinstance(n.opt, pypsa.opf.PersistentSolver):
|
||||
pypsa.opf.network_lopf_build_model(n, formulation=solve_opts['formulation'])
|
||||
add_opts_constraints(n)
|
||||
|
||||
pypsa.opf.network_lopf_prepare_solver(n, solver_name=solver_name)
|
||||
|
||||
if fix_zero_lines:
|
||||
fix_lines_b = (n.lines.s_nom_opt == 0.) & n.lines.s_nom_extendable
|
||||
n.lines.loc[fix_lines_b & (n.lines.type == ''), 's_nom'] = 0.
|
||||
n.lines.loc[fix_lines_b & (n.lines.type != ''), 'num_parallel'] = 0.
|
||||
|
||||
fix_links_b = (n.links.p_nom_opt == 0.) & n.links.p_nom_extendable
|
||||
n.links.loc[fix_links_b, 'p_nom'] = 0.
|
||||
|
||||
# WARNING: We are not unfixing these later
|
||||
fix_lines(n, lines_i=n.lines.index[fix_lines_b], links_i=n.links.index[fix_links_b])
|
||||
|
||||
status, termination_condition = \
|
||||
pypsa.opf.network_lopf_solve(n,
|
||||
solver_options=solver_options,
|
||||
formulation=solve_opts['formulation'])
|
||||
|
||||
assert status == "ok" or allow_warning_status and status == 'warning', \
|
||||
("network_lopf did abort with status={} "
|
||||
"and termination_condition={}"
|
||||
.format(status, termination_condition))
|
||||
|
||||
return status, termination_condition
|
||||
|
||||
lines_ext_b = n.lines.s_nom_extendable
|
||||
if lines_ext_b.any():
|
||||
# puh: ok, we need to iterate, since there is a relation
|
||||
# between s/p_nom and r, x for branches.
|
||||
msq_threshold = 0.01
|
||||
lines = pd.DataFrame(n.lines[['r', 'x', 'type', 'num_parallel']])
|
||||
|
||||
lines['s_nom'] = (
|
||||
np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) * n.lines.bus0.map(n.buses.v_nom) * n.lines.num_parallel
|
||||
).where(n.lines.type != '', n.lines['s_nom'])
|
||||
|
||||
lines_ext_typed_b = (n.lines.type != '') & lines_ext_b
|
||||
lines_ext_untyped_b = (n.lines.type == '') & lines_ext_b
|
||||
|
||||
def update_line_parameters(n, zero_lines_below=10, fix_zero_lines=False):
|
||||
if zero_lines_below > 0:
|
||||
n.lines.loc[n.lines.s_nom_opt < zero_lines_below, 's_nom_opt'] = 0.
|
||||
n.links.loc[n.links.p_nom_opt < zero_lines_below, 'p_nom_opt'] = 0.
|
||||
|
||||
if lines_ext_untyped_b.any():
|
||||
for attr in ('r', 'x'):
|
||||
n.lines.loc[lines_ext_untyped_b, attr] = (
|
||||
lines[attr].multiply(lines['s_nom']/n.lines['s_nom_opt'])
|
||||
)
|
||||
|
||||
if lines_ext_typed_b.any():
|
||||
n.lines.loc[lines_ext_typed_b, 'num_parallel'] = (
|
||||
lines['num_parallel'].multiply(n.lines['s_nom_opt']/lines['s_nom'])
|
||||
)
|
||||
logger.debug("lines.num_parallel={}".format(n.lines.loc[lines_ext_typed_b, 'num_parallel']))
|
||||
|
||||
if isinstance(n.opt, pypsa.opf.PersistentSolver):
|
||||
n.calculate_dependent_values()
|
||||
|
||||
assert solve_opts['formulation'] == 'kirchhoff', \
|
||||
"Updating persistent solvers has only been implemented for the kirchhoff formulation for now"
|
||||
|
||||
n.opt.remove_constraint(n.model.cycle_constraints)
|
||||
del n.model.cycle_constraints_index
|
||||
del n.model.cycle_constraints_index_0
|
||||
del n.model.cycle_constraints_index_1
|
||||
del n.model.cycle_constraints
|
||||
|
||||
pypsa.opf.define_passive_branch_flows_with_kirchhoff(n, n.snapshots, skip_vars=True)
|
||||
n.opt.add_constraint(n.model.cycle_constraints)
|
||||
|
||||
iteration = 1
|
||||
|
||||
lines['s_nom_opt'] = lines['s_nom']
|
||||
status, termination_condition = run_lopf(n, allow_warning_status=True)
|
||||
|
||||
def msq_diff(n):
|
||||
lines_err = np.sqrt(((n.lines['s_nom_opt'] - lines['s_nom_opt'])**2).mean())/lines['s_nom_opt'].mean()
|
||||
logger.info("Mean square difference after iteration {} is {}".format(iteration, lines_err))
|
||||
return lines_err
|
||||
|
||||
min_iterations = solve_opts.get('min_iterations', 2)
|
||||
max_iterations = solve_opts.get('max_iterations', 999)
|
||||
while msq_diff(n) > msq_threshold or iteration < min_iterations:
|
||||
if iteration >= max_iterations:
|
||||
logger.info("Iteration {} beyond max_iterations {}. Stopping ...".format(iteration, max_iterations))
|
||||
break
|
||||
|
||||
update_line_parameters(n)
|
||||
lines['s_nom_opt'] = n.lines['s_nom_opt']
|
||||
iteration += 1
|
||||
|
||||
# Not really needed, could also be taken out
|
||||
n.export_to_hdf5(snakemake.output[0])
|
||||
|
||||
status, termination_condition = run_lopf(n, allow_warning_status=True)
|
||||
|
||||
update_line_parameters(n, zero_lines_below=500)
|
||||
|
||||
status, termination_condition = run_lopf(n, fix_zero_lines=True)
|
||||
|
||||
# Drop zero lines from network
|
||||
zero_lines_i = n.lines.index[(n.lines.s_nom_opt == 0.) & n.lines.s_nom_extendable]
|
||||
if len(zero_lines_i):
|
||||
n.lines.drop(zero_lines_i, inplace=True)
|
||||
zero_links_i = n.links.index[(n.links.p_nom_opt == 0.) & n.links.p_nom_extendable]
|
||||
if len(zero_links_i):
|
||||
n.links.drop(zero_links_i, inplace=True)
|
||||
|
||||
return n
|
||||
|
||||
if __name__ == "__main__":
|
||||
n = pypsa.Network()
|
||||
n.import_from_hdf5(snakemake.input[0])
|
||||
|
||||
n = prepare_network(n)
|
||||
n = solve_network(n)
|
||||
|
||||
n.export_to_hdf5(snakemake.output[0])
|
Loading…
Reference in New Issue
Block a user