Fix model for country selections without DC links (fixes #5)

This commit is contained in:
Jonas Hoersch 2019-02-13 19:03:57 +01:00
parent 1b6fe5be80
commit 7f3f096ba6
8 changed files with 44 additions and 19 deletions

View File

@ -151,6 +151,8 @@ def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=Fal
n.lines['capital_cost'] = (n.lines['length'] * length_factor * n.lines['capital_cost'] = (n.lines['length'] * length_factor *
costs.at['HVAC overhead', 'capital_cost']) costs.at['HVAC overhead', 'capital_cost'])
if n.links.empty: return
dc_b = n.links.carrier == 'DC' dc_b = n.links.carrier == 'DC'
if simple_hvdc_costs: if simple_hvdc_costs:
n.links.loc[dc_b, 'capital_cost'] = (n.links.loc[dc_b, 'length'] * length_factor * n.links.loc[dc_b, 'capital_cost'] = (n.links.loc[dc_b, 'length'] * length_factor *

View File

@ -20,10 +20,16 @@ logger = logging.getLogger(__name__)
import pypsa import pypsa
def _get_oid(df): def _get_oid(df):
return df.tags.str.extract('"oid"=>"(\d+)"', expand=False) if "tags" in df.columns:
return df.tags.str.extract('"oid"=>"(\d+)"', expand=False)
else:
return pd.Series(np.nan, df.index)
def _get_country(df): def _get_country(df):
return df.tags.str.extract('"country"=>"([A-Z]{2})"', expand=False) if "tags" in df.columns:
return df.tags.str.extract('"country"=>"([A-Z]{2})"', expand=False)
else:
return pd.Series(np.nan, df.index)
def _find_closest_links(links, new_links, distance_upper_bound=1.5): def _find_closest_links(links, new_links, distance_upper_bound=1.5):
tree = sp.spatial.KDTree(np.vstack([ tree = sp.spatial.KDTree(np.vstack([
@ -226,6 +232,8 @@ def _set_lines_s_nom_from_linetypes(n):
) )
def _set_electrical_parameters_links(links): def _set_electrical_parameters_links(links):
if links.empty: return links
p_max_pu = snakemake.config['links'].get('p_max_pu', 1.) p_max_pu = snakemake.config['links'].get('p_max_pu', 1.)
links['p_max_pu'] = p_max_pu links['p_max_pu'] = p_max_pu
links['p_min_pu'] = -p_max_pu links['p_min_pu'] = -p_max_pu
@ -402,6 +410,8 @@ def _replace_b2b_converter_at_country_border_by_link(n):
.format(i, b0, line, linkcntry.at[i], buscntry.at[b1])) .format(i, b0, line, linkcntry.at[i], buscntry.at[b1]))
def _set_links_underwater_fraction(n): def _set_links_underwater_fraction(n):
if n.links.empty: return
offshore_shape = gpd.read_file(snakemake.input.offshore_shapes).unary_union offshore_shape = gpd.read_file(snakemake.input.offshore_shapes).unary_union
links = gpd.GeoSeries(n.links.geometry.dropna().map(shapely.wkt.loads)) links = gpd.GeoSeries(n.links.geometry.dropna().map(shapely.wkt.loads))
n.links['underwater_fraction'] = links.intersection(offshore_shape).length / links.length n.links['underwater_fraction'] = links.intersection(offshore_shape).length / links.length
@ -488,7 +498,10 @@ if __name__ == "__main__":
eg_transformers='data/entsoegridkit/transformers.csv', eg_transformers='data/entsoegridkit/transformers.csv',
parameter_corrections='data/parameter_corrections.yaml', parameter_corrections='data/parameter_corrections.yaml',
links_p_nom='data/links_p_nom.csv', links_p_nom='data/links_p_nom.csv',
links_tyndp='data/links_tyndp.csv' links_tyndp='data/links_tyndp.csv',
country_shapes='resources/country_shapes.geojson',
offshore_shapes='resources/offshore_shapes.geojson',
europe_shape='resources/europe_shape.geojson'
), ),
output = ['networks/base.nc'] output = ['networks/base.nc']
) )

View File

@ -71,6 +71,9 @@ def distribute_clusters(n, n_clusters, solver_name=None):
N = n.buses.groupby(['country', 'sub_network']).size() N = n.buses.groupby(['country', 'sub_network']).size()
assert n_clusters >= len(N) and n_clusters <= N.sum(), \
"Number of clusters must be {} <= n_clusters <= {} for this selection of countries.".format(len(N), N.sum())
m = po.ConcreteModel() m = po.ConcreteModel()
def n_bounds(model, *n_id): def n_bounds(model, *n_id):
return (1, N[n_id]) return (1, N[n_id])

View File

@ -24,6 +24,8 @@ def assign_carriers(n):
n.lines["carrier"] = "AC" n.lines["carrier"] = "AC"
n.lines["carrier"].replace({"AC": "lines"}, inplace=True) n.lines["carrier"].replace({"AC": "lines"}, inplace=True)
if n.links.empty: n.links["carrier"] = pd.Series(dtype=str)
n.links["carrier"].replace({"DC": "lines"}, inplace=True) n.links["carrier"].replace({"DC": "lines"}, inplace=True)
if "EU gas store" in n.stores.index and n.stores.loc["EU gas Store","carrier"] == "": if "EU gas store" in n.stores.index and n.stores.loc["EU gas Store","carrier"] == "":

View File

@ -87,17 +87,17 @@ if snakemake.wildcards.attr == 'p_nom':
# bus_sizes = n.generators_t.p.sum().loc[n.generators.carrier == "load"].groupby(n.generators.bus).sum() # bus_sizes = n.generators_t.p.sum().loc[n.generators.carrier == "load"].groupby(n.generators.bus).sum()
bus_sizes = pd.concat((n.generators.query('carrier != "load"').groupby(['bus', 'carrier']).p_nom_opt.sum(), bus_sizes = pd.concat((n.generators.query('carrier != "load"').groupby(['bus', 'carrier']).p_nom_opt.sum(),
n.storage_units.groupby(['bus', 'carrier']).p_nom_opt.sum())) n.storage_units.groupby(['bus', 'carrier']).p_nom_opt.sum()))
line_widths_exp = pd.concat(dict(Line=n.lines.s_nom_opt, Link=n.links.p_nom_opt)) line_widths_exp = dict(Line=n.lines.s_nom_opt, Link=n.links.p_nom_opt)
line_widths_cur = pd.concat(dict(Line=n.lines.s_nom_min, Link=n.links.p_nom_min)) line_widths_cur = dict(Line=n.lines.s_nom_min, Link=n.links.p_nom_min)
else: else:
raise 'plotting of {} has not been implemented yet'.format(plot) raise 'plotting of {} has not been implemented yet'.format(plot)
line_colors_with_alpha = \ line_colors_with_alpha = \
pd.concat(dict(Line=(line_widths_cur['Line'] / n.lines.s_nom > 1e-3) dict(Line=(line_widths_cur['Line'] / n.lines.s_nom > 1e-3)
.map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)}), .map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)}),
Link=(line_widths_cur['Link'] / n.links.p_nom > 1e-3) Link=(line_widths_cur['Link'] / n.links.p_nom > 1e-3)
.map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)}))) .map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)}))
## FORMAT ## FORMAT
linewidth_factor = opts['map'][snakemake.wildcards.attr]['linewidth_factor'] linewidth_factor = opts['map'][snakemake.wildcards.attr]['linewidth_factor']
@ -105,15 +105,15 @@ bus_size_factor = opts['map'][snakemake.wildcards.attr]['bus_size_factor']
## PLOT ## PLOT
fig, ax = plt.subplots(figsize=map_figsize) fig, ax = plt.subplots(figsize=map_figsize)
n.plot(line_widths=line_widths_exp/linewidth_factor, n.plot(line_widths=pd.concat(line_widths_exp)/linewidth_factor,
line_colors=dict(Line=line_colors['exp'], Link=line_colors['exp']), line_colors=dict(Line=line_colors['exp'], Link=line_colors['exp']),
bus_sizes=bus_sizes/bus_size_factor, bus_sizes=bus_sizes/bus_size_factor,
bus_colors=tech_colors, bus_colors=tech_colors,
boundaries=map_boundaries, boundaries=map_boundaries,
basemap=True, basemap=True,
ax=ax) ax=ax)
n.plot(line_widths=line_widths_cur/linewidth_factor, n.plot(line_widths=pd.concat(line_widths_cur)/linewidth_factor,
line_colors=line_colors_with_alpha, line_colors=pd.concat(line_colors_with_alpha),
bus_sizes=0, bus_sizes=0,
bus_colors=tech_colors, bus_colors=tech_colors,
boundaries=map_boundaries, boundaries=map_boundaries,
@ -253,7 +253,7 @@ ll = snakemake.wildcards.ll
ll_type = ll[0] ll_type = ll[0]
ll_factor = ll[1:] ll_factor = ll[1:]
lbl = dict(c='line cost', v='line volume')[ll_type] lbl = dict(c='line cost', v='line volume')[ll_type]
amnt = '{lv} x today\'s'.format(ll_factor) if ll_factor != 'opt' else 'optimal' amnt = '{ll} x today\'s'.format(ll=ll_factor) if ll_factor != 'opt' else 'optimal'
fig.suptitle('Expansion to {amount} {label} at {clusters} clusters' fig.suptitle('Expansion to {amount} {label} at {clusters} clusters'
.format(amount=amnt, label=lbl, clusters=snakemake.wildcards.clusters)) .format(amount=amnt, label=lbl, clusters=snakemake.wildcards.clusters))

View File

@ -38,7 +38,7 @@ def set_line_s_max_pu(n):
n.lines['s_max_pu'] = s_max_pu n.lines['s_max_pu'] = s_max_pu
def set_line_cost_limit(n, lc, Nyears=1.): def set_line_cost_limit(n, lc, Nyears=1.):
links_dc_b = n.links.carrier == 'DC' links_dc_b = n.links.carrier == 'DC' if not n.links.empty else pd.Series()
lines_s_nom = n.lines.s_nom.where( lines_s_nom = n.lines.s_nom.where(
n.lines.type == '', n.lines.type == '',
@ -74,7 +74,7 @@ def set_line_cost_limit(n, lc, Nyears=1.):
return n return n
def set_line_volume_limit(n, lv, Nyears=1.): def set_line_volume_limit(n, lv, Nyears=1.):
links_dc_b = n.links.carrier == 'DC' links_dc_b = n.links.carrier == 'DC' if not n.links.empty else pd.Series()
lines_s_nom = n.lines.s_nom.where( lines_s_nom = n.lines.s_nom.where(
n.lines.type == '', n.lines.type == '',

View File

@ -64,6 +64,8 @@ def simplify_network_to_380(n):
return n, trafo_map return n, trafo_map
def _prepare_connection_costs_per_link(n): def _prepare_connection_costs_per_link(n):
if n.links.empty: return {}
costs = load_costs(n.snapshot_weightings.sum() / 8760, snakemake.input.tech_costs, costs = load_costs(n.snapshot_weightings.sum() / 8760, snakemake.input.tech_costs,
snakemake.config['costs'], snakemake.config['electricity']) snakemake.config['costs'], snakemake.config['electricity'])
@ -135,6 +137,9 @@ def simplify_links(n):
## Complex multi-node links are folded into end-points ## Complex multi-node links are folded into end-points
logger.info("Simplifying connected link components") logger.info("Simplifying connected link components")
if n.links.empty:
return n, n.buses.index.to_series()
# Determine connected link components, ignore all links but DC # Determine connected link components, ignore all links but DC
adjacency_matrix = n.adjacency_matrix(branch_components=['Link'], adjacency_matrix = n.adjacency_matrix(branch_components=['Link'],
weights=dict(Link=(n.links.carrier == 'DC').astype(float))) weights=dict(Link=(n.links.carrier == 'DC').astype(float)))

View File

@ -80,24 +80,24 @@ def add_opts_constraints(n, opts=None):
def add_lv_constraint(n): def add_lv_constraint(n):
line_volume = getattr(n, 'line_volume_limit', None) line_volume = getattr(n, 'line_volume_limit', None)
if line_volume is not None and not np.isinf(line_volume): if line_volume is not None and not np.isinf(line_volume):
links_dc_ext_i = n.links.index[(n.links.carrier == 'DC') & n.links.p_nom_extendable] if not n.links.empty else pd.Index([])
n.model.line_volume_constraint = pypsa.opt.Constraint( n.model.line_volume_constraint = pypsa.opt.Constraint(
expr=((sum(n.model.passive_branch_s_nom["Line",line]*n.lines.at[line,"length"] expr=((sum(n.model.passive_branch_s_nom["Line",line]*n.lines.at[line,"length"]
for line in n.lines.index[n.lines.s_nom_extendable]) + for line in n.lines.index[n.lines.s_nom_extendable]) +
sum(n.model.link_p_nom[link]*n.links.at[link,"length"] sum(n.model.link_p_nom[link]*n.links.at[link,"length"]
for link in n.links.index[(n.links.carrier=='DC') & for link in links_dc_ext_i))
n.links.p_nom_extendable]))
<= line_volume) <= line_volume)
) )
def add_lc_constraint(n): def add_lc_constraint(n):
line_cost = getattr(n, 'line_cost_limit', None) line_cost = getattr(n, 'line_cost_limit', None)
if line_cost is not None and not np.isinf(line_cost): if line_cost is not None and not np.isinf(line_cost):
links_dc_ext_i = n.links.index[(n.links.carrier == 'DC') & n.links.p_nom_extendable] if not n.links.empty else pd.Index([])
n.model.line_cost_constraint = pypsa.opt.Constraint( n.model.line_cost_constraint = pypsa.opt.Constraint(
expr=((sum(n.model.passive_branch_s_nom["Line",line]*n.lines.at[line,"capital_cost_lc"] expr=((sum(n.model.passive_branch_s_nom["Line",line]*n.lines.at[line,"capital_cost_lc"]
for line in n.lines.index[n.lines.s_nom_extendable]) + for line in n.lines.index[n.lines.s_nom_extendable]) +
sum(n.model.link_p_nom[link]*n.links.at[link,"capital_cost_lc"] sum(n.model.link_p_nom[link]*n.links.at[link,"capital_cost_lc"]
for link in n.links.index[(n.links.carrier=='DC') & for link in links_dc_ext_i))
n.links.p_nom_extendable]))
<= line_cost) <= line_cost)
) )