resolve merge conflicts

This commit is contained in:
Fabian Neumann 2019-10-24 16:22:57 +02:00
commit 0fef13ee86
6 changed files with 80 additions and 10 deletions

View File

@ -24,6 +24,7 @@ enable:
electricity: electricity:
voltages: [220., 300., 380.] voltages: [220., 300., 380.]
co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5 co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5
agg_p_nom_limits: data/agg_p_nom_minmax.csv
extendable_carriers: extendable_carriers:
Generator: [OCGT] Generator: [OCGT]

31
data/agg_p_nom_minmax.csv Normal file
View File

@ -0,0 +1,31 @@
country,carrier,min,max
DE,onwind,0.1,
DE,offwind-ac,0.1,
DE,offwind-dc,0.1,
DE,solar,0.2,
LU,onwind,,
LU,solar,,
NL,onwind,,
NL,offwind-ac,,
NL,offwind-dc,,
NL,solar,,
GB,onwind,,
GB,offwind-ac,,
GB,offwind-dc,,
GB,solar,,
IE,onwind,,
IE,offwind-ac,,
IE,offwind-dc,,
IE,solar,,
FR,onwind,,
FR,offwind-ac,,
FR,offwind-dc,,
FR,solar,,
DK,onwind,,
DK,offwind-ac,,
DK,offwind-dc,,
DK,solar,,
BE,onwind,,
BE,offwind-ac,,
BE,offwind-dc,,
BE,solar,,
1 country carrier min max
2 DE onwind 0.1
3 DE offwind-ac 0.1
4 DE offwind-dc 0.1
5 DE solar 0.2
6 LU onwind
7 LU solar
8 NL onwind
9 NL offwind-ac
10 NL offwind-dc
11 NL solar
12 GB onwind
13 GB offwind-ac
14 GB offwind-dc
15 GB solar
16 IE onwind
17 IE offwind-ac
18 IE offwind-dc
19 IE solar
20 FR onwind
21 FR offwind-ac
22 FR offwind-dc
23 FR solar
24 DK onwind
25 DK offwind-ac
26 DK offwind-dc
27 DK solar
28 BE onwind
29 BE offwind-ac
30 BE offwind-dc
31 BE solar

View File

@ -31,7 +31,7 @@ dependencies:
# GIS dependencies have to come all from conda-forge # GIS dependencies have to come all from conda-forge
- conda-forge::cartopy - conda-forge::cartopy
- conda-forge::fiona - conda-forge::fiona
- conda-forge::pyproj=1.9.5.1 - conda-forge::pyproj
- conda-forge::pyshp - conda-forge::pyshp
- conda-forge::geopandas - conda-forge::geopandas
- conda-forge::rasterio - conda-forge::rasterio
@ -39,7 +39,7 @@ dependencies:
- conda-forge::libgdal - conda-forge::libgdal
- pip: - pip:
- vresutils>=0.2.5 - vresutils>=0.3
- git+https://github.com/FRESNA/atlite.git#egg=atlite - git+https://github.com/FRESNA/atlite.git#egg=atlite
- git+https://github.com/PyPSA/glaes.git#egg=glaes - git+https://github.com/PyPSA/glaes.git#egg=glaes
- git+https://github.com/PyPSA/geokit.git#egg=geokit - git+https://github.com/PyPSA/geokit.git#egg=geokit

View File

@ -470,6 +470,9 @@ def _replace_b2b_converter_at_country_border_by_link(n):
def _set_links_underwater_fraction(n): def _set_links_underwater_fraction(n):
if n.links.empty: return if n.links.empty: return
if not hasattr(n.links, 'geometry'):
n.links['underwater_fraction'] = 0.
else:
offshore_shape = gpd.read_file(snakemake.input.offshore_shapes).unary_union offshore_shape = gpd.read_file(snakemake.input.offshore_shapes).unary_union
links = gpd.GeoSeries(n.links.geometry.dropna().map(shapely.wkt.loads)) links = gpd.GeoSeries(n.links.geometry.dropna().map(shapely.wkt.loads))
n.links['underwater_fraction'] = links.intersection(offshore_shape).length / links.length n.links['underwater_fraction'] = links.intersection(offshore_shape).length / links.length
@ -478,6 +481,7 @@ def _adjust_capacities_of_under_construction_branches(n):
lines_mode = snakemake.config['lines'].get('under_construction', 'undef') lines_mode = snakemake.config['lines'].get('under_construction', 'undef')
if lines_mode == 'zero': if lines_mode == 'zero':
n.lines.loc[n.lines.under_construction, 'num_parallel'] = 0. n.lines.loc[n.lines.under_construction, 'num_parallel'] = 0.
n.lines.loc[n.lines.under_construction, 's_nom'] = 0.
elif lines_mode == 'remove': elif lines_mode == 'remove':
n.mremove("Line", n.lines.index[n.lines.under_construction]) n.mremove("Line", n.lines.index[n.lines.under_construction])
elif lines_mode != 'keep': elif lines_mode != 'keep':
@ -518,6 +522,7 @@ def base_network():
n.name = 'PyPSA-Eur' n.name = 'PyPSA-Eur'
n.set_snapshots(pd.date_range(freq='h', **snakemake.config['snapshots'])) n.set_snapshots(pd.date_range(freq='h', **snakemake.config['snapshots']))
n.snapshot_weightings[:] *= 8760./n.snapshot_weightings.sum()
n.import_components_from_dataframe(buses, "Bus") n.import_components_from_dataframe(buses, "Bus")
n.import_components_from_dataframe(lines, "Line") n.import_components_from_dataframe(lines, "Line")

View File

@ -67,16 +67,18 @@ if __name__ == "__main__":
onshore_shape = country_shapes[country] onshore_shape = country_shapes[country]
onshore_locs = n.buses.loc[c_b & n.buses.substation_lv, ["x", "y"]] onshore_locs = n.buses.loc[c_b & n.buses.substation_lv, ["x", "y"]]
onshore_regions.append(gpd.GeoDataFrame({ onshore_regions.append(gpd.GeoDataFrame({
'name': onshore_locs.index,
'x': onshore_locs['x'], 'x': onshore_locs['x'],
'y': onshore_locs['y'], 'y': onshore_locs['y'],
'geometry': voronoi_partition_pts(onshore_locs.values, onshore_shape), 'geometry': voronoi_partition_pts(onshore_locs.values, onshore_shape),
'country': country 'country': country
}, index=onshore_locs.index)) }))
if country not in offshore_shapes.index: continue if country not in offshore_shapes.index: continue
offshore_shape = offshore_shapes[country] offshore_shape = offshore_shapes[country]
offshore_locs = n.buses.loc[c_b & n.buses.substation_off, ["x", "y"]] offshore_locs = n.buses.loc[c_b & n.buses.substation_off, ["x", "y"]]
offshore_regions_c = gpd.GeoDataFrame({ offshore_regions_c = gpd.GeoDataFrame({
'name': offshore_locs.index,
'x': offshore_locs['x'], 'x': offshore_locs['x'],
'y': offshore_locs['y'], 'y': offshore_locs['y'],
'geometry': voronoi_partition_pts(offshore_locs.values, offshore_shape), 'geometry': voronoi_partition_pts(offshore_locs.values, offshore_shape),
@ -88,9 +90,8 @@ if __name__ == "__main__":
def save_to_geojson(s, fn): def save_to_geojson(s, fn):
if os.path.exists(fn): if os.path.exists(fn):
os.unlink(fn) os.unlink(fn)
df = s.reset_index() schema = {**gpd.io.file.infer_schema(s), 'geometry': 'Unknown'}
schema = {**gpd.io.file.infer_schema(df), 'geometry': 'Unknown'} s.to_file(fn, driver='GeoJSON', schema=schema)
df.to_file(fn, driver='GeoJSON', schema=schema)
save_to_geojson(pd.concat(onshore_regions), snakemake.output.regions_onshore) save_to_geojson(pd.concat(onshore_regions), snakemake.output.regions_onshore)

View File

@ -156,6 +156,36 @@ def add_opts_constraints(n, opts=None):
ext_gens_i = n.generators.index[n.generators.carrier.isin(conv_techs) & n.generators.p_nom_extendable] ext_gens_i = n.generators.index[n.generators.carrier.isin(conv_techs) & n.generators.p_nom_extendable]
n.model.safe_peakdemand = pypsa.opt.Constraint(expr=sum(n.model.generator_p_nom[gen] for gen in ext_gens_i) >= peakdemand - exist_conv_caps) n.model.safe_peakdemand = pypsa.opt.Constraint(expr=sum(n.model.generator_p_nom[gen] for gen in ext_gens_i) >= peakdemand - exist_conv_caps)
# Add constraints on the per-carrier capacity in each country
if 'CCL' in opts:
agg_p_nom_limits = snakemake.config['electricity'].get('agg_p_nom_limits')
try:
agg_p_nom_minmax = pd.read_csv(agg_p_nom_limits, index_col=list(range(2)))
except IOError:
logger.exception("Need to specify the path to a .csv file containing aggregate capacity limits per country in config['electricity']['agg_p_nom_limit'].")
logger.info("Adding per carrier generation capacity constraints for individual countries")
gen_country = n.generators.bus.map(n.buses.country)
def agg_p_nom_min_rule(model, country, carrier):
min = agg_p_nom_minmax.at[(country, carrier), 'min']
return ((sum(model.generator_p_nom[gen]
for gen in n.generators.index[(gen_country == country) & (n.generators.carrier == carrier)])
>= min)
if np.isfinite(min) else pypsa.opt.Constraint.Skip)
def agg_p_nom_max_rule(model, country, carrier):
max = agg_p_nom_minmax.at[(country, carrier), 'max']
return ((sum(model.generator_p_nom[gen]
for gen in n.generators.index[(gen_country == country) & (n.generators.carrier == carrier)])
<= max)
if np.isfinite(max) else pypsa.opt.Constraint.Skip)
n.model.agg_p_nom_min = pypsa.opt.Constraint(list(agg_p_nom_minmax.index), rule=agg_p_nom_min_rule)
n.model.agg_p_nom_max = pypsa.opt.Constraint(list(agg_p_nom_minmax.index), rule=agg_p_nom_max_rule)
def add_lv_constraint(n): def add_lv_constraint(n):
line_volume = getattr(n, 'line_volume_limit', None) line_volume = getattr(n, 'line_volume_limit', None)
if line_volume is not None and not np.isinf(line_volume): if line_volume is not None and not np.isinf(line_volume):
@ -224,7 +254,9 @@ def solve_network(n, config=None, solver_log=None, opts=None, callback=None):
free_output_series_dataframes(n) free_output_series_dataframes(n)
pypsa.opf.network_lopf_build_model(n, formulation=solve_opts['formulation']) pypsa.opf.network_lopf_build_model(n, formulation=solve_opts['formulation'])
add_opts_constraints(n, opts) add_opts_constraints(n, opts)
if not fix_ext_lines: if not fix_ext_lines:
add_lv_constraint(n) add_lv_constraint(n)
add_lc_constraint(n) add_lc_constraint(n)