Merge branch 'master' into introduce_hac_clustering

This commit is contained in:
Fabian Neumann 2022-06-24 20:22:47 +02:00 committed by GitHub
commit 4c5ec8e5f1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 204 additions and 114 deletions

1
.gitignore vendored
View File

@ -19,6 +19,7 @@ gurobi.log
/data
/data/links_p_nom.csv
/cutouts
/dask-worker-space
doc/_build

View File

@ -160,7 +160,7 @@ if config['enable'].get('build_cutout', False):
if config['enable'].get('retrieve_cutout', True):
rule retrieve_cutout:
input: HTTP.remote("zenodo.org/record/4709858/files/{cutout}.nc", keep_local=True, static=True)
input: HTTP.remote("zenodo.org/record/6382570/files/{cutout}.nc", keep_local=True, static=True)
output: "cutouts/{cutout}.nc"
run: move(input[0], output[0])
@ -201,19 +201,19 @@ rule build_renewable_profiles:
benchmark: "benchmarks/build_renewable_profiles_{technology}"
threads: ATLITE_NPROCESSES
resources: mem_mb=ATLITE_NPROCESSES * 5000
wildcard_constraints: technology="(?!hydro).*" # Any technology other than hydro
script: "scripts/build_renewable_profiles.py"
if 'hydro' in config['renewable'].keys():
rule build_hydro_profile:
input:
country_shapes='resources/country_shapes.geojson',
eia_hydro_generation='data/bundle/EIA_hydro_generation_2000_2014.csv',
cutout="cutouts/" + config["renewable"]['hydro']['cutout'] + ".nc"
output: 'resources/profile_hydro.nc'
log: "logs/build_hydro_profile.log"
resources: mem_mb=5000
script: 'scripts/build_hydro_profile.py'
rule build_hydro_profile:
input:
country_shapes='resources/country_shapes.geojson',
eia_hydro_generation='data/bundle/EIA_hydro_generation_2000_2014.csv',
cutout=f"cutouts/{config['renewable']['hydro']['cutout']}.nc" if "hydro" in config["renewable"] else "config['renewable']['hydro']['cutout'] not configured",
output: 'resources/profile_hydro.nc'
log: "logs/build_hydro_profile.log"
resources: mem_mb=5000
script: 'scripts/build_hydro_profile.py'
rule add_electricity:

View File

@ -17,7 +17,7 @@ scenario:
clusters: [5]
opts: [Co2L-24H]
countries: ['DE']
countries: ['BE']
clustering:
simplify_network:
@ -63,7 +63,7 @@ electricity:
atlite:
nprocesses: 4
cutouts:
europe-2013-era5-tutorial:
be-03-2013-era5:
module: era5
x: [4., 15.]
y: [46., 56.]
@ -71,7 +71,7 @@ atlite:
renewable:
onwind:
cutout: europe-2013-era5-tutorial
cutout: be-03-2013-era5
resource:
method: wind
turbine: Vestas_V112_3MW
@ -88,7 +88,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-ac:
cutout: europe-2013-era5-tutorial
cutout: be-03-2013-era5
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
@ -100,7 +100,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-dc:
cutout: europe-2013-era5-tutorial
cutout: be-03-2013-era5
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
@ -113,7 +113,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
solar:
cutout: europe-2013-era5-tutorial
cutout: be-03-2013-era5
resource:
method: pv
panel: CSi

View File

@ -62,6 +62,18 @@ Upcoming Release
* New network topology extracted from the ENTSO-E interactive map.
* The unused argument ``simple_hvdc_costs`` in :mod:`add_electricity` was removed.
* Iterative solving with impedance updates is skipped if there are no expandable lines.
* Switch from Germany to Belgium for continuous integration and tutorial to save resources.
* Use updated SARAH-2 and ERA5 cutouts with slightly wider scope to east and additional variables.
* Fix crs bug. Change crs 4236 to 4326.
* Update rasterio version to correctly calculate exclusion raster
PyPSA-Eur 0.4.0 (22th September 2021)
=====================================
@ -104,7 +116,7 @@ PyPSA-Eur 0.4.0 (22th September 2021)
[`#261 <https://github.com/PyPSA/pypsa-eur/pull/261>`_].
* The tutorial cutout was renamed from ``cutouts/europe-2013-era5.nc`` to
``cutouts/europe-2013-era5-tutorial.nc`` to accomodate tutorial and productive
``cutouts/be-03-2013-era5.nc`` to accomodate tutorial and productive
cutouts side-by-side.
* The flag ``keep_all_available_areas`` in the configuration for renewable

View File

@ -43,7 +43,7 @@ For more information on the data dependencies of PyPSA-Eur, continue reading :re
How to customise PyPSA-Eur?
===========================
The model can be adapted to only include selected countries (e.g. Germany) instead of all European countries to limit the spatial scope.
The model can be adapted to only include selected countries (e.g. Belgium) instead of all European countries to limit the spatial scope.
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
@ -53,41 +53,43 @@ Likewise, the example's temporal scope can be restricted (e.g. to a single month
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 24-27
:start-at: snapshots:
:end-before: enable:
It is also possible to allow less or more carbon-dioxide emissions. Here, we limit the emissions of Germany 100 Megatonnes per year.
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 38,40
:lines: 40,42
PyPSA-Eur also includes a database of existing conventional powerplants.
We can select which types of powerplants we like to be included with fixed capacities:
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 38,54
:lines: 40,56
To accurately model the temporal and spatial availability of renewables such as wind and solar energy, we rely on historical weather data.
It is advisable to adapt the required range of coordinates to the selection of countries.
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 56-63
:start-at: atlite:
:end-before: renewable:
We can also decide which weather data source should be used to calculate potentials and capacity factor time-series for each carrier.
For example, we may want to use the ERA-5 dataset for solar and not the default SARAH-2 dataset.
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 65,108-109
:lines: 67,110,111
Finally, it is possible to pick a solver. For instance, this tutorial uses the open-source solvers CBC and Ipopt and does not rely
on the commercial solvers Gurobi or CPLEX (for which free academic licenses are available).
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 171,181-182
:lines: 173,183,184
.. note::
@ -126,11 +128,6 @@ orders ``snakemake`` to run the script ``solve_network`` that produces the solve
.. until https://github.com/snakemake/snakemake/issues/46 closed
.. warning::
On Windows the previous command may currently cause a ``MissingRuleException`` due to problems with output files in subfolders.
This is an `open issue <https://github.com/snakemake/snakemake/issues/46>`_ at `snakemake <https://snakemake.readthedocs.io/>`_.
Windows users should add the option ``--keep-target-files`` to the command or instead run ``snakemake -j 1 solve_all_networks``.
This triggers a workflow of multiple preceding jobs that depend on each rule's inputs and outputs:
.. graphviz::
@ -271,7 +268,8 @@ the wildcards given in ``scenario`` in the configuration file ``config.yaml`` ar
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 14-18
:start-at: scenario:
:end-before: countries:
In this example we would not only solve a 6-node model of Germany but also a 2-node model.

View File

@ -11,7 +11,7 @@ dependencies:
- pip
- pypsa>=0.19.1
- atlite>=0.2.5
- atlite>=0.2.6
- dask
# Dependencies of the workflow itself
@ -24,7 +24,7 @@ dependencies:
- yaml
- pytables
- lxml
- powerplantmatching>=0.4.8
- powerplantmatching>=0.5.3
- numpy
- pandas
- geopandas
@ -37,6 +37,7 @@ dependencies:
- pyomo
- matplotlib
- proj
- fiona<=1.18.20 # Till issue https://github.com/Toblerity/Fiona/issues/1085 is not solved
# Keep in conda environment when calling ipython
- ipython
@ -44,7 +45,7 @@ dependencies:
# GIS dependencies:
- cartopy
- descartes
- rasterio
- rasterio<=1.2.9 # 1.2.10 creates error https://github.com/PyPSA/atlite/issues/238
# PyPSA-Eur-Sec Dependencies
- geopy

View File

@ -120,7 +120,7 @@ def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True):
# n.storage_units.loc[bus_carrier == "heat","carrier"] = "water tanks"
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(Nyears, tech_costs, config['costs'], config['electricity'])
costs = load_costs(tech_costs, config['costs'], config['electricity'], Nyears)
update_transmission_costs(n, costs)
return n
@ -231,6 +231,7 @@ def mock_snakemake(rulename, **wildcards):
import os
from pypsa.descriptors import Dict
from snakemake.script import Snakemake
from packaging.version import Version, parse
script_dir = Path(__file__).parent.resolve()
assert Path.cwd().resolve() == script_dir, \
@ -240,7 +241,8 @@ def mock_snakemake(rulename, **wildcards):
if os.path.exists(p):
snakefile = p
break
workflow = sm.Workflow(snakefile, overwrite_configfiles=[])
kwargs = dict(rerun_triggers=[]) if parse(sm.__version__) > Version("7.7.0") else {}
workflow = sm.Workflow(snakefile, overwrite_configfiles=[], **kwargs)
workflow.include(snakefile)
workflow.global_resources = {}
rule = workflow.get_rule(rulename)

View File

@ -94,7 +94,6 @@ import geopandas as gpd
import powerplantmatching as pm
from powerplantmatching.export import map_country_bus
from vresutils.costdata import annuity
from vresutils import transfer as vtransfer
idx = pd.IndexSlice
@ -105,6 +104,18 @@ logger = logging.getLogger(__name__)
def normed(s): return s/s.sum()
def calculate_annuity(n, r):
"""Calculate the annuity factor for an asset with lifetime n years and
discount rate of r, e.g. annuity(20, 0.05) * 20 = 1.6"""
if isinstance(r, pd.Series):
return pd.Series(1/n, index=r.index).where(r == 0, r/(1. - 1./(1.+r)**n))
elif r > 0:
return r / (1. - 1./(1.+r)**n)
else:
return 1 / n
def _add_missing_carriers_from_costs(n, costs, carriers):
missing_carriers = pd.Index(carriers).difference(n.carriers.index)
if missing_carriers.empty: return
@ -138,7 +149,7 @@ def load_costs(tech_costs, config, elec_config, Nyears=1.):
"investment" : 0,
"lifetime" : 25})
costs["capital_cost"] = ((annuity(costs["lifetime"], costs["discount rate"]) +
costs["capital_cost"] = ((calculate_annuity(costs["lifetime"], costs["discount rate"]) +
costs["FOM"]/100.) *
costs["investment"] * Nyears)
@ -227,7 +238,7 @@ def attach_load(n, regions, load, nuts3_shapes, countries, scaling=1.):
n.madd("Load", substation_lv_i, bus=substation_lv_i, p_set=load)
def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=False):
def update_transmission_costs(n, costs, length_factor=1.0):
# TODO: line length factor of lines is applied to lines and links.
# Separate the function to distinguish.
@ -242,16 +253,12 @@ def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=Fal
# may be missing. Therefore we have to return here.
if n.links.loc[dc_b].empty: return
if simple_hvdc_costs:
costs = (n.links.loc[dc_b, 'length'] * length_factor *
costs.at['HVDC overhead', 'capital_cost'])
else:
costs = (n.links.loc[dc_b, 'length'] * length_factor *
((1. - n.links.loc[dc_b, 'underwater_fraction']) *
costs.at['HVDC overhead', 'capital_cost'] +
n.links.loc[dc_b, 'underwater_fraction'] *
costs.at['HVDC submarine', 'capital_cost']) +
costs.at['HVDC inverter pair', 'capital_cost'])
costs = (n.links.loc[dc_b, 'length'] * length_factor *
((1. - n.links.loc[dc_b, 'underwater_fraction']) *
costs.at['HVDC overhead', 'capital_cost'] +
n.links.loc[dc_b, 'underwater_fraction'] *
costs.at['HVDC submarine', 'capital_cost']) +
costs.at['HVDC inverter pair', 'capital_cost'])
n.links.loc[dc_b, 'capital_cost'] = costs

View File

@ -47,9 +47,10 @@ from _helpers import configure_logging
import pypsa
import os
import pandas as pd
import numpy as np
import geopandas as gpd
from vresutils.graph import voronoi_partition_pts
from shapely.geometry import Polygon
from scipy.spatial import Voronoi
logger = logging.getLogger(__name__)
@ -61,6 +62,53 @@ def save_to_geojson(s, fn):
s.to_file(fn, driver='GeoJSON', schema=schema)
def voronoi_partition_pts(points, outline):
"""
Compute the polygons of a voronoi partition of `points` within the
polygon `outline`. Taken from
https://github.com/FRESNA/vresutils/blob/master/vresutils/graph.py
Attributes
----------
points : Nx2 - ndarray[dtype=float]
outline : Polygon
Returns
-------
polygons : N - ndarray[dtype=Polygon|MultiPolygon]
"""
points = np.asarray(points)
if len(points) == 1:
polygons = [outline]
else:
xmin, ymin = np.amin(points, axis=0)
xmax, ymax = np.amax(points, axis=0)
xspan = xmax - xmin
yspan = ymax - ymin
# to avoid any network positions outside all Voronoi cells, append
# the corners of a rectangle framing these points
vor = Voronoi(np.vstack((points,
[[xmin-3.*xspan, ymin-3.*yspan],
[xmin-3.*xspan, ymax+3.*yspan],
[xmax+3.*xspan, ymin-3.*yspan],
[xmax+3.*xspan, ymax+3.*yspan]])))
polygons = []
for i in range(len(points)):
poly = Polygon(vor.vertices[vor.regions[vor.point_region[i]]])
if not poly.is_valid:
poly = poly.buffer(0)
poly = poly.intersection(outline)
polygons.append(poly)
return np.array(polygons, dtype=object)
if __name__ == "__main__":
if 'snakemake' not in globals():
from _helpers import mock_snakemake

View File

@ -116,14 +116,19 @@ def nan_statistics(df):
keys=['total', 'consecutive', 'max_total_per_month'], axis=1)
def copy_timeslice(load, cntry, start, stop, delta):
def copy_timeslice(load, cntry, start, stop, delta, fn_load=None):
start = pd.Timestamp(start)
stop = pd.Timestamp(stop)
if start-delta in load.index and stop in load.index and cntry in load:
load.loc[start:stop, cntry] = load.loc[start-delta:stop-delta, cntry].values
if (start in load.index and stop in load.index):
if start-delta in load.index and stop-delta in load.index and cntry in load:
load.loc[start:stop, cntry] = load.loc[start-delta:stop-delta, cntry].values
elif fn_load is not None:
duration = pd.date_range(freq='h', start=start-delta, end=stop-delta)
load_raw = load_timeseries(fn_load, duration, [cntry], powerstatistics)
load.loc[start:stop, cntry] = load_raw.loc[start-delta:stop-delta, cntry].values
def manual_adjustment(load, powerstatistics):
def manual_adjustment(load, fn_load, powerstatistics):
"""
Adjust gaps manual for load data from OPSD time-series package.
@ -150,6 +155,8 @@ def manual_adjustment(load, powerstatistics):
powerstatistics: bool
Whether argument load comprises the electricity consumption data of
the ENTSOE power statistics or of the ENTSOE transparency map
load_fn: str
File name or url location (file format .csv)
Returns
-------
@ -175,7 +182,11 @@ def manual_adjustment(load, powerstatistics):
copy_timeslice(load, 'CH', '2010-11-04 04:00', '2010-11-04 22:00', Delta(days=1))
copy_timeslice(load, 'NO', '2010-12-09 11:00', '2010-12-09 18:00', Delta(days=1))
# whole january missing
copy_timeslice(load, 'GB', '2009-12-31 23:00', '2010-01-31 23:00', Delta(days=-364))
copy_timeslice(load, 'GB', '2010-01-01 00:00', '2010-01-31 23:00', Delta(days=-365), fn_load)
# 1.1. at midnight gets special treatment
copy_timeslice(load, 'IE', '2016-01-01 00:00', '2016-01-01 01:00', Delta(days=-366), fn_load)
copy_timeslice(load, 'PT', '2016-01-01 00:00', '2016-01-01 01:00', Delta(days=-366), fn_load)
copy_timeslice(load, 'GB', '2016-01-01 00:00', '2016-01-01 01:00', Delta(days=-366), fn_load)
else:
if 'ME' in load:
@ -206,7 +217,7 @@ if __name__ == "__main__":
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
if snakemake.config['load']['manual_adjustments']:
load = manual_adjustment(load, powerstatistics)
load = manual_adjustment(load, snakemake.input[0], powerstatistics)
logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.")
load = load.interpolate(method='linear', limit=interpolate_limit)

View File

@ -40,7 +40,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import atlite
import geopandas as gpd
@ -73,20 +73,17 @@ if __name__ == "__main__":
snakemake = mock_snakemake('build_natura_raster')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
cutouts = paths.cutouts
cutouts = snakemake.input.cutouts
xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts))
bounds = transform_bounds(4326, 3035, min(xs), min(ys), max(Xs), max(Ys))
transform, out_shape = get_transform_and_shape(bounds, res=100)
# adjusted boundaries
shapes = gpd.read_file(paths.natura).to_crs(3035)
shapes = gpd.read_file(snakemake.input.natura).to_crs(3035)
raster = ~geometry_mask(shapes.geometry, out_shape[::-1], transform)
raster = raster.astype(rio.uint8)
with rio.open(out[0], 'w', driver='GTiff', dtype=rio.uint8,
with rio.open(snakemake.output[0], 'w', driver='GTiff', dtype=rio.uint8,
count=1, transform=transform, crs=3035, compress='lzw',
width=raster.shape[1], height=raster.shape[0]) as dst:
dst.write(raster, indexes=1)

View File

@ -189,6 +189,7 @@ import logging
from pypsa.geo import haversine
from shapely.geometry import LineString
import time
from dask.distributed import Client, LocalCluster
from _helpers import configure_logging
@ -202,8 +203,8 @@ if __name__ == '__main__':
configure_logging(snakemake)
pgb.streams.wrap_stderr()
nprocesses = snakemake.config['atlite'].get('nprocesses')
noprogress = not snakemake.config['atlite'].get('show_progress', True)
nprocesses = int(snakemake.threads)
noprogress = not snakemake.config['atlite'].get('show_progress', False)
config = snakemake.config['renewable'][snakemake.wildcards.technology]
resource = config['resource'] # pv panel config / wind turbine config
correction_factor = config.get('correction_factor', 1.)
@ -216,7 +217,9 @@ if __name__ == '__main__':
if correction_factor != 1.:
logger.info(f'correction_factor is set as {correction_factor}')
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True)
cutout = atlite.Cutout(snakemake.input['cutout'])
regions = gpd.read_file(snakemake.input.regions).set_index('name').rename_axis('bus')
buses = regions.index
@ -240,7 +243,7 @@ if __name__ == '__main__':
# use named function np.greater with partially frozen argument instead
# and exclude areas where: -max_depth > grid cell depth
func = functools.partial(np.greater,-config['max_depth'])
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4236, nodata=-1000)
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4326, nodata=-1000)
if 'min_shore_distance' in config:
buffer = config['min_shore_distance']
@ -266,7 +269,7 @@ if __name__ == '__main__':
potential = capacity_per_sqkm * availability.sum('bus') * area
func = getattr(cutout, resource.pop('method'))
resource['dask_kwargs'] = {'num_workers': nprocesses}
resource['dask_kwargs'] = {"scheduler": client}
capacity_factor = correction_factor * func(capacity_factor=True, **resource)
layout = capacity_factor * area * capacity_per_sqkm
profile, capacities = func(matrix=availability.stack(spatial=['y','x']),

View File

@ -358,7 +358,14 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
aggregate_generators_carriers=aggregate_carriers,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=line_length_factor,
generator_strategies={'p_nom_max': p_nom_max_strategy, 'p_nom_min': pd.Series.sum},
generator_strategies={'p_nom_max': p_nom_max_strategy,
'p_nom_min': pd.Series.sum,
'p_min_pu': pd.Series.mean,
'marginal_cost': pd.Series.mean,
'committable': np.any,
'ramp_limit_up': pd.Series.max,
'ramp_limit_down': pd.Series.max,
},
scale_link_capital_costs=False)
if not n.links.empty:

View File

@ -171,6 +171,9 @@ def calculate_capacity(n,label,capacity):
if 'p_nom_opt' in c.df.columns:
c_capacities = abs(c.df.p_nom_opt.multiply(c.df.sign)).groupby(c.df.carrier).sum()
capacity = include_in_summary(capacity, [c.list_name], label, c_capacities)
elif 'e_nom_opt' in c.df.columns:
c_capacities = abs(c.df.e_nom_opt.multiply(c.df.sign)).groupby(c.df.carrier).sum()
capacity = include_in_summary(capacity, [c.list_name], label, c_capacities)
for c in n.iterate_components(n.passive_branch_components):
c_capacities = c.df['s_nom_opt'].groupby(c.df.carrier).sum()
@ -185,11 +188,11 @@ def calculate_capacity(n,label,capacity):
def calculate_supply(n, label, supply):
"""calculate the max dispatch of each component at the buses where the loads are attached"""
load_types = n.loads.carrier.value_counts().index
load_types = n.buses.carrier.unique()
for i in load_types:
buses = n.loads.bus[n.loads.carrier == i].values
buses = n.buses.query("carrier == @i").index
bus_map = pd.Series(False,index=n.buses.index)
@ -232,11 +235,11 @@ def calculate_supply(n, label, supply):
def calculate_supply_energy(n, label, supply_energy):
"""calculate the total dispatch of each component at the buses where the loads are attached"""
load_types = n.loads.carrier.value_counts().index
load_types = n.buses.carrier.unique()
for i in load_types:
buses = n.loads.bus[n.loads.carrier == i].values
buses = n.buses.query("carrier == @i").index
bus_map = pd.Series(False,index=n.buses.index)
@ -404,7 +407,7 @@ def make_summaries(networks_dict, paths, config, country='all'):
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(paths[0], config['costs'], config['electricity'], Nyears)
update_transmission_costs(n, costs, simple_hvdc_costs=False)
update_transmission_costs(n, costs)
assign_carriers(n)

View File

@ -20,8 +20,7 @@ Description
"""
import logging
from _helpers import (retrieve_snakemake_keys, load_network_for_plots,
aggregate_p, aggregate_costs, configure_logging)
from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs, configure_logging)
import pandas as pd
import numpy as np
@ -75,7 +74,7 @@ def set_plot_style():
}])
def plot_map(n, ax=None, attribute='p_nom', opts={}):
def plot_map(n, opts, ax=None, attribute='p_nom'):
if ax is None:
ax = plt.gca()
@ -182,7 +181,7 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}):
return fig
def plot_total_energy_pie(n, ax=None):
def plot_total_energy_pie(n, opts, ax=None):
if ax is None: ax = plt.gca()
ax.set_title('Energy per technology', fontdict=dict(fontsize="medium"))
@ -200,7 +199,7 @@ def plot_total_energy_pie(n, ax=None):
t1.remove()
t2.remove()
def plot_total_cost_bar(n, ax=None):
def plot_total_cost_bar(n, opts, ax=None):
if ax is None: ax = plt.gca()
total_load = (n.snapshot_weightings.generators * n.loads_t.p.sum(axis=1)).sum()
@ -259,25 +258,25 @@ if __name__ == "__main__":
set_plot_style()
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
config, wildcards = snakemake.config, snakemake.wildcards
map_figsize = config['map']['figsize']
map_boundaries = config['map']['boundaries']
map_figsize = config["plotting"]['map']['figsize']
map_boundaries = config["plotting"]['map']['boundaries']
n = load_network_for_plots(paths.network, paths.tech_costs, config)
n = load_network_for_plots(snakemake.input.network, snakemake.input.tech_costs, config)
scenario_opts = wildcards.opts.split('-')
fig, ax = plt.subplots(figsize=map_figsize, subplot_kw={"projection": ccrs.PlateCarree()})
plot_map(n, ax, wildcards.attr, config)
plot_map(n, config["plotting"], ax=ax, attribute=wildcards.attr)
fig.savefig(out.only_map, dpi=150, bbox_inches='tight')
fig.savefig(snakemake.output.only_map, dpi=150, bbox_inches='tight')
ax1 = fig.add_axes([-0.115, 0.625, 0.2, 0.2])
plot_total_energy_pie(n, ax1)
plot_total_energy_pie(n, config["plotting"], ax=ax1)
ax2 = fig.add_axes([-0.075, 0.1, 0.1, 0.45])
plot_total_cost_bar(n, ax2)
plot_total_cost_bar(n, config["plotting"], ax=ax2)
ll = wildcards.ll
ll_type = ll[0]
@ -287,4 +286,4 @@ if __name__ == "__main__":
fig.suptitle('Expansion to {amount} {label} at {clusters} clusters'
.format(amount=amnt, label=lbl, clusters=wildcards.clusters))
fig.savefig(out.ext, transparent=True, bbox_inches='tight')
fig.savefig(snakemake.output.ext, transparent=True, bbox_inches='tight')

View File

@ -19,7 +19,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pypsa
import pandas as pd
@ -53,13 +53,11 @@ if __name__ == "__main__":
clusts= '5,full', country= 'all')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
plot_kwds = dict(drawstyle="steps-post")
clusters = wildcards.clusts.split(',')
techs = wildcards.techs.split(',')
country = wildcards.country
clusters = snakemake.wildcards.clusts.split(',')
techs = snakemake.wildcards.techs.split(',')
country = snakemake.wildcards.country
if country == 'all':
country = None
else:
@ -68,7 +66,7 @@ if __name__ == "__main__":
fig, axes = plt.subplots(1, len(techs))
for j, cluster in enumerate(clusters):
net = pypsa.Network(paths[j])
net = pypsa.Network(snakemake.input[j])
for i, tech in enumerate(techs):
cum_p_nom_max(net, tech, country).plot(x="p_max_pu", y="cum_p_nom_max",
@ -81,4 +79,4 @@ if __name__ == "__main__":
plt.legend(title="Cluster level")
fig.savefig(out[0], transparent=True, bbox_inches='tight')
fig.savefig(snakemake.output[0], transparent=True, bbox_inches='tight')

View File

@ -21,7 +21,7 @@ Description
import os
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pandas as pd
import matplotlib.pyplot as plt
@ -170,12 +170,12 @@ if __name__ == "__main__":
attr='', ext='png', country='all')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
config = snakemake.config
summary = wildcards.summary
summary = snakemake.wildcards.summary
try:
func = globals()[f"plot_{summary}"]
except KeyError:
raise RuntimeError(f"plotting function for {summary} has not been defined")
func(os.path.join(paths[0], f"{summary}.csv"), config, out[0])
func(os.path.join(snakemake.input[0], f"{summary}.csv"), config, snakemake.output[0])

View File

@ -37,7 +37,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pandas as pd
@ -63,8 +63,6 @@ if __name__ == "__main__":
snakemake = mock_snakemake('prepare_links_p_nom', simpl='', network='elec')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0]
mw = "Power (MW)"
@ -76,4 +74,4 @@ if __name__ == "__main__":
links_p_nom['x1'], links_p_nom['y1'] = extract_coordinates(links_p_nom['Converterstation 1'])
links_p_nom['x2'], links_p_nom['y2'] = extract_coordinates(links_p_nom['Converterstation 2'])
links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(out[0], index=False)
links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(snakemake.output[0], index=False)

View File

@ -104,7 +104,7 @@ def set_transmission_limit(n, ll_type, factor, costs, Nyears=1):
ref = (lines_s_nom @ n.lines[col] +
n.links.loc[links_dc_b, "p_nom"] @ n.links.loc[links_dc_b, col])
update_transmission_costs(n, costs, simple_hvdc_costs=False)
update_transmission_costs(n, costs)
if factor == 'opt' or float(factor) > 1.0:
n.lines['s_nom_min'] = lines_s_nom

View File

@ -100,7 +100,7 @@ def prepare_network(n, solve_opts):
df.where(df>solve_opts['clip_p_max_pu'], other=0., inplace=True)
if solve_opts.get('load_shedding'):
n.add("Carrier", "Load")
n.add("Carrier", "load", color="#dd2e23", nice_name="Load shedding")
buses_i = n.buses.query("carrier == 'AC'").index
n.madd("Generator", buses_i, " load",
bus=buses_i,
@ -254,7 +254,12 @@ def solve_network(n, config, opts='', **kwargs):
n.config = config
n.opts = opts
if cf_solving.get('skip_iterations', False):
skip_iterations = cf_solving.get('skip_iterations', False)
if not n.lines.s_nom_extendable.any():
skip_iterations = True
logger.info("No expandable lines found. Skipping iterative solving.")
if skip_iterations:
network_lopf(n, solver_name=solver_name, solver_options=solver_options,
extra_functionality=extra_functionality, **kwargs)
else:

View File

@ -16,7 +16,7 @@ scenario:
clusters: [5]
opts: [Co2L-24H]
countries: ['DE']
countries: ['BE']
clustering:
simplify_network:
@ -62,7 +62,7 @@ electricity:
atlite:
nprocesses: 4
cutouts:
europe-2013-era5-tutorial:
be-03-2013-era5:
module: era5
x: [4., 15.]
y: [46., 56.]
@ -70,7 +70,7 @@ atlite:
renewable:
onwind:
cutout: europe-2013-era5-tutorial
cutout: be-03-2013-era5
resource:
method: wind
turbine: Vestas_V112_3MW
@ -87,7 +87,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-ac:
cutout: europe-2013-era5-tutorial
cutout: be-03-2013-era5
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
@ -99,7 +99,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-dc:
cutout: europe-2013-era5-tutorial
cutout: be-03-2013-era5
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
@ -112,7 +112,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
solar:
cutout: europe-2013-era5-tutorial
cutout: be-03-2013-era5
resource:
method: pv
panel: CSi