Add a summary sentence for each rule taken from scripts

This commit is contained in:
Jonas Hoersch 2019-08-08 15:02:28 +02:00
parent aa34e0406f
commit fb0d44f64e
22 changed files with 270 additions and 99 deletions

View File

@ -8,36 +8,54 @@ Preparing Networks
Build Shapes
=============================
Build Natura Raster
=============================
.. automodule:: build_shapes
Build Cutout
=============================
.. automodule:: build_cutout
Prepare HVDC Links
=============================
Base Networks
.. automodule:: prepare_links_p_nom
Base Network
=============================
.. automodule:: base_network
Build Bus Regions
=============================
.. automodule:: build_bus_regions
Build Country Full Load Hours
=============================
.. automodule:: build_country_flh
Build Hydro Profile
=============================
Build Renewabe Profiles
.. automodule:: build_hydro_profile
Build Natura Raster
=============================
Build Renewable Potentials
=============================
.. automodule:: build_natura_raster
Build Renewable Profiles
========================
.. automodule:: build_renewable_profiles
Build Power Plants
=============================
.. automodule:: build_powerplants
Add Electricity
=============================
.. automodule:: add_electricity

View File

@ -1,4 +1,7 @@
# coding: utf-8
"""
Adds electrical generators and storage units to base network
"""
import logging
logger = logging.getLogger(__name__)

View File

@ -1,4 +1,7 @@
# coding: utf-8
"""
Creates the network topology from ENTSO-E map extracts as a PyPSA network
"""
import yaml
import pandas as pd

View File

@ -1,3 +1,7 @@
"""
Creates onshore and offshore Voronoi shapes for each bus
"""
import os
from operator import attrgetter
@ -7,48 +11,52 @@ import geopandas as gpd
from vresutils.graph import voronoi_partition_pts
import pypsa
import logging
countries = snakemake.config['countries']
if __name__ == "__main__":
logging.basicConfig(level=snakemake.config["logging_level"])
n = pypsa.Network(snakemake.input.base_network)
countries = snakemake.config['countries']
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry']
offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry']
n = pypsa.Network(snakemake.input.base_network)
onshore_regions = []
offshore_regions = []
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry']
offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry']
for country in countries:
c_b = n.buses.country == country
onshore_regions = []
offshore_regions = []
onshore_shape = country_shapes[country]
onshore_locs = n.buses.loc[c_b & n.buses.substation_lv, ["x", "y"]]
onshore_regions.append(gpd.GeoDataFrame({
'x': onshore_locs['x'],
'y': onshore_locs['y'],
'geometry': voronoi_partition_pts(onshore_locs.values, onshore_shape),
'country': country
}, index=onshore_locs.index))
for country in countries:
c_b = n.buses.country == country
if country not in offshore_shapes.index: continue
offshore_shape = offshore_shapes[country]
offshore_locs = n.buses.loc[c_b & n.buses.substation_off, ["x", "y"]]
offshore_regions_c = gpd.GeoDataFrame({
'x': offshore_locs['x'],
'y': offshore_locs['y'],
'geometry': voronoi_partition_pts(offshore_locs.values, offshore_shape),
'country': country
}, index=offshore_locs.index)
offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2]
offshore_regions.append(offshore_regions_c)
onshore_shape = country_shapes[country]
onshore_locs = n.buses.loc[c_b & n.buses.substation_lv, ["x", "y"]]
onshore_regions.append(gpd.GeoDataFrame({
'x': onshore_locs['x'],
'y': onshore_locs['y'],
'geometry': voronoi_partition_pts(onshore_locs.values, onshore_shape),
'country': country
}, index=onshore_locs.index))
def save_to_geojson(s, fn):
if os.path.exists(fn):
os.unlink(fn)
df = s.reset_index()
schema = {**gpd.io.file.infer_schema(df), 'geometry': 'Unknown'}
df.to_file(fn, driver='GeoJSON', schema=schema)
if country not in offshore_shapes.index: continue
offshore_shape = offshore_shapes[country]
offshore_locs = n.buses.loc[c_b & n.buses.substation_off, ["x", "y"]]
offshore_regions_c = gpd.GeoDataFrame({
'x': offshore_locs['x'],
'y': offshore_locs['y'],
'geometry': voronoi_partition_pts(offshore_locs.values, offshore_shape),
'country': country
}, index=offshore_locs.index)
offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2]
offshore_regions.append(offshore_regions_c)
save_to_geojson(pd.concat(onshore_regions), snakemake.output.regions_onshore)
def save_to_geojson(s, fn):
if os.path.exists(fn):
os.unlink(fn)
df = s.reset_index()
schema = {**gpd.io.file.infer_schema(df), 'geometry': 'Unknown'}
df.to_file(fn, driver='GeoJSON', schema=schema)
save_to_geojson(pd.concat(offshore_regions), snakemake.output.regions_offshore)
save_to_geojson(pd.concat(onshore_regions), snakemake.output.regions_onshore)
save_to_geojson(pd.concat(offshore_regions), snakemake.output.regions_offshore)

View File

@ -1,4 +1,7 @@
#!/usr/bin/env python
"""
Create csv files and plots for comparing per country full load hours of renewable time-series
"""
import os
import atlite

View File

@ -1,17 +1,21 @@
"""
Create cutouts configured in `atlite` config section
"""
import os
import atlite
import logging
logger = logging.getLogger(__name__)
logging.basicConfig(level=snakemake.config['logging_level'])
if __name__ == "__main__":
logging.basicConfig(level=snakemake.config['logging_level'])
cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout]
for p in ('xs', 'ys', 'years', 'months'):
if p in cutout_params:
cutout_params[p] = slice(*cutout_params[p])
cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout]
for p in ('xs', 'ys', 'years', 'months'):
if p in cutout_params:
cutout_params[p] = slice(*cutout_params[p])
cutout = atlite.Cutout(snakemake.wildcards.cutout,
cutout_dir=os.path.dirname(snakemake.output[0]),
**cutout_params)
cutout = atlite.Cutout(snakemake.wildcards.cutout,
cutout_dir=os.path.dirname(snakemake.output[0]),
**cutout_params)
cutout.prepare(nprocesses=snakemake.config['atlite'].get('nprocesses', 4))
cutout.prepare(nprocesses=snakemake.config['atlite'].get('nprocesses', 4))

View File

@ -1,4 +1,11 @@
#!/usr/bin/env python
"""
Build hydroelectric inflow time-series for each country
See also
--------
build_renewable_profiles
"""
import os
import atlite
@ -6,24 +13,26 @@ import pandas as pd
import geopandas as gpd
from vresutils import hydro as vhydro
import logging
logger = logging.getLogger(__name__)
logger.setLevel(level=snakemake.config['logging_level'])
config = snakemake.config['renewable']['hydro']
cutout = atlite.Cutout(config['cutout'],
cutout_dir=os.path.dirname(snakemake.input.cutout))
countries = snakemake.config['countries']
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'].reindex(countries)
country_shapes.index.name = 'countries'
if __name__ == "__main__":
logger.basicConfig(level=snakemake.config['logging_level'])
eia_stats = vhydro.get_eia_annual_hydro_generation(snakemake.input.eia_hydro_generation).reindex(columns=countries)
inflow = cutout.runoff(shapes=country_shapes,
smooth=True,
lower_threshold_quantile=True,
normalize_using_yearly=eia_stats)
config = snakemake.config['renewable']['hydro']
cutout = atlite.Cutout(config['cutout'],
cutout_dir=os.path.dirname(snakemake.input.cutout))
if 'clip_min_inflow' in config:
inflow.values[inflow.values < config['clip_min_inflow']] = 0.
countries = snakemake.config['countries']
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'].reindex(countries)
country_shapes.index.name = 'countries'
inflow.to_netcdf(snakemake.output[0])
eia_stats = vhydro.get_eia_annual_hydro_generation(snakemake.input.eia_hydro_generation).reindex(columns=countries)
inflow = cutout.runoff(shapes=country_shapes,
smooth=True,
lower_threshold_quantile=True,
normalize_using_yearly=eia_stats)
if 'clip_min_inflow' in config:
inflow.values[inflow.values < config['clip_min_inflow']] = 0.
inflow.to_netcdf(snakemake.output[0])

View File

@ -1,3 +1,7 @@
"""
Rasters the vector data of the NATURA2000 data onto all cutout regions
"""
import numpy as np
import atlite
from osgeo import gdal
@ -10,10 +14,11 @@ def determine_cutout_xXyY(cutout_name):
dy = (Y - y) / (cutout.shape[0] - 1)
return [x - dx/2., X + dx/2., y - dy/2., Y + dy/2.]
cutout_names = np.unique([res['cutout'] for res in snakemake.config['renewable'].values()])
xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutout_names))
xXyY = min(xs), max(Xs), min(ys), max(Ys)
if __name__ == "__main__":
cutout_names = np.unique([res['cutout'] for res in snakemake.config['renewable'].values()])
xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutout_names))
xXyY = min(xs), max(Xs), min(ys), max(Ys)
natura = gk.vector.loadVector(snakemake.input[0])
extent = gk.Extent.from_xXyY(xXyY).castTo(3035).fit(100)
extent.rasterize(natura, pixelWidth=100, pixelHeight=100, output=snakemake.output[0])
natura = gk.vector.loadVector(snakemake.input[0])
extent = gk.Extent.from_xXyY(xXyY).castTo(3035).fit(100)
extent.rasterize(natura, pixelWidth=100, pixelHeight=100, output=snakemake.output[0])

View File

@ -1,4 +1,7 @@
# coding: utf-8
"""
Get conventional powerplants from `powerplantmatching`, assign to buses and create csv file
"""
import logging
import numpy as np

View File

@ -1,4 +1,79 @@
#!/usr/bin/env python
"""
Summary
-------
The script ``build_renewable_profiles.py`` calculates for each node several geographical properties:
1. the installable capacity (based on land-use)
2. the available generation time series (based on weather data) and
3. the average distance from the node for onshore wind, AC-connected offshore wind, DC-connected offshore wind and solar PV generators.
4. In addition for offshore wind it calculates the fraction of the grid connection which is under water.
.. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`.
Relevant settings
-----------------
config.renewable (describes the parameters for onwind, offwind-ac, offwind-dc
and solar)
config.snapshots (describes the time dimensions of the selection of snapshots)
Inputs
------
base_network
land-use shapes
region shapes for onshore, offshore and countries
cutout
Outputs
-------
profile_{tech}.nc for tech in [onwind,offwind-ac,offwind-dc,solar]
profile_{tech}.nc contains five common fields:
profile (bus x time) - the per unit hourly availability factors for each node
weight (bus) - the sum of the layout weighting for each node
p_nom_max (bus) - the maximal installable capacity at the node (in MW)
potential (y,x) - the layout of generator units at cutout grid cells inside the
voronoi cell (maximal installable capacity at each grid cell multiplied by the
capacity factor)
average_distance (bus) - the average distance of units in the voronoi cell to
the grid node (in km)
for offshore we also have:
underwater_fraction (bus) - the fraction of the average connection distance
which is under water
Long description:
First the script computes how much of the technology can be installed at each
cutout grid cell and each node using the library `GLAES
<https://github.com/FZJ-IEK3-VSA/glaes>`_. This uses the CORINE land use data,
Natura2000 nature reserves and GEBCO for bathymetry.
To compute the layout of generators in each node's voronoi cell, the installable
potential in each grid cell is multiplied with the capacity factor at each grid
cell (since we assume more generators are installed at cells with a higher
capacity factor).
This layout is then used to compute the generation availability time series from
the atlite cutout.
Two methods are available to compute the maximal installable potential for the
node (`p_nom_max`): `simple` and `conservative`:
`simple` adds up the installable potentials of the individual grid cells (if the
model comes close to this limit, then the time series may slightly overestimate
production since we assumed the geographical distribution is proportional to
capacity factor).
`conservative` assertains the nodal limit by increasing capacities proportional
to the layout until the limit of an individual grid cell is reached.
"""
import matplotlib.pyplot as plt

View File

@ -1,3 +1,7 @@
"""
Create GIS shape files for countries on-shore and off-shore, europe and nuts3 regions
"""
import os
import numpy as np
from operator import attrgetter

View File

@ -1,4 +1,7 @@
# coding: utf-8
"""
Create networks clustered to `cluster` number of zones with aggregated buses, generators and transmission corridors
"""
import pandas as pd
idx = pd.IndexSlice
@ -243,5 +246,3 @@ if __name__ == "__main__":
store.put(attr, getattr(clustering, attr), format="table", index=False)
cluster_regions((clustering.busmap,))

View File

@ -1,14 +1,5 @@
"""
Test module description
Examples
--------
Test example
`Example <http://www.example.com>`_
:ref:`Installation <installation>`
``code``
Create summaries of aggregated energy and costs as csv files
"""
import os

View File

@ -1,3 +1,14 @@
"""
Plot map with pie charts and cost box plots
"""
# Dirty work-around so that sphinx can import this module and get the
# doc-string should be refactored in the style of the other scripts, ideally
# several functions for the different plots
if __name__ != "__main__":
import sys
sys.exit(0)
if 'snakemake' not in globals():
from vresutils.snakemake import MockSnakemake, Dict
from snakemake.rules import expand

View File

@ -1,3 +1,7 @@
"""
Plot renewable installation potentials per capacity factor
"""
import pypsa
import pandas as pd
import matplotlib.pyplot as plt

View File

@ -1,3 +1,7 @@
"""
Plot energy and cost summaries for several solved networks
"""
import os
import pandas as pd
import matplotlib.pyplot as plt

View File

@ -1,25 +1,29 @@
#!/usr/bin/env python
"""
Extract capacities for HVDC links from wikipedia
"""
import pandas as pd
import numpy as np
links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0]
if __name__ == "__main__":
links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0]
def extract_coordinates(s):
regex = (r"(\d{1,2})°(\d{1,2})(\d{1,2})″(N|S) "
r"(\d{1,2})°(\d{1,2})(\d{1,2})″(E|W)")
e = s.str.extract(regex, expand=True)
lat = (e[0].astype(float) + (e[1].astype(float) + e[2].astype(float)/60.)/60.)*e[3].map({'N': +1., 'S': -1.})
lon = (e[4].astype(float) + (e[5].astype(float) + e[6].astype(float)/60.)/60.)*e[7].map({'E': +1., 'W': -1.})
return lon, lat
def extract_coordinates(s):
regex = (r"(\d{1,2})°(\d{1,2})(\d{1,2})″(N|S) "
r"(\d{1,2})°(\d{1,2})(\d{1,2})″(E|W)")
e = s.str.extract(regex, expand=True)
lat = (e[0].astype(float) + (e[1].astype(float) + e[2].astype(float)/60.)/60.)*e[3].map({'N': +1., 'S': -1.})
lon = (e[4].astype(float) + (e[5].astype(float) + e[6].astype(float)/60.)/60.)*e[7].map({'E': +1., 'W': -1.})
return lon, lat
m_b = links_p_nom["Power (MW)"].str.contains('x').fillna(False)
def multiply(s): return s.str[0].astype(float) * s.str[1].astype(float)
links_p_nom.loc[m_b, "Power (MW)"] = links_p_nom.loc[m_b, "Power (MW)"].str.split('x').pipe(multiply)
links_p_nom["Power (MW)"] = links_p_nom["Power (MW)"].str.extract("[-/]?([\d.]+)", expand=False).astype(float)
m_b = links_p_nom["Power (MW)"].str.contains('x').fillna(False)
def multiply(s): return s.str[0].astype(float) * s.str[1].astype(float)
links_p_nom['x1'], links_p_nom['y1'] = extract_coordinates(links_p_nom['Converterstation 1'])
links_p_nom['x2'], links_p_nom['y2'] = extract_coordinates(links_p_nom['Converterstation 2'])
links_p_nom.loc[m_b, "Power (MW)"] = links_p_nom.loc[m_b, "Power (MW)"].str.split('x').pipe(multiply)
links_p_nom["Power (MW)"] = links_p_nom["Power (MW)"].str.extract("[-/]?([\d.]+)", expand=False).astype(float)
links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(snakemake.output[0], index=False)
links_p_nom['x1'], links_p_nom['y1'] = extract_coordinates(links_p_nom['Converterstation 1'])
links_p_nom['x2'], links_p_nom['y2'] = extract_coordinates(links_p_nom['Converterstation 2'])
links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(snakemake.output[0], index=False)

View File

@ -1,4 +1,7 @@
# coding: utf-8
"""
Prepare PyPSA network for solving according to `opts`-wildcard
"""
import logging
logger = logging.getLogger(__name__)

View File

@ -1,4 +1,8 @@
# coding: utf-8
"""Bring electrical transmission network to a single 380kV voltage layer,
remove network dead-ends, and reduce multi-hop linear HVDC connections to a
single link
"""
import pandas as pd
idx = pd.IndexSlice

View File

@ -1,3 +1,7 @@
"""
Solve networks iteratively linear optimal power flow, while updating reactances
"""
import numpy as np
import pandas as pd
import logging

View File

@ -1,3 +1,8 @@
"""
Solve linear optimal dispatch in hourly resolution with capacities of previous
capacity expansion
"""
import pypsa
import numpy as np
import re

View File

@ -1,3 +1,8 @@
"""
Iteratively solves expansion problem like solve_network, but additionally
records intermediate branch capacity steps and values of the objective
"""
import numpy as np
import pandas as pd
import logging