Use GLAES library by FZJ-IEK-3 for computing GIS potentials

This commit is contained in:
Jonas Hoersch 2018-12-10 18:40:54 +01:00
parent 04a291b91b
commit 802261eedd
5 changed files with 187 additions and 66 deletions

View File

@ -89,38 +89,31 @@ rule build_bus_regions:
rule build_cutout: rule build_cutout:
output: "cutouts/{cutout}" output: "cutouts/{cutout}"
resources: mem=5000 resources: mem=config['atlite'].get('nprocesses', 4) * 1000
threads: config['atlite'].get('nprocesses', 4) threads: config['atlite'].get('nprocesses', 4)
benchmark: "benchmarks/build_cutout_{cutout}" benchmark: "benchmarks/build_cutout_{cutout}"
# group: 'feedin_preparation' # group: 'feedin_preparation'
script: "scripts/build_cutout.py" script: "scripts/build_cutout.py"
def memory_build_renewable_potentials(wildcards): rule build_natura_raster:
corine_config = config["renewable"][wildcards.technology]["corine"] input: "data/bundle/natura/Natura2000_end2015.shp"
return 12000 if corine_config.get("distance") is None else 24000 output: "resources/natura.tiff"
script: "scripts/build_natura_raster.py"
rule build_renewable_potentials:
input:
cutout=lambda wildcards: "cutouts/" + config["renewable"][wildcards.technology]['cutout'],
corine="data/bundle/corine/g250_clc06_V18_5.tif",
natura="data/bundle/natura/Natura2000_end2015.shp"
output: "resources/potentials_{technology}.nc"
resources: mem=memory_build_renewable_potentials
benchmark: "benchmarks/build_renewable_potentials_{technology}"
# group: 'feedin_preparation'
script: "scripts/build_renewable_potentials.py"
rule build_renewable_profiles: rule build_renewable_profiles:
input: input:
base_network="networks/base.nc", base_network="networks/base.nc",
potentials="resources/potentials_{technology}.nc", corine="data/bundle/corine/g250_clc06_V18_5.tif",
natura="resources/natura.tiff",
gebco="data/bundle/GEBCO_2014_2D.nc",
country_shapes='resources/country_shapes.geojson',
regions=lambda wildcards: ("resources/regions_onshore.geojson" regions=lambda wildcards: ("resources/regions_onshore.geojson"
if wildcards.technology in ('onwind', 'solar') if wildcards.technology in ('onwind', 'solar')
else "resources/regions_offshore.geojson"), else "resources/regions_offshore.geojson"),
cutout=lambda wildcards: "cutouts/" + config["renewable"][wildcards.technology]['cutout'] cutout=lambda wildcards: "cutouts/" + config["renewable"][wildcards.technology]['cutout']
output: output: profile="resources/profile_{technology}.nc",
profile="resources/profile_{technology}.nc", resources: mem=config['atlite'].get('nprocesses', 2) * 5000
resources: mem=5000 threads: config['atlite'].get('nprocesses', 2)
benchmark: "benchmarks/build_renewable_profiles_{technology}" benchmark: "benchmarks/build_renewable_profiles_{technology}"
# group: 'feedin_preparation' # group: 'feedin_preparation'
script: "scripts/build_renewable_profiles.py" script: "scripts/build_renewable_profiles.py"

View File

@ -58,7 +58,7 @@ renewable:
method: wind method: wind
turbine: Vestas_V112_3MW turbine: Vestas_V112_3MW
# ScholzPhd Tab 4.3.1: 10MW/km^2 # ScholzPhd Tab 4.3.1: 10MW/km^2
capacity_per_sqm: 3 capacity_per_sqkm: 3
# correction_factor: 0.93 # correction_factor: 0.93
corine: corine:
#The selection of CORINE Land Cover [1] types that are allowed for wind and solar are based on [2] p.42 / p.28 #The selection of CORINE Land Cover [1] types that are allowed for wind and solar are based on [2] p.42 / p.28
@ -70,19 +70,20 @@ renewable:
24, 25, 26, 27, 28, 29, 31, 32] 24, 25, 26, 27, 28, 29, 31, 32]
distance: 1000 distance: 1000
distance_grid_codes: [1, 2, 3, 4, 5, 6] distance_grid_codes: [1, 2, 3, 4, 5, 6]
natura: true natura: true
potential: conservative # or heuristic
offwind: offwind:
cutout: europe-2013-era5 cutout: europe-2013-era5
resource: resource:
method: wind method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore turbine: NREL_ReferenceTurbine_5MW_offshore
# ScholzPhd Tab 4.3.1: 10MW/km^2 # ScholzPhd Tab 4.3.1: 10MW/km^2
capacity_per_sqm: 3 capacity_per_sqkm: 3
height_cutoff: 50
# correction_factor: 0.93 # correction_factor: 0.93
corine: corine: [44, 255]
grid_codes: [44, 255] natura: true
natura: true max_depth: 50
potential: conservative # or heuristic
solar: solar:
cutout: europe-2013-sarah cutout: europe-2013-sarah
resource: resource:
@ -92,12 +93,12 @@ renewable:
slope: 35. slope: 35.
azimuth: 180. azimuth: 180.
# ScholzPhd Tab 4.3.1: 170 MW/km^2 # ScholzPhd Tab 4.3.1: 170 MW/km^2
capacity_per_sqm: 1.7 capacity_per_sqkm: 1.7
correction_factor: 0.877 correction_factor: 0.877
corine: corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
grid_codes: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32]
18, 19, 20, 26, 31, 32] natura: true
natura: true potential: conservative # or heuristic
hydro: hydro:
cutout: europe-2013-era5 cutout: europe-2013-era5
carriers: [ror, PHS, hydro] carriers: [ror, PHS, hydro]

View File

@ -40,6 +40,8 @@ dependencies:
- pypsa>=0.13 - pypsa>=0.13
- vresutils>=0.2.5 - vresutils>=0.2.5
- git+https://github.com/FRESNA/atlite.git - git+https://github.com/FRESNA/atlite.git
- git+https://github.com/FZJ-IEK3-VSA/glaes.git
- git+https://github.com/FZJ-IEK3-VSA/geokit.git
#- git+https://github.com/FRESNA/powerplantmatching.git #- git+https://github.com/FRESNA/powerplantmatching.git
#- https://software.ecmwf.int/wiki/download/attachments/56664858/ecmwf-api-client-python.tgz #- https://software.ecmwf.int/wiki/download/attachments/56664858/ecmwf-api-client-python.tgz

View File

@ -0,0 +1,18 @@
import atlite
from osgeo import gdal
import geokit as gk
def determine_cutout_xXyY(cutout_name):
cutout = atlite.Cutout(cutout_name, cutout_dir="../cutouts")
x, X, y, Y = cutout.extent
dx = (X - x) / (cutout.shape[1] - 1)
dy = (Y - y) / (cutout.shape[0] - 1)
return [x - dx/2., X + dx/2., y - dy/2., Y + dy/2.]
cutout_names = np.unique([res['cutout'] for res in config['renewable'].values()])
xs, Xs, ys, Ys = zip(*(determine_cutout_xyXY(cutout) for cutout in cutout_names))
xXyY = min(xs), max(Xs), min(ys), max(Ys)
natura = gk.vector.loadVector(snakemake.input[0])
extent = gk.Extent.from_xXyY(xXyY).castTo(3035).fit(100)
extent.rasterize(natura, pixelWidth=100, pixelHeight=100, output=snakemake.output[0])

View File

@ -5,51 +5,158 @@ import atlite
import numpy as np import numpy as np
import xarray as xr import xarray as xr
import pandas as pd import pandas as pd
import geopandas as gpd from multiprocessing import Pool
import glaes as gl
import geokit as gk
from osgeo import gdal
from scipy.sparse import csr_matrix, vstack
from vresutils import landuse as vlanduse
from vresutils.array import spdiag
import progressbar as pgb
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logging.basicConfig(level=snakemake.config['logging_level'])
config = snakemake.config['renewable'][snakemake.wildcards.technology] bounds = dx = dy = gebco = clc = natura = None
def init_globals(n_bounds, n_dx, n_dy):
# global in each process of the multiprocessing.Pool
global bounds, dx, dy, gebco, clc, natura
time = pd.date_range(freq='m', **snakemake.config['snapshots']) bounds = n_bounds
params = dict(years=slice(*time.year[[0, -1]]), months=slice(*time.month[[0, -1]])) dx = n_dx
dy = n_dy
regions = gpd.read_file(snakemake.input.regions).set_index('name') gebco = gk.raster.loadRaster(snakemake.input.gebco)
regions.index.name = 'bus' gebco.SetProjection(gk.srs.loadSRS(4326).ExportToWkt())
cutout = atlite.Cutout(config['cutout'], clc = gk.raster.loadRaster(snakemake.input.corine)
cutout_dir=os.path.dirname(snakemake.input.cutout), clc.SetProjection(gk.srs.loadSRS(3035).ExportToWkt())
**params)
# Potentials natura = gk.raster.loadRaster(snakemake.input.natura)
potentials = xr.open_dataarray(snakemake.input.potentials)
# Indicatormatrix def downsample_to_coarse_grid(bounds, dx, dy, mask, data):
indicatormatrix = cutout.indicatormatrix(regions.geometry) # The GDAL warp function with the 'average' resample algorithm needs a band of zero values of at least
# the size of one coarse cell around the original raster or it produces erroneous results
orig = mask.createRaster(data=data)
padded_extent = mask.extent.castTo(bounds.srs).pad(max(dx, dy)).castTo(mask.srs)
padded = padded_extent.fit((mask.pixelWidth, mask.pixelHeight)).warp(orig, mask.pixelWidth, mask.pixelHeight)
orig = None # free original raster
average = bounds.createRaster(dx, dy, dtype=gdal.GDT_Float32)
assert gdal.Warp(average, padded, resampleAlg='average') == 1, "gdal warp failed: %s" % gdal.GetLastErrorMsg()
return average
resource = config['resource'] def calculate_potential(gid):
func = getattr(cutout, resource.pop('method')) feature = gk.vector.extractFeature(snakemake.input.regions, where=gid)
correction_factor = config.get('correction_factor', 1.) ec = gl.ExclusionCalculator(feature.geom, srs=4326, pixelRes=1e-3) # about 100m in EU, it also works in LAEA 3035
if correction_factor != 1.:
logger.warning('correction_factor is set as {}'.format(correction_factor))
capacity_factor = correction_factor * func(capacity_factor=True, **resource)
layout = capacity_factor * potentials
profile, capacities = func(matrix=indicatormatrix, index=regions.index, corine = config.get("corine", {})
layout=layout, per_unit=True, return_capacity=True, if isinstance(corine, list):
**resource) corine = {'grid_codes': corine}
if "grid_codes" in corine:
ec.excludeRasterType(clc, value=corine["grid_codes"], invert=True)
if corine.get("distance", 0.) > 0.:
ec.excludeRasterType(clc, value=corine["distance_grid_codes"], buffer=corine["distance"])
relativepotentials = (potentials / layout).stack(spatial=('y', 'x')).values if config.get("natura", False):
p_nom_max = xr.DataArray([np.nanmin(relativepotentials[row.nonzero()[1]]) ec.excludeRasterType(natura, value=1)
if row.getnnz() > 0 else 0 if "max_depth" in config:
for row in indicatormatrix.tocsr()], ec.excludeRasterType(gebco, (None, -config["max_depth"]))
[capacities.coords['bus']]) * capacities
ds = xr.merge([(correction_factor * profile).rename('profile'), # TODO compute a distance field as a raster beforehand
capacities.rename('weight'), if 'max_shore_distance' in config:
p_nom_max.rename('p_nom_max'), ec.excludeVectorType(snakemake.input.country_shapes, buffer=config['max_shore_distance'], invert=True)
layout.rename('potential')]) if 'min_shore_distance' in config:
(ds.sel(bus=ds['profile'].mean('time') > config.get('min_p_max_pu', 0.)) ec.excludeVectorType(snakemake.input.country_shapes, buffer=config['min_shore_distance'])
.to_netcdf(snakemake.output.profile))
availability = downsample_to_coarse_grid(bounds, dx, dy, ec.region, np.where(ec.region.mask, ec._availability, 0))
return csr_matrix(gk.raster.extractMatrix(availability).flatten() / 100.)
if __name__ == '__main__':
pgb.streams.wrap_stderr()
logging.basicConfig(level=snakemake.config['logging_level'])
config = snakemake.config['renewable'][snakemake.wildcards.technology]
time = pd.date_range(freq='m', **snakemake.config['snapshots'])
params = dict(years=slice(*time.year[[0, -1]]), months=slice(*time.month[[0, -1]]))
cutout = atlite.Cutout(config['cutout'],
cutout_dir=os.path.dirname(snakemake.input.cutout),
**params)
minx, maxx, miny, maxy = cutout.extent
dx = (maxx - minx) / (cutout.shape[1] - 1)
dy = (maxy - miny) / (cutout.shape[0] - 1)
bounds = gk.Extent.from_xXyY((minx - dx/2., maxx + dx/2.,
miny - dy/2., maxy + dy/2.))
# Use GLAES to compute available potentials and the transition matrix
with Pool(initializer=init_globals, initargs=(bounds, dx, dy),
maxtasksperchild=20, processes=snakemake.config['atlite'].get('nprocesses', 2)) as pool:
features = gk.vector.extractFeatures(snakemake.input.regions, onlyAttr=True) #.iloc[:10]
buses = pd.Index(features['name'], name="bus")
widgets = [
pgb.widgets.Percentage(),
' ', pgb.widgets.SimpleProgress(format='(%s)' % pgb.widgets.SimpleProgress.DEFAULT_FORMAT),
' ', pgb.widgets.Bar(),
' ', pgb.widgets.Timer(),
' ', pgb.widgets.ETA()
]
progressbar = pgb.ProgressBar(prefix='Compute GIS potentials: ', widgets=widgets, max_value=len(features))
matrix = vstack(list(progressbar(pool.imap(calculate_potential, features.index))))
potentials = config['capacity_per_sqkm'] * vlanduse._cutout_cell_areas(cutout)
potmatrix = matrix * spdiag(potentials.ravel())
potmatrix.data[potmatrix.data < 1.] = 0 # ignore weather cells where only less than 1 MW can be installed
potmatrix.eliminate_zeros()
with pgb.ProgressBar(prefix='Compute capacity factors: ', max_value=1) as progressbar:
progressbar.update(0)
resource = config['resource']
func = getattr(cutout, resource.pop('method'))
correction_factor = config.get('correction_factor', 1.)
if correction_factor != 1.:
logger.warning('correction_factor is set as {}'.format(correction_factor))
capacity_factor = correction_factor * func(capacity_factor=True, **resource).stack(spatial=('y', 'x')).values
layoutmatrix = potmatrix * spdiag(capacity_factor)
progressbar.update(1)
with pgb.ProgressBar(prefix='Compute profiles: ', max_value=1) as progressbar:
progressbar.update(0)
profile, capacities = func(matrix=layoutmatrix, index=buses, per_unit=True,
return_capacity=True, **resource)
progressbar.update(1)
p_nom_max_meth = config.get('potential', 'conservative')
if p_nom_max_meth == 'conservative':
# p_nom_max has to be calculated for each bus and is the minimal ratio
# (min over all weather grid cells of the bus region) between the available
# potential (potmatrix) and the used normalised layout (layoutmatrix /
# capacities), so we would like to calculate i.e. potmatrix / (layoutmatrix /
# capacities). Since layoutmatrix = potmatrix * capacity_factor, this
# corresponds to capacities/max(capacity factor in the voronoi cell)
p_nom_max = xr.DataArray([1./np.max(capacity_factor[inds]) if len(inds) else 0.
for inds in np.split(potmatrix.indices, potmatrix.indptr[1:-1])], [buses]) * capacities
elif p_nom_max_meth == 'heuristic':
p_nom_max = 0.8 * xr.DataArray(np.asarray(potmatrix.sum(axis=1)).squeeze(), [buses])
else:
raise AssertionError('Config key `potential` should be one of "conservative" (default) or "heuristic",'
' not "{}"'.format(p_nom_max_meth))
layout = xr.DataArray(np.asarray(potmatrix.sum(axis=0)).reshape(cutout.shape),
[cutout.meta.indexes[ax] for ax in ['y', 'x']])
ds = xr.merge([(correction_factor * profile).rename('profile'),
capacities.rename('weight'),
p_nom_max.rename('p_nom_max'),
layout.rename('potential')])
(ds.sel(bus=(ds['profile'].mean('time') > config.get('min_p_max_pu', 0.)) & (ds['p_nom_max'] > 0.))
.to_netcdf(snakemake.output.profile))