pypsa-eur/scripts/build_hac_features.py
Fabian Neumann 013b705ee4
Clustering: build renewable profiles and add all assets after clustering (#1201)
* Cluster first: build renewable profiles and add all assets after clustering

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* correction: pass landfall_lengths through functions

* assign landfall_lenghts correctly

* remove parameter add_land_use_constraint

* fix network_dict

* calculate distance to shoreline, remove underwater_fraction

* adjust simplification parameter to exclude Crete from offshore wind connections

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* remove unused geth2015 hydro capacities

* removing remaining traces of {simpl} wildcard

* add release notes and update workflow graphics

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: lisazeyen <lisa.zeyen@web.de>
2024-09-13 15:37:01 +02:00

48 lines
1.2 KiB
Python

# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: : 2024 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
"""
Aggregate all rastered cutout data to base regions Voronoi cells.
"""
import logging
import atlite
import geopandas as gpd
from _helpers import get_snapshots, set_scenario_config
from atlite.aggregate import aggregate_matrix
from dask.distributed import Client
logger = logging.getLogger(__name__)
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake("build_hac_features")
set_scenario_config(snakemake)
params = snakemake.params
nprocesses = int(snakemake.threads)
if nprocesses > 1:
client = Client(n_workers=nprocesses, threads_per_worker=1)
else:
client = None
time = get_snapshots(params.snapshots, params.drop_leap_day)
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
regions = gpd.read_file(snakemake.input.regions).set_index("name")
I = cutout.indicatormatrix(regions) # noqa: E741
ds = cutout.data[params.features].map(
aggregate_matrix, matrix=I, index=regions.index
)
ds = ds.load(scheduler=client)
ds.to_netcdf(snakemake.output[0])