pypsa-eur/scripts/build_shipping_demand.py
Fabian Neumann 013b705ee4
Clustering: build renewable profiles and add all assets after clustering (#1201)
* Cluster first: build renewable profiles and add all assets after clustering

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* correction: pass landfall_lengths through functions

* assign landfall_lenghts correctly

* remove parameter add_land_use_constraint

* fix network_dict

* calculate distance to shoreline, remove underwater_fraction

* adjust simplification parameter to exclude Crete from offshore wind connections

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* remove unused geth2015 hydro capacities

* removing remaining traces of {simpl} wildcard

* add release notes and update workflow graphics

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: lisazeyen <lisa.zeyen@web.de>
2024-09-13 15:37:01 +02:00

59 lines
2.1 KiB
Python

# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: : 2023-2024 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
"""
Build regional demand for international navigation based on outflow volume of
ports.
"""
import json
import geopandas as gpd
import pandas as pd
from _helpers import set_scenario_config
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake("build_shipping_demand", clusters=48)
set_scenario_config(snakemake)
scope = gpd.read_file(snakemake.input.scope).geometry[0]
regions = gpd.read_file(snakemake.input.regions).set_index("name")
demand = pd.read_csv(snakemake.input.demand, index_col=[0, 1])[
"total international navigation"
]
demand = demand.xs(snakemake.params.energy_totals_year, level=1)
# read port data into GeoDataFrame
with open(snakemake.input.ports, "r", encoding="latin_1") as f:
ports = json.load(f)
ports = pd.json_normalize(ports, "features", sep="_")
coordinates = ports.geometry_coordinates
geometry = gpd.points_from_xy(coordinates.str[0], coordinates.str[1])
ports = gpd.GeoDataFrame(ports, geometry=geometry, crs=4326)
# filter global port data by European ports
european_ports = ports[ports.within(scope)]
# assign ports to nearest region
p = european_ports.to_crs(3857)
r = regions.to_crs(3857)
outflows = p.sjoin_nearest(r).groupby("name").properties_outflows.sum().div(1e3)
# calculate fraction of each country's port outflows
countries = outflows.index.str[:2]
outflows_per_country = outflows.groupby(countries).sum()
fraction = outflows / countries.map(outflows_per_country)
# distribute per-country demands to nodes based on these fractions
nodal_demand = demand.loc[countries].fillna(0.0)
nodal_demand.index = fraction.index
nodal_demand = nodal_demand.multiply(fraction, axis=0)
nodal_demand = nodal_demand.reindex(regions.index, fill_value=0)
# export nodal international navigation demands
nodal_demand.to_csv(snakemake.output[0])