Merge remote-tracking branch 'origin/master' into country-specific-dh-forward-temperatures

This commit is contained in:
AmosSchledorn 2024-08-08 17:20:23 +02:00
commit 6c9bcae6f0
15 changed files with 334 additions and 153 deletions

View File

@ -67,7 +67,6 @@ snapshots:
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#enable # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#enable
enable: enable:
retrieve: auto retrieve: auto
prepare_links_p_nom: false
retrieve_databundle: true retrieve_databundle: true
retrieve_cost_data: true retrieve_cost_data: true
build_cutout: false build_cutout: false
@ -370,6 +369,23 @@ biomass:
- Sludge - Sludge
municipal solid waste: municipal solid waste:
- Municipal waste - Municipal waste
share_unsustainable_use_retained:
2020: 1
2025: 0.66
2030: 0.33
2035: 0
2040: 0
2045: 0
2050: 0
share_sustainable_potential_available:
2020: 0
2025: 0.33
2030: 0.66
2035: 1
2040: 1
2045: 1
2050: 1
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#solar-thermal # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#solar-thermal
solar_thermal: solar_thermal:
@ -749,7 +765,7 @@ industry:
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#costs # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#costs
costs: costs:
year: 2030 year: 2030
version: v0.9.0 version: v0.9.1
social_discountrate: 0.02 social_discountrate: 0.02
fill_values: fill_values:
FOM: 0 FOM: 0
@ -1067,6 +1083,7 @@ plotting:
services rural biomass boiler: '#c6cf98' services rural biomass boiler: '#c6cf98'
services urban decentral biomass boiler: '#dde5b5' services urban decentral biomass boiler: '#dde5b5'
biomass to liquid: '#32CD32' biomass to liquid: '#32CD32'
unsustainable bioliquids: '#32CD32'
electrobiofuels: 'red' electrobiofuels: 'red'
BioSNG: '#123456' BioSNG: '#123456'
# power transmission # power transmission

View File

@ -53,7 +53,6 @@ extensions = [
autodoc_mock_imports = [ autodoc_mock_imports = [
"atlite", "atlite",
"snakemake", "snakemake",
"pycountry",
"rioxarray", "rioxarray",
"country_converter", "country_converter",
"tabula", "tabula",

View File

@ -5,3 +5,5 @@ classes ,,,
-- solid biomass,--,Array of biomass comodity,The comodity that are included as solid biomass -- solid biomass,--,Array of biomass comodity,The comodity that are included as solid biomass
-- not included,--,Array of biomass comodity,The comodity that are not included as a biomass potential -- not included,--,Array of biomass comodity,The comodity that are not included as a biomass potential
-- biogas,--,Array of biomass comodity,The comodity that are included as biogas -- biogas,--,Array of biomass comodity,The comodity that are included as biogas
share_unsustainable_use_retained,--,Dictionary with planning horizons as keys., Share of unsustainable biomass use retained using primary production of Eurostat data as reference
share_sustainable_potential_available,--,Dictionary with planning horizons as keys., Share determines phase-in of ENSPRESO biomass potentials

1 Unit Values Description
5 -- solid biomass -- Array of biomass comodity The comodity that are included as solid biomass
6 -- not included -- Array of biomass comodity The comodity that are not included as a biomass potential
7 -- biogas -- Array of biomass comodity The comodity that are included as biogas
8 share_unsustainable_use_retained -- Dictionary with planning horizons as keys. Share of unsustainable biomass use retained using primary production of Eurostat data as reference
9 share_sustainable_potential_available -- Dictionary with planning horizons as keys. Share determines phase-in of ENSPRESO biomass potentials

View File

@ -1,6 +1,5 @@
,Unit,Values,Description ,Unit,Values,Description
enable,str or bool,"{auto, true, false}","Switch to include (true) or exclude (false) the retrieve_* rules of snakemake into the workflow; 'auto' sets true|false based on availability of an internet connection to prevent issues with snakemake failing due to lack of internet connection." enable,str or bool,"{auto, true, false}","Switch to include (true) or exclude (false) the retrieve_* rules of snakemake into the workflow; 'auto' sets true|false based on availability of an internet connection to prevent issues with snakemake failing due to lack of internet connection."
prepare_links_p_nom,bool,"{true, false}","Switch to retrieve current HVDC projects from `Wikipedia <https://en.wikipedia.org/wiki/List_of_HVDC_projects>`_"
retrieve_databundle,bool,"{true, false}","Switch to retrieve databundle from zenodo via the rule :mod:`retrieve_databundle` or whether to keep a custom databundle located in the corresponding folder." retrieve_databundle,bool,"{true, false}","Switch to retrieve databundle from zenodo via the rule :mod:`retrieve_databundle` or whether to keep a custom databundle located in the corresponding folder."
retrieve_cost_data,bool,"{true, false}","Switch to retrieve technology cost data from `technology-data repository <https://github.com/PyPSA/technology-data>`_." retrieve_cost_data,bool,"{true, false}","Switch to retrieve technology cost data from `technology-data repository <https://github.com/PyPSA/technology-data>`_."
build_cutout,bool,"{true, false}","Switch to enable the building of cutouts via the rule :mod:`build_cutout`." build_cutout,bool,"{true, false}","Switch to enable the building of cutouts via the rule :mod:`build_cutout`."

1 Unit Values Description
2 enable str or bool {auto, true, false} Switch to include (true) or exclude (false) the retrieve_* rules of snakemake into the workflow; 'auto' sets true|false based on availability of an internet connection to prevent issues with snakemake failing due to lack of internet connection.
prepare_links_p_nom bool {true, false} Switch to retrieve current HVDC projects from `Wikipedia <https://en.wikipedia.org/wiki/List_of_HVDC_projects>`_
3 retrieve_databundle bool {true, false} Switch to retrieve databundle from zenodo via the rule :mod:`retrieve_databundle` or whether to keep a custom databundle located in the corresponding folder.
4 retrieve_cost_data bool {true, false} Switch to retrieve technology cost data from `technology-data repository <https://github.com/PyPSA/technology-data>`_.
5 build_cutout bool {true, false} Switch to enable the building of cutouts via the rule :mod:`build_cutout`.

View File

@ -41,11 +41,6 @@ Rule ``build_cutout``
.. automodule:: build_cutout .. automodule:: build_cutout
Rule ``prepare_links_p_nom``
===============================
.. automodule:: prepare_links_p_nom
.. _base: .. _base:
Rule ``base_network`` Rule ``base_network``

View File

@ -12,6 +12,12 @@ Upcoming Release
* Added option to use country-specific district heating forward and return temperatures. Defaults to lower temperatures in Scandinavia. * Added option to use country-specific district heating forward and return temperatures. Defaults to lower temperatures in Scandinavia.
* Added unsustainable biomass potentials for solid, gaseous, and liquid biomass. The potentials can be phased-out and/or
substituted by the phase-in of sustainable biomass types using the config parameters
``biomass: share_unsustainable_use_retained`` and ``biomass: share_sustainable_potential_available``.
* The rule ``prepare_links_p_nom`` was removed since it was outdated and not used.
* Changed heat pump COP approximation for central heating to be based on `Jensen et al. (2018) <https://backend.orbit.dtu.dk/ws/portalfiles/portal/151965635/MAIN_Final.pdf>`__ and a default forward temperature of 90C. This is more realistic for district heating than the previously used approximation method. * Changed heat pump COP approximation for central heating to be based on `Jensen et al. (2018) <https://backend.orbit.dtu.dk/ws/portalfiles/portal/151965635/MAIN_Final.pdf>`__ and a default forward temperature of 90C. This is more realistic for district heating than the previously used approximation method.
* split solid biomass potentials into solid biomass and municipal solid waste. Add option to use municipal solid waste. This option is only activated in combination with the flag ``waste_to_energy`` * split solid biomass potentials into solid biomass and municipal solid waste. Add option to use municipal solid waste. This option is only activated in combination with the flag ``waste_to_energy``

View File

@ -17,7 +17,6 @@ tabula-py
# cartopy # cartopy
scikit-learn scikit-learn
pycountry
pyyaml pyyaml
seaborn seaborn
memory_profiler memory_profiler

View File

@ -18,7 +18,6 @@ dependencies:
# Dependencies of the workflow itself # Dependencies of the workflow itself
- xlrd - xlrd
- openpyxl!=3.1.1 - openpyxl!=3.1.1
- pycountry
- seaborn - seaborn
- snakemake-minimal>=8.14 - snakemake-minimal>=8.14
- memory_profiler - memory_profiler

View File

@ -2,21 +2,6 @@
# #
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
if config["enable"].get("prepare_links_p_nom", False):
rule prepare_links_p_nom:
output:
"data/links_p_nom.csv",
log:
logs("prepare_links_p_nom.log"),
threads: 1
resources:
mem_mb=1500,
conda:
"../envs/environment.yaml"
script:
"../scripts/prepare_links_p_nom.py"
rule build_electricity_demand: rule build_electricity_demand:
params: params:
@ -106,8 +91,8 @@ rule build_shapes:
params: params:
countries=config_provider("countries"), countries=config_provider("countries"),
input: input:
naturalearth=ancient("data/bundle/naturalearth/ne_10m_admin_0_countries.shp"), naturalearth=ancient("data/naturalearth/ne_10m_admin_0_countries_deu.shp"),
eez=ancient("data/bundle/eez/World_EEZ_v8_2014.shp"), eez=ancient("data/eez/World_EEZ_v12_20231025_gpkg/eez_v12.gpkg"),
nuts3=ancient("data/bundle/NUTS_2013_60M_SH/data/NUTS_RG_60M_2013.shp"), nuts3=ancient("data/bundle/NUTS_2013_60M_SH/data/NUTS_RG_60M_2013.shp"),
nuts3pop=ancient("data/bundle/nama_10r_3popgdp.tsv.gz"), nuts3pop=ancient("data/bundle/nama_10r_3popgdp.tsv.gz"),
nuts3gdp=ancient("data/bundle/nama_10r_3gdp.tsv.gz"), nuts3gdp=ancient("data/bundle/nama_10r_3gdp.tsv.gz"),

View File

@ -347,7 +347,8 @@ rule build_biomass_potentials:
"https://zenodo.org/records/10356004/files/ENSPRESO_BIOMASS.xlsx", "https://zenodo.org/records/10356004/files/ENSPRESO_BIOMASS.xlsx",
keep_local=True, keep_local=True,
), ),
nuts2="data/bundle/nuts/NUTS_RG_10M_2013_4326_LEVL_2.geojson", # https://gisco-services.ec.europa.eu/distribution/v2/nuts/download/#nuts21 eurostat="data/eurostat/Balances-April2023",
nuts2="data/bundle/nuts/NUTS_RG_10M_2013_4326_LEVL_2.geojson",
regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"), regions_onshore=resources("regions_onshore_elec_s{simpl}_{clusters}.geojson"),
nuts3_population=ancient("data/bundle/nama_10r_3popgdp.tsv.gz"), nuts3_population=ancient("data/bundle/nama_10r_3popgdp.tsv.gz"),
swiss_cantons=ancient("data/ch_cantons.csv"), swiss_cantons=ancient("data/ch_cantons.csv"),
@ -360,7 +361,7 @@ rule build_biomass_potentials:
biomass_potentials=resources( biomass_potentials=resources(
"biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv" "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv"
), ),
threads: 1 threads: 8
resources: resources:
mem_mb=1000, mem_mb=1000,
log: log:
@ -956,6 +957,7 @@ rule prepare_sector_network:
countries=config_provider("countries"), countries=config_provider("countries"),
adjustments=config_provider("adjustments", "sector"), adjustments=config_provider("adjustments", "sector"),
emissions_scope=config_provider("energy", "emissions"), emissions_scope=config_provider("energy", "emissions"),
biomass=config_provider("biomass"),
RDIR=RDIR, RDIR=RDIR,
heat_pump_sources=config_provider("sector", "heat_pump_sources"), heat_pump_sources=config_provider("sector", "heat_pump_sources"),
heat_systems=config_provider("sector", "heat_systems"), heat_systems=config_provider("sector", "heat_systems"),

View File

@ -4,6 +4,7 @@
import requests import requests
from datetime import datetime, timedelta from datetime import datetime, timedelta
from shutil import move, unpack_archive
if config["enable"].get("retrieve", "auto") == "auto": if config["enable"].get("retrieve", "auto") == "auto":
config["enable"]["retrieve"] = has_internet_access() config["enable"]["retrieve"] = has_internet_access()
@ -15,8 +16,6 @@ if config["enable"]["retrieve"] is False:
if config["enable"]["retrieve"] and config["enable"].get("retrieve_databundle", True): if config["enable"]["retrieve"] and config["enable"].get("retrieve_databundle", True):
datafiles = [ datafiles = [
"je-e-21.03.02.xls", "je-e-21.03.02.xls",
"eez/World_EEZ_v8_2014.shp",
"naturalearth/ne_10m_admin_0_countries.shp",
"NUTS_2013_60M_SH/data/NUTS_RG_60M_2013.shp", "NUTS_2013_60M_SH/data/NUTS_RG_60M_2013.shp",
"nama_10r_3popgdp.tsv.gz", "nama_10r_3popgdp.tsv.gz",
"nama_10r_3gdp.tsv.gz", "nama_10r_3gdp.tsv.gz",
@ -215,6 +214,64 @@ if config["enable"]["retrieve"]:
move(input[0], output[0]) move(input[0], output[0])
if config["enable"]["retrieve"]:
rule retrieve_eez:
params:
zip="data/eez/World_EEZ_v12_20231025_gpkg.zip",
output:
gpkg="data/eez/World_EEZ_v12_20231025_gpkg/eez_v12.gpkg",
run:
import os
import requests
from uuid import uuid4
name = str(uuid4())[:8]
org = str(uuid4())[:8]
response = requests.post(
"https://www.marineregions.org/download_file.php",
params={"name": "World_EEZ_v12_20231025_gpkg.zip"},
data={
"name": name,
"organisation": org,
"email": f"{name}@{org}.org",
"country": "Germany",
"user_category": "academia",
"purpose_category": "Research",
"agree": "1",
},
)
with open(params["zip"], "wb") as f:
f.write(response.content)
output_folder = Path(params["zip"]).parent
unpack_archive(params["zip"], output_folder)
os.remove(params["zip"])
if config["enable"]["retrieve"]:
# Download directly from naciscdn.org which is a redirect from naturalearth.com
# (https://www.naturalearthdata.com/downloads/10m-cultural-vectors/10m-admin-0-countries/)
# Use point-of-view (POV) variant of Germany so that Crimea is included.
rule retrieve_naturalearth_countries:
input:
storage(
"https://naciscdn.org/naturalearth/10m/cultural/ne_10m_admin_0_countries_deu.zip"
),
params:
zip="data/naturalearth/ne_10m_admin_0_countries_deu.zip",
output:
countries="data/naturalearth/ne_10m_admin_0_countries_deu.shp",
run:
move(input[0], params["zip"])
output_folder = Path(output["countries"]).parent
unpack_archive(params["zip"], output_folder)
os.remove(params["zip"])
if config["enable"]["retrieve"]: if config["enable"]["retrieve"]:
# Some logic to find the correct file URL # Some logic to find the correct file URL
# Sometimes files are released delayed or ahead of schedule, check which file is currently available # Sometimes files are released delayed or ahead of schedule, check which file is currently available

View File

@ -13,11 +13,51 @@ import geopandas as gpd
import numpy as np import numpy as np
import pandas as pd import pandas as pd
from _helpers import configure_logging, set_scenario_config from _helpers import configure_logging, set_scenario_config
from build_energy_totals import build_eurostat
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
AVAILABLE_BIOMASS_YEARS = [2010, 2020, 2030, 2040, 2050] AVAILABLE_BIOMASS_YEARS = [2010, 2020, 2030, 2040, 2050]
def _calc_unsustainable_potential(df, df_unsustainable, share_unsus, resource_type):
"""
Calculate the unsustainable biomass potential for a given resource type or
regex.
Parameters
----------
df : pd.DataFrame
The dataframe with sustainable biomass potentials.
df_unsustainable : pd.DataFrame
The dataframe with unsustainable biomass potentials.
share_unsus : float
The share of unsustainable biomass potential retained.
resource_type : str or regex
The resource type to calculate the unsustainable potential for.
Returns
-------
pd.Series
The unsustainable biomass potential for the given resource type or regex.
"""
if "|" in resource_type:
resource_potential = df_unsustainable.filter(regex=resource_type).sum(axis=1)
else:
resource_potential = df_unsustainable[resource_type]
return (
df.apply(
lambda c: c.sum()
/ df.loc[df.index.str[:2] == c.name[:2]].sum().sum()
* resource_potential.loc[c.name[:2]],
axis=1,
)
.mul(share_unsus)
.clip(lower=0)
)
def build_nuts_population_data(year=2013): def build_nuts_population_data(year=2013):
pop = pd.read_csv( pop = pd.read_csv(
snakemake.input.nuts3_population, snakemake.input.nuts3_population,
@ -211,15 +251,104 @@ def convert_nuts2_to_regions(bio_nuts2, regions):
return bio_regions return bio_regions
def add_unsustainable_potentials(df):
"""
Add unsustainable biomass potentials to the given dataframe. The difference
between the data of JRC and Eurostat is assumed to be unsustainable
biomass.
Parameters
----------
df : pd.DataFrame
The dataframe with sustainable biomass potentials.
unsustainable_biomass : str
Path to the file with unsustainable biomass potentials.
Returns
-------
pd.DataFrame
The dataframe with added unsustainable biomass potentials.
"""
if "GB" in snakemake.config["countries"]:
latest_year = 2019
else:
latest_year = 2021
idees_rename = {"GR": "EL", "GB": "UK"}
df_unsustainable = (
build_eurostat(
countries=snakemake.config["countries"],
input_eurostat=snakemake.input.eurostat,
nprocesses=int(snakemake.threads),
)
.xs(
max(min(latest_year, int(snakemake.wildcards.planning_horizons)), 1990),
level=1,
)
.xs("Primary production", level=2)
.droplevel([1, 2, 3])
)
df_unsustainable.index = df_unsustainable.index.str.strip()
df_unsustainable = df_unsustainable.rename(
{v: k for k, v in idees_rename.items()}, axis=0
)
bio_carriers = [
"Primary solid biofuels",
"Biogases",
"Renewable municipal waste",
"Pure biogasoline",
"Blended biogasoline",
"Pure biodiesels",
"Blended biodiesels",
"Pure bio jet kerosene",
"Blended bio jet kerosene",
"Other liquid biofuels",
]
df_unsustainable = df_unsustainable[bio_carriers]
# Phase out unsustainable biomass potentials linearly from 2020 to 2035 while phasing in sustainable potentials
share_unsus = params.get("share_unsustainable_use_retained").get(investment_year)
df_wo_ch = df.drop(df.filter(regex="CH\d", axis=0).index)
# Calculate unsustainable solid biomass
df_wo_ch["unsustainable solid biomass"] = _calc_unsustainable_potential(
df_wo_ch, df_unsustainable, share_unsus, "Primary solid biofuels"
)
# Calculate unsustainable biogas
df_wo_ch["unsustainable biogas"] = _calc_unsustainable_potential(
df_wo_ch, df_unsustainable, share_unsus, "Biogases"
)
# Calculate unsustainable bioliquids
df_wo_ch["unsustainable bioliquids"] = _calc_unsustainable_potential(
df_wo_ch,
df_unsustainable,
share_unsus,
resource_type="gasoline|diesel|kerosene|liquid",
)
share_sus = params.get("share_sustainable_potential_available").get(investment_year)
df *= share_sus
df = df.join(df_wo_ch.filter(like="unsustainable")).fillna(0)
return df
if __name__ == "__main__": if __name__ == "__main__":
if "snakemake" not in globals(): if "snakemake" not in globals():
from _helpers import mock_snakemake from _helpers import mock_snakemake
snakemake = mock_snakemake( snakemake = mock_snakemake(
"build_biomass_potentials", "build_biomass_potentials",
simpl="", simpl="",
clusters="5", clusters="37",
planning_horizons=2050, planning_horizons=2020,
) )
configure_logging(snakemake) configure_logging(snakemake)
@ -269,6 +398,8 @@ if __name__ == "__main__":
grouper = {v: k for k, vv in params["classes"].items() for v in vv} grouper = {v: k for k, vv in params["classes"].items() for v in vv}
df = df.T.groupby(grouper).sum().T df = df.T.groupby(grouper).sum().T
df = add_unsustainable_potentials(df)
df *= 1e6 # TWh/a to MWh/a df *= 1e6 # TWh/a to MWh/a
df.index.name = "MWh/a" df.index.name = "MWh/a"

View File

@ -26,7 +26,7 @@ Inputs
.. image:: img/countries.png .. image:: img/countries.png
:scale: 33 % :scale: 33 %
- ``data/bundle/eez/World_EEZ_v8_2014.shp``: World `exclusive economic zones <https://en.wikipedia.org/wiki/Exclusive_economic_zone>`_ (EEZ) - ``data/eez/World_EEZ_v12_20231025_gpkg/eez_v12.gpkg ``: World `exclusive economic zones <https://en.wikipedia.org/wiki/Exclusive_economic_zone>`_ (EEZ)
.. image:: img/eez.png .. image:: img/eez.png
:scale: 33 % :scale: 33 %
@ -73,22 +73,16 @@ from functools import reduce
from itertools import takewhile from itertools import takewhile
from operator import attrgetter from operator import attrgetter
import country_converter as coco
import geopandas as gpd import geopandas as gpd
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import pycountry as pyc
from _helpers import configure_logging, set_scenario_config from _helpers import configure_logging, set_scenario_config
from shapely.geometry import MultiPolygon, Polygon from shapely.geometry import MultiPolygon, Polygon
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
cc = coco.CountryConverter()
def _get_country(target, **keys):
assert len(keys) == 1
try:
return getattr(pyc.countries.get(**keys), target)
except (KeyError, AttributeError):
return np.nan
def _simplify_polys(polys, minarea=0.1, tolerance=None, filterremote=True): def _simplify_polys(polys, minarea=0.1, tolerance=None, filterremote=True):
@ -135,22 +129,15 @@ def countries(naturalearth, country_list):
return s return s
def eez(country_shapes, eez, country_list): def eez(eez, country_list):
df = gpd.read_file(eez) df = gpd.read_file(eez)
df = df.loc[ iso3_list = cc.convert(country_list, src="ISO2", to="ISO3")
df["ISO_3digit"].isin( df = df.query("ISO_TER1 in @iso3_list and POL_TYPE == '200NM'").copy()
[_get_country("alpha_3", alpha_2=c) for c in country_list] df["name"] = cc.convert(df.ISO_TER1, src="ISO3", to="ISO2")
)
]
df["name"] = df["ISO_3digit"].map(lambda c: _get_country("alpha_2", alpha_3=c))
s = df.set_index("name").geometry.map( s = df.set_index("name").geometry.map(
lambda s: _simplify_polys(s, filterremote=False) lambda s: _simplify_polys(s, filterremote=False)
) )
s = gpd.GeoSeries( s = s.to_frame("geometry").set_crs(df.crs)
{k: v for k, v in s.items() if v.distance(country_shapes[k]) < 1e-3},
crs=df.crs,
)
s = s.to_frame("geometry")
s.index.name = "name" s.index.name = "name"
return s return s
@ -262,9 +249,7 @@ if __name__ == "__main__":
country_shapes = countries(snakemake.input.naturalearth, snakemake.params.countries) country_shapes = countries(snakemake.input.naturalearth, snakemake.params.countries)
country_shapes.reset_index().to_file(snakemake.output.country_shapes) country_shapes.reset_index().to_file(snakemake.output.country_shapes)
offshore_shapes = eez( offshore_shapes = eez(snakemake.input.eez, snakemake.params.countries)
country_shapes, snakemake.input.eez, snakemake.params.countries
)
offshore_shapes.reset_index().to_file(snakemake.output.offshore_shapes) offshore_shapes.reset_index().to_file(snakemake.output.offshore_shapes)
europe_shape = gpd.GeoDataFrame( europe_shape = gpd.GeoDataFrame(

View File

@ -1,95 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: : 2017-2024 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
"""
Extracts capacities of HVDC links from `Wikipedia.
<https://en.wikipedia.org/wiki/List_of_HVDC_projects>`_.
Relevant Settings
-----------------
.. code:: yaml
enable:
prepare_links_p_nom:
.. seealso::
Documentation of the configuration file ``config/config.yaml`` at
:ref:`toplevel_cf`
Inputs
------
*None*
Outputs
-------
- ``data/links_p_nom.csv``: A plain download of https://en.wikipedia.org/wiki/List_of_HVDC_projects#Europe plus extracted coordinates.
Description
-----------
*None*
"""
import logging
import pandas as pd
from _helpers import configure_logging, set_scenario_config
logger = logging.getLogger(__name__)
def multiply(s):
return s.str[0].astype(float) * s.str[1].astype(float)
def extract_coordinates(s):
regex = (
r"(\d{1,2})°(\d{1,2})(\d{1,2})″(N|S) " r"(\d{1,2})°(\d{1,2})(\d{1,2})″(E|W)"
)
e = s.str.extract(regex, expand=True)
lat = (
e[0].astype(float) + (e[1].astype(float) + e[2].astype(float) / 60.0) / 60.0
) * e[3].map({"N": +1.0, "S": -1.0})
lon = (
e[4].astype(float) + (e[5].astype(float) + e[6].astype(float) / 60.0) / 60.0
) * e[7].map({"E": +1.0, "W": -1.0})
return lon, lat
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake # rule must be enabled in config
snakemake = mock_snakemake("prepare_links_p_nom", simpl="")
configure_logging(snakemake)
set_scenario_config(snakemake)
links_p_nom = pd.read_html(
"https://en.wikipedia.org/wiki/List_of_HVDC_projects", header=0, match="SwePol"
)[0]
mw = "Power (MW)"
m_b = links_p_nom[mw].str.contains("x").fillna(False)
links_p_nom.loc[m_b, mw] = links_p_nom.loc[m_b, mw].str.split("x").pipe(multiply)
links_p_nom[mw] = (
links_p_nom[mw].str.extract("[-/]?([\d.]+)", expand=False).astype(float)
)
links_p_nom["x1"], links_p_nom["y1"] = extract_coordinates(
links_p_nom["Converterstation 1"]
)
links_p_nom["x2"], links_p_nom["y2"] = extract_coordinates(
links_p_nom["Converterstation 2"]
)
links_p_nom.dropna(subset=["x1", "y1", "x2", "y2"]).to_csv(
snakemake.output[0], index=False
)

View File

@ -63,6 +63,7 @@ def define_spatial(nodes, options):
if options.get("biomass_spatial", options["biomass_transport"]): if options.get("biomass_spatial", options["biomass_transport"]):
spatial.biomass.nodes = nodes + " solid biomass" spatial.biomass.nodes = nodes + " solid biomass"
spatial.biomass.bioliquids = nodes + " bioliquids"
spatial.biomass.locations = nodes spatial.biomass.locations = nodes
spatial.biomass.industry = nodes + " solid biomass for industry" spatial.biomass.industry = nodes + " solid biomass for industry"
spatial.biomass.industry_cc = nodes + " solid biomass for industry CC" spatial.biomass.industry_cc = nodes + " solid biomass for industry CC"
@ -70,6 +71,7 @@ def define_spatial(nodes, options):
spatial.msw.locations = nodes spatial.msw.locations = nodes
else: else:
spatial.biomass.nodes = ["EU solid biomass"] spatial.biomass.nodes = ["EU solid biomass"]
spatial.biomass.bioliquids = ["EU unsustainable bioliquids"]
spatial.biomass.locations = ["EU"] spatial.biomass.locations = ["EU"]
spatial.biomass.industry = ["solid biomass for industry"] spatial.biomass.industry = ["solid biomass for industry"]
spatial.biomass.industry_cc = ["solid biomass for industry CC"] spatial.biomass.industry_cc = ["solid biomass for industry CC"]
@ -2261,8 +2263,14 @@ def add_biomass(n, costs):
biogas_potentials_spatial = biomass_potentials["biogas"].rename( biogas_potentials_spatial = biomass_potentials["biogas"].rename(
index=lambda x: x + " biogas" index=lambda x: x + " biogas"
) )
unsustainable_biogas_potentials_spatial = biomass_potentials[
"unsustainable biogas"
].rename(index=lambda x: x + " biogas")
else: else:
biogas_potentials_spatial = biomass_potentials["biogas"].sum() biogas_potentials_spatial = biomass_potentials["biogas"].sum()
unsustainable_biogas_potentials_spatial = biomass_potentials[
"unsustainable biogas"
].sum()
if options.get("biomass_spatial", options["biomass_transport"]): if options.get("biomass_spatial", options["biomass_transport"]):
solid_biomass_potentials_spatial = biomass_potentials["solid biomass"].rename( solid_biomass_potentials_spatial = biomass_potentials["solid biomass"].rename(
@ -2271,11 +2279,27 @@ def add_biomass(n, costs):
msw_biomass_potentials_spatial = biomass_potentials[ msw_biomass_potentials_spatial = biomass_potentials[
"municipal solid waste" "municipal solid waste"
].rename(index=lambda x: x + " municipal solid waste") ].rename(index=lambda x: x + " municipal solid waste")
unsustainable_solid_biomass_potentials_spatial = biomass_potentials[
"unsustainable solid biomass"
].rename(index=lambda x: x + " solid biomass")
else: else:
solid_biomass_potentials_spatial = biomass_potentials["solid biomass"].sum() solid_biomass_potentials_spatial = biomass_potentials["solid biomass"].sum()
msw_biomass_potentials_spatial = biomass_potentials[ msw_biomass_potentials_spatial = biomass_potentials[
"municipal solid waste" "municipal solid waste"
].sum() ].sum()
unsustainable_solid_biomass_potentials_spatial = biomass_potentials[
"unsustainable solid biomass"
].sum()
if options["regional_oil_demand"]:
unsustainable_liquid_biofuel_potentials_spatial = biomass_potentials[
"unsustainable bioliquids"
].rename(index=lambda x: x + " bioliquids")
else:
unsustainable_liquid_biofuel_potentials_spatial = biomass_potentials[
"unsustainable bioliquids"
].sum()
n.add("Carrier", "biogas") n.add("Carrier", "biogas")
n.add("Carrier", "solid biomass") n.add("Carrier", "solid biomass")
@ -2400,6 +2424,81 @@ def add_biomass(n, costs):
p_nom_extendable=True, p_nom_extendable=True,
) )
if biomass_potentials.filter(like="unsustainable").sum().sum() > 0:
# Create timeseries to force usage of unsustainable potentials
e_max_pu = pd.DataFrame(1, index=n.snapshots, columns=spatial.gas.biogas)
e_max_pu.iloc[-1] = 0
n.madd(
"Store",
spatial.gas.biogas,
suffix=" unsustainable",
bus=spatial.gas.biogas,
carrier="unsustainable biogas",
e_nom=unsustainable_biogas_potentials_spatial,
marginal_cost=costs.at["biogas", "fuel"],
e_initial=unsustainable_biogas_potentials_spatial,
e_nom_extendable=False,
e_max_pu=e_max_pu,
)
e_max_pu = pd.DataFrame(1, index=n.snapshots, columns=spatial.biomass.nodes)
e_max_pu.iloc[-1] = 0
n.madd(
"Store",
spatial.biomass.nodes,
suffix=" unsustainable",
bus=spatial.biomass.nodes,
carrier="unsustainable solid biomass",
e_nom=unsustainable_solid_biomass_potentials_spatial,
marginal_cost=costs.at["fuelwood", "fuel"],
e_initial=unsustainable_solid_biomass_potentials_spatial,
e_nom_extendable=False,
e_max_pu=e_max_pu,
)
n.madd(
"Bus",
spatial.biomass.bioliquids,
location=spatial.biomass.locations,
carrier="unsustainable bioliquids",
unit="MWh_LHV",
)
e_max_pu = pd.DataFrame(
1, index=n.snapshots, columns=spatial.biomass.bioliquids
)
e_max_pu.iloc[-1] = 0
n.madd(
"Store",
spatial.biomass.bioliquids,
suffix=" unsustainable",
bus=spatial.biomass.bioliquids,
carrier="unsustainable bioliquids",
e_nom=unsustainable_liquid_biofuel_potentials_spatial,
marginal_cost=costs.at["biodiesel crops", "fuel"],
e_initial=unsustainable_liquid_biofuel_potentials_spatial,
e_nom_extendable=False,
e_max_pu=e_max_pu,
)
n.madd(
"Link",
spatial.biomass.bioliquids,
bus0=spatial.biomass.bioliquids,
bus1=spatial.oil.nodes,
bus2="co2 atmosphere",
carrier="unsustainable bioliquids",
efficiency=1,
efficiency2=-costs.at["solid biomass", "CO2 intensity"]
+ costs.at["BtL", "CO2 stored"],
p_nom=unsustainable_liquid_biofuel_potentials_spatial,
marginal_cost=costs.at["BtL", "VOM"],
)
n.madd( n.madd(
"Link", "Link",
spatial.gas.biogas_to_gas, spatial.gas.biogas_to_gas,
@ -4131,6 +4230,7 @@ def add_enhanced_geothermal(n, egs_potentials, egs_overlap, costs):
# %% # %%
if __name__ == "__main__": if __name__ == "__main__":
if "snakemake" not in globals(): if "snakemake" not in globals():
from _helpers import mock_snakemake from _helpers import mock_snakemake
snakemake = mock_snakemake( snakemake = mock_snakemake(