merge branch 'master' into services_heat

This commit is contained in:
martacki 2024-01-04 15:38:10 +01:00
commit d6137f8e93
43 changed files with 995 additions and 333 deletions

View File

@ -50,8 +50,8 @@ repos:
- id: blackdoc
# Formatting with "black" coding style
- repo: https://github.com/psf/black
rev: 23.12.0
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.12.1
hooks:
# Format Python files
- id: black
@ -67,7 +67,7 @@ repos:
# Do YAML formatting (before the linter checks it for misses)
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.11.0
rev: v2.12.0
hooks:
- id: pretty-format-yaml
args: [--autofix, --indent, "2", --preserve-quotes]

View File

@ -14,7 +14,7 @@ from snakemake.utils import min_version
min_version("7.7")
if not exists("config/config.yaml"):
if not exists("config/config.yaml") and exists("config/config.default.yaml"):
copyfile("config/config.default.yaml", "config/config.yaml")

View File

@ -158,45 +158,51 @@ renewable:
resource:
method: wind
turbine: Vestas_V112_3MW
add_cutout_windspeed: true
capacity_per_sqkm: 3
# correction_factor: 0.93
corine:
grid_codes: [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 31, 32]
distance: 1000
distance_grid_codes: [1, 2, 3, 4, 5, 6]
luisa: false
# grid_codes: [1111, 1121, 1122, 1123, 1130, 1210, 1221, 1222, 1230, 1241, 1242]
# distance: 1000
# distance_grid_codes: [1111, 1121, 1122, 1123, 1130, 1210, 1221, 1222, 1230, 1241, 1242]
natura: true
excluder_resolution: 100
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-ac:
cutout: europe-2013-era5
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
turbine: NREL_ReferenceTurbine_2020ATB_5.5MW
add_cutout_windspeed: true
capacity_per_sqkm: 2
correction_factor: 0.8855
corine: [44, 255]
luisa: false # [0, 5230]
natura: true
ship_threshold: 400
max_depth: 50
max_shore_distance: 30000
excluder_resolution: 200
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-dc:
cutout: europe-2013-era5
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
turbine: NREL_ReferenceTurbine_2020ATB_5.5MW
add_cutout_windspeed: true
capacity_per_sqkm: 2
correction_factor: 0.8855
corine: [44, 255]
luisa: false # [0, 5230]
natura: true
ship_threshold: 400
max_depth: 50
min_shore_distance: 30000
excluder_resolution: 200
potential: simple # or conservative
clip_p_max_pu: 1.e-2
solar:
cutout: europe-2013-sarah
@ -209,9 +215,9 @@ renewable:
capacity_per_sqkm: 1.7
# correction_factor: 0.854337
corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32]
luisa: false # [1111, 1121, 1122, 1123, 1130, 1210, 1221, 1222, 1230, 1241, 1242, 1310, 1320, 1330, 1410, 1421, 1422, 2110, 2120, 2130, 2210, 2220, 2230, 2310, 2410, 2420, 3210, 3320, 3330]
natura: true
excluder_resolution: 100
potential: simple # or conservative
clip_p_max_pu: 1.e-2
hydro:
cutout: europe-2013-era5
@ -449,7 +455,6 @@ sector:
solar_cf_correction: 0.788457 # = >>> 1/1.2683
marginal_cost_storage: 0. #1e-4
methanation: true
helmeth: false
coal_cc: false
dac: true
co2_vent: false
@ -459,6 +464,8 @@ sector:
hydrogen_turbine: false
SMR: true
SMR_cc: true
regional_methanol_demand: false
regional_oil_demand: false
regional_co2_sequestration_potential:
enable: false
attribute: 'conservative estimate Mt'
@ -478,14 +485,28 @@ sector:
- nearshore # within 50 km of sea
# - offshore
ammonia: false
min_part_load_fischer_tropsch: 0.9
min_part_load_methanolisation: 0.5
min_part_load_fischer_tropsch: 0.7
min_part_load_methanolisation: 0.3
min_part_load_methanation: 0.3
use_fischer_tropsch_waste_heat: true
use_haber_bosch_waste_heat: true
use_methanolisation_waste_heat: true
use_methanation_waste_heat: true
use_fuel_cell_waste_heat: true
use_electrolysis_waste_heat: false
use_electrolysis_waste_heat: true
electricity_distribution_grid: true
electricity_distribution_grid_cost_factor: 1.0
electricity_grid_connection: true
transmission_efficiency:
DC:
efficiency_static: 0.98
efficiency_per_1000km: 0.977
H2 pipeline:
efficiency_per_1000km: 1 # 0.979
compression_per_1000km: 0.019
gas pipeline:
efficiency_per_1000km: 1 #0.977
compression_per_1000km: 0.01
H2_network: true
gas_network: false
H2_retrofit: false
@ -495,6 +516,7 @@ sector:
gas_distribution_grid_cost_factor: 1.0
biomass_spatial: false
biomass_transport: false
biogas_upgrading_cc: false
conventional_generation:
OCGT: gas
biomass_to_liquid: false
@ -545,8 +567,8 @@ industry:
MWh_NH3_per_tNH3: 5.166
MWh_CH4_per_tNH3_SMR: 10.8
MWh_elec_per_tNH3_SMR: 0.7
MWh_H2_per_tNH3_electrolysis: 6.5
MWh_elec_per_tNH3_electrolysis: 1.17
MWh_H2_per_tNH3_electrolysis: 5.93
MWh_elec_per_tNH3_electrolysis: 0.2473
MWh_NH3_per_MWh_H2_cracker: 1.46 # https://github.com/euronion/trace/blob/44a5ff8401762edbef80eff9cfe5a47c8d3c8be4/data/efficiencies.csv
NH3_process_emissions: 24.5
petrochemical_process_emissions: 25.5
@ -628,6 +650,7 @@ solving:
skip_iterations: true
rolling_horizon: false
seed: 123
custom_extra_functionality: "../data/custom_extra_functionality.py"
# options that go into the optimize function
track_iterations: false
min_iterations: 4
@ -776,6 +799,7 @@ plotting:
fossil gas: '#e05b09'
natural gas: '#e05b09'
biogas to gas: '#e36311'
biogas to gas CC: '#e51245'
CCGT: '#a85522'
CCGT marginal: '#a85522'
allam: '#B98F76'
@ -877,6 +901,7 @@ plotting:
# heat demand
Heat load: '#cc1f1f'
heat: '#cc1f1f'
heat vent: '#aa3344'
heat demand: '#cc1f1f'
rural heat: '#ff5c5c'
residential rural heat: '#ff7c7c'
@ -946,7 +971,6 @@ plotting:
Sabatier: '#9850ad'
methanation: '#c44ce6'
methane: '#c44ce6'
helmeth: '#e899ff'
# synfuels
Fischer-Tropsch: '#25c49a'
liquid: '#25c49a'

View File

@ -0,0 +1,11 @@
# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: : 2023- The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
def custom_extra_functionality(n, snapshots, snakemake):
"""
Add custom extra functionality constraints.
"""
pass

View File

@ -7,10 +7,10 @@ capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine place
correction_factor,--,float,"Correction factor for capacity factor time series."
excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis."
corine,--,"Any *realistic* subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_","Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement."
luisa,--,"Any subset of the `LUISA Base Map codes in Annex 1 <https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_","Specifies areas according to the LUISA Base Map codes which are generally eligible for AC-connected offshore wind turbine placement."
natura,bool,"{true, false}","Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``."
ship_threshold,--,float,"Ship density threshold from which areas are excluded."
max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential."
min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential."
max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential."
potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`"
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."

1 Unit Values Description
7 correction_factor -- float Correction factor for capacity factor time series.
8 excluder_resolution m float Resolution on which to perform geographical elibility analysis.
9 corine -- Any *realistic* subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_ Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement.
10 luisa -- Any subset of the `LUISA Base Map codes in Annex 1 <https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_ Specifies areas according to the LUISA Base Map codes which are generally eligible for AC-connected offshore wind turbine placement.
11 natura bool {true, false} Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``.
12 ship_threshold -- float Ship density threshold from which areas are excluded.
13 max_depth m float Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential.
14 min_shore_distance m float Minimum distance to the shore below which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential.
15 max_shore_distance m float Maximum distance to the shore above which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential.
potential -- One of {'simple', 'conservative'} Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`
16 clip_p_max_pu p.u. float To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero.

View File

@ -7,10 +7,10 @@ capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine place
correction_factor,--,float,"Correction factor for capacity factor time series."
excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis."
corine,--,"Any *realistic* subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_","Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement."
luisa,--,"Any subset of the `LUISA Base Map codes in Annex 1 <https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_","Specifies areas according to the LUISA Base Map codes which are generally eligible for DC-connected offshore wind turbine placement."
natura,bool,"{true, false}","Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``."
ship_threshold,--,float,"Ship density threshold from which areas are excluded."
max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential."
min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build."
max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build."
potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`"
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."

1 Unit Values Description
7 correction_factor -- float Correction factor for capacity factor time series.
8 excluder_resolution m float Resolution on which to perform geographical elibility analysis.
9 corine -- Any *realistic* subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_ Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement.
10 luisa -- Any subset of the `LUISA Base Map codes in Annex 1 <https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_ Specifies areas according to the LUISA Base Map codes which are generally eligible for DC-connected offshore wind turbine placement.
11 natura bool {true, false} Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``.
12 ship_threshold -- float Ship density threshold from which areas are excluded.
13 max_depth m float Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential.
14 min_shore_distance m float Minimum distance to the shore below which wind turbines cannot be build.
15 max_shore_distance m float Maximum distance to the shore above which wind turbines cannot be build.
potential -- One of {'simple', 'conservative'} Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`
16 clip_p_max_pu p.u. float To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero.

View File

@ -8,8 +8,11 @@ corine,,,
-- grid_codes,--,"Any subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_","Specifies areas according to CORINE Land Cover codes which are generally eligible for wind turbine placement."
-- distance,m,float,"Distance to keep from areas specified in ``distance_grid_codes``"
-- distance_grid_codes,--,"Any subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_","Specifies areas according to CORINE Land Cover codes to which wind turbines must maintain a distance specified in the setting ``distance``."
luisa,,,
-- grid_codes,--,"Any subset of the `LUISA Base Map codes in Annex 1 <https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_","Specifies areas according to the LUISA Base Map codes which are generally eligible for wind turbine placement."
-- distance,m,float,"Distance to keep from areas specified in ``distance_grid_codes``"
-- distance_grid_codes,--,"Any subset of the `LUISA Base Map codes in Annex 1 <https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_","Specifies areas according to the LUISA Base Map codes to which wind turbines must maintain a distance specified in the setting ``distance``."
natura,bool,"{true, false}","Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``."
potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`"
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
correction_factor,--,float,"Correction factor for capacity factor time series."
excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis."

1 Unit Values Description
8 -- grid_codes -- Any subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_ Specifies areas according to CORINE Land Cover codes which are generally eligible for wind turbine placement.
9 -- distance m float Distance to keep from areas specified in ``distance_grid_codes``
10 -- distance_grid_codes -- Any subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_ Specifies areas according to CORINE Land Cover codes to which wind turbines must maintain a distance specified in the setting ``distance``.
11 luisa
12 -- grid_codes -- Any subset of the `LUISA Base Map codes in Annex 1 <https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_ Specifies areas according to the LUISA Base Map codes which are generally eligible for wind turbine placement.
13 -- distance m float Distance to keep from areas specified in ``distance_grid_codes``
14 -- distance_grid_codes -- Any subset of the `LUISA Base Map codes in Annex 1 <https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_ Specifies areas according to the LUISA Base Map codes to which wind turbines must maintain a distance specified in the setting ``distance``.
15 natura bool {true, false} Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``.
potential -- One of {'simple', 'conservative'} Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`
16 clip_p_max_pu p.u. float To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero.
17 correction_factor -- float Correction factor for capacity factor time series.
18 excluder_resolution m float Resolution on which to perform geographical elibility analysis.

View File

@ -72,7 +72,6 @@ solar_thermal,--,"{true, false}",Add option for using solar thermal to generate
solar_cf_correction,--,float,The correction factor for the value provided by the solar thermal profile calculations
marginal_cost_storage,currency/MWh ,float,The marginal cost of discharging batteries in distributed grids
methanation,--,"{true, false}",Add option for transforming hydrogen and CO2 into methane using methanation.
helmeth,--,"{true, false}",Add option for transforming power into gas using HELMETH (Integrated High-Temperature ELectrolysis and METHanation for Effective Power to Gas Conversion)
coal_cc,--,"{true, false}",Add option for coal CHPs with carbon capture
dac,--,"{true, false}",Add option for Direct Air Capture (DAC)
co2_vent,--,"{true, false}",Add option for vent out CO2 from storages to the atmosphere.
@ -81,6 +80,8 @@ hydrogen_fuel_cell,--,"{true, false}",Add option to include hydrogen fuel cell f
hydrogen_turbine,--,"{true, false}",Add option to include hydrogen turbine for re-electrification. Assuming OCGT technology costs
SMR,--,"{true, false}",Add option for transforming natural gas into hydrogen and CO2 using Steam Methane Reforming (SMR)
SMR CC,--,"{true, false}",Add option for transforming natural gas into hydrogen and CO2 using Steam Methane Reforming (SMR) and Carbon Capture (CC)
regional_methanol_demand,--,"{true, false}",Spatially resolve methanol demand. Set to true if regional CO2 constraints needed.
regional_oil_demand,--,"{true, false}",Spatially resolve oil demand. Set to true if regional CO2 constraints needed.
regional_co2 _sequestration_potential,,,
-- enable,--,"{true, false}",Add option for regionally-resolved geological carbon dioxide sequestration potentials based on `CO2StoP <https://setis.ec.europa.eu/european-co2-storage-database_en>`_.
-- attribute,--,string,Name of the attribute for the sequestration potential
@ -109,6 +110,11 @@ electricity_distribution _grid,--,"{true, false}",Add a simplified representatio
electricity_distribution _grid_cost_factor,,,Multiplies the investment cost of the electricity distribution grid
,,,
electricity_grid _connection,--,"{true, false}",Add the cost of electricity grid connection for onshore wind and solar
transmission_efficiency,,,Section to specify transmission losses or compression energy demands of bidirectional links. Splits them into two capacity-linked unidirectional links.
-- {carrier},--,str,The carrier of the link.
-- -- efficiency_static,p.u.,float,Length-independent transmission efficiency.
-- -- efficiency_per_1000km,p.u. per 1000 km,float,Length-dependent transmission efficiency ($\eta^{\text{length}}$)
-- -- compression_per_1000km,p.u. per 1000 km,float,Length-dependent electricity demand for compression ($\eta \cdot \text{length}$) implemented as multi-link to local electricity bus.
H2_network,--,"{true, false}",Add option for new hydrogen pipelines
gas_network,--,"{true, false}","Add existing natural gas infrastructure, incl. LNG terminals, production and entry-points. The existing gas network is added with a lossless transport model. A length-weighted `k-edge augmentation algorithm <https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.connectivity.edge_augmentation.k_edge_augmentation.html#networkx.algorithms.connectivity.edge_augmentation.k_edge_augmentation>`_ can be run to add new candidate gas pipelines such that all regions of the model can be connected to the gas network. When activated, all the gas demands are regionally disaggregated as well."
H2_retrofit,--,"{true, false}",Add option for retrofiting existing pipelines to transport hydrogen.
@ -119,6 +125,7 @@ gas_distribution_grid _cost_factor,,,Multiplier for the investment cost of the g
,,,
biomass_spatial,--,"{true, false}",Add option for resolving biomass demand regionally
biomass_transport,--,"{true, false}",Add option for transporting solid biomass between nodes
biogas_upgrading_cc,--,"{true, false}",Add option to capture CO2 from biomass upgrading
conventional_generation,,,Add a more detailed description of conventional carriers. Any power generation requires the consumption of fuel from nodes representing that fuel.
biomass_to_liquid,--,"{true, false}",Add option for transforming solid biomass into liquid fuel with the same properties as oil
biosng,--,"{true, false}",Add option for transforming solid biomass into synthesis gas with the same properties as natural gas

1 Unit Values Description
72 solar_cf_correction -- float The correction factor for the value provided by the solar thermal profile calculations
73 marginal_cost_storage currency/MWh float The marginal cost of discharging batteries in distributed grids
74 methanation -- {true, false} Add option for transforming hydrogen and CO2 into methane using methanation.
helmeth -- {true, false} Add option for transforming power into gas using HELMETH (Integrated High-Temperature ELectrolysis and METHanation for Effective Power to Gas Conversion)
75 coal_cc -- {true, false} Add option for coal CHPs with carbon capture
76 dac -- {true, false} Add option for Direct Air Capture (DAC)
77 co2_vent -- {true, false} Add option for vent out CO2 from storages to the atmosphere.
80 hydrogen_turbine -- {true, false} Add option to include hydrogen turbine for re-electrification. Assuming OCGT technology costs
81 SMR -- {true, false} Add option for transforming natural gas into hydrogen and CO2 using Steam Methane Reforming (SMR)
82 SMR CC -- {true, false} Add option for transforming natural gas into hydrogen and CO2 using Steam Methane Reforming (SMR) and Carbon Capture (CC)
83 regional_methanol_demand -- {true, false} Spatially resolve methanol demand. Set to true if regional CO2 constraints needed.
84 regional_oil_demand -- {true, false} Spatially resolve oil demand. Set to true if regional CO2 constraints needed.
85 regional_co2 _sequestration_potential
86 -- enable -- {true, false} Add option for regionally-resolved geological carbon dioxide sequestration potentials based on `CO2StoP <https://setis.ec.europa.eu/european-co2-storage-database_en>`_.
87 -- attribute -- string Name of the attribute for the sequestration potential
110 electricity_distribution _grid_cost_factor Multiplies the investment cost of the electricity distribution grid
111
112 electricity_grid _connection -- {true, false} Add the cost of electricity grid connection for onshore wind and solar
113 transmission_efficiency Section to specify transmission losses or compression energy demands of bidirectional links. Splits them into two capacity-linked unidirectional links.
114 -- {carrier} -- str The carrier of the link.
115 -- -- efficiency_static p.u. float Length-independent transmission efficiency.
116 -- -- efficiency_per_1000km p.u. per 1000 km float Length-dependent transmission efficiency ($\eta^{\text{length}}$)
117 -- -- compression_per_1000km p.u. per 1000 km float Length-dependent electricity demand for compression ($\eta \cdot \text{length}$) implemented as multi-link to local electricity bus.
118 H2_network -- {true, false} Add option for new hydrogen pipelines
119 gas_network -- {true, false} Add existing natural gas infrastructure, incl. LNG terminals, production and entry-points. The existing gas network is added with a lossless transport model. A length-weighted `k-edge augmentation algorithm <https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.connectivity.edge_augmentation.k_edge_augmentation.html#networkx.algorithms.connectivity.edge_augmentation.k_edge_augmentation>`_ can be run to add new candidate gas pipelines such that all regions of the model can be connected to the gas network. When activated, all the gas demands are regionally disaggregated as well.
120 H2_retrofit -- {true, false} Add option for retrofiting existing pipelines to transport hydrogen.
125
126 biomass_spatial -- {true, false} Add option for resolving biomass demand regionally
127 biomass_transport -- {true, false} Add option for transporting solid biomass between nodes
128 biogas_upgrading_cc -- {true, false} Add option to capture CO2 from biomass upgrading
129 conventional_generation Add a more detailed description of conventional carriers. Any power generation requires the consumption of fuel from nodes representing that fuel.
130 biomass_to_liquid -- {true, false} Add option for transforming solid biomass into liquid fuel with the same properties as oil
131 biosng -- {true, false} Add option for transforming solid biomass into synthesis gas with the same properties as natural gas

View File

@ -9,7 +9,7 @@ resource,,,
capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of solar panel placement."
correction_factor,--,float,"A correction factor for the capacity factor (availability) time series."
corine,--,"Any subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_","Specifies areas according to CORINE Land Cover codes which are generally eligible for solar panel placement."
luisa,--,"Any subset of the `LUISA Base Map codes in Annex 1 <https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_","Specifies areas according to the LUISA Base Map codes which are generally eligible for solar panel placement."
natura,bool,"{true, false}","Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``."
potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`"
clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero."
excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis."

1 Unit Values Description
9 capacity_per_sqkm :math:`MW/km^2` float Allowable density of solar panel placement.
10 correction_factor -- float A correction factor for the capacity factor (availability) time series.
11 corine -- Any subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_ Specifies areas according to CORINE Land Cover codes which are generally eligible for solar panel placement.
12 luisa -- Any subset of the `LUISA Base Map codes in Annex 1 <https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_ Specifies areas according to the LUISA Base Map codes which are generally eligible for solar panel placement.
13 natura bool {true, false} Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``.
potential -- One of {'simple', 'conservative'} Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`
14 clip_p_max_pu p.u. float To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero.
15 excluder_resolution m float Resolution on which to perform geographical elibility analysis.

View File

@ -6,6 +6,7 @@ options,,,
-- skip_iterations,bool,"{'true','false'}","Skip iterating, do not update impedances of branches. Defaults to true."
-- rolling_horizon,bool,"{'true','false'}","Whether to optimize the network in a rolling horizon manner, where the snapshot range is split into slices of size `horizon` which are solved consecutively."
-- seed,--,int,Random seed for increased deterministic behaviour.
-- custom_extra_functionality,--,str,Path to a Python file with custom extra functionality code to be injected into the solving rules of the workflow relative to ``rules`` directory.
-- track_iterations,bool,"{'true','false'}",Flag whether to store the intermediate branch capacities and objective function values are recorded for each iteration in ``network.lines['s_nom_opt_X']`` (where ``X`` labels the iteration)
-- min_iterations,--,int,Minimum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run.
-- max_iterations,--,int,Maximum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run.

1 Unit Values Description
6 -- skip_iterations bool {'true','false'} Skip iterating, do not update impedances of branches. Defaults to true.
7 -- rolling_horizon bool {'true','false'} Whether to optimize the network in a rolling horizon manner, where the snapshot range is split into slices of size `horizon` which are solved consecutively.
8 -- seed -- int Random seed for increased deterministic behaviour.
9 -- custom_extra_functionality -- str Path to a Python file with custom extra functionality code to be injected into the solving rules of the workflow relative to ``rules`` directory.
10 -- track_iterations bool {'true','false'} Flag whether to store the intermediate branch capacities and objective function values are recorded for each iteration in ``network.lines['s_nom_opt_X']`` (where ``X`` labels the iteration)
11 -- min_iterations -- int Minimum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run.
12 -- max_iterations -- int Maximum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run.

View File

@ -116,7 +116,7 @@ of the individual parts.
topics we are working on. Please feel free to help or make suggestions.
This project is currently maintained by the `Department of Digital
Transformation in Energy Systems <https:/www.ensys.tu-berlin.de>`_ at the
Transformation in Energy Systems <https://www.tu.berlin/en/ensys>`_ at the
`Technische Universität Berlin <https://www.tu.berlin>`_. Previous versions were
developed within the `IAI <http://www.iai.kit.edu>`_ at the `Karlsruhe Institute
of Technology (KIT) <http://www.kit.edu/english/index.php>`_ which was funded by

View File

@ -10,6 +10,30 @@ Release Notes
Upcoming Release
================
* Remove all negative loads on the ``co2 atmosphere`` bus representing emissions
for e.g. fixed fossil demands for transport oil. Instead these are handled
more transparently with a fixed transport oil demand and a link taking care of
the emissions to the ``co2 atmosphere`` bus. This is also a preparation for
endogenous transport optimisation, where demand will be subject to
optimisation (e.g. fuel switching in the transport sector).
* Allow possibility to go from copperplated to regionally resolved methanol and
oil demand with switches ``sector: regional_methanol_demand: true`` and
``sector: regional_oil_demand: true``. This allows nodal/regional CO2
constraints to be applied.
* Process emissions from steam crackers (i.e. naphtha processing for HVC) are now
piped from the consumption link to the process emissions bus where the model
can decide about carbon capture. Previously the process emissions for naphtha
were a fixed load.
* Add option to specify losses for bidirectional links, e.g. pipelines or HVDC
links, in configuration file under ``sector: transmission_efficiency:``. Users
can specify static or length-dependent values as well as a length-dependent
electricity demand for compression, which is implemented as a multi-link to
the local electricity buses. The bidirectional links will then be split into
two unidirectional links with linked capacities.
* Pin ``snakemake`` version to below 8.0.0, as the new version is not yet
supported by ``pypsa-eur``.
@ -34,14 +58,34 @@ Upcoming Release
* Rule ``retrieve_irena`` get updated values for renewables capacities.
* Rule ``retrieve_wdpa`` updated to not only check for current and previous, but also potentially next months dataset availability.
* Split configuration to enable SMR and SMR CC.
* Bugfix: The unit of the capital cost of Haber-Bosch plants was corrected.
* The configuration setting for country focus weights when clustering the
network has been moved from ``focus_weights:`` to ``clustering:
focus_weights:``. Backwards compatibility to old config files is maintained.
* Extend options for waste usage from Haber-Bosch, methanolisation and methanation.
* Use electrolysis waste heat by default.
* Add new ``sector_opts`` wildcard option "nowasteheat" to disable all waste heat usage.
* Set minimum part loads for PtX processes to 30% for methanolisation and methanation, and to 70% for Fischer-Tropsch synthesis.
* Add VOM as marginal cost to PtX processes.
* Add pelletizing costs for biomass boilers.
* The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument.
* Switch to using hydrogen and electricity inputs for Haber-Bosch from https://github.com/PyPSA/technology-data.
* Add option to capture CO2 contained in biogas when upgrading (``sector: biogas_to_gas_cc``).
* Merged option to extend geographical scope to Ukraine and Moldova. These
countries are excluded by default and is currently constrained to power-sector
only parts of the workflow. A special config file
@ -52,11 +96,38 @@ Upcoming Release
reconnected to the main Ukrainian grid with the configuration option
`reconnect_crimea`.
* Add option to reference an additional source file where users can specify
custom ``extra_functionality`` constraints in the configuration file. The
default setting points to an empty hull at
``data/custom_extra_functionality.py``.
* Validate downloads from Zenodo using MD5 checksums. This identifies corrupted
or incomplete downloads.
* Add locations, capacities and costs of existing gas storage using Global
Energy Monitor's `Europe Gas Tracker
<https://globalenergymonitor.org/projects/europe-gas-tracker>`_.
* Remove HELMETH option.
* Print Irreducible Infeasible Subset (IIS) if model is infeasible. Only for
solvers with IIS support.
* Add option to use `LUISA Base Map
<https://publications.jrc.ec.europa.eu/repository/handle/JRC124621>`_ 50m land
coverage dataset for land eligibility analysis in
:mod:`build_renewable_profiles`. Settings are analogous to the CORINE dataset
but with the key ``luisa:`` in the configuration file. To leverage the
dataset's full advantages, set the excluder resolution to 50m
(``excluder_resolution: 50``). For land category codes, see `Annex 1 of the
technical documentation
<https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_.
**Bugs and Compatibility**
* A bug preventing custom powerplants specified in ``data/custom_powerplants.csv`` was fixed. (https://github.com/PyPSA/pypsa-eur/pull/732)
* Fix nodal fraction in ``add_existing_year`` when using distributed generators
* Fix typo in buses definition for oil boilers in ``add_industry`` in ``prepare_sector_network``
PyPSA-Eur 0.8.1 (27th July 2023)
@ -182,6 +253,8 @@ PyPSA-Eur 0.8.1 (27th July 2023)
(https://github.com/PyPSA/pypsa-eur/pull/672)
* Addressed deprecation warnings for ``pandas=2.0``. ``pandas=2.0`` is now minimum requirement.
PyPSA-Eur 0.8.0 (18th March 2023)
=================================

View File

@ -11,7 +11,7 @@ dependencies:
- pip
- atlite>=0.2.9
- pypsa>=0.26.0
- pypsa>=0.26.1
- linopy
- dask

View File

@ -208,10 +208,9 @@ rule build_ship_raster:
rule determine_availability_matrix_MD_UA:
input:
copernicus=RESOURCES
+ "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
wdpa=RESOURCES + f"WDPA_{bYYYY}.gpkg",
wdpa_marine=RESOURCES + f"WDPA_WDOECM_{bYYYY}_marine.gpkg",
copernicus="data/Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
wdpa="data/WDPA.gpkg",
wdpa_marine="data/WDPA_WDOECM_marine.gpkg",
gebco=lambda w: (
"data/bundle/GEBCO_2014_2D.nc"
if "max_depth" in config["renewable"][w.technology].keys()
@ -269,6 +268,11 @@ rule build_renewable_profiles:
if config["renewable"][w.technology]["natura"]
else []
),
luisa=lambda w: (
"data/LUISA_basemap_020321_50m.tif"
if config["renewable"][w.technology].get("luisa")
else []
),
gebco=ancient(
lambda w: (
"data/bundle/GEBCO_2014_2D.nc"

View File

@ -85,12 +85,12 @@ if config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]:
rule build_gas_input_locations:
input:
lng=HTTP.remote(
gem=HTTP.remote(
"https://globalenergymonitor.org/wp-content/uploads/2023/07/Europe-Gas-Tracker-2023-03-v3.xlsx",
keep_local=True,
),
entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson",
production="data/gas_network/scigrid-gas/data/IGGIELGN_Productions.geojson",
storage="data/gas_network/scigrid-gas/data/IGGIELGN_Storages.geojson",
regions_onshore=RESOURCES
+ "regions_onshore_elec_s{simpl}_{clusters}.geojson",
regions_offshore=RESOURCES

View File

@ -2,6 +2,16 @@
#
# SPDX-License-Identifier: MIT
import os, sys, glob
helper_source_path = [match for match in glob.glob('**/_helpers.py', recursive=True)]
for path in helper_source_path:
path = os.path.dirname(os.path.abspath(path))
sys.path.insert(0, os.path.abspath(path))
from _helpers import validate_checksum
def memory(w):
factor = 3.0
@ -23,6 +33,13 @@ def memory(w):
return int(factor * (10000 + 195 * int(w.clusters)))
def input_custom_extra_functionality(w):
path = config["solving"]["options"].get("custom_extra_functionality", False)
if path:
return workflow.source_path(path)
return []
# Check if the workflow has access to the internet by trying to access the HEAD of specified url
def has_internet_access(url="www.zenodo.org") -> bool:
import http.client as http_client

View File

@ -77,6 +77,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cutout", True
retries: 2
run:
move(input[0], output[0])
validate_checksum(output[0], input[0])
if config["enable"]["retrieve"] and config["enable"].get("retrieve_cost_data", True):
@ -113,7 +114,7 @@ if config["enable"]["retrieve"] and config["enable"].get(
static=True,
),
output:
protected(RESOURCES + "natura.tiff"),
RESOURCES + "natura.tiff",
log:
LOGS + "retrieve_natura_raster.log",
resources:
@ -121,6 +122,7 @@ if config["enable"]["retrieve"] and config["enable"].get(
retries: 2
run:
move(input[0], output[0])
validate_checksum(output[0], input[0])
if config["enable"]["retrieve"] and config["enable"].get(
@ -167,6 +169,7 @@ if config["enable"]["retrieve"] and (
"IGGIELGN_LNGs.geojson",
"IGGIELGN_BorderPoints.geojson",
"IGGIELGN_Productions.geojson",
"IGGIELGN_Storages.geojson",
"IGGIELGN_PipeSegments.geojson",
]
@ -226,6 +229,7 @@ if config["enable"]["retrieve"]:
retries: 2
run:
move(input[0], output[0])
validate_checksum(output[0], input[0])
if config["enable"]["retrieve"]:
@ -239,29 +243,57 @@ if config["enable"]["retrieve"]:
static=True,
),
output:
RESOURCES
+ "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
"data/Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
run:
move(input[0], output[0])
validate_checksum(output[0], input[0])
if config["enable"]["retrieve"]:
# Downloading LUISA Base Map for land cover and land use:
# Website: https://ec.europa.eu/jrc/en/luisa
rule retrieve_luisa_land_cover:
input:
HTTP.remote(
"jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/LUISA/EUROPE/Basemaps/LandUse/2018/LATEST/LUISA_basemap_020321_50m.tif",
static=True,
),
output:
"data/LUISA_basemap_020321_50m.tif",
run:
move(input[0], output[0])
if config["enable"]["retrieve"]:
current_month = datetime.now().strftime("%b")
current_year = datetime.now().strftime("%Y")
bYYYY = f"{current_month}{current_year}"
# Some logic to find the correct file URL
# Sometimes files are released delayed or ahead of schedule, check which file is currently available
def check_file_exists(url):
response = requests.head(url)
return response.status_code == 200
url = f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip"
# Basic pattern where WDPA files can be found
url_pattern = (
"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public_shp.zip"
)
if not check_file_exists(url):
prev_month = (datetime.now() - timedelta(30)).strftime("%b")
bYYYY = f"{prev_month}{current_year}"
assert check_file_exists(
f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip"
), "The file does not exist."
# 3-letter month + 4 digit year for current/previous/next month to test
current_monthyear = datetime.now().strftime("%b%Y")
prev_monthyear = (datetime.now() - timedelta(30)).strftime("%b%Y")
next_monthyear = (datetime.now() + timedelta(30)).strftime("%b%Y")
# Test prioritised: current month -> previous -> next
for bYYYY in [current_monthyear, prev_monthyear, next_monthyear]:
if check_file_exists(url := url_pattern.format(bYYYY=bYYYY)):
break
else:
# If None of the three URLs are working
url = False
assert (
url
), f"No WDPA files found at {url_pattern} for bY='{current_monthyear}, {prev_monthyear}, or {next_monthyear}'"
# Downloading protected area database from WDPA
# extract the main zip and then merge the contained 3 zipped shapefiles
@ -269,15 +301,15 @@ if config["enable"]["retrieve"]:
rule download_wdpa:
input:
HTTP.remote(
f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public_shp.zip",
url,
static=True,
keep_local=True,
),
params:
zip=RESOURCES + f"WDPA_{bYYYY}_shp.zip",
folder=directory(RESOURCES + f"WDPA_{bYYYY}"),
zip="data/WDPA_shp.zip",
folder=directory("data/WDPA"),
output:
gpkg=RESOURCES + f"WDPA_{bYYYY}.gpkg",
gpkg=protected("data/WDPA.gpkg"),
run:
shell("cp {input} {params.zip}")
shell("unzip -o {params.zip} -d {params.folder}")
@ -300,10 +332,10 @@ if config["enable"]["retrieve"]:
keep_local=True,
),
params:
zip=RESOURCES + f"WDPA_WDOECM_{bYYYY}_marine.zip",
folder=directory(RESOURCES + f"WDPA_WDOECM_{bYYYY}_marine"),
zip="data/WDPA_WDOECM_marine.zip",
folder=directory("data/WDPA_WDOECM_marine"),
output:
gpkg=RESOURCES + f"WDPA_WDOECM_{bYYYY}_marine.gpkg",
gpkg=protected("data/WDPA_WDOECM_marine.gpkg"),
run:
shell("cp {input} {params.zip}")
shell("unzip -o {params.zip} -d {params.folder}")

View File

@ -11,6 +11,7 @@ rule solve_network:
co2_sequestration_potential=config["sector"].get(
"co2_sequestration_potential", 200
),
custom_extra_functionality=input_custom_extra_functionality,
input:
network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
config=RESULTS + "config.yaml",

View File

@ -88,6 +88,7 @@ rule solve_sector_network_myopic:
co2_sequestration_potential=config["sector"].get(
"co2_sequestration_potential", 200
),
custom_extra_functionality=input_custom_extra_functionality,
input:
network=RESULTS
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",

View File

@ -11,6 +11,7 @@ rule solve_sector_network:
co2_sequestration_potential=config["sector"].get(
"co2_sequestration_potential", 200
),
custom_extra_functionality=input_custom_extra_functionality,
input:
network=RESULTS
+ "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",

View File

@ -118,6 +118,7 @@ rule solve_sector_network_perfect:
co2_sequestration_potential=config["sector"].get(
"co2_sequestration_potential", 200
),
custom_extra_functionality=input_custom_extra_functionality,
input:
network=RESULTS
+ "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc",

View File

@ -4,6 +4,7 @@
# SPDX-License-Identifier: MIT
import contextlib
import hashlib
import logging
import os
import urllib
@ -11,6 +12,7 @@ from pathlib import Path
import pandas as pd
import pytz
import requests
import yaml
from pypsa.components import component_attrs, components
from pypsa.descriptors import Dict
@ -318,3 +320,63 @@ def update_config_with_sector_opts(config, sector_opts):
if o.startswith("CF+"):
l = o.split("+")[1:]
update_config(config, parse(l))
def get_checksum_from_zenodo(file_url):
parts = file_url.split("/")
record_id = parts[parts.index("record") + 1]
filename = parts[-1]
response = requests.get(f"https://zenodo.org/api/records/{record_id}", timeout=30)
response.raise_for_status()
data = response.json()
for file in data["files"]:
if file["key"] == filename:
return file["checksum"]
return None
def validate_checksum(file_path, zenodo_url=None, checksum=None):
"""
Validate file checksum against provided or Zenodo-retrieved checksum.
Calculates the hash of a file using 64KB chunks. Compares it against a
given checksum or one from a Zenodo URL.
Parameters
----------
file_path : str
Path to the file for checksum validation.
zenodo_url : str, optional
URL of the file on Zenodo to fetch the checksum.
checksum : str, optional
Checksum (format 'hash_type:checksum_value') for validation.
Raises
------
AssertionError
If the checksum does not match, or if neither `checksum` nor `zenodo_url` is provided.
Examples
--------
>>> validate_checksum("/path/to/file", checksum="md5:abc123...")
>>> validate_checksum(
... "/path/to/file",
... zenodo_url="https://zenodo.org/record/12345/files/example.txt",
... )
If the checksum is invalid, an AssertionError will be raised.
"""
assert checksum or zenodo_url, "Either checksum or zenodo_url must be provided"
if zenodo_url:
checksum = get_checksum_from_zenodo(zenodo_url)
hash_type, checksum = checksum.split(":")
hasher = hashlib.new(hash_type)
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(65536), b""): # 64kb chunks
hasher.update(chunk)
calculated_checksum = hasher.hexdigest()
assert (
calculated_checksum == checksum
), "Checksum is invalid. This may be due to an incomplete download. Delete the file and re-execute the rule."

View File

@ -120,6 +120,33 @@ def add_brownfield(n, n_p, year):
n.links.loc[new_pipes, "p_nom_min"] = 0.0
def disable_grid_expansion_if_LV_limit_hit(n):
if not "lv_limit" in n.global_constraints.index:
return
total_expansion = (
n.lines.eval("s_nom_min * length").sum()
+ n.links.query("carrier == 'DC'").eval("p_nom_min * length").sum()
).sum()
lv_limit = n.global_constraints.at["lv_limit", "constant"]
# allow small numerical differences
if lv_limit - total_expansion < 1:
logger.info(
f"LV is already reached (gap {diff} MWkm), disabling expansion and LV limit"
)
extendable_acs = n.lines.query("s_nom_extendable").index
n.lines.loc[extendable_acs, "s_nom_extendable"] = False
n.lines.loc[extendable_acs, "s_nom"] = n.lines.loc[extendable_acs, "s_nom_min"]
extendable_dcs = n.links.query("carrier == 'DC' and p_nom_extendable").index
n.links.loc[extendable_dcs, "p_nom_extendable"] = False
n.links.loc[extendable_dcs, "p_nom"] = n.links.loc[extendable_dcs, "p_nom_min"]
n.global_constraints.drop("lv_limit", inplace=True)
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
@ -150,5 +177,7 @@ if __name__ == "__main__":
add_brownfield(n, n_p, year)
disable_grid_expansion_if_LV_limit_hit(n)
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
n.export_to_netcdf(snakemake.output[0])

View File

@ -305,7 +305,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
else:
bus0 = vars(spatial)[carrier[generator]].nodes
if "EU" not in vars(spatial)[carrier[generator]].locations:
bus0 = bus0.intersection(capacity.index + " gas")
bus0 = bus0.intersection(capacity.index + " " + carrier[generator])
# check for missing bus
missing_bus = pd.Index(bus0).difference(n.buses.index)

View File

@ -134,7 +134,7 @@ def disaggregate_nuts0(bio):
# get population in nuts2
pop_nuts2 = pop.loc[pop.index.str.len() == 4]
by_country = pop_nuts2.total.groupby(pop_nuts2.ct).sum()
pop_nuts2["fraction"] = pop_nuts2.total / pop_nuts2.ct.map(by_country)
pop_nuts2.loc[:, "fraction"] = pop_nuts2.total / pop_nuts2.ct.map(by_country)
# distribute nuts0 data to nuts2 by population
bio_nodal = bio.loc[pop_nuts2.ct]

View File

@ -25,10 +25,7 @@ if __name__ == "__main__":
cutout = atlite.Cutout(snakemake.input.cutout)
clustered_regions = (
gpd.read_file(snakemake.input.regions_onshore)
.set_index("name")
.buffer(0)
.squeeze()
gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0)
)
I = cutout.indicatormatrix(clustered_regions)

View File

@ -81,7 +81,7 @@ def load_timeseries(fn, years, countries, powerstatistics=True):
return s[: -len(pattern)]
return (
pd.read_csv(fn, index_col=0, parse_dates=[0])
pd.read_csv(fn, index_col=0, parse_dates=[0], date_format="%Y-%m-%dT%H:%M:%SZ")
.tz_localize(None)
.filter(like=pattern)
.rename(columns=rename)

View File

@ -23,11 +23,10 @@ def read_scigrid_gas(fn):
return df
def build_gem_lng_data(lng_fn):
df = pd.read_excel(lng_fn[0], sheet_name="LNG terminals - data")
def build_gem_lng_data(fn):
df = pd.read_excel(fn[0], sheet_name="LNG terminals - data")
df = df.set_index("ComboID")
remove_status = ["Cancelled"]
remove_country = ["Cyprus", "Turkey"]
remove_terminal = ["Puerto de la Luz LNG Terminal", "Gran Canaria LNG Terminal"]
@ -42,9 +41,50 @@ def build_gem_lng_data(lng_fn):
return gpd.GeoDataFrame(df, geometry=geometry, crs="EPSG:4326")
def build_gas_input_locations(lng_fn, entry_fn, prod_fn, countries):
def build_gem_prod_data(fn):
df = pd.read_excel(fn[0], sheet_name="Gas extraction - main")
df = df.set_index("GEM Unit ID")
remove_country = ["Cyprus", "Türkiye"]
remove_fuel_type = ["oil"]
df = df.query(
"Status != 'shut in' \
& 'Fuel type' != 'oil' \
& Country != @remove_country \
& ~Latitude.isna() \
& ~Longitude.isna()"
).copy()
p = pd.read_excel(fn[0], sheet_name="Gas extraction - production")
p = p.set_index("GEM Unit ID")
p = p[p["Fuel description"] == "gas"]
capacities = pd.DataFrame(index=df.index)
for key in ["production", "production design capacity", "reserves"]:
cap = (
p.loc[p["Production/reserves"] == key, "Quantity (converted)"]
.groupby("GEM Unit ID")
.sum()
.reindex(df.index)
)
# assume capacity such that 3% of reserves can be extracted per year (25% quantile)
annualization_factor = 0.03 if key == "reserves" else 1.0
capacities[key] = cap * annualization_factor
df["mcm_per_year"] = (
capacities["production"]
.combine_first(capacities["production design capacity"])
.combine_first(capacities["reserves"])
)
geometry = gpd.points_from_xy(df["Longitude"], df["Latitude"])
return gpd.GeoDataFrame(df, geometry=geometry, crs="EPSG:4326")
def build_gas_input_locations(gem_fn, entry_fn, sto_fn, countries):
# LNG terminals
lng = build_gem_lng_data(lng_fn)
lng = build_gem_lng_data(gem_fn)
# Entry points from outside the model scope
entry = read_scigrid_gas(entry_fn)
@ -55,25 +95,30 @@ def build_gas_input_locations(lng_fn, entry_fn, prod_fn, countries):
| (entry.from_country == "NO") # malformed datapoint # entries from NO to GB
]
sto = read_scigrid_gas(sto_fn)
remove_country = ["RU", "UA", "TR", "BY"]
sto = sto.query("country_code != @remove_country")
# production sites inside the model scope
prod = read_scigrid_gas(prod_fn)
prod = prod.loc[
(prod.geometry.y > 35) & (prod.geometry.x < 30) & (prod.country_code != "DE")
]
prod = build_gem_prod_data(gem_fn)
mcm_per_day_to_mw = 437.5 # MCM/day to MWh/h
mcm_per_year_to_mw = 1.199 # MCM/year to MWh/h
mtpa_to_mw = 1649.224 # mtpa to MWh/h
lng["p_nom"] = lng["CapacityInMtpa"] * mtpa_to_mw
entry["p_nom"] = entry["max_cap_from_to_M_m3_per_d"] * mcm_per_day_to_mw
prod["p_nom"] = prod["max_supply_M_m3_per_d"] * mcm_per_day_to_mw
mcm_to_gwh = 11.36 # MCM to GWh
lng["capacity"] = lng["CapacityInMtpa"] * mtpa_to_mw
entry["capacity"] = entry["max_cap_from_to_M_m3_per_d"] * mcm_per_day_to_mw
prod["capacity"] = prod["mcm_per_year"] * mcm_per_year_to_mw
sto["capacity"] = sto["max_cushionGas_M_m3"] * mcm_to_gwh
lng["type"] = "lng"
entry["type"] = "pipeline"
prod["type"] = "production"
sto["type"] = "storage"
sel = ["geometry", "p_nom", "type"]
sel = ["geometry", "capacity", "type"]
return pd.concat([prod[sel], entry[sel], lng[sel]], ignore_index=True)
return pd.concat([prod[sel], entry[sel], lng[sel], sto[sel]], ignore_index=True)
if __name__ == "__main__":
@ -83,7 +128,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake(
"build_gas_input_locations",
simpl="",
clusters="37",
clusters="128",
)
logging.basicConfig(level=snakemake.config["logging"]["level"])
@ -104,9 +149,9 @@ if __name__ == "__main__":
countries = regions.index.str[:2].unique().str.replace("GB", "UK")
gas_input_locations = build_gas_input_locations(
snakemake.input.lng,
snakemake.input.gem,
snakemake.input.entry,
snakemake.input.production,
snakemake.input.storage,
countries,
)
@ -116,9 +161,13 @@ if __name__ == "__main__":
gas_input_nodes.to_file(snakemake.output.gas_input_nodes, driver="GeoJSON")
ensure_columns = ["lng", "pipeline", "production", "storage"]
gas_input_nodes_s = (
gas_input_nodes.groupby(["bus", "type"])["p_nom"].sum().unstack()
gas_input_nodes.groupby(["bus", "type"])["capacity"]
.sum()
.unstack()
.reindex(columns=ensure_columns)
)
gas_input_nodes_s.columns.name = "p_nom"
gas_input_nodes_s.columns.name = "capacity"
gas_input_nodes_s.to_csv(snakemake.output.gas_input_nodes_simplified)

View File

@ -31,10 +31,7 @@ if __name__ == "__main__":
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
clustered_regions = (
gpd.read_file(snakemake.input.regions_onshore)
.set_index("name")
.buffer(0)
.squeeze()
gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0)
)
I = cutout.indicatormatrix(clustered_regions)

View File

@ -119,7 +119,7 @@ def calculate_line_rating(n, cutout):
.apply(lambda x: int(re.findall(r"(\d+)-bundle", x)[0]))
)
# Set default number of bundles per line
relevant_lines["n_bundle"].fillna(1, inplace=True)
relevant_lines["n_bundle"] = relevant_lines["n_bundle"].fillna(1)
R *= relevant_lines["n_bundle"]
R = calculate_resistance(T=353, R_ref=R)
Imax = cutout.line_rating(shapes, R, D=0.0218, Ts=353, epsilon=0.8, alpha=0.8)

View File

@ -26,20 +26,9 @@ Relevant settings
renewable:
{technology}:
cutout:
corine:
grid_codes:
distance:
natura:
max_depth:
max_shore_distance:
min_shore_distance:
capacity_per_sqkm:
correction_factor:
potential:
min_p_max_pu:
clip_p_max_pu:
resource:
cutout: corine: luisa: grid_codes: distance: natura: max_depth:
max_shore_distance: min_shore_distance: capacity_per_sqkm:
correction_factor: min_p_max_pu: clip_p_max_pu: resource:
.. seealso::
Documentation of the configuration file ``config/config.yaml`` at
@ -48,21 +37,37 @@ Relevant settings
Inputs
------
- ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC) <https://land.copernicus.eu/pan-european/corine-land-cover>`_ inventory on `44 classes <https://wiki.openstreetmap.org/wiki/Corine_Land_Cover#Tagging>`_ of land use (e.g. forests, arable land, industrial, urban areas).
- ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC)
<https://land.copernicus.eu/pan-european/corine-land-cover>`_ inventory on `44
classes <https://wiki.openstreetmap.org/wiki/Corine_Land_Cover#Tagging>`_ of
land use (e.g. forests, arable land, industrial, urban areas) at 100m
resolution.
.. image:: img/corine.png
:scale: 33 %
- ``data/bundle/GEBCO_2014_2D.nc``: A `bathymetric <https://en.wikipedia.org/wiki/Bathymetry>`_ data set with a global terrain model for ocean and land at 15 arc-second intervals by the `General Bathymetric Chart of the Oceans (GEBCO) <https://www.gebco.net/data_and_products/gridded_bathymetry_data/>`_.
- ``data/LUISA_basemap_020321_50m.tif``: `LUISA Base Map
<https://publications.jrc.ec.europa.eu/repository/handle/JRC124621>`_ land
coverage dataset at 50m resolution similar to CORINE. For codes in relation to
CORINE land cover, see `Annex 1 of the technical documentation
<https://publications.jrc.ec.europa.eu/repository/bitstream/JRC124621/technical_report_luisa_basemap_2018_v7_final.pdf>`_.
- ``data/bundle/GEBCO_2014_2D.nc``: A `bathymetric
<https://en.wikipedia.org/wiki/Bathymetry>`_ data set with a global terrain
model for ocean and land at 15 arc-second intervals by the `General
Bathymetric Chart of the Oceans (GEBCO)
<https://www.gebco.net/data_and_products/gridded_bathymetry_data/>`_.
.. image:: img/gebco_2019_grid_image.jpg
:scale: 50 %
**Source:** `GEBCO <https://www.gebco.net/data_and_products/images/gebco_2019_grid_image.jpg>`_
**Source:** `GEBCO
<https://www.gebco.net/data_and_products/images/gebco_2019_grid_image.jpg>`_
- ``resources/natura.tiff``: confer :ref:`natura`
- ``resources/offshore_shapes.geojson``: confer :ref:`shapes`
- ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions`
- ``resources/regions_onshore.geojson``: (if not offshore wind), confer
:ref:`busregions`
- ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions`
- ``"cutouts/" + params["renewable"][{technology}]['cutout']``: :ref:`cutout`
- ``networks/base.nc``: :ref:`base`
@ -128,25 +133,26 @@ Description
This script functions at two main spatial resolutions: the resolution of the
network nodes and their `Voronoi cells
<https://en.wikipedia.org/wiki/Voronoi_diagram>`_, and the resolution of the
cutout grid cells for the weather data. Typically the weather data grid is
finer than the network nodes, so we have to work out the distribution of
generators across the grid cells within each Voronoi cell. This is done by
taking account of a combination of the available land at each grid cell and the
capacity factor there.
cutout grid cells for the weather data. Typically the weather data grid is finer
than the network nodes, so we have to work out the distribution of generators
across the grid cells within each Voronoi cell. This is done by taking account
of a combination of the available land at each grid cell and the capacity factor
there.
First the script computes how much of the technology can be installed at each
cutout grid cell and each node using the `GLAES
<https://github.com/FZJ-IEK3-VSA/glaes>`_ library. This uses the CORINE land use data,
Natura2000 nature reserves and GEBCO bathymetry data.
cutout grid cell and each node using the `atlite
<https://github.com/pypsa/atlite>`_ library. This uses the CORINE land use data,
LUISA land use data, Natura2000 nature reserves, GEBCO bathymetry data, and
shipping lanes.
.. image:: img/eligibility.png
:scale: 50 %
:align: center
To compute the layout of generators in each node's Voronoi cell, the
installable potential in each grid cell is multiplied with the capacity factor
at each grid cell. This is done since we assume more generators are installed
at cells with a higher capacity factor.
To compute the layout of generators in each node's Voronoi cell, the installable
potential in each grid cell is multiplied with the capacity factor at each grid
cell. This is done since we assume more generators are installed at cells with a
higher capacity factor.
.. image:: img/offwinddc-gridcell.png
:scale: 50 %
@ -164,20 +170,14 @@ at cells with a higher capacity factor.
:scale: 50 %
:align: center
This layout is then used to compute the generation availability time series
from the weather data cutout from ``atlite``.
This layout is then used to compute the generation availability time series from
the weather data cutout from ``atlite``.
Two methods are available to compute the maximal installable potential for the
node (`p_nom_max`): ``simple`` and ``conservative``:
- ``simple`` adds up the installable potentials of the individual grid cells.
If the model comes close to this limit, then the time series may slightly
overestimate production since it is assumed the geographical distribution is
proportional to capacity factor.
- ``conservative`` assertains the nodal limit by increasing capacities
proportional to the layout until the limit of an individual grid cell is
reached.
The maximal installable potential for the node (`p_nom_max`) is computed by
adding up the installable potentials of the individual grid cells. If the model
comes close to this limit, then the time series may slightly overestimate
production since it is assumed the geographical distribution is proportional to
capacity factor.
"""
import functools
import logging
@ -210,10 +210,6 @@ if __name__ == "__main__":
resource = params["resource"] # pv panel params / wind turbine params
correction_factor = params.get("correction_factor", 1.0)
capacity_per_sqkm = params["capacity_per_sqkm"]
p_nom_max_meth = params.get("potential", "conservative")
if isinstance(params.get("corine", {}), list):
params["corine"] = {"grid_codes": params["corine"]}
if correction_factor != 1.0:
logger.info(f"correction_factor is set as {correction_factor}")
@ -240,16 +236,29 @@ if __name__ == "__main__":
if params["natura"]:
excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True)
corine = params.get("corine", {})
if "grid_codes" in corine:
codes = corine["grid_codes"]
excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035)
if corine.get("distance", 0.0) > 0.0:
codes = corine["distance_grid_codes"]
buffer = corine["distance"]
excluder.add_raster(
snakemake.input.corine, codes=codes, buffer=buffer, crs=3035
)
for dataset in ["corine", "luisa"]:
kwargs = {"nodata": 0} if dataset == "luisa" else {}
settings = params.get(dataset, {})
if not settings:
continue
if dataset == "luisa" and res > 50:
logger.info(
"LUISA data is available at 50m resolution, "
f"but coarser {res}m resolution is used."
)
if isinstance(settings, list):
settings = {"grid_codes": settings}
if "grid_codes" in settings:
codes = settings["grid_codes"]
excluder.add_raster(
snakemake.input[dataset], codes=codes, invert=True, crs=3035, **kwargs
)
if settings.get("distance", 0.0) > 0.0:
codes = settings["distance_grid_codes"]
buffer = settings["distance"]
excluder.add_raster(
snakemake.input[dataset], codes=codes, buffer=buffer, crs=3035, **kwargs
)
if params.get("ship_threshold"):
shipping_threshold = (
@ -277,15 +286,14 @@ if __name__ == "__main__":
snakemake.input.country_shapes, buffer=buffer, invert=True
)
logger.info("Calculate landuse availability...")
start = time.time()
kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress)
if noprogress:
logger.info("Calculate landuse availabilities...")
start = time.time()
availability = cutout.availabilitymatrix(regions, excluder, **kwargs)
duration = time.time() - start
logger.info(f"Completed availability calculation ({duration:2.2f}s)")
else:
availability = cutout.availabilitymatrix(regions, excluder, **kwargs)
availability = cutout.availabilitymatrix(regions, excluder, **kwargs)
duration = time.time() - start
logger.info(f"Completed landuse availability calculation ({duration:2.2f}s)")
# For Moldova and Ukraine: Overwrite parts not covered by Corine with
# externally determined available areas
@ -304,8 +312,19 @@ if __name__ == "__main__":
func = getattr(cutout, resource.pop("method"))
if client is not None:
resource["dask_kwargs"] = {"scheduler": client}
logger.info("Calculate average capacity factor...")
start = time.time()
capacity_factor = correction_factor * func(capacity_factor=True, **resource)
layout = capacity_factor * area * capacity_per_sqkm
duration = time.time() - start
logger.info(f"Completed average capacity factor calculation ({duration:2.2f}s)")
logger.info("Calculate weighted capacity factor time series...")
start = time.time()
profile, capacities = func(
matrix=availability.stack(spatial=["y", "x"]),
layout=layout,
@ -315,17 +334,13 @@ if __name__ == "__main__":
**resource,
)
logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')")
if p_nom_max_meth == "simple":
p_nom_max = capacity_per_sqkm * availability @ area
elif p_nom_max_meth == "conservative":
max_cap_factor = capacity_factor.where(availability != 0).max(["x", "y"])
p_nom_max = capacities / max_cap_factor
else:
raise AssertionError(
'Config key `potential` should be one of "simple" '
f'(default) or "conservative", not "{p_nom_max_meth}"'
)
duration = time.time() - start
logger.info(
f"Completed weighted capacity factor time series calculation ({duration:2.2f}s)"
)
logger.info(f"Calculating maximal capacity per bus")
p_nom_max = capacity_per_sqkm * availability @ area
logger.info("Calculate average distances.")
layoutmatrix = (layout * availability).stack(spatial=["y", "x"])

View File

@ -880,7 +880,7 @@ def calculate_gain_utilisation_factor(heat_transfer_perm2, Q_ht, Q_gain):
Calculates gain utilisation factor nu.
"""
# time constant of the building tau [h] = c_m [Wh/(m^2K)] * 1 /(H_tr_e+H_tb*H_ve) [m^2 K /W]
tau = c_m / heat_transfer_perm2.T.groupby(level=1).sum().T
tau = c_m / heat_transfer_perm2.T.groupby(axis=1).sum().T
alpha = alpha_H_0 + (tau / tau_H_0)
# heat balance ratio
gamma = (1 / Q_ht).mul(Q_gain.sum(axis=1), axis=0)

View File

@ -64,7 +64,7 @@ if __name__ == "__main__":
with zipfile.ZipFile(snakemake.input.ship_density) as zip_f:
zip_f.extract("shipdensity_global.tif")
with rioxarray.open_rasterio("shipdensity_global.tif") as ship_density:
ship_density = ship_density.drop(["band"]).sel(
ship_density = ship_density.drop_vars(["band"]).sel(
x=slice(min(xs), max(Xs)), y=slice(max(Ys), min(ys))
)
ship_density.rio.to_raster(snakemake.output[0])

View File

@ -33,10 +33,7 @@ if __name__ == "__main__":
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
clustered_regions = (
gpd.read_file(snakemake.input.regions_onshore)
.set_index("name")
.buffer(0)
.squeeze()
gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0)
)
I = cutout.indicatormatrix(clustered_regions)

View File

@ -31,10 +31,7 @@ if __name__ == "__main__":
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
clustered_regions = (
gpd.read_file(snakemake.input.regions_onshore)
.set_index("name")
.buffer(0)
.squeeze()
gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0)
)
I = cutout.indicatormatrix(clustered_regions)

View File

@ -446,6 +446,10 @@ def calculate_metrics(n, label, metrics):
if "CO2Limit" in n.global_constraints.index:
metrics.at["co2_shadow", label] = n.global_constraints.at["CO2Limit", "mu"]
if "co2_sequestration_limit" in n.global_constraints.index:
metrics.at["co2_storage_shadow", label] = n.global_constraints.at[
"co2_sequestration_limit", "mu"
]
return metrics

View File

@ -31,7 +31,7 @@ def rename_techs_tyndp(tech):
tech = rename_techs(tech)
if "heat pump" in tech or "resistive heater" in tech:
return "power-to-heat"
elif tech in ["H2 Electrolysis", "methanation", "helmeth", "H2 liquefaction"]:
elif tech in ["H2 Electrolysis", "methanation", "H2 liquefaction"]:
return "power-to-gas"
elif tech == "H2":
return "H2 storage"
@ -495,7 +495,7 @@ def plot_ch4_map(network):
# make a fake MultiIndex so that area is correct for legend
fossil_gas.index = pd.MultiIndex.from_product([fossil_gas.index, ["fossil gas"]])
methanation_i = n.links[n.links.carrier.isin(["helmeth", "Sabatier"])].index
methanation_i = n.links.query("carrier == 'Sabatier'").index
methanation = (
abs(
n.links_t.p1.loc[:, methanation_i].mul(

View File

@ -121,7 +121,6 @@ preferred_order = pd.Index(
"gas boiler",
"gas",
"natural gas",
"helmeth",
"methanation",
"ammonia",
"hydrogen storage",

View File

@ -95,12 +95,14 @@ def define_spatial(nodes, options):
spatial.gas.industry = nodes + " gas for industry"
spatial.gas.industry_cc = nodes + " gas for industry CC"
spatial.gas.biogas_to_gas = nodes + " biogas to gas"
spatial.gas.biogas_to_gas_cc = nodes + "biogas to gas CC"
else:
spatial.gas.nodes = ["EU gas"]
spatial.gas.locations = ["EU"]
spatial.gas.biogas = ["EU biogas"]
spatial.gas.industry = ["gas for industry"]
spatial.gas.biogas_to_gas = ["EU biogas to gas"]
spatial.gas.biogas_to_gas_cc = ["EU biogas to gas CC"]
if options.get("co2_spatial", options["co2network"]):
spatial.gas.industry_cc = nodes + " gas for industry CC"
else:
@ -127,15 +129,43 @@ def define_spatial(nodes, options):
spatial.h2.locations = nodes
# methanol
# beware: unlike other carriers, uses locations rather than locations+carriername
# this allows to avoid separation between nodes and locations
spatial.methanol = SimpleNamespace()
spatial.methanol.nodes = ["EU methanol"]
spatial.methanol.locations = ["EU"]
if options["regional_methanol_demand"]:
spatial.methanol.demand_locations = nodes
spatial.methanol.shipping = nodes + " shipping methanol"
else:
spatial.methanol.demand_locations = ["EU"]
spatial.methanol.shipping = ["EU shipping methanol"]
# oil
spatial.oil = SimpleNamespace()
spatial.oil.nodes = ["EU oil"]
spatial.oil.locations = ["EU"]
if options["regional_oil_demand"]:
spatial.oil.demand_locations = nodes
spatial.oil.naphtha = nodes + " naphtha for industry"
spatial.oil.kerosene = nodes + " kerosene for aviation"
spatial.oil.shipping = nodes + " shipping oil"
spatial.oil.agriculture_machinery = nodes + " agriculture machinery oil"
spatial.oil.land_transport = nodes + " land transport oil"
else:
spatial.oil.demand_locations = ["EU"]
spatial.oil.naphtha = ["EU naphtha for industry"]
spatial.oil.kerosene = ["EU kerosene for aviation"]
spatial.oil.shipping = ["EU shipping oil"]
spatial.oil.agriculture_machinery = ["EU agriculture machinery oil"]
spatial.oil.land_transport = ["EU land transport oil"]
# uranium
spatial.uranium = SimpleNamespace()
spatial.uranium.nodes = ["EU uranium"]
@ -452,10 +482,11 @@ def add_carrier_buses(n, carrier, nodes=None):
n.add("Carrier", carrier)
unit = "MWh_LHV" if carrier == "gas" else "MWh_th"
# preliminary value for non-gas carriers to avoid zeros
capital_cost = costs.at["gas storage", "fixed"] if carrier == "gas" else 0.02
n.madd("Bus", nodes, location=location, carrier=carrier, unit=unit)
# capital cost could be corrected to e.g. 0.2 EUR/kWh * annuity and O&M
n.madd(
"Store",
nodes + " Store",
@ -463,8 +494,7 @@ def add_carrier_buses(n, carrier, nodes=None):
e_nom_extendable=True,
e_cyclic=True,
carrier=carrier,
capital_cost=0.2
* costs.at[carrier, "discount rate"], # preliminary value to avoid zeros
capital_cost=capital_cost,
)
if carrier == "gas":
@ -808,14 +838,13 @@ def add_ammonia(n, costs):
bus2=nodes + " H2",
p_nom_extendable=True,
carrier="Haber-Bosch",
efficiency=1
/ (
cf_industry["MWh_elec_per_tNH3_electrolysis"]
/ cf_industry["MWh_NH3_per_tNH3"]
), # output: MW_NH3 per MW_elec
efficiency2=-cf_industry["MWh_H2_per_tNH3_electrolysis"]
/ cf_industry["MWh_elec_per_tNH3_electrolysis"], # input: MW_H2 per MW_elec
capital_cost=costs.at["Haber-Bosch", "fixed"],
efficiency=1 / costs.at["Haber-Bosch", "electricity-input"],
efficiency2=-costs.at["Haber-Bosch", "hydrogen-input"]
/ costs.at["Haber-Bosch", "electricity-input"],
capital_cost=costs.at["Haber-Bosch", "fixed"]
/ costs.at["Haber-Bosch", "electricity-input"],
marginal_cost=costs.at["Haber-Bosch", "VOM"]
/ costs.at["Haber-Bosch", "electricity-input"],
lifetime=costs.at["Haber-Bosch", "lifetime"],
)
@ -1026,7 +1055,7 @@ def insert_gas_distribution_costs(n, costs):
f"Inserting gas distribution grid with investment cost factor of {f_costs}"
)
capital_cost = costs.loc["electricity distribution grid"]["fixed"] * f_costs
capital_cost = costs.at["electricity distribution grid", "fixed"] * f_costs
# gas boilers
gas_b = n.links.index[
@ -1103,6 +1132,7 @@ def add_storage_and_grids(n, costs):
efficiency=costs.at["OCGT", "efficiency"],
capital_cost=costs.at["OCGT", "fixed"]
* costs.at["OCGT", "efficiency"], # NB: fixed cost is per MWel
marginal_cost=costs.at["OCGT", "VOM"],
lifetime=costs.at["OCGT", "lifetime"],
)
@ -1163,7 +1193,7 @@ def add_storage_and_grids(n, costs):
if options["gas_network"]:
logger.info(
"Add natural gas infrastructure, incl. LNG terminals, production and entry-points."
"Add natural gas infrastructure, incl. LNG terminals, production, storage and entry-points."
)
if options["H2_retrofit"]:
@ -1208,10 +1238,25 @@ def add_storage_and_grids(n, costs):
remove_i = n.generators[gas_i & internal_i].index
n.generators.drop(remove_i, inplace=True)
p_nom = gas_input_nodes.sum(axis=1).rename(lambda x: x + " gas")
input_types = ["lng", "pipeline", "production"]
p_nom = gas_input_nodes[input_types].sum(axis=1).rename(lambda x: x + " gas")
n.generators.loc[gas_i, "p_nom_extendable"] = False
n.generators.loc[gas_i, "p_nom"] = p_nom
# add existing gas storage capacity
gas_i = n.stores.carrier == "gas"
e_nom = (
gas_input_nodes["storage"]
.rename(lambda x: x + " gas Store")
.reindex(n.stores.index)
.fillna(0.0)
* 1e3
) # MWh_LHV
e_nom.clip(
upper=e_nom.quantile(0.98), inplace=True
) # limit extremely large storage
n.stores.loc[gas_i, "e_nom_min"] = e_nom
# add candidates for new gas pipelines to achieve full connectivity
G = nx.Graph()
@ -1346,6 +1391,7 @@ def add_storage_and_grids(n, costs):
bus2=spatial.co2.nodes,
p_nom_extendable=True,
carrier="Sabatier",
p_min_pu=options.get("min_part_load_methanation", 0),
efficiency=costs.at["methanation", "efficiency"],
efficiency2=-costs.at["methanation", "efficiency"]
* costs.at["gas", "CO2 intensity"],
@ -1354,23 +1400,6 @@ def add_storage_and_grids(n, costs):
lifetime=costs.at["methanation", "lifetime"],
)
if options["helmeth"]:
n.madd(
"Link",
spatial.nodes,
suffix=" helmeth",
bus0=nodes,
bus1=spatial.gas.nodes,
bus2=spatial.co2.nodes,
carrier="helmeth",
p_nom_extendable=True,
efficiency=costs.at["helmeth", "efficiency"],
efficiency2=-costs.at["helmeth", "efficiency"]
* costs.at["gas", "CO2 intensity"],
capital_cost=costs.at["helmeth", "fixed"],
lifetime=costs.at["helmeth", "lifetime"],
)
if options.get("coal_cc"):
n.madd(
"Link",
@ -1470,8 +1499,8 @@ def add_land_transport(n, costs):
n.madd(
"Bus",
nodes,
location=nodes,
suffix=" EV battery",
location=nodes,
carrier="Li ion",
unit="MWh_el",
)
@ -1563,29 +1592,42 @@ def add_land_transport(n, costs):
ice_efficiency = options["transport_internal_combustion_efficiency"]
n.madd(
"Load",
nodes,
suffix=" land transport oil",
bus=spatial.oil.nodes,
carrier="land transport oil",
p_set=ice_share / ice_efficiency * transport[nodes],
)
co2 = (
p_set_land_transport_oil = (
ice_share
/ ice_efficiency
* transport[nodes].sum().sum()
/ nhours
* costs.at["oil", "CO2 intensity"]
* transport[nodes].rename(columns=lambda x: x + " land transport oil")
)
n.add(
if not options["regional_oil_demand"]:
p_set_land_transport_oil = p_set_land_transport_oil.sum(axis=1).to_frame(
name="EU land transport oil"
)
n.madd(
"Bus",
spatial.oil.land_transport,
location=spatial.oil.demand_locations,
carrier="land transport oil",
unit="land transport",
)
n.madd(
"Load",
"land transport oil emissions",
bus="co2 atmosphere",
carrier="land transport oil emissions",
p_set=-co2,
spatial.oil.land_transport,
bus=spatial.oil.land_transport,
carrier="land transport oil",
p_set=p_set_land_transport_oil,
)
n.madd(
"Link",
spatial.oil.land_transport,
bus0=spatial.oil.nodes,
bus1=spatial.oil.land_transport,
bus2="co2 atmosphere",
carrier="land transport oil",
efficiency2=costs.at["oil", "CO2 intensity"],
p_nom_extendable=True,
)
@ -1700,6 +1742,7 @@ def add_heat(n, costs):
n.madd(
"Generator",
nodes[name] + f" {name} heat vent",
bus=nodes[name] + f" {name} heat",
location=nodes[name],
carrier=name + " heat vent",
p_nom_extendable=True,
@ -2182,12 +2225,42 @@ def add_biomass(n, costs):
bus1=spatial.gas.nodes,
bus2="co2 atmosphere",
carrier="biogas to gas",
capital_cost=costs.loc["biogas upgrading", "fixed"],
marginal_cost=costs.loc["biogas upgrading", "VOM"],
capital_cost=costs.at["biogas", "fixed"]
+ costs.at["biogas upgrading", "fixed"],
marginal_cost=costs.at["biogas upgrading", "VOM"],
efficiency=costs.at["biogas", "efficiency"],
efficiency2=-costs.at["gas", "CO2 intensity"],
p_nom_extendable=True,
)
if options.get("biogas_upgrading_cc"):
# Assuming for costs that the CO2 from upgrading is pure, such as in amine scrubbing. I.e., with and without CC is
# equivalent. Adding biomass CHP capture because biogas is often small-scale and decentral so further
# from e.g. CO2 grid or buyers. This is a proxy for the added cost for e.g. a raw biogas pipeline to a central upgrading facility
n.madd(
"Link",
spatial.gas.biogas_to_gas_cc,
bus0=spatial.gas.biogas,
bus1=spatial.gas.nodes,
bus2="co2 stored",
bus3="co2 atmosphere",
carrier="biogas to gas CC",
capital_cost=costs.at["biogas CC", "fixed"]
+ costs.at["biogas upgrading", "fixed"]
+ costs.at["biomass CHP capture", "fixed"]
* costs.at["biogas CC", "CO2 stored"],
marginal_cost=costs.at["biogas CC", "VOM"]
+ costs.at["biogas upgrading", "VOM"],
efficiency=costs.at["biogas CC", "efficiency"],
efficiency2=costs.at["biogas CC", "CO2 stored"]
* costs.at["biogas CC", "capture rate"],
efficiency3=-costs.at["gas", "CO2 intensity"]
- costs.at["biogas CC", "CO2 stored"]
* costs.at["biogas CC", "capture rate"],
p_nom_extendable=True,
)
if options["biomass_transport"]:
# add biomass transport
transport_costs = pd.read_csv(
@ -2313,6 +2386,7 @@ def add_biomass(n, costs):
efficiency=costs.at["biomass boiler", "efficiency"],
capital_cost=costs.at["biomass boiler", "efficiency"]
* costs.at["biomass boiler", "fixed"],
marginal_cost=costs.at["biomass boiler", "pelletizing cost"],
lifetime=costs.at["biomass boiler", "lifetime"],
)
@ -2332,7 +2406,7 @@ def add_biomass(n, costs):
+ costs.at["BtL", "CO2 stored"],
p_nom_extendable=True,
capital_cost=costs.at["BtL", "fixed"],
marginal_cost=costs.at["BtL", "efficiency"] * costs.loc["BtL", "VOM"],
marginal_cost=costs.at["BtL", "efficiency"] * costs.at["BtL", "VOM"],
)
# TODO: Update with energy penalty
@ -2353,7 +2427,7 @@ def add_biomass(n, costs):
p_nom_extendable=True,
capital_cost=costs.at["BtL", "fixed"]
+ costs.at["biomass CHP capture", "fixed"] * costs.at["BtL", "CO2 stored"],
marginal_cost=costs.at["BtL", "efficiency"] * costs.loc["BtL", "VOM"],
marginal_cost=costs.at["BtL", "efficiency"] * costs.at["BtL", "VOM"],
)
# BioSNG from solid biomass
@ -2372,7 +2446,7 @@ def add_biomass(n, costs):
+ costs.at["BioSNG", "CO2 stored"],
p_nom_extendable=True,
capital_cost=costs.at["BioSNG", "fixed"],
marginal_cost=costs.at["BioSNG", "efficiency"] * costs.loc["BioSNG", "VOM"],
marginal_cost=costs.at["BioSNG", "efficiency"] * costs.at["BioSNG", "VOM"],
)
# TODO: Update with energy penalty for CC
@ -2396,7 +2470,7 @@ def add_biomass(n, costs):
capital_cost=costs.at["BioSNG", "fixed"]
+ costs.at["biomass CHP capture", "fixed"]
* costs.at["BioSNG", "CO2 stored"],
marginal_cost=costs.at["BioSNG", "efficiency"] * costs.loc["BioSNG", "VOM"],
marginal_cost=costs.at["BioSNG", "efficiency"] * costs.at["BioSNG", "VOM"],
)
@ -2448,9 +2522,14 @@ def add_industry(n, costs):
efficiency=1.0,
)
if len(spatial.biomass.industry_cc) <= 1 and len(spatial.co2.nodes) > 1:
link_names = nodes + " " + spatial.biomass.industry_cc
else:
link_names = spatial.biomass.industry_cc
n.madd(
"Link",
spatial.biomass.industry_cc,
link_names,
bus0=spatial.biomass.nodes,
bus1=spatial.biomass.industry,
bus2="co2 atmosphere",
@ -2629,6 +2708,8 @@ def add_industry(n, costs):
p_min_pu=options.get("min_part_load_methanolisation", 0),
capital_cost=costs.at["methanolisation", "fixed"]
* options["MWh_MeOH_per_MWh_H2"], # EUR/MW_H2/a
marginal_cost=options["MWh_MeOH_per_MWh_H2"]
* costs.at["methanolisation", "VOM"],
lifetime=costs.at["methanolisation", "lifetime"],
efficiency=options["MWh_MeOH_per_MWh_H2"],
efficiency2=-options["MWh_MeOH_per_MWh_H2"] / options["MWh_MeOH_per_MWh_e"],
@ -2638,48 +2719,44 @@ def add_industry(n, costs):
efficiency = (
options["shipping_oil_efficiency"] / options["shipping_methanol_efficiency"]
)
p_set_methanol = shipping_methanol_share * p_set.sum() * efficiency
p_set_methanol = (
shipping_methanol_share
* p_set.rename(lambda x: x + " shipping methanol")
* efficiency
)
if not options["regional_methanol_demand"]:
p_set_methanol = p_set_methanol.sum()
n.madd(
"Bus",
spatial.methanol.shipping,
location=spatial.methanol.demand_locations,
carrier="shipping methanol",
unit="MWh_LHV",
)
n.madd(
"Load",
spatial.methanol.nodes,
suffix=" shipping methanol",
bus=spatial.methanol.nodes,
spatial.methanol.shipping,
bus=spatial.methanol.shipping,
carrier="shipping methanol",
p_set=p_set_methanol,
)
# CO2 intensity methanol based on stoichiometric calculation with 22.7 GJ/t methanol (32 g/mol), CO2 (44 g/mol), 277.78 MWh/TJ = 0.218 t/MWh
co2 = p_set_methanol / options["MWh_MeOH_per_tCO2"]
n.add(
"Load",
"shipping methanol emissions",
bus="co2 atmosphere",
carrier="shipping methanol emissions",
p_set=-co2,
)
if shipping_oil_share:
p_set_oil = shipping_oil_share * p_set.sum()
n.madd(
"Load",
spatial.oil.nodes,
suffix=" shipping oil",
bus=spatial.oil.nodes,
carrier="shipping oil",
p_set=p_set_oil,
)
co2 = p_set_oil * costs.at["oil", "CO2 intensity"]
n.add(
"Load",
"shipping oil emissions",
bus="co2 atmosphere",
carrier="shipping oil emissions",
p_set=-co2,
"Link",
spatial.methanol.shipping,
bus0=spatial.methanol.nodes,
bus1=spatial.methanol.shipping,
bus2="co2 atmosphere",
carrier="shipping methanol",
p_nom_extendable=True,
efficiency2=1
/ options[
"MWh_MeOH_per_tCO2"
], # CO2 intensity methanol based on stoichiometric calculation with 22.7 GJ/t methanol (32 g/mol), CO2 (44 g/mol), 277.78 MWh/TJ = 0.218 t/MWh
)
if "oil" not in n.buses.carrier.unique():
@ -2695,7 +2772,8 @@ def add_industry(n, costs):
# could correct to e.g. 0.001 EUR/kWh * annuity and O&M
n.madd(
"Store",
[oil_bus + " Store" for oil_bus in spatial.oil.nodes],
spatial.oil.nodes,
suffix=" Store",
bus=spatial.oil.nodes,
e_nom_extendable=True,
e_cyclic=True,
@ -2712,6 +2790,39 @@ def add_industry(n, costs):
marginal_cost=costs.at["oil", "fuel"],
)
if shipping_oil_share:
p_set_oil = shipping_oil_share * p_set.rename(lambda x: x + " shipping oil")
if not options["regional_oil_demand"]:
p_set_oil = p_set_oil.sum()
n.madd(
"Bus",
spatial.oil.shipping,
location=spatial.oil.demand_locations,
carrier="shipping oil",
unit="MWh_LHV",
)
n.madd(
"Load",
spatial.oil.shipping,
bus=spatial.oil.shipping,
carrier="shipping oil",
p_set=p_set_oil,
)
n.madd(
"Link",
spatial.oil.shipping,
bus0=spatial.oil.nodes,
bus1=spatial.oil.shipping,
bus2="co2 atmosphere",
carrier="shipping oil",
p_nom_extendable=True,
efficiency2=costs.at["oil", "CO2 intensity"],
)
if options["oil_boilers"]:
nodes_heat = create_nodes_for_heat_sector()[0]
@ -2726,7 +2837,7 @@ def add_industry(n, costs):
nodes_heat[name] + f" {name} oil boiler",
p_nom_extendable=True,
bus0=spatial.oil.nodes,
bus1=nodes_heat[name] + f" {name} heat",
bus1=nodes_heat[name] + f" {name} heat",
bus2="co2 atmosphere",
carrier=f"{name} oil boiler",
efficiency=costs.at["decentral oil boiler", "efficiency"],
@ -2746,6 +2857,8 @@ def add_industry(n, costs):
efficiency=costs.at["Fischer-Tropsch", "efficiency"],
capital_cost=costs.at["Fischer-Tropsch", "fixed"]
* costs.at["Fischer-Tropsch", "efficiency"], # EUR/MW_H2/a
marginal_cost=costs.at["Fischer-Tropsch", "efficiency"]
* costs.at["Fischer-Tropsch", "VOM"],
efficiency2=-costs.at["oil", "CO2 intensity"]
* costs.at["Fischer-Tropsch", "efficiency"],
p_nom_extendable=True,
@ -2753,53 +2866,101 @@ def add_industry(n, costs):
lifetime=costs.at["Fischer-Tropsch", "lifetime"],
)
# naphtha
demand_factor = options.get("HVC_demand_factor", 1)
p_set = demand_factor * industrial_demand.loc[nodes, "naphtha"].sum() / nhours
if demand_factor != 1:
logger.warning(f"Changing HVC demand by {demand_factor*100-100:+.2f}%.")
n.madd(
"Load",
["naphtha for industry"],
bus=spatial.oil.nodes,
carrier="naphtha for industry",
p_set=p_set,
)
demand_factor = options.get("aviation_demand_factor", 1)
all_aviation = ["total international aviation", "total domestic aviation"]
p_set = (
p_set_plastics = (
demand_factor
* pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1).sum()
* 1e6
* industrial_demand.loc[nodes, "naphtha"].rename(
lambda x: x + " naphtha for industry"
)
/ nhours
)
if not options["regional_oil_demand"]:
p_set_plastics = p_set_plastics.sum()
n.madd(
"Bus",
spatial.oil.naphtha,
location=spatial.oil.demand_locations,
carrier="naphtha for industry",
unit="MWh_LHV",
)
n.madd(
"Load",
spatial.oil.naphtha,
bus=spatial.oil.naphtha,
carrier="naphtha for industry",
p_set=p_set_plastics,
)
# some CO2 from naphtha are process emissions from steam cracker
# rest of CO2 released to atmosphere either in waste-to-energy or decay
process_co2_per_naphtha = (
industrial_demand.loc[nodes, "process emission from feedstock"].sum()
/ industrial_demand.loc[nodes, "naphtha"].sum()
)
emitted_co2_per_naphtha = costs.at["oil", "CO2 intensity"] - process_co2_per_naphtha
n.madd(
"Link",
spatial.oil.naphtha,
bus0=spatial.oil.nodes,
bus1=spatial.oil.naphtha,
bus2="co2 atmosphere",
bus3=spatial.co2.process_emissions,
carrier="naphtha for industry",
p_nom_extendable=True,
efficiency2=emitted_co2_per_naphtha,
efficiency3=process_co2_per_naphtha,
)
# aviation
demand_factor = options.get("aviation_demand_factor", 1)
if demand_factor != 1:
logger.warning(f"Changing aviation demand by {demand_factor*100-100:+.2f}%.")
all_aviation = ["total international aviation", "total domestic aviation"]
p_set = (
demand_factor
* pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1)
* 1e6
/ nhours
).rename(lambda x: x + " kerosene for aviation")
if not options["regional_oil_demand"]:
p_set = p_set.sum()
n.madd(
"Bus",
spatial.oil.kerosene,
location=spatial.oil.demand_locations,
carrier="kerosene for aviation",
unit="MWh_LHV",
)
n.madd(
"Load",
["kerosene for aviation"],
bus=spatial.oil.nodes,
spatial.oil.kerosene,
bus=spatial.oil.kerosene,
carrier="kerosene for aviation",
p_set=p_set,
)
# NB: CO2 gets released again to atmosphere when plastics decay or kerosene is burned
# except for the process emissions when naphtha is used for petrochemicals, which can be captured with other industry process emissions
# tco2 per hour
co2_release = ["naphtha for industry", "kerosene for aviation"]
co2 = (
n.loads.loc[co2_release, "p_set"].sum() * costs.at["oil", "CO2 intensity"]
- industrial_demand.loc[nodes, "process emission from feedstock"].sum() / nhours
)
n.add(
"Load",
"oil emissions",
bus="co2 atmosphere",
carrier="oil emissions",
p_set=-co2,
n.madd(
"Link",
spatial.oil.kerosene,
bus0=spatial.oil.nodes,
bus1=spatial.oil.kerosene,
bus2="co2 atmosphere",
carrier="kerosene for aviation",
p_nom_extendable=True,
efficiency2=costs.at["oil", "CO2 intensity"],
)
# TODO simplify bus expression
@ -2850,19 +3011,15 @@ def add_industry(n, costs):
unit="t_co2",
)
sel = ["process emission", "process emission from feedstock"]
if options["co2_spatial"] or options["co2network"]:
p_set = (
-industrial_demand.loc[nodes, sel]
.sum(axis=1)
-industrial_demand.loc[nodes, "process emission"]
.rename(index=lambda x: x + " process emissions")
/ nhours
)
else:
p_set = -industrial_demand.loc[nodes, sel].sum(axis=1).sum() / nhours
p_set = -industrial_demand.loc[nodes, "process emission"].sum() / nhours
# this should be process emissions fossil+feedstock
# then need load on atmosphere for feedstock emissions that are currently going to atmosphere via Link Fischer-Tropsch demand
n.madd(
"Load",
spatial.co2.process_emissions,
@ -2951,8 +3108,13 @@ def add_waste_heat(n):
if not urban_central.empty:
urban_central = urban_central.str[: -len(" urban central heat")]
link_carriers = n.links.carrier.unique()
# TODO what is the 0.95 and should it be a config option?
if options["use_fischer_tropsch_waste_heat"]:
if (
options["use_fischer_tropsch_waste_heat"]
and "Fischer-Tropsch" in link_carriers
):
n.links.loc[urban_central + " Fischer-Tropsch", "bus3"] = (
urban_central + " urban central heat"
)
@ -2960,8 +3122,48 @@ def add_waste_heat(n):
0.95 - n.links.loc[urban_central + " Fischer-Tropsch", "efficiency"]
)
if options["use_methanation_waste_heat"] and "Sabatier" in link_carriers:
n.links.loc[urban_central + " Sabatier", "bus3"] = (
urban_central + " urban central heat"
)
n.links.loc[urban_central + " Sabatier", "efficiency3"] = (
0.95 - n.links.loc[urban_central + " Sabatier", "efficiency"]
)
# DEA quotes 15% of total input (11% of which are high-value heat)
if options["use_haber_bosch_waste_heat"] and "Haber-Bosch" in link_carriers:
n.links.loc[urban_central + " Haber-Bosch", "bus3"] = (
urban_central + " urban central heat"
)
total_energy_input = (
cf_industry["MWh_H2_per_tNH3_electrolysis"]
+ cf_industry["MWh_elec_per_tNH3_electrolysis"]
) / cf_industry["MWh_NH3_per_tNH3"]
electricity_input = (
cf_industry["MWh_elec_per_tNH3_electrolysis"]
/ cf_industry["MWh_NH3_per_tNH3"]
)
n.links.loc[urban_central + " Haber-Bosch", "efficiency3"] = (
0.15 * total_energy_input / electricity_input
)
if (
options["use_methanolisation_waste_heat"]
and "methanolisation" in link_carriers
):
n.links.loc[urban_central + " methanolisation", "bus4"] = (
urban_central + " urban central heat"
)
n.links.loc[urban_central + " methanolisation", "efficiency4"] = (
costs.at["methanolisation", "heat-output"]
/ costs.at["methanolisation", "hydrogen-input"]
)
# TODO integrate usable waste heat efficiency into technology-data from DEA
if options.get("use_electrolysis_waste_heat", False):
if (
options.get("use_electrolysis_waste_heat", False)
and "H2 Electrolysis" in link_carriers
):
n.links.loc[urban_central + " H2 Electrolysis", "bus2"] = (
urban_central + " urban central heat"
)
@ -2969,7 +3171,7 @@ def add_waste_heat(n):
0.84 - n.links.loc[urban_central + " H2 Electrolysis", "efficiency"]
)
if options["use_fuel_cell_waste_heat"]:
if options["use_fuel_cell_waste_heat"] and "H2 Fuel Cell" in link_carriers:
n.links.loc[urban_central + " H2 Fuel Cell", "bus2"] = (
urban_central + " urban central heat"
)
@ -3023,9 +3225,9 @@ def add_agriculture(n, costs):
f"Total agriculture machinery shares sum up to {total_share:.2%}, corresponding to increased or decreased demand assumptions."
)
machinery_nodal_energy = pop_weighted_energy_totals.loc[
nodes, "total agriculture machinery"
]
machinery_nodal_energy = (
pop_weighted_energy_totals.loc[nodes, "total agriculture machinery"] * 1e6
)
if electric_share > 0:
efficiency_gain = (
@ -3039,36 +3241,44 @@ def add_agriculture(n, costs):
suffix=" agriculture machinery electric",
bus=nodes,
carrier="agriculture machinery electric",
p_set=electric_share
/ efficiency_gain
* machinery_nodal_energy
* 1e6
/ nhours,
p_set=electric_share / efficiency_gain * machinery_nodal_energy / nhours,
)
if oil_share > 0:
p_set = (
oil_share
* machinery_nodal_energy.rename(lambda x: x + " agriculture machinery oil")
/ nhours
)
if not options["regional_oil_demand"]:
p_set = p_set.sum()
n.madd(
"Bus",
spatial.oil.agriculture_machinery,
location=spatial.oil.demand_locations,
carrier="agriculture machinery oil",
unit="MWh_LHV",
)
n.madd(
"Load",
["agriculture machinery oil"],
bus=spatial.oil.nodes,
spatial.oil.agriculture_machinery,
bus=spatial.oil.agriculture_machinery,
carrier="agriculture machinery oil",
p_set=oil_share * machinery_nodal_energy.sum() * 1e6 / nhours,
p_set=p_set,
)
co2 = (
oil_share
* machinery_nodal_energy.sum()
* 1e6
/ nhours
* costs.at["oil", "CO2 intensity"]
)
n.add(
"Load",
"agriculture machinery oil emissions",
bus="co2 atmosphere",
carrier="agriculture machinery oil emissions",
p_set=-co2,
n.madd(
"Link",
spatial.oil.agriculture_machinery,
bus0=spatial.oil.nodes,
bus1=spatial.oil.agriculture_machinery,
bus2="co2 atmosphere",
carrier="agriculture machinery oil",
p_nom_extendable=True,
efficiency2=costs.at["oil", "CO2 intensity"],
)
@ -3327,6 +3537,57 @@ def set_temporal_aggregation(n, opts, solver_name):
return n
def lossy_bidirectional_links(n, carrier, efficiencies={}):
"Split bidirectional links into two unidirectional links to include transmission losses."
carrier_i = n.links.query("carrier == @carrier").index
if (
not any((v != 1.0) or (v >= 0) for v in efficiencies.values())
or carrier_i.empty
):
return
efficiency_static = efficiencies.get("efficiency_static", 1)
efficiency_per_1000km = efficiencies.get("efficiency_per_1000km", 1)
compression_per_1000km = efficiencies.get("compression_per_1000km", 0)
logger.info(
f"Specified losses for {carrier} transmission "
f"(static: {efficiency_static}, per 1000km: {efficiency_per_1000km}, compression per 1000km: {compression_per_1000km}). "
"Splitting bidirectional links."
)
n.links.loc[carrier_i, "p_min_pu"] = 0
n.links.loc[
carrier_i, "efficiency"
] = efficiency_static * efficiency_per_1000km ** (
n.links.loc[carrier_i, "length"] / 1e3
)
rev_links = (
n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1)
)
rev_links["length_original"] = rev_links["length"]
rev_links["capital_cost"] = 0
rev_links["length"] = 0
rev_links["reversed"] = True
rev_links.index = rev_links.index.map(lambda x: x + "-reversed")
n.links = pd.concat([n.links, rev_links], sort=False)
n.links["reversed"] = n.links["reversed"].fillna(False)
n.links["length_original"] = n.links["length_original"].fillna(n.links.length)
# do compression losses after concatenation to take electricity consumption at bus0 in either direction
carrier_i = n.links.query("carrier == @carrier").index
if compression_per_1000km > 0:
n.links.loc[carrier_i, "bus2"] = n.links.loc[carrier_i, "bus0"].map(
n.buses.location
) # electricity
n.links.loc[carrier_i, "efficiency2"] = (
-compression_per_1000km * n.links.loc[carrier_i, "length_original"] / 1e3
)
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
@ -3404,6 +3665,15 @@ if __name__ == "__main__":
if "nodistrict" in opts:
options["district_heating"]["progress"] = 0.0
if "nowasteheat" in opts:
logger.info("Disabling waste heat.")
options["use_fischer_tropsch_waste_heat"] = False
options["use_methanolisation_waste_heat"] = False
options["use_haber_bosch_waste_heat"] = False
options["use_methanation_waste_heat"] = False
options["use_fuel_cell_waste_heat"] = False
options["use_electrolysis_waste_heat"] = False
if "T" in opts:
add_land_transport(n, costs)
@ -3493,6 +3763,18 @@ if __name__ == "__main__":
if options["electricity_grid_connection"]:
add_electricity_grid_connection(n, costs)
for k, v in options["transmission_efficiency"].items():
lossy_bidirectional_links(n, k, v)
# Workaround: Remove lines with conflicting (and unrealistic) properties
# cf. https://github.com/PyPSA/pypsa-eur/issues/444
if snakemake.config["solving"]["options"]["transmission_losses"]:
idx = n.lines.query("num_parallel == 0").index
logger.info(
f"Removing {len(idx)} line(s) with properties conflicting with transmission losses functionality."
)
n.mremove("Line", idx)
first_year_myopic = (snakemake.params.foresight in ["myopic", "perfect"]) and (
snakemake.params.planning_horizons[0] == investment_year
)

View File

@ -36,7 +36,7 @@ import logging
import tarfile
from pathlib import Path
from _helpers import configure_logging, progress_retrieve
from _helpers import configure_logging, progress_retrieve, validate_checksum
logger = logging.getLogger(__name__)
@ -65,6 +65,8 @@ if __name__ == "__main__":
disable_progress = snakemake.config["run"].get("disable_progressbar", False)
progress_retrieve(url, tarball_fn, disable=disable_progress)
validate_checksum(tarball_fn, url)
logger.info("Extracting databundle.")
tarfile.open(tarball_fn).extractall(to_fn)

View File

@ -11,7 +11,7 @@ import logging
import zipfile
from pathlib import Path
from _helpers import progress_retrieve
from _helpers import progress_retrieve, validate_checksum
logger = logging.getLogger(__name__)
@ -35,6 +35,8 @@ if __name__ == "__main__":
disable_progress = snakemake.config["run"].get("disable_progressbar", False)
progress_retrieve(url, zip_fn, disable=disable_progress)
validate_checksum(zip_fn, url)
logger.info("Extracting databundle.")
zipfile.ZipFile(zip_fn).extractall(to_fn)

View File

@ -13,7 +13,7 @@ logger = logging.getLogger(__name__)
import tarfile
from pathlib import Path
from _helpers import configure_logging, progress_retrieve
from _helpers import configure_logging, progress_retrieve, validate_checksum
if __name__ == "__main__":
if "snakemake" not in globals():
@ -34,6 +34,8 @@ if __name__ == "__main__":
disable_progress = snakemake.config["run"].get("disable_progressbar", False)
progress_retrieve(url, tarball_fn, disable=disable_progress)
validate_checksum(tarball_fn, url)
logger.info("Extracting databundle.")
tarfile.open(tarball_fn).extractall(to_fn)

View File

@ -26,8 +26,11 @@ Additionally, some extra constraints specified in :mod:`solve_network` are added
the workflow for all scenarios in the configuration file (``scenario:``)
based on the rule :mod:`solve_network`.
"""
import importlib
import logging
import os
import re
import sys
import numpy as np
import pandas as pd
@ -687,6 +690,35 @@ def add_battery_constraints(n):
n.model.add_constraints(lhs == 0, name="Link-charger_ratio")
def add_lossy_bidirectional_link_constraints(n):
if not n.links.p_nom_extendable.any() or not "reversed" in n.links.columns:
return
n.links["reversed"] = n.links.reversed.fillna(0).astype(bool)
carriers = n.links.loc[n.links.reversed, "carrier"].unique()
forward_i = n.links.query(
"carrier in @carriers and ~reversed and p_nom_extendable"
).index
def get_backward_i(forward_i):
return pd.Index(
[
re.sub(r"-(\d{4})$", r"-reversed-\1", s)
if re.search(r"-\d{4}$", s)
else s + "-reversed"
for s in forward_i
]
)
backward_i = get_backward_i(forward_i)
lhs = n.model["Link-p_nom"].loc[backward_i]
rhs = n.model["Link-p_nom"].loc[forward_i]
n.model.add_constraints(lhs == rhs, name="Link-bidirectional_sync")
def add_chp_constraints(n):
electric = (
n.links.index.str.contains("urban central")
@ -745,9 +777,13 @@ def add_pipe_retrofit_constraint(n):
"""
Add constraint for retrofitting existing CH4 pipelines to H2 pipelines.
"""
gas_pipes_i = n.links.query("carrier == 'gas pipeline' and p_nom_extendable").index
if "reversed" not in n.links.columns:
n.links["reversed"] = False
gas_pipes_i = n.links.query(
"carrier == 'gas pipeline' and p_nom_extendable and ~reversed"
).index
h2_retrofitted_i = n.links.query(
"carrier == 'H2 pipeline retrofitted' and p_nom_extendable"
"carrier == 'H2 pipeline retrofitted' and p_nom_extendable and ~reversed"
).index
if h2_retrofitted_i.empty or gas_pipes_i.empty:
@ -786,12 +822,22 @@ def extra_functionality(n, snapshots):
if "EQ" in o:
add_EQ_constraints(n, o)
add_battery_constraints(n)
add_lossy_bidirectional_link_constraints(n)
add_pipe_retrofit_constraint(n)
if n._multi_invest:
add_carbon_constraint(n, snapshots)
add_carbon_budget_constraint(n, snapshots)
add_retrofit_gas_boiler_constraint(n, snapshots)
if snakemake.params.custom_extra_functionality:
source_path = snakemake.params.custom_extra_functionality
assert os.path.exists(source_path), f"{source_path} does not exist"
sys.path.append(os.path.dirname(source_path))
module_name = os.path.splitext(os.path.basename(source_path))[0]
module = importlib.import_module(module_name)
custom_extra_functionality = getattr(module, module_name)
custom_extra_functionality(n, snapshots, snakemake)
def solve_network(n, config, solving, opts="", **kwargs):
set_of_options = solving["solver"]["options"]
@ -809,6 +855,9 @@ def solve_network(n, config, solving, opts="", **kwargs):
)
kwargs["assign_all_duals"] = cf_solving.get("assign_all_duals", False)
if kwargs["solver_name"] == "gurobi":
logging.getLogger('gurobipy').setLevel(logging.CRITICAL)
rolling_horizon = cf_solving.pop("rolling_horizon", False)
skip_iterations = cf_solving.pop("skip_iterations", False)
if not n.lines.s_nom_extendable.any():
@ -839,6 +888,9 @@ def solve_network(n, config, solving, opts="", **kwargs):
f"Solving status '{status}' with termination condition '{condition}'"
)
if "infeasible" in condition:
labels = n.model.compute_infeasibilities()
logger.info("Labels:\n" + labels)
n.model.print_infeasibilities()
raise RuntimeError("Solving status 'infeasible'")
return n