Merge pull request #187 from PyPSA/retrofit-gas-pipelines-fneum

Retrofit gas pipelines - iteration 2
This commit is contained in:
Fabian Neumann 2021-11-19 08:05:09 +01:00 committed by GitHub
commit 87ee8d2e69
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 741 additions and 2796 deletions

View File

@ -78,16 +78,61 @@ rule build_simplified_population_layouts:
benchmark: "benchmarks/build_clustered_population_layouts/s{simpl}"
script: "scripts/build_clustered_population_layouts.py"
if config["sector"]["gas_network"]:
datafiles = [
"IGGIELGN_LNGs.geojson",
"IGGIELGN_BorderPoints.geojson",
"IGGIELGN_Productions.geojson",
"IGGIELGN_PipeSegments.geojson",
]
rule retrieve_gas_infrastructure_data:
output: expand("data/gas_network/scigrid-gas/data/{files}", files=datafiles)
script: 'scripts/retrieve_gas_infrastructure_data.py'
rule build_gas_network:
input:
gas_network="data/gas_network/gas_network_dataset.csv",
country_shapes=pypsaeur("resources/country_shapes.geojson"),
gas_network="data/gas_network/scigrid-gas/data/IGGIELGN_PipeSegments.geojson"
output:
cleaned_gas_network="resources/gas_network.csv"
resources: mem_mb=4000
script: "scripts/build_gas_network.py"
rule build_gas_input_locations:
input:
lng="data/gas_network/scigrid-gas/data/IGGIELGN_LNGs.geojson",
entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson",
production="data/gas_network/scigrid-gas/data/IGGIELGN_Productions.geojson",
planned_lng="data/gas_network/planned_LNGs.csv",
regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson"),
regions_offshore=pypsaeur('resources/regions_offshore_elec_s{simpl}_{clusters}.geojson')
output:
gas_input_nodes="resources/gas_input_locations_s{simpl}_{clusters}.geojson",
gas_input_nodes_simplified="resources/gas_input_locations_s{simpl}_{clusters}_simplified.csv"
resources: mem_mb=2000,
script: "scripts/build_gas_input_locations.py"
rule cluster_gas_network:
input:
cleaned_gas_network="resources/gas_network.csv",
regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson"),
regions_offshore=pypsaeur("resources/regions_offshore_elec_s{simpl}_{clusters}.geojson")
output:
clustered_gas_network="resources/gas_network_elec_s{simpl}_{clusters}.csv"
resources: mem_mb=10000
script: "scripts/build_gas_network.py"
resources: mem_mb=4000
script: "scripts/cluster_gas_network.py"
gas_infrastructure = {**rules.cluster_gas_network.output, **rules.build_gas_input_locations.output}
else:
gas_infrastructure = {}
rule build_heat_demands:
input:
@ -354,7 +399,6 @@ rule prepare_sector_network:
energy_totals_name='resources/energy_totals.csv',
co2_totals_name='resources/co2_totals.csv',
transport_name='resources/transport_data.csv',
clustered_gas_network="resources/gas_network_elec_s{simpl}_{clusters}.csv",
traffic_data_KFZ="data/emobility/KFZ__count",
traffic_data_Pkw="data/emobility/Pkw__count",
biomass_potentials='resources/biomass_potentials_s{simpl}_{clusters}.csv',
@ -387,7 +431,8 @@ rule prepare_sector_network:
solar_thermal_urban="resources/solar_thermal_urban_elec_s{simpl}_{clusters}.nc",
solar_thermal_rural="resources/solar_thermal_rural_elec_s{simpl}_{clusters}.nc",
**build_retro_cost_output,
**build_biomass_transport_costs_output
**build_biomass_transport_costs_output,
**gas_infrastructure
output: RDIR + '/prenetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc'
threads: 1
resources: mem_mb=2000

View File

@ -243,12 +243,14 @@ sector:
electricity_distribution_grid: false
electricity_distribution_grid_cost_factor: 1.0 #multiplies cost in data/costs.csv
electricity_grid_connection: true # only applies to onshore wind and utility PV
H2_network: true
gas_network: true
H2_retrofit: true # if set to True existing gas pipes can be retrofitted to H2 pipes
# according to hydrogen backbone strategy (April, 2020) p.15
# https://gasforclimate2050.eu/wp-content/uploads/2020/07/2020_European-Hydrogen-Backbone_Report.pdf
# 60% of original natural gas capacity could be used in cost-optimal case as H2 capacity
H2_retrofit_capacity_per_CH4: 0.6 # ratio for H2 capacity per original CH4 capacity of retrofitted pipelines
gas_network_connectivity_upgrade: 3 # https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.connectivity.edge_augmentation.k_edge_augmentation.html#networkx.algorithms.connectivity.edge_augmentation.k_edge_augmentation
gas_distribution_grid: true
gas_distribution_grid_cost_factor: 1.0 #multiplies cost in data/costs.csv
biomass_transport: false # biomass transport between nodes
@ -449,6 +451,7 @@ plotting:
gas boilers: '#db6a25'
gas boiler marginal: '#db6a25'
gas: '#e05b09'
fossil gas: '#e05b09'
natural gas: '#e05b09'
CCGT: '#a85522'
CCGT marginal: '#a85522'
@ -457,7 +460,7 @@ plotting:
gas for industry: '#853403'
gas for industry CC: '#692e0a'
gas pipeline: '#ebbca0'
Gas pipeline: '#ebbca0'
gas pipeline new: '#a87c62'
# oil
oil: '#c9c9c9'
oil boiler: '#adadad'
@ -547,6 +550,7 @@ plotting:
H2 storage: '#bf13a0'
land transport fuel cell: '#6b3161'
H2 pipeline: '#f081dc'
H2 pipeline retrofitted: '#ba99b5'
H2 Fuel Cell: '#c251ae'
H2 Electrolysis: '#ff29d9'
# syngas

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,8 @@
name,geometry,max_cap_store2pipe_M_m3_per_d,source
Wilhelmshaven,"POINT(8.133 53.516)",27.4,https://www.gem.wiki/Wilhelmshaven_LNG_Terminal
Brunsbüttel,"POINT(8.976 53.914)",19.2,https://www.gem.wiki/Brunsb%C3%BCttel_LNG_Terminal
Stade,"POINT(9.510 53.652)",32.9,https://www.gem.wiki/Stade_LNG_Terminal
Alexandroupolis,"POINT(25.843 40.775)",16.7,https://www.gem.wiki/Alexandroupolis_LNG_Terminal
Shannon,"POINT(-9.442 52.581)",22.5,https://www.gem.wiki/Shannon_LNG_Terminal
Gothenburg,"POINT(11.948 57.702)",1.4,https://www.gem.wiki/Gothenburg_LNG_Terminal
Cork,"POINT(-8.323 51.831)",11.0,https://www.gem.wiki/Cork_LNG_Terminal
1 name geometry max_cap_store2pipe_M_m3_per_d source
2 Wilhelmshaven POINT(8.133 53.516) 27.4 https://www.gem.wiki/Wilhelmshaven_LNG_Terminal
3 Brunsbüttel POINT(8.976 53.914) 19.2 https://www.gem.wiki/Brunsb%C3%BCttel_LNG_Terminal
4 Stade POINT(9.510 53.652) 32.9 https://www.gem.wiki/Stade_LNG_Terminal
5 Alexandroupolis POINT(25.843 40.775) 16.7 https://www.gem.wiki/Alexandroupolis_LNG_Terminal
6 Shannon POINT(-9.442 52.581) 22.5 https://www.gem.wiki/Shannon_LNG_Terminal
7 Gothenburg POINT(11.948 57.702) 1.4 https://www.gem.wiki/Gothenburg_LNG_Terminal
8 Cork POINT(-8.323 51.831) 11.0 https://www.gem.wiki/Cork_LNG_Terminal

View File

@ -8,6 +8,52 @@ Future release
.. note::
This unreleased version currently may require the master branches of PyPSA, PyPSA-Eur, and the technology-data repository.
This release includes the addition of the European gas transmission network and
incorporates retrofitting options to hydrogen.
**Gas Transmission Network**
* New rule ``retrieve_gas_infrastructure_data`` that downloads and extracts the
SciGRID_gas `IGGIELGN <https://zenodo.org/record/4767098>`_ dataset from zenodo.
It includes data on the transmission routes, pipe diameters, capacities, pressure,
and whether the pipeline is bidirectional and carries H-Gas or L-Gas.
* New rule ``build_gas_network`` processes and cleans the pipeline data from SciGRID_gas.
Missing or uncertain pipeline capacities can be inferred by diameter.
* New rule ``build_gas_input_locations`` compiles the LNG import capacities
(including planned projects from gem.wiki), pipeline entry capacities and
local production capacities for each region of the model. These are the
regions where fossil gas can eventually enter the model.
* New rule ``cluster_gas_network`` that clusters the gas transmission network
data to the model resolution. Cross-regional pipeline capacities are aggregated
(while pressure and diameter compability is ignored), intra-regional pipelines
are dropped. Lengths are recalculated based on the regions' centroids.
* With the option ``sector: gas_network:``, the existing gas network is
added with a lossless transport model. A length-weighted `k-edge augmentation
algorithm
<https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.connectivity.edge_augmentation.k_edge_augmentation.html#networkx.algorithms.connectivity.edge_augmentation.k_edge_augmentation>`_
can be run to add new candidate gas pipelines such that all regions of the
model can be connected to the gas network. The number of candidates can be
controlled via the setting ``sector: gas_network_connectivity_upgrade:``. When
the gas network is activated, all the gas demands are regionally disaggregated
as well.
* New constraint allows retrofitting of gas pipelines to hydrogen pipelines.
This option is activated via the setting ``sector: H2_retrofit:``. For every
unit of gas pipeline capacity dismantled, ``sector:
H2_retrofit_capacity_per_CH4`` units are made available as hydrogen pipeline
capacity in the corresponding corridor. These repurposed hydrogen pipelines
have lower costs than new hydrogen pipelines. Both new and repurposed pipelines
can be built simultaneously.
* New hydrogen pipelines can now be built where there are already power or gas
transmission routes. Previously, only the electricity transmission routes were
considered.
PyPSA-Eur-Sec 0.6.0 (4 October 2021)
====================================

View File

@ -41,8 +41,10 @@ locations of industry from `HotMaps database <https://gitlab.com/hotmaps/industr
Hydrogen network: nodal.
Methane network: single node for Europe, since future demand is so
low and no bottlenecks are expected.
Methane network: single node for Europe, since future demand is so low and no
bottlenecks are expected. Optionally, if for example retrofitting from fossil
gas to hydrogen is to be considered, the methane grid can be nodally resolved
based on SciGRID_gas data.
Solid biomass: choice between single node for Europe and nodal where biomass
potential is regionally disaggregated (currently given per country,

View File

@ -151,7 +151,7 @@ Hydrogen demand
Stationary fuel cell CHP.
Transport applications.
Transport applications (heavy-duty road vehicles, liquid H2 in shipping).
Industry (ammonia, precursor to hydrocarbons for chemicals and iron/steel).

View File

@ -175,7 +175,7 @@ def convert_nuts2_to_regions(bio_nuts2, regions):
# calculate area of nuts2 regions
bio_nuts2["area_nuts2"] = area(bio_nuts2)
overlay = gpd.overlay(regions, bio_nuts2)
overlay = gpd.overlay(regions, bio_nuts2, keep_geom_type=True)
# calculate share of nuts2 area inside region
overlay["share"] = area(overlay) / overlay["area_nuts2"]

View File

@ -0,0 +1,105 @@
"""
Build import locations for fossil gas from entry-points, LNG terminals and production sites.
"""
import logging
logger = logging.getLogger(__name__)
import pandas as pd
import geopandas as gpd
from shapely import wkt
from cluster_gas_network import load_bus_regions
def read_scigrid_gas(fn):
df = gpd.read_file(fn)
df = pd.concat([df, df.param.apply(pd.Series)], axis=1)
df.drop(["param", "uncertainty", "method"], axis=1, inplace=True)
return df
def build_gas_input_locations(lng_fn, planned_lng_fn, entry_fn, prod_fn, countries):
# LNG terminals
lng = read_scigrid_gas(lng_fn)
planned_lng = pd.read_csv(planned_lng_fn)
planned_lng.geometry = planned_lng.geometry.apply(wkt.loads)
planned_lng = gpd.GeoDataFrame(planned_lng, crs=4326)
lng = lng.append(planned_lng, ignore_index=True)
# Entry points from outside the model scope
entry = read_scigrid_gas(entry_fn)
entry["from_country"] = entry.from_country.str.rstrip()
entry = entry.loc[
~(entry.from_country.isin(countries) & entry.to_country.isin(countries)) & # only take non-EU entries
~entry.name.str.contains("Tegelen") | # malformed datapoint
(entry.from_country == "NO") # entries from NO to GB
]
# production sites inside the model scope
prod = read_scigrid_gas(prod_fn)
prod = prod.loc[
(prod.geometry.y > 35) &
(prod.geometry.x < 30) &
(prod.country_code != "DE")
]
conversion_factor = 437.5 # MCM/day to MWh/h
lng["p_nom"] = lng["max_cap_store2pipe_M_m3_per_d"] * conversion_factor
entry["p_nom"] = entry["max_cap_from_to_M_m3_per_d"] * conversion_factor
prod["p_nom"] = prod["max_supply_M_m3_per_d"] * conversion_factor
lng["type"] = "lng"
entry["type"] = "pipeline"
prod["type"] = "production"
sel = ["geometry", "p_nom", "type"]
return pd.concat([prod[sel], entry[sel], lng[sel]], ignore_index=True)
if __name__ == "__main__":
if 'snakemake' not in globals():
from helper import mock_snakemake
snakemake = mock_snakemake(
'build_gas_import_locations',
simpl='',
clusters='37',
)
logging.basicConfig(level=snakemake.config['logging_level'])
regions = load_bus_regions(
snakemake.input.regions_onshore,
snakemake.input.regions_offshore
)
# add a buffer to eastern countries because some
# entry points are still in Russian or Ukrainian territory.
buffer = 9000 # meters
eastern_countries = ['FI', 'EE', 'LT', 'LV', 'PL', 'SK', 'HU', 'RO']
add_buffer_b = regions.index.str[:2].isin(eastern_countries)
regions.loc[add_buffer_b] = regions[add_buffer_b].to_crs(3035).buffer(buffer).to_crs(4326)
countries = regions.index.str[:2].unique().str.replace("GB", "UK")
gas_input_locations = build_gas_input_locations(
snakemake.input.lng,
snakemake.input.planned_lng,
snakemake.input.entry,
snakemake.input.production,
countries
)
gas_input_nodes = gpd.sjoin(gas_input_locations, regions, how='left')
gas_input_nodes.rename(columns={"index_right": "bus"}, inplace=True)
gas_input_nodes.to_file(snakemake.output.gas_input_nodes, driver='GeoJSON')
gas_input_nodes_s = gas_input_nodes.groupby(["bus", "type"])["p_nom"].sum().unstack()
gas_input_nodes_s.columns.name = "p_nom"
gas_input_nodes_s.to_csv(snakemake.output.gas_input_nodes_simplified)

310
scripts/build_gas_network.py Executable file → Normal file
View File

@ -1,188 +1,16 @@
"""
Builds clustered natural gas network based on data from:
[1] the SciGRID Gas project
(https://www.gas.scigrid.de/)
[2] ENTSOG capacity map
(https://www.entsog.eu/sites/default/files/2019-10/Capacities%20for%20Transmission%20Capacity%20Map%20RTS008_NS%20-%20DWH_final.xlsx)
"""
"""Preprocess gas network based on data from bthe SciGRID Gas project (https://www.gas.scigrid.de/)."""
import logging
logger = logging.getLogger(__name__)
import re
import json
import pandas as pd
import geopandas as gpd
import numpy as np
from shapely.geometry import Point
from pypsa.geo import haversine_pts
def concat_gdf(gdf_list, crs='EPSG:4326'):
"""Convert to gepandas dataframe with given Coordinate Reference System (crs)."""
return gpd.GeoDataFrame(pd.concat(gdf_list),crs=crs)
def string2list(string, with_None=True):
"""Convert string format to a list."""
p = re.compile('(?<!\\\\)\'')
string = p.sub('\"', string)
if with_None:
p2 = re.compile('None')
string = p2.sub('\"None\"', string)
return json.loads(string)
def load_gas_network(df_path):
"""Load and format gas network data."""
df = pd.read_csv(df_path, sep=',')
df.long = df.long.apply(string2list)
df.lat = df.lat.apply(string2list)
df.node_id = df.node_id.apply(string2list)
# pipes which can be used in both directions
both_direct_df = df[df.is_bothDirection == 1].reset_index(drop=True)
both_direct_df.node_id = both_direct_df.node_id.apply(lambda x: [x[1], x[0]])
both_direct_df.long = both_direct_df.long.apply(lambda x: [x[1], x[0]])
both_direct_df.lat = both_direct_df.lat.apply(lambda x: [x[1], x[0]])
df_singledirect = pd.concat([df, both_direct_df]).reset_index(drop=True)
df_singledirect.drop('is_bothDirection', axis=1)
# create shapely geometry points
df['point1'] = df.apply(lambda x: Point((x['long'][0], x['lat'][0])), axis=1)
df['point2'] = df.apply(lambda x: Point((x['long'][1], x['lat'][1])), axis=1)
df['point1_name'] = df.node_id.str[0]
df['point2_name'] = df.node_id.str[1]
part1 = df[['point1', 'point1_name']]
part2 = df[['point2', 'point2_name']]
part1.columns = ['geometry', 'name']
part2.columns = ['geometry', 'name']
points = [part1, part2]
points = concat_gdf(points)
points = points.drop_duplicates()
points.reset_index(drop=True, inplace=True)
return df, points
def load_bus_regions(onshore_path, offshore_path):
"""Load pypsa-eur on- and offshore regions and concat."""
bus_regions_offshore = gpd.read_file(offshore_path)
bus_regions_onshore = gpd.read_file(onshore_path)
bus_regions = concat_gdf([bus_regions_offshore, bus_regions_onshore])
bus_regions = bus_regions.dissolve(by='name', aggfunc='sum')
bus_regions = bus_regions.reset_index()
return bus_regions
def points2buses(input_points, bus_regions):
"""Map gas network points to network buses depending on bus region."""
points = input_points.copy()
points['bus'] = None
buses_list = set(bus_regions.name)
for bus in buses_list:
mask = bus_regions[bus_regions.name == bus]
index = gpd.clip(points, mask).index
points.loc[index, 'bus'] = bus
return points
def build_gas_network_topology(df, points2buses):
"""Create gas network between pypsa buses.
Parameters
----------
df : pd.DataFrame
gas network data
points2buses_map : pd.DataFrame
mapping of gas network points to pypsa buses
Returns
-------
gas_connections : pd.DataFrame
gas network connecting pypsa buses
"""
tmp_df = points2buses[['bus', 'name']]
tmp_df.columns = ['buses_start', 'name']
gas_connections = df.merge(tmp_df, left_on='point1_name', right_on='name')
tmp_df.columns = ['buses_destination', 'name']
gas_connections = gas_connections.merge(tmp_df, left_on='point2_name', right_on='name')
# drop all pipes connecting the same bus
gas_connections = gas_connections[gas_connections.buses_start != gas_connections.buses_destination]
gas_connections.reset_index(drop=True, inplace=True)
gas_connections.drop(['point1', 'point2'], axis=1, inplace=True)
return gas_connections
def check_missing(nodes, gas_connections):
"""Check which nodes are not connected to the gas network."""
start_buses = gas_connections.buses_start.dropna().unique()
end_buses = gas_connections.buses_destination.dropna().unique()
missing_start = nodes[[bus not in start_buses for bus in nodes]]
missing_end = nodes[[bus not in end_buses for bus in nodes]]
logger.info(f"- The following buses are missing in gas network data as a start bus:"
f"\n {', '.join(map(str, missing_start))} \n"
f"- The following buses are missing in gas network data as an end bus:"
f"\n {', '.join(map(str, missing_end))} \n"
f"- The following buses are missing completely:"
f"\n {', '.join(map(str, missing_start.intersection(missing_end)))}")
def clean_dataset(nodes, gas_connections):
"""Convert units and save only necessary data."""
check_missing(nodes, gas_connections)
determine_pipe_capacity(gas_connections)
cols = [
'is_bothDirection',
'capacity_recalculated',
'buses_start',
'buses_destination',
'id',
'length_km'
]
clean_pipes = gas_connections[cols].dropna()
# convert GW -> MW
clean_pipes.loc[:, 'capacity_recalculated'] *= 1e3
# rename columns
to_rename = {
'capacity_recalculated': 'pipe_capacity_MW',
'buses_start': 'bus0',
'buses_destination': 'bus1'
}
clean_pipes.rename(columns=to_rename, inplace=True)
return clean_pipes
def diameter2capacity(pipe_diameter_mm):
"""Calculate pipe capacity based on diameter.
def diameter_to_capacity(pipe_diameter_mm):
"""Calculate pipe capacity in MW based on diameter in mm.
20 inch (500 mm) 50 bar -> 1.5 GW CH4 pipe capacity (LHV)
24 inch (600 mm) 50 bar -> 5 GW CH4 pipe capacity (LHV)
@ -193,69 +21,115 @@ def diameter2capacity(pipe_diameter_mm):
"""
# slopes definitions
m0 = (5 - 1.5) / (600 - 500)
m1 = (11.25 - 5) / (900 - 600)
m2 = (21.7 - 11.25) / (1200 - 900)
m0 = (1500 - 0) / (500 - 0)
m1 = (5000 - 1500) / (600 - 500)
m2 = (11250 - 5000) / (900 - 600)
m3 = (21700 - 11250) / (1200 - 900)
# intercept
a0 = -16
a1 = -7.5
a2 = -20.1
a0 = 0
a1 = -16000
a2 = -7500
a3 = -20100
if pipe_diameter_mm < 500:
return np.nan
elif pipe_diameter_mm < 600:
return a0 + m0 * pipe_diameter_mm
elif pipe_diameter_mm < 900:
elif pipe_diameter_mm < 600:
return a1 + m1 * pipe_diameter_mm
else:
elif pipe_diameter_mm < 900:
return a2 + m2 * pipe_diameter_mm
else:
return a3 + m3 * pipe_diameter_mm
def determine_pipe_capacity(gas_network):
"""Check pipe capacity depending on diameter and pressure."""
def load_dataset(fn):
df = gpd.read_file(fn)
param = df.param.apply(pd.Series)
method = df.method.apply(pd.Series)[["diameter_mm", "max_cap_M_m3_per_d"]]
method.columns = method.columns + "_method"
df = pd.concat([df, param, method], axis=1)
to_drop = ["param", "uncertainty", "method", "tags"]
to_drop = df.columns.intersection(to_drop)
df.drop(to_drop, axis=1, inplace=True)
return df
gas_network["capacity_recalculated"] = gas_network.diameter_mm.apply(diameter2capacity)
# if pipe capacity smaller than 1.5 GW take original pipe capacity
low_cap = gas_network.Capacity_GWh_h < 1.5
gas_network.loc[low_cap, "capacity_recalculated"] = gas_network.loc[low_cap, "capacity_recalculated"].fillna(gas_network.loc[low_cap, "Capacity_GWh_h"])
def prepare_dataset(
df,
length_factor=1.5,
correction_threshold_length=4,
correction_threshold_p_nom=8,
bidirectional_below=10
):
# for pipes without diameter assume 500 mm diameter
gas_network["capacity_recalculated"].fillna(1.5, inplace=True)
# extract start and end from LineString
df["point0"] = df.geometry.apply(lambda x: Point(x.coords[0]))
df["point1"] = df.geometry.apply(lambda x: Point(x.coords[-1]))
# for nord stream take orginal data
nord_stream = gas_network[gas_network.max_pressure_bar==220].index
gas_network.loc[nord_stream, "capacity_recalculated"] = gas_network.loc[nord_stream, "Capacity_GWh_h"]
conversion_factor = 437.5 # MCM/day to MWh/h
df["p_nom"] = df.max_cap_M_m3_per_d * conversion_factor
# for inferred diameters, assume 500 mm rather than 900 mm (more conservative)
df.loc[df.diameter_mm_method != 'raw', "diameter_mm"] = 500.
keep = ["name", "diameter_mm", "is_H_gas", "is_bothDirection",
"length_km", "p_nom", "max_pressure_bar",
"start_year", "point0", "point1", "geometry"]
to_rename = {
"is_bothDirection": "bidirectional",
"is_H_gas": "H_gas",
"start_year": "build_year",
"length_km": "length",
}
df = df[keep].rename(columns=to_rename)
df.bidirectional = df.bidirectional.astype(bool)
df.H_gas = df.H_gas.astype(bool)
# short lines below 10 km are assumed to be bidirectional
short_lines = df["length"] < bidirectional_below
df.loc[short_lines, "bidirectional"] = True
# correct all capacities that deviate correction_threshold factor
# to diameter-based capacities, unless they are NordStream pipelines
# also all capacities below 0.5 GW are now diameter-based capacities
df["p_nom_diameter"] = df.diameter_mm.apply(diameter_to_capacity)
ratio = df.p_nom / df.p_nom_diameter
not_nordstream = df.max_pressure_bar < 220
df.p_nom.update(df.p_nom_diameter.where(
(df.p_nom <= 500) |
((ratio > correction_threshold_p_nom) & not_nordstream) |
((ratio < 1 / correction_threshold_p_nom) & not_nordstream)
))
# lines which have way too discrepant line lengths
# get assigned haversine length * length factor
df["length_haversine"] = df.apply(
lambda p: length_factor * haversine_pts(
[p.point0.x, p.point0.y],
[p.point1.x, p.point1.y]
), axis=1
)
ratio = df.eval("length / length_haversine")
df["length"].update(df.length_haversine.where(
(df["length"] < 20) |
(ratio > correction_threshold_length) |
(ratio < 1 / correction_threshold_length)
))
return df
if __name__ == "__main__":
if 'snakemake' not in globals():
from helper import mock_snakemake
snakemake = mock_snakemake('build_gas_network',
network='elec', simpl='', clusters='37',
lv='1.0', opts='', planning_horizons='2020',
sector_opts='168H-T-H-B-I')
snakemake = mock_snakemake('build_gas_network')
logging.basicConfig(level=snakemake.config['logging_level'])
# import gas network data
gas_network, points = load_gas_network(snakemake.input.gas_network)
gas_network = load_dataset(snakemake.input.gas_network)
# get clustered bus regions
bus_regions = load_bus_regions(
snakemake.input.regions_onshore,
snakemake.input.regions_offshore
)
nodes = pd.Index(bus_regions.name.unique())
gas_network = prepare_dataset(gas_network)
# map gas network points to network buses
points2buses_map = points2buses(points, bus_regions)
# create gas network between pypsa nodes
gas_connections = build_gas_network_topology(gas_network, points2buses_map)
gas_connections = clean_dataset(nodes, gas_connections)
gas_connections.to_csv(snakemake.output.clustered_gas_network)
gas_network.to_csv(snakemake.output.cleaned_gas_network)

View File

@ -3,7 +3,11 @@
import uuid
import pandas as pd
import geopandas as gpd
from itertools import product
from distutils.version import StrictVersion
gpd_version = StrictVersion(gpd.__version__)
def locate_missing_industrial_sites(df):
@ -69,7 +73,8 @@ def prepare_hotmaps_database(regions):
gdf = gpd.GeoDataFrame(df, geometry='coordinates', crs="EPSG:4326")
gdf = gpd.sjoin(gdf, regions, how="inner", op='within')
kws = dict(op="within") if gpd_version < '0.10' else dict(predicate="within")
gdf = gpd.sjoin(gdf, regions, how="inner", **kws)
gdf.rename(columns={"index_right": "bus"}, inplace=True)
gdf["country"] = gdf.bus.str[:2]

124
scripts/cluster_gas_network.py Executable file
View File

@ -0,0 +1,124 @@
"""Cluster gas network."""
import logging
logger = logging.getLogger(__name__)
import pandas as pd
import geopandas as gpd
from shapely import wkt
from pypsa.geo import haversine_pts
from distutils.version import StrictVersion
gpd_version = StrictVersion(gpd.__version__)
def concat_gdf(gdf_list, crs='EPSG:4326'):
"""Concatenate multiple geopandas dataframes with common coordinate reference system (crs)."""
return gpd.GeoDataFrame(pd.concat(gdf_list), crs=crs)
def load_bus_regions(onshore_path, offshore_path):
"""Load pypsa-eur on- and offshore regions and concat."""
bus_regions_offshore = gpd.read_file(offshore_path)
bus_regions_onshore = gpd.read_file(onshore_path)
bus_regions = concat_gdf([bus_regions_offshore, bus_regions_onshore])
bus_regions = bus_regions.dissolve(by='name', aggfunc='sum')
return bus_regions
def build_clustered_gas_network(df, bus_regions, length_factor=1.25):
for i in [0,1]:
gdf = gpd.GeoDataFrame(geometry=df[f"point{i}"], crs="EPSG:4326")
kws = dict(op="within") if gpd_version < '0.10' else dict(predicate="within")
bus_mapping = gpd.sjoin(gdf, bus_regions, how="left", **kws).index_right
bus_mapping = bus_mapping.groupby(bus_mapping.index).first()
df[f"bus{i}"] = bus_mapping
df[f"point{i}"] = df[f"bus{i}"].map(bus_regions.to_crs(3035).centroid.to_crs(4326))
# drop pipes where not both buses are inside regions
df = df.loc[~df.bus0.isna() & ~df.bus1.isna()]
# drop pipes within the same region
df = df.loc[df.bus1 != df.bus0]
# recalculate lengths as center to center * length factor
df["length"] = df.apply(
lambda p: length_factor * haversine_pts(
[p.point0.x, p.point0.y],
[p.point1.x, p.point1.y]
), axis=1
)
# tidy and create new numbered index
df.drop(["point0", "point1"], axis=1, inplace=True)
df.reset_index(drop=True, inplace=True)
return df
def reindex_pipes(df):
def make_index(x):
connector = " <-> " if x.bidirectional else " -> "
return "gas pipeline " + x.bus0 + connector + x.bus1
df.index = df.apply(make_index, axis=1)
df["p_min_pu"] = df.bidirectional.apply(lambda bi: -1 if bi else 0)
df.drop("bidirectional", axis=1, inplace=True)
df.sort_index(axis=1, inplace=True)
def aggregate_parallel_pipes(df):
strategies = {
'bus0': 'first',
'bus1': 'first',
"p_nom": 'sum',
"p_nom_diameter": 'sum',
"max_pressure_bar": "mean",
"build_year": "mean",
"diameter_mm": "mean",
"length": 'mean',
'name': ' '.join,
"p_min_pu": 'min',
}
return df.groupby(df.index).agg(strategies)
if __name__ == "__main__":
if 'snakemake' not in globals():
from helper import mock_snakemake
snakemake = mock_snakemake(
'cluster_gas_network',
simpl='',
clusters='37'
)
logging.basicConfig(level=snakemake.config['logging_level'])
fn = snakemake.input.cleaned_gas_network
df = pd.read_csv(fn, index_col=0)
for col in ["point0", "point1"]:
df[col] = df[col].apply(wkt.loads)
bus_regions = load_bus_regions(
snakemake.input.regions_onshore,
snakemake.input.regions_offshore
)
gas_network = build_clustered_gas_network(df, bus_regions)
reindex_pipes(gas_network)
gas_network = aggregate_parallel_pipes(gas_network)
gas_network.to_csv(snakemake.output.clustered_gas_network)

View File

@ -89,3 +89,15 @@ def mock_snakemake(rulename, **wildcards):
os.chdir(script_dir)
return snakemake
# from pypsa-eur/_helpers.py
def progress_retrieve(url, file):
import urllib
from progressbar import ProgressBar
pbar = ProgressBar(0, 100)
def dlProgress(count, blockSize, totalSize):
pbar.update( int(count * blockSize * 100 / totalSize) )
urllib.request.urlretrieve(url, file, reporthook=dlProgress)

View File

@ -236,8 +236,6 @@ def plot_h2_map(network):
linewidth_factor = 1e4
# MW below which not drawn
line_lower_threshold = 1e3
bus_color = "m"
link_color = "c"
# Drop non-electric buses so they don't clutter the plot
n.buses.drop(n.buses.index[n.buses.carrier != "AC"], inplace=True)
@ -251,19 +249,34 @@ def plot_h2_map(network):
n.links.drop(n.links.index[~n.links.carrier.str.contains("H2 pipeline")], inplace=True)
link_widths = n.links.p_nom_opt / linewidth_factor
link_widths[n.links.p_nom_opt < line_lower_threshold] = 0.
link_color = n.links.carrier.map({"H2 pipeline":"red",
"H2 pipeline retrofitted": "blue"})
h2_new = n.links.loc[n.links.carrier=="H2 pipeline", "p_nom_opt"]
h2_retro = n.links.loc[n.links.carrier=='H2 pipeline retrofitted']
positive_order = h2_retro.bus0 < h2_retro.bus1
h2_retro_p = h2_retro[positive_order]
swap_buses = {"bus0": "bus1", "bus1": "bus0"}
h2_retro_n = h2_retro[~positive_order].rename(columns=swap_buses)
h2_retro = pd.concat([h2_retro_p, h2_retro_n])
h2_retro.index = h2_retro.apply(
lambda x: f"H2 pipeline {x.bus0.replace(' H2', '')} -> {x.bus1.replace(' H2', '')}",
axis=1
)
h2_retro = h2_retro["p_nom_opt"]
link_widths_total = (h2_new + h2_retro) / linewidth_factor
link_widths_total = link_widths_total.groupby(level=0).sum().reindex(n.links.index).fillna(0.)
link_widths_total[n.links.p_nom_opt < line_lower_threshold] = 0.
retro = n.links.p_nom_opt.where(n.links.carrier=='H2 pipeline retrofitted', other=0.)
link_widths_retro = retro / linewidth_factor
link_widths_retro[n.links.p_nom_opt < line_lower_threshold] = 0.
n.links.bus0 = n.links.bus0.str.replace(" H2", "")
n.links.bus1 = n.links.bus1.str.replace(" H2", "")
print(link_widths.sort_values())
print(n.links[["bus0", "bus1"]])
fig, ax = plt.subplots(
figsize=(7, 6),
subplot_kw={"projection": ccrs.PlateCarree()}
@ -271,18 +284,28 @@ def plot_h2_map(network):
n.plot(
bus_sizes=bus_sizes,
bus_colors={"H2 Electrolysis": bus_color,
"H2 Fuel Cell": "slateblue"},
link_colors=link_color,
link_widths=link_widths,
bus_colors=snakemake.config['plotting']['tech_colors'],
link_colors='#a2f0f2',
link_widths=link_widths_total,
branch_components=["Link"],
ax=ax, **map_opts
ax=ax,
**map_opts
)
n.plot(
geomap=False,
bus_sizes=0,
link_colors='#72d3d6',
link_widths=link_widths_retro,
branch_components=["Link"],
ax=ax,
**map_opts
)
handles = make_legend_circles_for(
[50000, 10000],
scale=bus_size_factor,
facecolor=bus_color
facecolor='grey'
)
labels = ["{} GW".format(s) for s in (50, 10)]
@ -303,7 +326,7 @@ def plot_h2_map(network):
labels = []
for s in (50, 10):
handles.append(plt.Line2D([0], [0], color="black",
handles.append(plt.Line2D([0], [0], color="grey",
linewidth=s * 1e3 / linewidth_factor))
labels.append("{} GW".format(s))
@ -321,7 +344,6 @@ def plot_h2_map(network):
fig.savefig(
snakemake.output.map.replace("-costs-all","-h2_network"),
transparent=True,
bbox_inches="tight"
)
@ -330,261 +352,129 @@ def plot_ch4_map(network):
n = network.copy()
supply_energy = get_nodal_balance().droplevel([0,1]).sort_index()
if "Gas pipeline" not in n.links.carrier.unique():
if "gas pipeline" not in n.links.carrier.unique():
return
assign_location(n)
bus_size_factor = 1e7
bus_size_factor = 6e7
linewidth_factor = 1e4
# MW below which not drawn
line_lower_threshold = 5e3
bus_color = "maroon"
link_color = "lightcoral"
line_lower_threshold = 500
# Drop non-electric buses so they don't clutter the plot
n.buses.drop(n.buses.index[n.buses.carrier != "AC"], inplace=True)
elec = n.generators[n.generators.carrier=="gas"].index
fossil_gas_i = n.generators[n.generators.carrier=="gas"].index
fossil_gas = n.generators_t.p.loc[:,fossil_gas_i].mul(n.snapshot_weightings.generators, axis=0).sum().groupby(n.generators.loc[fossil_gas_i,"bus"]).sum() / bus_size_factor
fossil_gas.rename(index=lambda x: x.replace(" gas", ""), inplace=True)
fossil_gas = fossil_gas.reindex(n.buses.index).fillna(0)
# make a fake MultiIndex so that area is correct for legend
fossil_gas.index = pd.MultiIndex.from_product([fossil_gas.index, ["fossil gas"]])
methanation_i = n.links[n.links.carrier.isin(["helmeth", "Sabatier"])].index
bus_sizes = n.generators_t.p.loc[:,elec].mul(n.snapshot_weightings, axis=0).sum().groupby(n.generators.loc[elec,"bus"]).sum() / bus_size_factor
bus_sizes.rename(index=lambda x: x.replace(" gas", ""), inplace=True)
bus_sizes = bus_sizes.reindex(n.buses.index).fillna(0)
bus_sizes.index = pd.MultiIndex.from_product(
[bus_sizes.index, ["fossil gas"]])
methanation = abs(n.links_t.p1.loc[:,methanation_i].mul(n.snapshot_weightings, axis=0)).sum().groupby(n.links.loc[methanation_i,"bus1"]).sum() / bus_size_factor
methanation = abs(n.links_t.p1.loc[:,methanation_i].mul(n.snapshot_weightings.generators, axis=0)).sum().groupby(n.links.loc[methanation_i,"bus1"]).sum() / bus_size_factor
methanation = methanation.groupby(methanation.index).sum().rename(index=lambda x: x.replace(" gas", ""))
# make a fake MultiIndex so that area is correct for legend
methanation.index = pd.MultiIndex.from_product(
[methanation.index, ["methanation"]])
methanation.index = pd.MultiIndex.from_product([methanation.index, ["methanation"]])
biogas_i = n.stores[n.stores.carrier=="biogas"].index
biogas = n.stores_t.p.loc[:,biogas_i].mul(n.snapshot_weightings, axis=0).sum().groupby(n.stores.loc[biogas_i,"bus"]).sum() / bus_size_factor
biogas = n.stores_t.p.loc[:,biogas_i].mul(n.snapshot_weightings.generators, axis=0).sum().groupby(n.stores.loc[biogas_i,"bus"]).sum() / bus_size_factor
biogas = biogas.groupby(biogas.index).sum().rename(index=lambda x: x.replace(" biogas", ""))
# make a fake MultiIndex so that area is correct for legend
biogas.index = pd.MultiIndex.from_product(
[biogas.index, ["biogas"]])
biogas.index = pd.MultiIndex.from_product([biogas.index, ["biogas"]])
bus_sizes = pd.concat([bus_sizes, methanation, biogas])
bus_sizes = pd.concat([fossil_gas, methanation, biogas])
bus_sizes.sort_index(inplace=True)
n.links.drop(n.links.index[n.links.carrier != "Gas pipeline"], inplace=True)
to_remove = n.links.index[~n.links.carrier.str.contains("gas pipeline")]
n.links.drop(to_remove, inplace=True)
link_widths = n.links.p_nom_opt / linewidth_factor
link_widths[n.links.p_nom_opt < line_lower_threshold] = 0.
link_widths_orig = n.links.p_nom / linewidth_factor
link_widths_orig[n.links.p_nom < line_lower_threshold] = 0.
link_color = n.links.carrier.map({"gas pipeline": "#f08080",
"gas pipeline new": "#c46868"})
n.links.bus0 = n.links.bus0.str.replace(" gas", "")
n.links.bus1 = n.links.bus1.str.replace(" gas", "")
print(link_widths.sort_values())
tech_colors = snakemake.config['plotting']['tech_colors']
print(n.links[["bus0", "bus1"]])
bus_colors = {
"fossil gas": tech_colors["fossil gas"],
"methanation": tech_colors["methanation"],
"biogas": "seagreen"
}
fig, ax = plt.subplots(subplot_kw={"projection": ccrs.PlateCarree()})
fig, ax = plt.subplots(figsize=(7,6), subplot_kw={"projection": ccrs.PlateCarree()})
fig.set_size_inches(7, 6)
n.plot(
bus_sizes=bus_sizes,
bus_colors=bus_colors,
link_colors='lightgrey',
link_widths=link_widths_orig,
branch_components=["Link"],
ax=ax,
**map_opts
)
n.plot(bus_sizes=bus_sizes,
bus_colors={"fossil gas": bus_color,
"methanation": "steelblue",
"biogas": "seagreen"},
n.plot(
geomap=False,
ax=ax,
bus_sizes=0.,
link_colors=link_color,
link_widths=link_widths,
branch_components=["Link"],
ax=ax, boundaries=(-10, 30, 34, 70))
**map_opts
)
handles = make_legend_circles_for(
[200, 1000], scale=bus_size_factor, facecolor=bus_color)
labels = ["{} MW".format(s) for s in (200, 1000)]
l2 = ax.legend(handles, labels,
loc="upper left", bbox_to_anchor=(0.01, 1.01),
[10e6, 100e6],
scale=bus_size_factor,
facecolor='grey'
)
labels = ["{} TWh".format(s) for s in (10, 100)]
l2 = ax.legend(
handles, labels,
loc="upper left",
bbox_to_anchor=(-0.03, 1.01),
labelspacing=1.0,
framealpha=1.,
title='Biomass potential',
handler_map=make_handler_map_to_scale_circles_as_in(ax))
frameon=False,
title='gas generation',
handler_map=make_handler_map_to_scale_circles_as_in(ax)
)
ax.add_artist(l2)
handles = []
labels = []
for s in (50, 10):
handles.append(plt.Line2D([0], [0], color=link_color,
linewidth=s * 1e3 / linewidth_factor))
handles.append(plt.Line2D([0], [0], color="grey", linewidth=s * 1e3 / linewidth_factor))
labels.append("{} GW".format(s))
l1_1 = ax.legend(handles, labels,
loc="upper left", bbox_to_anchor=(0.30, 1.01),
framealpha=1,
labelspacing=0.8, handletextpad=1.5,
title='CH4 pipeline capacity')
l1_1 = ax.legend(
handles, labels,
loc="upper left",
bbox_to_anchor=(0.28, 1.01),
frameon=False,
labelspacing=0.8,
handletextpad=1.5,
title='gas pipeline capacity'
)
ax.add_artist(l1_1)
fig.savefig(snakemake.output.map.replace("-costs-all","-ch4_network"), transparent=True,
bbox_inches="tight")
fig.savefig(
snakemake.output.map.replace("-costs-all","-ch4_network"),
bbox_inches="tight"
)
##################################################
supply_energy.drop("Gas pipeline", level=1, inplace=True)
supply_energy = supply_energy[abs(supply_energy)>5]
supply_energy.rename(index=lambda x: x.replace(" gas",""), level=0, inplace=True)
demand = supply_energy[supply_energy<0].groupby(level=[0,1]).sum()
supply = supply_energy[supply_energy>0].groupby(level=[0,1]).sum()
#### DEMAND #######################################
bus_size_factor = 2e7
bus_sizes = abs(demand) / bus_size_factor
fig, ax = plt.subplots(subplot_kw={"projection": ccrs.PlateCarree()})
fig.set_size_inches(7, 6)
n.plot(bus_sizes=bus_sizes,
bus_colors={"CHP": "r",
"OCGT": "wheat",
"SMR": "darkkhaki",
"SMR CC": "tan",
"gas boiler": "orange",
"gas for industry": "grey",
'gas for industry CC': "lightgrey"},
link_colors=link_color,
link_widths=link_widths,
branch_components=["Link"],
ax=ax, boundaries=(-10, 30, 34, 70))
handles = make_legend_circles_for(
[10e6, 20e6], scale=bus_size_factor, facecolor=bus_color)
labels = ["{} TWh".format(s) for s in (10, 20)]
l2 = ax.legend(handles, labels,
loc="upper left", bbox_to_anchor=(0.01, 1.01),
labelspacing=1.0,
framealpha=1.,
title='CH4 demand',
handler_map=make_handler_map_to_scale_circles_as_in(ax))
ax.add_artist(l2)
handles = []
labels = []
for s in (50, 10):
handles.append(plt.Line2D([0], [0], color=link_color,
linewidth=s * 1e3 / linewidth_factor))
labels.append("{} GW".format(s))
l1_1 = ax.legend(handles, labels,
loc="upper left", bbox_to_anchor=(0.30, 1.01),
framealpha=1,
labelspacing=0.8, handletextpad=1.5,
title='CH4 pipeline capacity')
ax.add_artist(l1_1)
fig.savefig(snakemake.output.map.replace("-costs-all","-ch4_demand"), transparent=True,
bbox_inches="tight")
#### SUPPLY #######################################
bus_size_factor = 2e7
bus_sizes = supply / bus_size_factor
fig, ax = plt.subplots(subplot_kw={"projection": ccrs.PlateCarree()})
fig.set_size_inches(7, 6)
n.plot(bus_sizes=bus_sizes,
bus_colors={"gas": "maroon",
"methanation": "steelblue",
"helmeth": "slateblue",
"biogas": "seagreen"},
link_colors=link_color,
link_widths=link_widths,
branch_components=["Link"],
ax=ax, boundaries=(-10, 30, 34, 70))
handles = make_legend_circles_for(
[10e6, 20e6], scale=bus_size_factor, facecolor="black")
labels = ["{} TWh".format(s) for s in (10, 20)]
l2 = ax.legend(handles, labels,
loc="upper left", bbox_to_anchor=(0.01, 1.01),
labelspacing=1.0,
framealpha=1.,
title='CH4 supply',
handler_map=make_handler_map_to_scale_circles_as_in(ax))
ax.add_artist(l2)
handles = []
labels = []
for s in (50, 10):
handles.append(plt.Line2D([0], [0], color=link_color,
linewidth=s * 1e3 / linewidth_factor))
labels.append("{} GW".format(s))
l1_1 = ax.legend(handles, labels,
loc="upper left", bbox_to_anchor=(0.30, 1.01),
framealpha=1,
labelspacing=0.8, handletextpad=1.5,
title='CH4 pipeline capacity')
ax.add_artist(l1_1)
fig.savefig(snakemake.output.map.replace("-costs-all","-ch4_supply"), transparent=True,
bbox_inches="tight")
###########################################################################
net = pd.concat([demand.groupby(level=0).sum().rename("demand"),
supply.groupby(level=0).sum().rename("supply")], axis=1).sum(axis=1)
mask = net>0
net_importer = net.mask(mask).rename("net_importer")
net_exporter = net.mask(~mask).rename("net_exporter")
bus_size_factor = 2e7
linewidth_factor = 1e-1
bus_sizes = pd.concat([abs(net_importer), net_exporter], axis=1).fillna(0).stack() / bus_size_factor
link_widths = abs(n.links_t.p0).max().loc[n.links.index] / n.links.p_nom_opt
link_widths /= linewidth_factor
fig, ax = plt.subplots(subplot_kw={"projection": ccrs.PlateCarree()})
fig.set_size_inches(7, 6)
n.plot(bus_sizes=bus_sizes,
bus_colors={"net_importer": "r",
"net_exporter": "darkgreen",
},
link_colors="lightgrey",
link_widths=link_widths,
branch_components=["Link"],
ax=ax, boundaries=(-10, 30, 34, 70))
handles = make_legend_circles_for(
[10e6, 20e6], scale=bus_size_factor, facecolor="black")
labels = ["{} TWh".format(s) for s in (10, 20)]
l2 = ax.legend(handles, labels,
loc="upper left", bbox_to_anchor=(0.01, 1.01),
labelspacing=1.0,
framealpha=1.,
title='Net Import/Export',
handler_map=make_handler_map_to_scale_circles_as_in(ax))
ax.add_artist(l2)
handles = []
labels = []
for s in (0.5, 1):
handles.append(plt.Line2D([0], [0], color="lightgrey",
linewidth=s / linewidth_factor))
labels.append("{} per unit".format(s))
l1_1 = ax.legend(handles, labels,
loc="upper left", bbox_to_anchor=(0.30, 1.01),
framealpha=1,
labelspacing=0.8, handletextpad=1.5,
title='maximum used CH4 pipeline capacity')
ax.add_artist(l1_1)
fig.savefig(snakemake.output.map.replace("-costs-all","-ch4_net_balance"), transparent=True,
bbox_inches="tight")
def plot_map_without(network):
@ -785,51 +675,6 @@ def plot_series(network, carrier="AC", name="test"):
transparent=True)
def get_nodal_balance(carrier="gas"):
bus_map = (n.buses.carrier == carrier)
bus_map.at[""] = False
supply_energy = pd.Series(dtype="float64")
for c in n.iterate_components(n.one_port_components):
items = c.df.index[c.df.bus.map(bus_map).fillna(False)]
if len(items) == 0:
continue
s = round(c.pnl.p.multiply(n.snapshot_weightings,axis=0).sum().multiply(c.df['sign']).loc[items]
.groupby([c.df.bus, c.df.carrier]).sum())
s = pd.concat([s], keys=[c.list_name])
s = pd.concat([s], keys=[carrier])
supply_energy = supply_energy.reindex(s.index.union(supply_energy.index))
supply_energy.loc[s.index] = s
for c in n.iterate_components(n.branch_components):
for end in [col[3:] for col in c.df.columns if col[:3] == "bus"]:
items = c.df.index[c.df["bus" + str(end)].map(bus_map,na_action=False)]
if len(items) == 0:
continue
s = ((-1)*c.pnl["p"+end][items].multiply(n.snapshot_weightings,axis=0).sum()
.groupby([c.df.loc[items,'bus{}'.format(end)], c.df.loc[items,'carrier']]).sum())
s.index = s.index
s = pd.concat([s], keys=[c.list_name])
s = pd.concat([s], keys=[carrier])
supply_energy = supply_energy.reindex(s.index.union(supply_energy.index))
supply_energy.loc[s.index] = s
supply_energy = supply_energy.rename(index=lambda x: rename_techs(x), level=3)
return supply_energy
if __name__ == "__main__":
if 'snakemake' not in globals():
from helper import mock_snakemake

View File

@ -8,6 +8,7 @@ import pytz
import pandas as pd
import numpy as np
import xarray as xr
import networkx as nx
from itertools import product
from scipy.stats import beta
@ -16,6 +17,10 @@ from vresutils.costdata import annuity
from build_energy_totals import build_eea_co2, build_eurostat_co2, build_co2_totals
from helper import override_component_attrs
from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation
from networkx.algorithms import complement
from pypsa.geo import haversine_pts
import logging
logger = logging.getLogger(__name__)
@ -131,40 +136,6 @@ def get(item, investment_year=None):
return item
def create_network_topology(n, prefix, connector=" -> "):
"""
Create a network topology like the power transmission network.
Parameters
----------
n : pypsa.Network
prefix : str
connector : str
Returns
-------
pd.DataFrame with columns bus0, bus1 and length
"""
ln_attrs = ["bus0", "bus1", "length"]
lk_attrs = ["bus0", "bus1", "length", "underwater_fraction"]
candidates = pd.concat([
n.lines[ln_attrs],
n.links.loc[n.links.carrier == "DC", lk_attrs]
]).fillna(0)
positive_order = candidates.bus0 < candidates.bus1
candidates_p = candidates[positive_order]
swap_buses = {"bus0": "bus1", "bus1": "bus0"}
candidates_n = candidates[~positive_order].rename(columns=swap_buses)
candidates = pd.concat([candidates_p, candidates_n])
topo = candidates.groupby(["bus0", "bus1"], as_index=False).mean()
topo.index = topo.apply(lambda c: prefix + c.bus0 + connector + c.bus1, axis=1)
return topo
def co2_emissions_year(countries, opts, year):
"""
Calculate CO2 emissions in one specific year (e.g. 1990 or 2018).
@ -252,14 +223,21 @@ def add_lifetime_wind_solar(n, costs):
n.generators.loc[gen_i, "lifetime"] = costs.at[carrier, 'lifetime']
def create_network_topology(n, prefix, connector=" -> ", bidirectional=True):
def haversine(p):
coord0 = n.buses.loc[p.bus0, ['x', 'y']].values
coord1 = n.buses.loc[p.bus1, ['x', 'y']].values
return 1.5 * haversine_pts(coord0, coord1)
def create_network_topology(n, prefix, carriers=["DC"], connector=" -> ", bidirectional=True):
"""
Create a network topology like the power transmission network.
Create a network topology from transmission lines and link carrier selection.
Parameters
----------
n : pypsa.Network
prefix : str
carriers : list-like
connector : str
bidirectional : bool, default True
True: one link for each connection
@ -267,7 +245,7 @@ def create_network_topology(n, prefix, connector=" -> ", bidirectional=True):
Returns
-------
pd.DataFrame with columns bus0, bus1 and length
pd.DataFrame with columns bus0, bus1, length, underwater_fraction
"""
ln_attrs = ["bus0", "bus1", "length"]
@ -275,9 +253,13 @@ def create_network_topology(n, prefix, connector=" -> ", bidirectional=True):
candidates = pd.concat([
n.lines[ln_attrs],
n.links.loc[n.links.carrier == "DC", lk_attrs]
n.links.loc[n.links.carrier.isin(carriers), lk_attrs]
]).fillna(0)
# base network topology purely on location not carrier
candidates["bus0"] = candidates.bus0.map(n.buses.location)
candidates["bus1"] = candidates.bus1.map(n.buses.location)
positive_order = candidates.bus0 < candidates.bus1
candidates_p = candidates[positive_order]
swap_buses = {"bus0": "bus1", "bus1": "bus0"}
@ -561,17 +543,6 @@ def average_every_nhours(n, offset):
logger.info(f'Resampling the network to {offset}')
m = n.copy(with_time=False)
# TODO is this still needed?
#fix copying of network attributes
#copied from pypsa/io.py, should be in pypsa/components.py#Network.copy()
allowed_types = (float, int, bool, str) + tuple(np.typeDict.values())
attrs = dict((attr, getattr(n, attr))
for attr in dir(n)
if (not attr.startswith("__") and
isinstance(getattr(n,attr), allowed_types)))
for k,v in attrs.items():
setattr(m,k,v)
snapshot_weightings = n.snapshot_weightings.resample(offset).sum()
m.set_snapshots(snapshot_weightings.index)
m.snapshot_weightings = snapshot_weightings
@ -1033,8 +1004,8 @@ def add_electricity_grid_connection(n, costs):
n.generators.loc[gens, "capital_cost"] += costs.at['electricity grid connection', 'fixed']
def add_storage(n, costs):
print("adding electricity and hydrogen storage")
def add_storage_and_grids(n, costs):
print("adding electricity and hydrogen storage as well as hydrogen and gas grids")
nodes = pop_layout.index
@ -1106,122 +1077,128 @@ def add_storage(n, costs):
capital_cost=h2_capital_cost
)
attrs = ["bus0", "bus1", "length"]
h2_links = pd.DataFrame(columns=attrs)
candidates = pd.concat({"lines": n.lines[attrs],
"links": n.links.loc[n.links.carrier == "DC", attrs]})
for candidate in candidates.index:
buses = [candidates.at[candidate, "bus0"], candidates.at[candidate, "bus1"]]
buses.sort()
name = f"H2 pipeline {buses[0]} -> {buses[1]}"
if name not in h2_links.index:
h2_links.at[name, "bus0"] = buses[0]
h2_links.at[name, "bus1"] = buses[1]
h2_links.at[name, "length"] = candidates.at[candidate, "length"]
# TODO Add efficiency losses
n.madd("Link",
h2_links.index,
bus0=h2_links.bus0.values + " H2",
bus1=h2_links.bus1.values + " H2",
p_min_pu=-1,
p_nom_extendable=True,
length=h2_links.length.values,
capital_cost=costs.at['H2 (g) pipeline', 'fixed'] * h2_links.length.values,
carrier="H2 pipeline",
lifetime=costs.at['H2 (g) pipeline', 'lifetime']
)
if options["gas_network"]:
logger.info("Add gas network")
cols = [
"bus0",
"bus1",
"is_bothDirection",
"pipe_capacity_MW",
"id",
"length_km"
]
gas_pipes = pd.read_csv(snakemake.input.clustered_gas_network, usecols=cols)
def make_index(x):
connector = " <-> " if x.is_bothDirection else " -> "
return "Gas pipeline " + x.bus0 + connector + x.bus1
gas_pipes.index = gas_pipes.apply(make_index, axis=1)
# group parallel pipes together
strategies = {
'bus0': 'first',
'bus1': 'first',
'is_bothDirection': 'first',
"pipe_capacity_MW": 'sum',
"length_km": 'sum',
'id': ' '.join,
}
gas_pipes = gas_pipes.groupby(gas_pipes.index).agg(strategies)
gas_pipes["num_parallel"] = gas_pipes.index.value_counts()
gas_pipes["p_min_pu"] = gas_pipes.apply(lambda x: -1 if x.is_bothDirection else 0, axis=1)
fn = snakemake.input.clustered_gas_network
gas_pipes = pd.read_csv(fn, index_col=0)
if options["H2_retrofit"]:
gas_pipes["p_nom_max"] = gas_pipes.gas_pipes.pipe_capacity_MW
gas_pipes["p_nom_max"] = gas_pipes.p_nom
gas_pipes["p_nom_min"] = 0.
gas_pipes["capital_cost"] = 0.
else:
gas_pipes["p_nom_max"] = np.inf
gas_pipes["p_nom_min"] = gas_pipes.gas_pipes.pipe_capacity_MW
gas_pipes["capital_cost"] = gas_pipes.length_km * costs.at['CH4 (g) pipeline', 'fixed']
gas_pipes["p_nom_min"] = gas_pipes.p_nom
gas_pipes["capital_cost"] = gas_pipes.length * costs.at['CH4 (g) pipeline', 'fixed']
n.madd("Link",
gas_pipes.index,
bus0=gas_pipes.bus0 + " gas",
bus1=gas_pipes.bus1 + " gas",
p_min_pu=gas_pipes.p_min_pu,
p_nom=gas_pipes.pipe_capacity_MW,
p_nom=gas_pipes.p_nom,
p_nom_extendable=True,
p_nom_max=gas_pipes.p_nom_max,
p_nom_min=gas_pipes.p_nom_min,
length=gas_pipes.length_km,
length=gas_pipes.length,
capital_cost=gas_pipes.capital_cost,
type=gas_pipes.num_parallel,
tags=gas_pipes.id,
carrier="Gas pipeline",
lifetime=50
tags=gas_pipes.name,
carrier="gas pipeline",
lifetime=costs.at['CH4 (g) pipeline', 'lifetime']
)
# remove fossil generators at all connected nodes
# TODO what should be assumed here? rather located at LNG terminals?
missing = nodes.difference(pd.concat([gas_pipes.bus0, gas_pipes.bus1]).unique())
remove_i = n.generators[(n.generators.carrier=="gas")
& (~n.generators.bus.str.replace(" gas","").isin(missing))].index
# remove fossil generators where there is neither
# production, LNG terminal, nor entry-point beyond system scope
fn = snakemake.input.gas_input_nodes_simplified
gas_input_nodes = pd.read_csv(fn, index_col=0)
unique = gas_input_nodes.index.unique()
gas_i = n.generators.carrier == 'gas'
internal_i = ~n.generators.bus.map(n.buses.location).isin(unique)
remove_i = n.generators[gas_i & internal_i].index
n.generators.drop(remove_i, inplace=True)
p_nom = gas_input_nodes.sum(axis=1).rename(lambda x: x + " gas")
n.generators.loc[gas_i, "p_nom_extendable"] = False
n.generators.loc[gas_i, "p_nom"] = p_nom
# add candidates for new gas pipelines to achieve full connectivity
G = nx.Graph()
gas_buses = n.buses.loc[n.buses.carrier=='gas', 'location']
G.add_nodes_from(np.unique(gas_buses.values))
sel = gas_pipes.p_nom > 1500
attrs = ["bus0", "bus1", "length"]
G.add_weighted_edges_from(gas_pipes.loc[sel, attrs].values)
# find all complement edges
complement_edges = pd.DataFrame(complement(G).edges, columns=["bus0", "bus1"])
complement_edges["length"] = complement_edges.apply(haversine, axis=1)
# apply k_edge_augmentation weighted by length of complement edges
k_edge = options.get("gas_network_connectivity_upgrade", 3)
augmentation = k_edge_augmentation(G, k_edge, avail=complement_edges.values)
new_gas_pipes = pd.DataFrame(augmentation, columns=["bus0", "bus1"])
new_gas_pipes["length"] = new_gas_pipes.apply(haversine, axis=1)
new_gas_pipes.index = new_gas_pipes.apply(
lambda x: f"gas pipeline new {x.bus0} <-> {x.bus1}", axis=1)
n.madd("Link",
new_gas_pipes.index,
bus0=new_gas_pipes.bus0 + " gas",
bus1=new_gas_pipes.bus1 + " gas",
p_min_pu=-1, # new gas pipes are bidirectional
p_nom_extendable=True,
length=new_gas_pipes.length,
capital_cost=new_gas_pipes.length * costs.at['CH4 (g) pipeline', 'fixed'],
carrier="gas pipeline new",
lifetime=costs.at['CH4 (g) pipeline', 'lifetime']
)
# retroftting existing CH4 pipes to H2 pipes
if options["gas_network"] and options["H2_retrofit"]:
gas_pipe_i = n.links[n.links.carrier == "Gas pipeline"].index
gas_pipe_i = n.links[n.links.carrier == "gas pipeline"].index
n.links.loc[gas_pipe_i, "p_nom_extendable"] = True
h2_pipes = gas_pipes.rename(index=lambda x:
x.replace("Gas pipeline", "H2 pipeline retrofitted"))
x.replace("gas pipeline", "H2 pipeline retrofitted"))
n.madd("Link",
h2_pipes.index,
bus0=h2_pipes.bus0 + " H2",
bus1=h2_pipes.bus1 + " H2",
p_min_pu=-1., # allow that all H2 pipelines can be used in other direction
p_nom_max=h2_pipes.pipe_capacity_MW * options["H2_retrofit_capacity_per_CH4"],
p_min_pu=-1., # allow that all H2 retrofit pipelines can be used in both directions
p_nom_max=h2_pipes.p_nom * options["H2_retrofit_capacity_per_CH4"],
p_nom_extendable=True,
length=h2_pipes.length_km,
capital_cost=costs.at['H2 (g) pipeline repurposed', 'fixed'] * h2_pipes.length_km,
type=gas_pipes.num_parallel,
tags=h2_pipes.id,
length=h2_pipes.length,
capital_cost=costs.at['H2 (g) pipeline repurposed', 'fixed'] * h2_pipes.length,
tags=h2_pipes.name,
carrier="H2 pipeline retrofitted",
lifetime=50
lifetime=costs.at['H2 (g) pipeline repurposed', 'lifetime']
)
if options.get("H2_network", True):
h2_pipes = create_network_topology(n, "H2 pipeline ", carriers=["DC", "gas pipeline"])
# TODO Add efficiency losses
n.madd("Link",
h2_pipes.index,
bus0=h2_pipes.bus0.values + " H2",
bus1=h2_pipes.bus1.values + " H2",
p_min_pu=-1,
p_nom_extendable=True,
length=h2_pipes.length.values,
capital_cost=costs.at['H2 (g) pipeline', 'fixed'] * h2_pipes.length.values,
carrier="H2 pipeline",
lifetime=costs.at['H2 (g) pipeline', 'lifetime']
)
n.add("Carrier", "battery")
@ -1963,14 +1940,6 @@ def add_industry(n, costs):
# 1e6 to convert TWh to MWh
industrial_demand = pd.read_csv(snakemake.input.industrial_demand, index_col=0) * 1e6
methane_demand = industrial_demand.loc[nodes, "methane"].div(8760).rename(index=lambda x: x + " gas for industry")
# need to aggregate methane demand if gas not nodally resolved
if not options["gas_network"]:
methane_demand = methane_demand.sum()
solid_biomass_by_country = industrial_demand["solid biomass"].groupby(pop_layout.ct).sum()
n.madd("Bus",
spatial.biomass.industry,
location=spatial.biomass.locations,
@ -2018,11 +1987,18 @@ def add_industry(n, costs):
location=spatial.gas.locations,
carrier="gas for industry")
gas_demand = industrial_demand.loc[nodes, "methane"] / 8760.
if options["gas_network"]:
spatial_gas_demand = gas_demand.rename(index=lambda x: x + " gas for industry")
else:
spatial_gas_demand = gas_demand.sum()
n.madd("Load",
spatial.gas.industry,
bus=spatial.gas.industry,
carrier="gas for industry",
p_set=methane_demand
p_set=spatial_gas_demand
)
n.madd("Link",
@ -2463,7 +2439,7 @@ if __name__ == "__main__":
add_generation(n, costs)
add_storage(n, costs)
add_storage_and_grids(n, costs)
# TODO merge with opts cost adjustment below
for o in opts:

View File

@ -0,0 +1,36 @@
"""
Retrieve gas infrastructure data from https://zenodo.org/record/4767098/files/IGGIELGN.zip
"""
import logging
from helper import progress_retrieve
import zipfile
from pathlib import Path
logger = logging.getLogger(__name__)
if __name__ == "__main__":
if 'snakemake' not in globals():
from helper import mock_snakemake
snakemake = mock_snakemake('retrieve_gas_network_data')
rootpath = '..'
else:
rootpath = '.'
url = "https://zenodo.org/record/4767098/files/IGGIELGN.zip"
# Save locations
zip_fn = Path(f"{rootpath}/IGGIELGN.zip")
to_fn = Path(f"{rootpath}/data/gas_network/scigrid-gas")
logger.info(f"Downloading databundle from '{url}'.")
progress_retrieve(url, zip_fn)
logger.info(f"Extracting databundle.")
zipfile.ZipFile(zip_fn).extractall(to_fn)
zip_fn.unlink()
logger.info(f"Gas infrastructure data available in '{to_fn}'.")

View File

@ -189,7 +189,7 @@ def add_chp_constraints(n):
def add_pipe_retrofit_constraint(n):
"""Add constraint for retrofitting existing CH4 pipelines to H2 pipelines."""
gas_pipes_i = n.links[n.links.carrier=="Gas pipeline"].index
gas_pipes_i = n.links[n.links.carrier=="gas pipeline"].index
h2_retrofitted_i = n.links[n.links.carrier=='H2 pipeline retrofitted'].index
if h2_retrofitted_i.empty or gas_pipes_i.empty: return
@ -201,7 +201,7 @@ def add_pipe_retrofit_constraint(n):
CH4_per_H2 = 1 / n.config["sector"]["H2_retrofit_capacity_per_CH4"]
lhs = linexpr(
(CH4_per_H2, link_p_nom.loc[h2_retrofitted_i].rename(index=lambda x: x.replace("H2 pipeline retrofitted", "Gas pipeline"))),
(CH4_per_H2, link_p_nom.loc[h2_retrofitted_i].rename(index=lambda x: x.replace("H2 pipeline retrofitted", "gas pipeline"))),
(1, link_p_nom.loc[gas_pipes_i])
)