merge former countries hydro profiles

This commit is contained in:
Fabian Neumann 2022-07-20 15:03:48 +02:00
commit b04a55a248
31 changed files with 878 additions and 315 deletions

1
.gitignore vendored
View File

@ -19,6 +19,7 @@ gurobi.log
/data
/data/links_p_nom.csv
/cutouts
/dask-worker-space
doc/_build

View File

@ -16,7 +16,6 @@ configfile: "config.yaml"
COSTS="data/costs.csv"
ATLITE_NPROCESSES = config['atlite'].get('nprocesses', 4)
wildcard_constraints:
weather_year="[0-9]{4}|",
simpl="[a-zA-Z0-9]*|all",
@ -51,7 +50,7 @@ if config['enable'].get('prepare_links_p_nom', False):
datafiles = ['ch_cantons.csv', 'je-e-21.03.02.xls',
'eez/World_EEZ_v8_2014.shp', 'EIA_hydro_generation_2000_2014.csv',
'eez/World_EEZ_v8_2014.shp',
'hydro_capacities.csv', 'naturalearth/ne_10m_admin_0_countries.shp',
'NUTS_2013_60M_SH/data/NUTS_RG_60M_2013.shp', 'nama_10r_3popgdp.tsv.gz',
'nama_10r_3gdp.tsv.gz', 'corine/g250_clc06_V18_5.tif']
@ -201,7 +200,9 @@ rule build_renewable_profiles:
input:
base_network="networks/base.nc",
corine="data/bundle/corine/g250_clc06_V18_5.tif",
natura="resources/natura.tiff",
natura=lambda w: ("resources/natura.tiff"
if config["renewable"][w.technology]["natura"]
else []),
gebco=lambda w: ("data/bundle/GEBCO_2014_2D.nc"
if "max_depth" in config["renewable"][w.technology].keys()
else []),
@ -224,7 +225,7 @@ rule build_renewable_profiles:
rule build_hydro_profile:
input:
country_shapes='resources/country_shapes.geojson',
eia_hydro_generation='data/bundle/EIA_hydro_generation_2000_2014.csv',
eia_hydro_generation='data/eia_hydro_annual_generation.csv',
cutout=f"cutouts/{config['renewable']['hydro']['cutout']}.nc" if "hydro" in config["renewable"] else "config['renewable']['hydro']['cutout'] not configured",
output: 'resources/profile{weather_year}_hydro.nc'
log: "logs/build_hydro_profile{weather_year}.log"
@ -243,7 +244,8 @@ rule add_electricity:
load='resources/load{weather_year}.csv',
nuts3_shapes='resources/nuts3_shapes.geojson',
**{f"profile_{tech}": "resources/profile{weather_year}_" + f"{tech}.nc"
for tech in config['renewable']}
for tech in config['renewable']},
**{f"conventional_{carrier}_{attr}": fn for carrier, d in config.get('conventional', {None: {}}).items() for attr, fn in d.items() if str(fn).startswith("data/")},
output: "networks/elec{weather_year}.nc"
log: "logs/add_electricity{weather_year}.log"
benchmark: "benchmarks/add_electricity{weather_year}"
@ -404,7 +406,7 @@ rule plot_summary:
def input_plot_p_nom_max(w):
return [("networks/elec{weather_year}_s{simpl}{maybe_cluster}.nc"
return [("results/elec{weather_year}_s{simpl}{maybe_cluster}.nc"
.format(maybe_cluster=('' if c == 'full' else ('_' + c)), **w))
for c in w.clusts.split(",")]

View File

@ -9,8 +9,6 @@ logging:
level: INFO
format: '%(levelname)s:%(name)s:%(message)s'
summary_dir: results
scenario:
weather_year: [''] # for backwards compatibility
simpl: ['']
@ -20,10 +18,6 @@ scenario:
countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK']
clustering:
simplify:
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
snapshots:
start: "2013-01-01"
end: "2014-01-01"
@ -41,29 +35,50 @@ enable:
electricity:
voltages: [220., 300., 380.]
gaslimit: false # global gas usage limit of X MWh_th
co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5
co2base: 1.487e+9
agg_p_nom_limits: data/agg_p_nom_minmax.csv
extendable_carriers:
Generator: []
StorageUnit: [] # battery, H2
Store: [battery, H2]
Link: []
operational_reserve: # like https://genxproject.github.io/GenX/dev/core/#Reserves
activate: false
epsilon_load: 0.02 # share of total load
epsilon_vres: 0.02 # share of total renewable supply
contingency: 4000 # fixed capacity in MW
max_hours:
battery: 6
H2: 168
powerplants_filter: false # use pandas query strings here, e.g. Country not in ['Germany']
custom_powerplants: false # use pandas query strings here, e.g. Country in ['Germany']
conventional_carriers: [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass]
renewable_capacities_from_OPSD: [] # onwind, offwind, solar
extendable_carriers:
Generator: [solar, onwind, offwind-ac, offwind-dc, OCGT]
StorageUnit: [] # battery, H2
Store: [battery, H2]
Link: [AC, DC]
# estimate_renewable_capacities_from_capacity_stats:
# # Wind is the Fueltype in ppm.data.Capacity_stats, onwind, offwind-{ac,dc} the carrier in PyPSA-Eur
# Wind: [onwind, offwind-ac, offwind-dc]
# Solar: [solar]
# use pandas query strings here, e.g. Country not in ['Germany']
powerplants_filter: (DateOut >= 2022 or DateOut != DateOut)
# use pandas query strings here, e.g. Country in ['Germany']
custom_powerplants: false
conventional_carriers: [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass]
renewable_carriers: [solar, onwind, offwind-ac, offwind-dc, hydro]
estimate_renewable_capacities:
enable: true
# Add capacities from OPSD data
from_opsd: true
# Renewable capacities are based on existing capacities reported by IRENA
year: 2020
# Artificially limit maximum capacities to factor * (IRENA capacities),
# i.e. 110% of <years>'s capacities => expansion_limit: 1.1
# false: Use estimated renewable potentials determine by the workflow
expansion_limit: false
technology_mapping:
# Wind is the Fueltype in powerplantmatching, onwind, offwind-{ac,dc} the carrier in PyPSA-Eur
Offshore: [offwind-ac, offwind-dc]
Onshore: [onwind]
PV: [solar]
atlite:
nprocesses: 4
@ -171,6 +186,10 @@ renewable:
clip_min_inflow: 1.0
norm_year: 2013
conventional:
nuclear:
p_max_pu: "data/nuclear_p_max_pu.csv" # float of file name
lines:
types:
220.: "Al/St 240/40 2-bundle 220.0"
@ -217,6 +236,25 @@ costs:
emission_prices: # in currency per tonne emission, only used with the option Ep
co2: 0.
clustering:
simplify_network:
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
algorithm: kmeans # choose from: [hac, kmeans]
feature: solar+onwind-time # only for hac. choose from: [solar+onwind-time, solar+onwind-cap, solar-time, solar-cap, solar+offwind-cap] etc.
cluster_network:
algorithm: kmeans
feature: solar+onwind-time
aggregation_strategies:
generators:
p_nom_max: sum # use "min" for more conservative assumptions
p_nom_min: sum
p_min_pu: mean
marginal_cost: mean
committable: any
ramp_limit_up: max
ramp_limit_down: max
efficiency: mean
solving:
options:
formulation: kirchhoff

View File

@ -9,7 +9,6 @@ logging:
level: INFO
format: '%(levelname)s:%(name)s:%(message)s'
summary_dir: results
scenario:
weather_year: ['']
@ -20,10 +19,6 @@ scenario:
countries: ['BE']
clustering:
simplify:
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
snapshots:
start: "2013-03-01"
end: "2013-04-01"
@ -172,6 +167,25 @@ costs:
emission_prices: # in currency per tonne emission, only used with the option Ep
co2: 0.
clustering:
simplify_network:
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
algorithm: kmeans # choose from: [hac, kmeans]
feature: solar+onwind-time # only for hac. choose from: [solar+onwind-time, solar+onwind-cap, solar-time, solar-cap, solar+offwind-cap] etc.
cluster_network:
algorithm: kmeans
feature: solar+onwind-time
aggregation_strategies:
generators:
p_nom_max: sum # use "min" for more conservative assumptions
p_nom_min: sum
p_min_pu: mean
marginal_cost: mean
committable: any
ramp_limit_up: max
ramp_limit_down: max
efficiency: mean
solving:
options:
formulation: kirchhoff

View File

@ -0,0 +1,50 @@
https://www.eia.gov/international/data/world/electricity/electricity-generation?pd=2&p=000000000000000000000000000000g&u=1&f=A&v=mapbubble&a=-&i=none&vo=value&t=R&g=000000000000002&l=73-1028i008017kg6368g80a4k000e0ag00gg0004g8g0ho00g000400008&s=315532800000&e=1577836800000&ev=false&
Report generated on: 03-28-2022 11:20:48
"API","","1980","1981","1982","1983","1984","1985","1986","1987","1988","1989","1990","1991","1992","1993","1994","1995","1996","1997","1998","1999","2000","2001","2002","2003","2004","2005","2006","2007","2008","2009","2010","2011","2012","2013","2014","2015","2016","2017","2018","2019","2020"
"","hydroelectricity net generation (billion kWh)","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","",""
"INTL.33-12-EURO-BKWH.A"," Europe","458.018","464.155","459.881","473.685","481.241","476.739","459.535","491.085","534.517","465.365","474.466","475.47","509.041","526.448","531.815","543.743","529.114164","543.845616","562.441501","569.308453","591.206662","587.371195","541.542535","506.19703","544.536443","545.176179","537.335934","540.934407","567.557921","564.244482","619.96477","543.05273","600.46622","631.86431","619.59229","615.53013","629.98906","562.59258","619.31106","610.62616","670.925"
"INTL.33-12-ALB-BKWH.A"," Albania","2.919","3.018","3.093","3.167","3.241","3.315","3.365","3.979","3.713","3.846","2.82","3.483","3.187","3.281","3.733","4.162","5.669","4.978","4.872","5.231","4.548","3.519","3.477","5.117","5.411","5.319","4.951","2.76","3.759","5.201","7.49133","4.09068","4.67775","6.88941","4.67676","5.83605","7.70418","4.47975","8.46648","5.15394","5.281"
"INTL.33-12-AUT-BKWH.A"," Austria","28.501","30.008","29.893","29.577","28.384","30.288","30.496","25.401","35.151","34.641","31.179","31.112","34.483","36.336","35.349","36.696","33.874","35.744","36.792","40.292","41.418","40.05","39.825","32.883","36.394","36.31","35.48","36.732","37.969","40.487","36.466","32.511","41.862","40.138","39.001","35.255","37.954","36.462","35.73","40.43655","45.344"
"INTL.33-12-BEL-BKWH.A"," Belgium","0.274","0.377","0.325","0.331","0.348","0.282","0.339","0.425","0.354","0.3","0.263","0.226","0.338","0.252","0.342","0.335","0.237","0.30195","0.38511","0.338","0.455","0.437","0.356","0.245","0.314","0.285","0.355","0.385","0.406","0.325","0.298","0.193","0.353","0.376","0.289","0.314","0.367","0.268","0.311","0.108","1.29"
"INTL.33-12-BIH-BKWH.A"," Bosnia and Herzegovina","--","--","--","--","--","--","--","--","--","--","--","--","3.374","2.343","3.424","3.607","5.104","4.608","4.511","5.477","5.043","5.129","5.215","4.456","5.919","5.938","5.798","3.961","4.818","6.177","7.946","4.343","4.173","7.164","5.876","5.495","5.585","3.7521","6.35382","6.02019","6.1"
"INTL.33-12-BGR-BKWH.A"," Bulgaria","3.674","3.58","3.018","3.318","3.226","2.214","2.302","2.512","2.569","2.662","1.859","2.417","2.042","1.923","1.453","2.291","2.89","2.726","3.066","2.725","2.646","1.72","2.172","2.999","3.136","4.294","4.196","2.845","2.796","3.435","4.98168","2.84328","3.14622","3.99564","4.55598","5.59845","3.8412","2.79972","5.09553","3.34917","3.37"
"INTL.33-12-HRV-BKWH.A"," Croatia","--","--","--","--","--","--","--","--","--","--","--","--","4.298","4.302","4.881","5.212","7.156","5.234","5.403","6.524","5.794","6.482","5.311","4.827","6.888","6.27","5.94","4.194","5.164","6.663","9.035","4.983","4.789","8.536","8.917","6.327","6.784","5.255","7.62399","5.87268","3.4"
"INTL.33-12-CYP-BKWH.A"," Cyprus","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0"
"INTL.33-12-CZE-BKWH.A"," Czech Republic","--","--","--","--","--","--","--","--","--","--","--","--","--","1.355","1.445","1.982","1.949","1.68201","1.382","1.664","1.7404","2.033","2.467","1.369","1.999","2.356","2.525","2.068","2.004","2.405","2.775","1.95","2.107","2.704","1.909","1.779","1.983","1.852","1.615","1.98792","3.4"
"INTL.33-12-DNK-BKWH.A"," Denmark","0.03","0.031","0.028","0.036","0.028","0.027","0.029","0.029","0.032","0.027","0.027","0.026","0.028","0.027","0.033","0.03","0.019","0.019","0.02673","0.031","0.03","0.028","0.032","0.021","0.027","0.023","0.023","0.028","0.026","0.019","0.021","0.017","0.017","0.013","0.015","0.018","0.019","0.018","0.015","0.01584","0.02"
"INTL.33-12-EST-BKWH.A"," Estonia","--","--","--","--","--","--","--","--","--","--","--","--","0.001","0.001","0.003","0.002","0.002","0.003","0.004","0.004","0.005","0.007","0.006","0.013","0.022","0.022","0.014","0.021","0.028","0.032","0.027","0.03","0.042","0.026","0.027","0.027","0.035","0.026","0.015","0.01881","0.04"
"INTL.33-12-FRO-BKWH.A"," Faroe Islands","0.049","0.049","0.049","0.049","0.049","0.049","0.049","0.049","0.062","0.071","0.074","0.074","0.083","0.073","0.075","0.075","0.069564","0.075066","0.076501","0.069453","0.075262","0.075195","0.095535","0.08483","0.093443","0.097986","0.099934","0.103407","0.094921","0.091482","0.06676","0.092","0.099","0.091","0.121","0.132","0.105","0.11","0.107","0.102","0.11"
"INTL.33-12-FIN-BKWH.A"," Finland","10.115","13.518","12.958","13.445","13.115","12.211","12.266","13.658","13.229","12.9","10.75","13.065","14.956","13.341","11.669","12.796","11.742","12.11958","14.9","12.652","14.513","13.073","10.668","9.495","14.919","13.646","11.379","14.035","16.941","12.559","12.743","12.278","16.667","12.672","13.24","16.584","15.634","14.61","13.137","12.31461","15.56"
"INTL.33-12-CSK-BKWH.A"," Former Czechoslovakia","4.8","4.2","3.7","3.9","3.2","4.3","4","4.853","4.355","4.229","3.919","3.119","3.602","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--"
"INTL.33-12-SCG-BKWH.A"," Former Serbia and Montenegro","--","--","--","--","--","--","--","--","--","--","--","--","11.23","10.395","11.016","12.071","14.266","12.636","12.763","13.243","11.88","12.326","11.633","9.752","11.01","11.912","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--"
"INTL.33-12-YUG-BKWH.A"," Former Yugoslavia","27.868","25.044","23.295","21.623","25.645","24.363","27.474","25.98","25.612","23.256","19.601","18.929","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--"
"INTL.33-12-FRA-BKWH.A"," France","68.253","70.358","68.6","67.515","64.01","60.248","60.953","68.623","73.952","45.744","52.796","56.277","68.313","64.3","78.057","72.196","64.43","63.151","61.479","71.832","66.466","73.888","59.992","58.567","59.276","50.965","55.741","57.029","63.017","56.428","61.945","45.184","59.099","71.042","62.993","54.876","60.094","49.389","64.485","56.98242","64.84"
"INTL.33-12-DEU-BKWH.A"," Germany","--","--","--","--","--","--","--","--","--","--","--","14.742","17.223","17.699","19.731","21.562","21.737","17.18343","17.044","19.451","21.515","22.506","22.893","19.071","20.866","19.442","19.808","20.957","20.239","18.841","20.678","17.323","21.331","22.66","19.31","18.664","20.214","19.985","17.815","19.86039","24.75"
"INTL.33-12-DDR-BKWH.A"," Germany, East","1.658","1.718","1.748","1.683","1.748","1.758","1.767","1.726","1.719","1.551","1.389","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--"
"INTL.33-12-DEUW-BKWH.A"," Germany, West","17.125","17.889","17.694","16.713","16.434","15.354","16.526","18.36","18.128","16.482","15.769","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--"
"INTL.33-12-GIB-BKWH.A"," Gibraltar","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0"
"INTL.33-12-GRC-BKWH.A"," Greece","3.396","3.398","3.551","2.331","2.852","2.792","3.222","2.768","2.354","1.888","1.751","3.068","2.181","2.26","2.573","3.494","4.305","3.84318","3.68","4.546","3.656","2.076","2.772","4.718","4.625","4.967","5.806","2.565","3.279","5.32","7.431","3.998","4.387","6.337","4.464","5.782","5.543","3.962","5.035","3.9798","3.43"
"INTL.33-12-HUN-BKWH.A"," Hungary","0.111","0.166","0.158","0.153","0.179","0.153","0.152","0.167","0.167","0.156","0.176","0.192","0.156","0.164","0.159","0.161","0.205","0.21384","0.15345","0.179","0.176","0.184","0.192","0.169","0.203","0.2","0.184","0.208","0.211","0.226","0.184","0.216","0.206","0.208","0.294","0.227","0.253","0.214","0.216","0.21681","0.24"
"INTL.33-12-ISL-BKWH.A"," Iceland","3.053","3.085","3.407","3.588","3.738","3.667","3.846","3.918","4.169","4.217","4.162","4.162","4.267","4.421","4.47","4.635","4.724","5.15493","5.565","5.987","6.292","6.512","6.907","7.017","7.063","6.949","7.22","8.31","12.303","12.156","12.51","12.382","12.214","12.747","12.554","13.541","13.092","13.892","13.679","13.32441","12.46"
"INTL.33-12-IRL-BKWH.A"," Ireland","0.833","0.855","0.792","0.776","0.68","0.824","0.91","0.673","0.862","0.684","0.69","0.738","0.809","0.757","0.911","0.706","0.715","0.67122","0.907","0.838","0.838","0.59","0.903","0.592","0.624","0.625","0.717","0.66","0.959","0.893","0.593","0.699","0.795","0.593","0.701","0.798","0.674","0.685","0.687","0.87813","1.21"
"INTL.33-12-ITA-BKWH.A"," Italy","44.997","42.782","41.216","40.96","41.923","40.616","40.626","39.05","40.205","33.647","31.31","41.817","41.778","41.011","44.212","37.404","41.617","41.18697","40.808","44.911","43.763","46.343","39.125","33.303","41.915","35.706","36.624","32.488","41.207","48.647","50.506","45.36477","41.45625","52.24626","57.95955","45.08163","42.00768","35.83701","48.29913","45.31824","47.72"
"INTL.33-12-XKS-BKWH.A"," Kosovo","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","0.075","0.119","0.154","0.104","0.095","0.142","0.149","0.139","0.243","0.177","0.27027","0.2079","0.26"
"INTL.33-12-LVA-BKWH.A"," Latvia","--","--","--","--","--","--","--","--","--","--","--","--","2.498","2.846","3.272","2.908","1.841","2.922","2.99","2.729","2.791","2.805","2.438","2.243","3.078","3.293","2.671","2.706","3.078","3.422","3.488","2.857","3.677","2.838","1.953","1.841","2.523","4.356","2.417","2.08692","2.59"
"INTL.33-12-LTU-BKWH.A"," Lithuania","--","--","--","--","--","--","--","--","--","--","--","--","0.308","0.389","0.447","0.369","0.323","0.291","0.413","0.409","0.336","0.322","0.35","0.323","0.417","0.446193","0.393","0.417","0.398","0.42","0.535","0.475","0.419","0.516","0.395","0.346","0.45","0.597","0.427","0.34254","1.06"
"INTL.33-12-LUX-BKWH.A"," Luxembourg","0.086","0.095","0.084","0.083","0.088","0.071","0.084","0.101","0.097","0.072","0.07","0.083","0.069","0.066","0.117","0.087","0.059","0.082","0.114","0.084","0.119","0.117","0.098","0.078","0.103","0.093","0.11","0.116","0.131","0.105","0.104","0.061","0.095","0.114","0.104","0.095","0.111","0.082","0.089","0.10593","1.09"
"INTL.33-12-MLT-BKWH.A"," Malta","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0","0"
"INTL.33-12-MNE-BKWH.A"," Montenegro","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","1.733","1.271","1.524","2.05","2.723","1.192","1.462","2.479","1.734","1.476","1.825","1.014","2.09187","1.78","1.8"
"INTL.33-12-NLD-BKWH.A"," Netherlands","0","0","0","0","0","0.003","0.003","0.001","0.002","0.037","0.119","0.079","0.119","0.091","0.1","0.087","0.079","0.09108","0.111","0.089","0.141","0.116","0.109","0.071","0.094","0.087","0.105","0.106","0.101","0.097","0.105","0.057","0.104","0.114","0.112","0.093","0.1","0.061","0.072","0.07326","0.05"
"INTL.33-12-MKD-BKWH.A"," North Macedonia","--","--","--","--","--","--","--","--","--","--","--","--","0.817","0.517","0.696","0.793","0.842","0.891","1.072","1.375","1.158","0.62","0.749","1.36","1.467","1.477","1.634","1","0.832","1.257","2.407","1.419","1.031","1.568","1.195","1.846","1.878","1.099","1.773","1.15236","1.24"
"INTL.33-12-NOR-BKWH.A"," Norway","82.717","91.876","91.507","104.704","104.895","101.464","95.321","102.341","107.919","117.369","119.933","109.032","115.505","118.024","110.398","120.315","102.823","108.677","114.546","120.237","140.4","119.258","128.078","104.425","107.693","134.331","118.175","132.319","137.654","124.03","116.257","119.78","141.189","127.551","134.844","136.662","142.244","141.651","138.202","123.66288","141.69"
"INTL.33-12-POL-BKWH.A"," Poland","2.326","2.116","1.528","1.658","1.394","1.833","1.534","1.644","1.775","1.593","1.403","1.411","1.492","1.473","1.716","1.868","1.912","1.941","2.286","2.133","2.085","2.302","2.256","1.654","2.06","2.179","2.022","2.328","2.13","2.351","2.9","2.313","2.02","2.421","2.165","1.814","2.117","2.552","1.949","1.93842","2.93"
"INTL.33-12-PRT-BKWH.A"," Portugal","7.873","4.934","6.82","7.897","9.609","10.512","8.364","9.005","12.037","5.72","9.065","8.952","4.599","8.453","10.551","8.26","14.613","12.97395","12.853","7.213","11.21","13.894","7.722","15.566","9.77","4.684","10.892","9.991","6.73","8.201","15.954","11.423","5.589","13.652","15.471","8.615","15.608","5.79","12.316","8.6526","13.96"
"INTL.33-12-ROU-BKWH.A"," Romania","12.506","12.605","11.731","9.934","11.208","11.772","10.688","11.084","13.479","12.497","10.87","14.107","11.583","12.64","12.916","16.526","15.597","17.334","18.69","18.107","14.63","14.774","15.886","13.126","16.348","20.005","18.172","15.806","17.023","15.379","19.684","14.581","11.945","14.807","18.618","16.467","17.848","14.349","17.48736","15.65289","15.53"
"INTL.33-12-SRB-BKWH.A"," Serbia","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","--","10.855","9.937","9.468","10.436","11.772","8.58","9.193","10.101","10.893","9.979","10.684","9.061","10.53261","10.07028","9.66"
"INTL.33-12-SVK-BKWH.A"," Slovakia","--","--","--","--","--","--","--","--","--","--","--","--","--","3.432","4.311","4.831","4.185","4.023","4.224","4.429","4.569","4.878","5.215","3.4452","4.059","4.592","4.355","4.406","4","4.324","5.184","3.211","3.687","4.329","3.762","3.701","4.302","4.321","3.506","4.27383","4.67"
"INTL.33-12-SVN-BKWH.A"," Slovenia","--","--","--","--","--","--","--","--","--","--","--","--","3.379","2.974","3.348","3.187","3.616","3.046","3.4","3.684","3.771","3.741","3.265","2.916","4.033","3.426","3.555","3.233","3.978","4.666","4.452","3.506","3.841","4.562","6.011","3.75","4.443","3.814","4.643","4.43421","5.24"
"INTL.33-12-ESP-BKWH.A"," Spain","29.16","21.64","25.99","26.696","31.088","30.895","26.105","27.016","34.76","19.046","25.16","27.01","18.731","24.133","27.898","22.881","39.404","34.43","33.665","22.634","29.274","40.617","22.691","40.643","31.359","18.209","25.699","27.036","23.13","26.147","41.576","30.07","20.192","36.45","38.815","27.656","35.77","18.007","33.743","24.23025","33.34"
"INTL.33-12-SWE-BKWH.A"," Sweden","58.133","59.006","54.369","62.801","67.106","70.095","60.134","70.95","69.016","70.911","71.778","62.603","73.588","73.905","58.508","67.421","51.2226","68.365","74.25","70.974","77.798","78.269","65.696","53.005","59.522","72.075","61.106","65.497","68.378","65.193","66.279","66.047","78.333","60.81","63.227","74.734","61.645","64.651","61.79","64.46583","71.6"
"INTL.33-12-CHE-BKWH.A"," Switzerland","32.481","35.13","35.974","35.069","29.871","31.731","32.576","34.328","35.437","29.477","29.497","31.756","32.373","35.416","38.678","34.817","28.458","33.70257","33.136","39.604","36.466","40.895","34.862","34.471","33.411","30.914","30.649","34.898","35.676","35.366","35.704","32.069","38.218","38.08","37.659","37.879","34.281","33.754","34.637","37.6596","40.62"
"INTL.33-12-TUR-BKWH.A"," Turkey","11.159","12.308","13.81","11.13","13.19","11.822","11.637","18.314","28.447","17.61","22.917","22.456","26.302","33.611","30.28","35.186","40.07","39.41784","41.80671","34.33","30.57","23.77","33.346","34.977","45.623","39.165","43.802","35.492","32.937","35.598","51.423","51.155","56.669","58.225","39.75","65.856","66.686","57.824","59.49","87.99714","77.39"
"INTL.33-12-GBR-BKWH.A"," United Kingdom","3.921","4.369","4.543","4.548","3.992","4.08","4.767","4.13","4.915","4.732","5.119","4.534","5.329","4.237","5.043","4.79","3.359","4.127","5.067","5.283","5.035","4.015","4.74","3.195","4.795","4.873","4.547","5.026","5.094","5.178","3.566","5.655","5.286","4.667","5.832","6.246","5.342","5.836","5.189","5.89941","7.64"
Can't render this file because it has a wrong number of fields in line 3.

16
data/nuclear_p_max_pu.csv Normal file
View File

@ -0,0 +1,16 @@
country,factor
BE,0.65
BG,0.89
CZ,0.82
FI,0.92
FR,0.70
DE,0.88
HU,0.90
NL,0.86
RO,0.92
SK,0.89
SI,0.94
ES,0.89
SE,0.82
CH,0.86
GB,0.67
1 country factor
2 BE 0.65
3 BG 0.89
4 CZ 0.82
5 FI 0.92
6 FR 0.70
7 DE 0.88
8 HU 0.90
9 NL 0.86
10 RO 0.92
11 SK 0.89
12 SI 0.94
13 ES 0.89
14 SE 0.82
15 CH 0.86
16 GB 0.67

View File

@ -1,3 +1,13 @@
,Unit,Values,Description
simplify,,,
simplify_network,,,
-- to_substations,bool,"{'true','false'}","Aggregates all nodes without power injection (positive or negative, i.e. demand or generation) to electrically closest ones"
-- algorithm,str,"One of {kmeans, hac}",
-- feature,str,"Str in the format carrier1+carrier2+...+carrierN-X, where CarrierI can be from {solar, onwind, offwind, ror} and X is one of {cap, time}.",
cluster_network
-- algorithm,str,"One of {kmeans, hac}",
-- feature,str,"Str in the format carrier1+carrier2+...+carrierN-X, where CarrierI can be from {solar, onwind, offwind, ror} and X is one of {cap, time}.",
aggregation_strategies,,,
-- generators,,,
-- -- {key},str,"{key} can be any of the component of the generator (str). Its value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new generator."
-- buses,,,
-- -- {key},str,"{key} can be any of the component of the bus (str). Its value can be any that can be converted to pandas.Series using getattr(). For example one of {min, max, sum}.","Aggregates the component according to the given strategy. For example, if sum, then all values within each cluster are summed to represent the new bus."

Can't render this file because it has a wrong number of fields in line 6.

View File

@ -4,7 +4,7 @@ co2limit,:math:`t_{CO_2-eq}/a`,float,Cap on total annual system carbon dioxide e
co2base,:math:`t_{CO_2-eq}/a`,float,Reference value of total annual system carbon dioxide emissions if relative emission reduction target is specified in ``{opts}`` wildcard.
agg_p_nom_limits,file,path,Reference to ``.csv`` file specifying per carrier generator nominal capacity constraints for individual countries if ``'CCL'`` is in ``{opts}`` wildcard. Defaults to ``data/agg_p_nom_minmax.csv``.
extendable_carriers,,,
-- Generator,--,"Any subset of {'OCGT','CCGT'}",Places extendable conventional power plants (OCGT and/or CCGT) where gas power plants are located today without capacity limits.
-- Generator,--,"Any extendable carrier",Defines existing or non-existing conventional and renewable power plants to be extendable during the optimization. Conventional generators can only be built/expanded where already existent today. If a listed conventional carrier is not included in the ``conventional_carriers`` list, the lower limit of the capacity expansion is set to 0.
-- StorageUnit,--,"Any subset of {'battery','H2'}",Adds extendable storage units (battery and/or hydrogen) at every node/bus after clustering without capacity limits and with zero initial capacity.
-- Store,--,"Any subset of {'battery','H2'}",Adds extendable storage units (battery and/or hydrogen) at every node/bus after clustering without capacity limits and with zero initial capacity.
-- Link,--,Any subset of {'H2 pipeline'},Adds extendable links (H2 pipelines only) at every connection where there are lines or HVDC links without capacity limits and with zero initial capacity. Hydrogen pipelines require hydrogen storage to be modelled as ``Store``.
@ -13,7 +13,7 @@ max_hours,,,
-- H2,h,float,Maximum state of charge capacity of the hydrogen storage in terms of hours at full output capacity ``p_nom``. Cf. `PyPSA documentation <https://pypsa.readthedocs.io/en/latest/components.html#storage-unit>`_.
powerplants_filter,--,"use `pandas.query <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.query.html>`_ strings here, e.g. Country not in ['Germany']",Filter query for the default powerplant database.
custom_powerplants,--,"use `pandas.query <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.query.html>`_ strings here, e.g. Country in ['Germany']",Filter query for the custom powerplant database.
conventional_carriers,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}",List of conventional power plants to include in the model from ``resources/powerplants.csv``.
renewable_capacities_from_OPSD,,"[solar, onwind, offwind]",List of carriers (offwind-ac and offwind-dc are included in offwind) whose capacities 'p_nom' are aligned to the `OPSD renewable power plant list <https://data.open-power-system-data.org/renewable_power_plants/>`_
estimate_renewable_capacities_from_capacitiy_stats,,,
conventional_carriers,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}",List of conventional power plants to include in the model from ``resources/powerplants.csv``. If an included carrier is also listed in `extendable_carriers`, the capacity is taken as a lower bound.
renewable_carriers,--,"Any subset of {solar, onwind, offwind-ac, offwind-dc, hydro}",List of renewable generators to include in the model.
estimate_renewable_capacities,,,
"-- Fueltype [ppm], e.g. Wind",,"list of fueltypes strings in PyPSA-Eur, e.g. [onwind, offwind-ac, offwind-dc]",converts ppm Fueltype to PyPSA-EUR Fueltype

Can't render this file because it has a wrong number of fields in line 7.

View File

@ -8,4 +8,5 @@ Trigger, Description, Definition, Status
``ATK``, "Require each node to be autarkic. Example: ``ATK`` removes all lines and links. ``ATKc`` removes all cross-border lines and links.", ``prepare_network``, In active use
``BAU``, Add a per-``carrier`` minimal overall capacity; i.e. at least ``40GW`` of ``OCGT`` in Europe; configured in ``electricity: BAU_mincapacities``, ``solve_network``: `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L66>`__, Untested
``SAFE``, Add a capacity reserve margin of a certain fraction above the peak demand to which renewable generators and storage do *not* contribute. Ignores network., ``solve_network`` `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L73>`__, Untested
``carrier+{c|p}factor``, "Alter the capital cost (``c``) or installable potential (``p``) of a carrier by a factor. Example: ``solar+c0.5`` reduces the capital cost of solar to 50\% of original values.", ``prepare_network``, In active use
``carrier+{c|p|m}factor``,"Alter the capital cost (``c``), installable potential (``p``) or marginal costs (``m``) of a carrier by a factor. Example: ``solar+c0.5`` reduces the capital cost of solar to 50\% of original values.", ``prepare_network``, In active use
``CH4L``,"Add an overall absolute gas limit. If configured in ``electricity: gaslimit`` it is given in MWh thermal, if a float is appended, the overall gaslimit is assumed to be given in TWh thermal (e.g. ``CH4L200`` limits gas dispatch to 200 TWh termal)", ``prepare_network``: ``add_gaslimit()``, In active use
1 Trigger Description Definition Status
8 ``ATK`` Require each node to be autarkic. Example: ``ATK`` removes all lines and links. ``ATKc`` removes all cross-border lines and links. ``prepare_network`` In active use
9 ``BAU`` Add a per-``carrier`` minimal overall capacity; i.e. at least ``40GW`` of ``OCGT`` in Europe; configured in ``electricity: BAU_mincapacities`` ``solve_network``: `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L66>`__ Untested
10 ``SAFE`` Add a capacity reserve margin of a certain fraction above the peak demand to which renewable generators and storage do *not* contribute. Ignores network. ``solve_network`` `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L73>`__ Untested
11 ``carrier+{c|p}factor`` ``carrier+{c|p|m}factor`` Alter the capital cost (``c``) or installable potential (``p``) of a carrier by a factor. Example: ``solar+c0.5`` reduces the capital cost of solar to 50\% of original values. Alter the capital cost (``c``), installable potential (``p``) or marginal costs (``m``) of a carrier by a factor. Example: ``solar+c0.5`` reduces the capital cost of solar to 50\% of original values. ``prepare_network`` In active use
12 ``CH4L`` Add an overall absolute gas limit. If configured in ``electricity: gaslimit`` it is given in MWh thermal, if a float is appended, the overall gaslimit is assumed to be given in TWh thermal (e.g. ``CH4L200`` limits gas dispatch to 200 TWh termal) ``prepare_network``: ``add_gaslimit()`` In active use

View File

@ -1,4 +1,4 @@
,Unit,Values,Description
start,--,"str or datetime-like; e.g. YYYY-MM-DD","Left bound of date range"
end,--,"str or datetime-like; e.g. YYYY-MM-DD","Right bound of date range"
closed,--,"One of {None, left, right}","Make the time interval closed to the ``left``, ``right``, or both sides ``None``."
closed,--,"One of {None, left, right}","Make the time interval closed to the ``left``, ``right``, or open on both sides ``None``."

1 Unit Values Description
2 start -- str or datetime-like; e.g. YYYY-MM-DD Left bound of date range
3 end -- str or datetime-like; e.g. YYYY-MM-DD Right bound of date range
4 closed -- One of {None, ‘left’, ‘right’} Make the time interval closed to the ``left``, ``right``, or both sides ``None``. Make the time interval closed to the ``left``, ``right``, or open on both sides ``None``.

View File

@ -91,9 +91,6 @@ Specifies the temporal range to build an energy system model for as arguments to
:widths: 25,7,22,30
:file: configtables/electricity.csv
.. warning::
Carriers in ``conventional_carriers`` must not also be in ``extendable_carriers``.
.. _atlite_cf:
``atlite``
@ -174,7 +171,7 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
.. literalinclude:: ../config.default.yaml
:language: yaml
:start-at: hydro:
:end-before: lines:
:end-before: conventional:
.. csv-table::
:header-rows: 1
@ -183,6 +180,17 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
.. _lines_cf:
``conventional``
=============
Define additional generator attribute for conventional carrier types. If a scalar value is given it is applied to all generators. However if a string starting with "data/" is given, the value is interpreted as a path to a csv file with country specific values. Then, the values are read in and applied to all generators of the given carrier in the given country. Note that the value(s) overwrite the existing values in the corresponding section of the ``generators`` dataframe.
.. literalinclude:: ../config.default.yaml
:language: yaml
:start-at: conventional:
:end-before: lines:
``lines``
=============

View File

@ -102,6 +102,8 @@ It might be the case that you can only retrieve solutions by using a commercial
conda activate pypsa-eur
conda install -c conda-forge ipopt glpk
.. warning::
On Windows, new versions of ``ipopt`` have caused problems. Consider downgrading to version 3.11.1.
.. _defaultconfig:

View File

@ -7,6 +7,92 @@
Release Notes
##########################################
Upcoming Release
================
* Add an efficiency factor of 88.55% to offshore wind capacity factors
as a proxy for wake losses. More rigorous modelling is `planned <https://github.com/PyPSA/pypsa-eur/issues/153>`_
[`#277 <https://github.com/PyPSA/pypsa-eur/pull/277>`_].
* The default deployment density of AC- and DC-connected offshore wind capacity is reduced from 3 MW/sqkm
to a more conservative estimate of 2 MW/sqkm [`#280 <https://github.com/PyPSA/pypsa-eur/pull/280>`_].
* Following discussion in `#285 <https://github.com/PyPSA/pypsa-eur/issues/285>`_ we have disabled the
correction factor for solar PV capacity factors by default while satellite data is used.
A correction factor of 0.854337 is recommended if reanalysis data like ERA5 is used.
* Resource definitions for memory usage now follow `Snakemake standard resource definition <https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#standard-resources>`_ ``mem_mb`` rather than ``mem``.
* Network building is made deterministic by supplying a fixed random state to network clustering routines.
* New network topology extracted from the ENTSO-E interactive map.
* The unused argument ``simple_hvdc_costs`` in :mod:`add_electricity` was removed.
* Iterative solving with impedance updates is skipped if there are no expandable lines.
* Switch from Germany to Belgium for continuous integration and tutorial to save resources.
* Use updated SARAH-2 and ERA5 cutouts with slightly wider scope to east and additional variables.
* Added existing renewable capacities for all countries based on IRENA statistics (IRENASTAT) using new ``powerplantmatching`` version:
* The corresponding ``config`` entries changed, cf. ``config.default.yaml``:
* old: ``estimate_renewable_capacities_from_capacity_stats``
* new: ``estimate_renewable_capacities``
* The estimation is endabled by setting the subkey ``enable`` to ``True``.
* Configuration of reference year for capacities can be configured (default: ``2020``)
* The list of renewables provided by the OPSD database can be used as a basis, using the tag ``from_opsd: True``. This adds the renewables from the database and fills up the missing capacities with the heuristic distribution.
* Uniform expansion limit of renewable build-up based on existing capacities can be configured using ``expansion_limit`` option
(default: ``false``; limited to determined renewable potentials)
* Distribution of country-level capacities proportional to maximum annual energy yield for each bus region
* The config key ``renewable_capacities_from_OPSD`` is deprecated and was moved under the section, ``estimate_renewable_capacities``. To enable it, set ``from_opsd`` to `True`.
* Add operational reserve margin constraint analogous to `GenX implementation <https://genxproject.github.io/GenX/dev/core/#Reserves>`_.
Can be activated with config setting ``electricity: operational_reserve:``.
* Add function to add global constraint on use of gas in :mod:`prepare_network`. This can be activated by including the keyword ``CH4L`` in the ``{opts}`` wildcard which enforces the limit set in ``electricity: gaslimit:`` given in MWh thermal. Alternatively, it is possible to append a number in the `{opts}` wildcard, e.g. `CH4L200` which limits the gas use to 200 TWh thermal.
* A new section ``conventional`` was added to the config file. This section contains configurations for conventional carriers.
* Add configuration option to implement arbitrary generator attributes for conventional generation technologies.
* Implement country-specific Energy Availability Factors (EAFs) for nuclear power plants based on IAEA 2018-2020 reported country averages. These are specified ``data/nuclear_p_max_pu.csv`` and translate to static ``p_max_pu`` values.
* The powerplants that have been shut down before 2021 are filtered out.
* ``powerplantmatching>=0.5.1`` is now required for ``IRENASTATS``.
* The inclusion of renewable carriers is now specified in the config entry ``renewable_carriers``. Before this was done by commenting/uncommenting sub-sections in the `renewable` config section.
* Now, all carriers that should be extendable have to be listed in the config entry ``extendable_carriers``. Before, renewable carriers were always set to be extendable. For backwards compatibility, the workflow is still looking at the listed carriers under the ``renewable`` key. In the future, all of them have to be listed under ``extendable_carriers``.
* It is now possible to set conventional power plants as extendable by adding them to the list of extendable ``Generator`` carriers in the config.
* Listing conventional carriers in ``extendable_carriers`` but not in ``conventional_carriers``, sets the corresponding conventional power plants as extendable without a lower capacity bound of today's capacities.
* Now, conventional carriers have an assigned capital cost by default.
* The ``build_year`` and ``lifetime`` column are now defined for conventional power plants.
* Fix crs bug. Change crs 4236 to 4326.
* Update rasterio version to correctly calculate exclusion raster
* Remove rules to build or retrieve rasterized NATURA 2000 dataset. Renewable potential calculation now directly uses the shapefiles.
* Cache data and cutouts folders. This cache will be updated weekly.
* Add rule to automatically retrieve Natura2000 natural protection areas. Switch of file format to GPKG.
* Add option to set CO2 emission prices through `{opts}` wildcard: `Ep<number>`, e.g. `Ep180`, will set the EUR/tCO2 price.
* Add option to alter marginal costs of a carrier through `{opts}` wildcard: `<carrier>+m<factor>`, e.g. `gas+m2.5`, will multiply the default marginal cost for gas by factor 2.5.
* Clustering strategies for generators and buses have moved from distinct scripts to configurables to unify the process and make it more transparent.
* Hierarchical clustering was introduced. Distance metric is calculated from renewable potentials on hourly (feature entry ends with `-time`) or annual (feature entry in config end with `-cap`) values.
Synchronisation Release - Ukraine and Moldova (17th March 2022)
===============================================================
@ -42,39 +128,6 @@ This release is not on the ``master`` branch. It can be used with
git checkout synchronisation-release
Upcoming Release
================
* Add an efficiency factor of 88.55% to offshore wind capacity factors
as a proxy for wake losses. More rigorous modelling is `planned <https://github.com/PyPSA/pypsa-eur/issues/153>`_
[`#277 <https://github.com/PyPSA/pypsa-eur/pull/277>`_].
* The default deployment density of AC- and DC-connected offshore wind capacity is reduced from 3 MW/sqkm
to a more conservative estimate of 2 MW/sqkm [`#280 <https://github.com/PyPSA/pypsa-eur/pull/280>`_].
* Following discussion in `#285 <https://github.com/PyPSA/pypsa-eur/issues/285>`_ we have disabled the
correction factor for solar PV capacity factors by default while satellite data is used.
A correction factor of 0.854337 is recommended if reanalysis data like ERA5 is used.
* Resource definitions for memory usage now follow [Snakemake standard resource definition](https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#standard-resources) ```mem_mb`` rather than ``mem``.
* Network building is made deterministic by supplying a fixed random state to network clustering routines.
* New network topology extracted from the ENTSO-E interactive map.
* The unused argument ``simple_hvdc_costs`` in :mod:`add_electricity` was removed.
* Iterative solving with impedance updates is skipped if there are no expandable lines.
* Switch from Germany to Belgium for continuous integration and tutorial to save resources.
* Use updated SARAH-2 and ERA5 cutouts with slightly wider scope to east and additional variables.
* Fix crs bug. Change crs 4236 to 4326.
* Update rasterio version to correctly calculate exclusion raster
PyPSA-Eur 0.4.0 (22th September 2021)
=====================================

View File

@ -47,7 +47,8 @@ The model can be adapted to only include selected countries (e.g. Belgium) inste
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 20
:start-at: countries:
:end-before: snapshots:
Likewise, the example's temporal scope can be restricted (e.g. to a single month).
@ -60,14 +61,14 @@ It is also possible to allow less or more carbon-dioxide emissions. Here, we lim
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 40,42
:lines: 35,37
PyPSA-Eur also includes a database of existing conventional powerplants.
We can select which types of powerplants we like to be included with fixed capacities:
We can select which types of powerplants we like to be included:
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 40,56
:lines: 35,51
To accurately model the temporal and spatial availability of renewables such as wind and solar energy, we rely on historical weather data.
It is advisable to adapt the required range of coordinates to the selection of countries.
@ -82,14 +83,14 @@ For example, we may want to use the ERA-5 dataset for solar and not the default
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 67,110,111
:lines: 62,105,106
Finally, it is possible to pick a solver. For instance, this tutorial uses the open-source solvers CBC and Ipopt and does not rely
on the commercial solvers Gurobi or CPLEX (for which free academic licenses are available).
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 173,183,184
:lines: 187,197,198
.. note::
@ -284,4 +285,4 @@ The solved networks can be analysed just like any other PyPSA network (e.g. in J
network = pypsa.Network("results/networks/elec_s_6_ec_lcopt_Co2L-24H.nc")
For inspiration, read the `examples section in the PyPSA documentation <https://pypsa.readthedocs.io/en/latest/examples.html>`_.
For inspiration, read the `examples section in the PyPSA documentation <https://pypsa.readthedocs.io/en/latest/examples-basic.html>`_.

View File

@ -10,7 +10,7 @@ dependencies:
- python>=3.8
- pip
- pypsa>=0.18.1
- pypsa>=0.19.1
- atlite>=0.2.6
- dask
@ -27,7 +27,7 @@ dependencies:
- powerplantmatching>=0.5.3
- numpy
- pandas
- geopandas
- geopandas>=0.11.0
- xarray
- netcdf4
- networkx
@ -37,7 +37,8 @@ dependencies:
- pyomo
- matplotlib
- proj
- fiona<=1.18.20 # Till issue https://github.com/Toblerity/Fiona/issues/1085 is not solved
- fiona <= 1.18.20 # Till issue https://github.com/Toblerity/Fiona/issues/1085 is not solved
- country_converter
# Keep in conda environment when calling ipython
- ipython
@ -51,8 +52,8 @@ dependencies:
- geopy
- tqdm
- pytz
- country_converter
- tabula-py
- mergedeep
- pip:
- vresutils>=0.3.1

View File

@ -4,7 +4,9 @@
import pandas as pd
from pathlib import Path
from collections import OrderedDict
REGION_COLS = ['geometry', 'name', 'x', 'y', 'country']
def configure_logging(snakemake, skip_handlers=False):
"""
@ -210,6 +212,22 @@ def progress_retrieve(url, file):
urllib.request.urlretrieve(url, file, reporthook=dlProgress)
def get_aggregation_strategies(aggregation_strategies):
# default aggregation strategies that cannot be defined in .yaml format must be specified within
# the function, otherwise (when defaults are passed in the function's definition) they get lost
# when custom values are specified in the config.
import numpy as np
from pypsa.networkclustering import _make_consense
bus_strategies = dict(country=_make_consense("Bus", "country"))
bus_strategies.update(aggregation_strategies.get("buses", {}))
generator_strategies = {'build_year': lambda x: 0, 'lifetime': lambda x: np.inf}
generator_strategies.update(aggregation_strategies.get("generators", {}))
return bus_strategies, generator_strategies
def mock_snakemake(rulename, **wildcards):
"""
@ -231,6 +249,7 @@ def mock_snakemake(rulename, **wildcards):
import os
from pypsa.descriptors import Dict
from snakemake.script import Snakemake
from packaging.version import Version, parse
script_dir = Path(__file__).parent.resolve()
assert Path.cwd().resolve() == script_dir, \
@ -240,7 +259,8 @@ def mock_snakemake(rulename, **wildcards):
if os.path.exists(p):
snakefile = p
break
workflow = sm.Workflow(snakefile, overwrite_configfiles=[])
kwargs = dict(rerun_triggers=[]) if parse(sm.__version__) > Version("7.7.0") else {}
workflow = sm.Workflow(snakefile, overwrite_configfiles=[], **kwargs)
workflow.include(snakefile)
workflow.global_resources = {}
rule = workflow.get_rule(rulename)

View File

@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
# SPDX-FileCopyrightText: : 2017-2022 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
@ -24,8 +24,8 @@ Relevant Settings
conventional_carriers:
co2limit:
extendable_carriers:
include_renewable_capacities_from_OPSD:
estimate_renewable_capacities_from_capacity_stats:
estimate_renewable_capacities:
load:
scaling_factor:
@ -53,7 +53,7 @@ Inputs
:scale: 34 %
- ``data/geth2015_hydro_capacities.csv``: alternative to capacities above; not currently used!
- ``resources/opsd_load.csv`` Hourly per-country load profiles.
- ``resources/load.csv`` Hourly per-country load profiles.
- ``resources/regions_onshore.geojson``: confer :ref:`busregions`
- ``resources/nuts3_shapes.geojson``: confer :ref:`shapes`
- ``resources/powerplants.csv``: confer :ref:`powerplants`
@ -197,7 +197,7 @@ def load_powerplants(ppl_fn):
'ccgt, thermal': 'CCGT', 'hard coal': 'coal'}
return (pd.read_csv(ppl_fn, index_col=0, dtype={'bus': 'str'})
.powerplant.to_pypsa_names()
.rename(columns=str.lower).drop(columns=['efficiency'])
.rename(columns=str.lower)
.replace({'carrier': carrier_dict}))
@ -264,13 +264,14 @@ def update_transmission_costs(n, costs, length_factor=1.0):
n.links.loc[dc_b, 'capital_cost'] = costs
def attach_wind_and_solar(n, costs, input_profiles, technologies, line_length_factor=1):
def attach_wind_and_solar(n, costs, input_profiles, technologies, extendable_carriers, line_length_factor=1):
# TODO: rename tech -> carrier, technologies -> carriers
_add_missing_carriers_from_costs(n, costs, technologies)
for tech in technologies:
if tech == 'hydro': continue
if tech == 'hydro':
continue
n.add("Carrier", name=tech)
with xr.open_dataset(getattr(input_profiles, 'profile_' + tech)) as ds:
if ds.indexes['bus'].empty: continue
@ -294,7 +295,7 @@ def attach_wind_and_solar(n, costs, input_profiles, technologies, line_length_fa
n.madd("Generator", ds.indexes['bus'], ' ' + tech,
bus=ds.indexes['bus'],
carrier=tech,
p_nom_extendable=True,
p_nom_extendable=tech in extendable_carriers['Generator'],
p_nom_max=ds['p_nom_max'].to_pandas(),
weight=ds['weight'].to_pandas(),
marginal_cost=costs.at[suptech, 'marginal_cost'],
@ -303,25 +304,50 @@ def attach_wind_and_solar(n, costs, input_profiles, technologies, line_length_fa
p_max_pu=ds['profile'].transpose('time', 'bus').to_pandas())
def attach_conventional_generators(n, costs, ppl, carriers):
def attach_conventional_generators(n, costs, ppl, conventional_carriers, extendable_carriers, conventional_config, conventional_inputs):
carriers = set(conventional_carriers) | set(extendable_carriers['Generator'])
_add_missing_carriers_from_costs(n, costs, carriers)
ppl = (ppl.query('carrier in @carriers').join(costs, on='carrier')
ppl = (ppl.query('carrier in @carriers').join(costs, on='carrier', rsuffix='_r')
.rename(index=lambda s: 'C' + str(s)))
ppl["efficiency"] = ppl.efficiency.fillna(ppl.efficiency_r)
logger.info('Adding {} generators with capacities [MW] \n{}'
.format(len(ppl), ppl.groupby('carrier').p_nom.sum()))
logger.info('Adding {} generators with capacities [GW] \n{}'
.format(len(ppl), ppl.groupby('carrier').p_nom.sum().div(1e3).round(2)))
n.madd("Generator", ppl.index,
carrier=ppl.carrier,
bus=ppl.bus,
p_nom=ppl.p_nom,
p_nom_min=ppl.p_nom.where(ppl.carrier.isin(conventional_carriers), 0),
p_nom=ppl.p_nom.where(ppl.carrier.isin(conventional_carriers), 0),
p_nom_extendable=ppl.carrier.isin(extendable_carriers['Generator']),
efficiency=ppl.efficiency,
marginal_cost=ppl.marginal_cost,
capital_cost=0)
capital_cost=ppl.capital_cost,
build_year=ppl.datein.fillna(0).astype(int),
lifetime=(ppl.dateout - ppl.datein).fillna(np.inf),
)
for carrier in conventional_config:
# Generators with technology affected
idx = n.generators.query("carrier == @carrier").index
for attr in list(set(conventional_config[carrier]) & set(n.generators)):
values = conventional_config[carrier][attr]
if f"conventional_{carrier}_{attr}" in conventional_inputs:
# Values affecting generators of technology k country-specific
# First map generator buses to countries; then map countries to p_max_pu
values = pd.read_csv(values, index_col=0).iloc[:, 0]
bus_values = n.buses.country.map(values)
n.generators[attr].update(n.generators.loc[idx].bus.map(bus_values).dropna())
else:
# Single value affecting all generators of technology k indiscriminantely of country
n.generators.loc[idx, attr] = values
logger.warning(f'Capital costs for conventional generators put to 0 EUR/MW.')
def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **config):
@ -425,7 +451,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **con
def attach_extendable_generators(n, costs, ppl, carriers):
logger.warning("The function `attach_extendable_generators` is deprecated in v0.5.0.")
_add_missing_carriers_from_costs(n, costs, carriers)
for tech in carriers:
@ -472,26 +498,18 @@ def attach_extendable_generators(n, costs, ppl, carriers):
def attach_OPSD_renewables(n, techs):
def attach_OPSD_renewables(n, tech_map):
available = ['DE', 'FR', 'PL', 'CH', 'DK', 'CZ', 'SE', 'GB']
tech_map = {'Onshore': 'onwind', 'Offshore': 'offwind', 'Solar': 'solar'}
countries = set(available) & set(n.buses.country)
tech_map = {k: v for k, v in tech_map.items() if v in techs}
tech_string = ", ".join(sum(tech_map.values(), []))
logger.info(f'Using OPSD renewable capacities for carriers {tech_string}.')
if not tech_map:
return
logger.info(f'Using OPSD renewable capacities in {", ".join(countries)} '
f'for technologies {", ".join(tech_map.values())}.')
df = pd.concat([pm.data.OPSD_VRE_country(c) for c in countries])
df = pm.data.OPSD_VRE().powerplant.convert_country_to_alpha2()
technology_b = ~df.Technology.isin(['Onshore', 'Offshore'])
df['Fueltype'] = df.Fueltype.where(technology_b, df.Technology)
df['Fueltype'] = df.Fueltype.where(technology_b, df.Technology).replace({"Solar": "PV"})
df = df.query('Fueltype in @tech_map').powerplant.convert_country_to_alpha2()
for fueltype, carrier_like in tech_map.items():
gens = n.generators[lambda df: df.carrier.str.contains(carrier_like)]
for fueltype, carriers in tech_map.items():
gens = n.generators[lambda df: df.carrier.isin(carriers)]
buses = n.buses.loc[gens.bus.unique()]
gens_per_bus = gens.groupby('bus').p_nom.count()
@ -503,38 +521,44 @@ def attach_OPSD_renewables(n, techs):
n.generators.p_nom_min.update(gens.bus.map(caps).dropna())
def estimate_renewable_capacities(n, config):
def estimate_renewable_capacities(n, tech_map):
year = config["electricity"]["estimate_renewable_capacities"]["year"]
tech_map = config["electricity"]["estimate_renewable_capacities"]["technology_mapping"]
countries = config["countries"]
expansion_limit = config["electricity"]["estimate_renewable_capacities"]["expansion_limit"]
if len(tech_map) == 0: return
if not len(countries) or not len(tech_map): return
capacities = (pm.data.Capacity_stats().powerplant.convert_country_to_alpha2()
[lambda df: df.Energy_Source_Level_2]
.set_index(['Fueltype', 'Country']).sort_index())
capacities = pm.data.IRENASTAT().powerplant.convert_country_to_alpha2()
capacities = capacities.query("Year == @year and Technology in @tech_map and Country in @countries")
capacities = capacities.groupby(["Technology", "Country"]).Capacity.sum()
countries = n.buses.country.unique()
logger.info(f"Heuristics applied to distribute renewable capacities [GW]: "
f"\n{capacities.groupby('Technology').sum().div(1e3).round(2)}")
if len(countries) == 0: return
logger.info('heuristics applied to distribute renewable capacities [MW] \n{}'
.format(capacities.query('Fueltype in @tech_map.keys() and Capacity >= 0.1')
.groupby('Country').agg({'Capacity': 'sum'})))
for ppm_technology, techs in tech_map.items():
tech_i = n.generators.query('carrier in @techs').index
stats = capacities.loc[ppm_technology].reindex(countries, fill_value=0.)
country = n.generators.bus[tech_i].map(n.buses.country)
existent = n.generators.p_nom[tech_i].groupby(country).sum()
missing = stats - existent
dist = n.generators_t.p_max_pu.mean() * n.generators.p_nom_max
for ppm_fueltype, techs in tech_map.items():
tech_capacities = capacities.loc[ppm_fueltype, 'Capacity']\
.reindex(countries, fill_value=0.)
#tech_i = n.generators.query('carrier in @techs').index
tech_i = (n.generators.query('carrier in @techs')
[n.generators.query('carrier in @techs')
.bus.map(n.buses.country).isin(countries)].index)
n.generators.loc[tech_i, 'p_nom'] = (
(n.generators_t.p_max_pu[tech_i].mean() *
n.generators.loc[tech_i, 'p_nom_max']) # maximal yearly generation
.groupby(n.generators.bus.map(n.buses.country))
.transform(lambda s: normed(s) * tech_capacities.at[s.name])
.where(lambda s: s>0.1, 0.)) # only capacities above 100kW
n.generators.loc[tech_i, 'p_nom'] += (
dist[tech_i]
.groupby(country)
.transform(lambda s: normed(s) * missing[s.name])
.where(lambda s: s>0.1, 0.) # only capacities above 100kW
)
n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom']
if expansion_limit:
assert np.isscalar(expansion_limit)
logger.info(f"Reducing capacity expansion limit to {expansion_limit*100:.2f}% of installed capacity.")
n.generators.loc[tech_i, 'p_nom_max'] = expansion_limit * n.generators.loc[tech_i, 'p_nom_min']
def add_nice_carrier_names(n, config):
carrier_i = n.carriers.index
@ -580,29 +604,65 @@ if __name__ == "__main__":
costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
ppl = load_powerplants(snakemake.input.powerplants)
if "renewable_carriers" in snakemake.config['electricity']:
renewable_carriers = set(snakemake.config['renewable'])
else:
logger.warning("Missing key `renewable_carriers` under config entry `electricity`. "
"In future versions, this will raise an error. "
"Falling back to carriers listed under `renewable`.")
renewable_carriers = snakemake.config['renewable']
extendable_carriers = snakemake.config['electricity']['extendable_carriers']
if not (set(renewable_carriers) & set(extendable_carriers['Generator'])):
logger.warning("No renewables found in config entry `extendable_carriers`. "
"In future versions, these have to be explicitely listed. "
"Falling back to all renewables.")
conventional_carriers = snakemake.config["electricity"]["conventional_carriers"]
attach_load(n, snakemake.input.regions, snakemake.input.load, snakemake.input.nuts3_shapes,
snakemake.config['countries'], snakemake.config['load']['scaling_factor'])
update_transmission_costs(n, costs, snakemake.config['lines']['length_factor'])
carriers = snakemake.config['electricity']['conventional_carriers']
attach_conventional_generators(n, costs, ppl, carriers)
conventional_inputs = {k: v for k, v in snakemake.input.items() if k.startswith("conventional_")}
attach_conventional_generators(n, costs, ppl, conventional_carriers, extendable_carriers, snakemake.config.get("conventional", {}), conventional_inputs)
carriers = snakemake.config['renewable']
attach_wind_and_solar(n, costs, snakemake.input, carriers, snakemake.config['lines']['length_factor'])
attach_wind_and_solar(n, costs, snakemake.input, renewable_carriers, extendable_carriers, snakemake.config['lines']['length_factor'])
if 'hydro' in snakemake.config['renewable']:
carriers = snakemake.config['renewable']['hydro'].pop('carriers', [])
if 'hydro' in renewable_carriers:
conf = snakemake.config['renewable']['hydro']
attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities,
carriers, **snakemake.config['renewable']['hydro'])
conf.pop('carriers', []), **conf)
carriers = snakemake.config['electricity']['extendable_carriers']['Generator']
attach_extendable_generators(n, costs, ppl, carriers)
if "estimate_renewable_capacities" not in snakemake.config['electricity']:
logger.warning("Missing key `estimate_renewable_capacities` under config entry `electricity`. "
"In future versions, this will raise an error. "
"Falling back to whether ``estimate_renewable_capacities_from_capacity_stats`` is in the config.")
if "estimate_renewable_capacities_from_capacity_stats" in snakemake.config['electricity']:
estimate_renewable_caps = {'enable': True, **snakemake.config['electricity']["estimate_renewable_capacities_from_capacity_stats"]}
else:
estimate_renewable_caps = {'enable': False}
else:
estimate_renewable_caps = snakemake.config['electricity']["estimate_renewable_capacities"]
if "enable" not in estimate_renewable_caps:
logger.warning("Missing key `enable` under config entry `estimate_renewable_capacities`. "
"In future versions, this will raise an error. Falling back to False.")
estimate_renewable_caps = {'enable': False}
if "from_opsd" not in estimate_renewable_caps:
logger.warning("Missing key `from_opsd` under config entry `estimate_renewable_capacities`. "
"In future versions, this will raise an error. "
"Falling back to whether `renewable_capacities_from_opsd` is non-empty.")
from_opsd = bool(snakemake.config["electricity"].get("renewable_capacities_from_opsd", False))
estimate_renewable_caps['from_opsd'] = from_opsd
tech_map = snakemake.config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {})
estimate_renewable_capacities(n, tech_map)
techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', [])
attach_OPSD_renewables(n, techs)
if estimate_renewable_caps["enable"]:
if estimate_renewable_caps["from_opsd"]:
tech_map = snakemake.config["electricity"]["estimate_renewable_capacities"]["technology_mapping"]
attach_OPSD_renewables(n, tech_map)
estimate_renewable_capacities(n, snakemake.config)
update_p_nom_max(n)

View File

@ -389,7 +389,9 @@ def _set_countries_and_substations(n, config, country_shapes, offshore_shapes):
countries = config['countries']
country_shapes = gpd.read_file(country_shapes).set_index('name')['geometry']
offshore_shapes = gpd.read_file(offshore_shapes).set_index('name')['geometry']
# reindexing necessary for supporting empty geo-dataframes
offshore_shapes = gpd.read_file(offshore_shapes)
offshore_shapes = offshore_shapes.reindex(columns=['name', 'geometry']).set_index('name')['geometry']
substation_b = buses['symbol'].str.contains('substation|converter station', case=False)
def prefer_voltage(x, which):

View File

@ -42,7 +42,7 @@ Description
"""
import logging
from _helpers import configure_logging
from _helpers import configure_logging, REGION_COLS
import pypsa
import os
@ -55,13 +55,6 @@ from scipy.spatial import Voronoi
logger = logging.getLogger(__name__)
def save_to_geojson(s, fn):
if os.path.exists(fn):
os.unlink(fn)
schema = {**gpd.io.file.infer_schema(s), 'geometry': 'Unknown'}
s.to_file(fn, driver='GeoJSON', schema=schema)
def voronoi_partition_pts(points, outline):
"""
Compute the polygons of a voronoi partition of `points` within the
@ -120,7 +113,8 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input.base_network)
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry']
offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry']
offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes)
offshore_shapes = offshore_shapes.reindex(columns=REGION_COLS).set_index('name')['geometry']
onshore_regions = []
offshore_regions = []
@ -151,6 +145,8 @@ if __name__ == "__main__":
offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2]
offshore_regions.append(offshore_regions_c)
save_to_geojson(pd.concat(onshore_regions, ignore_index=True), snakemake.output.regions_onshore)
save_to_geojson(pd.concat(offshore_regions, ignore_index=True), snakemake.output.regions_offshore)
pd.concat(onshore_regions, ignore_index=True).to_file(snakemake.output.regions_onshore)
if offshore_regions:
pd.concat(offshore_regions, ignore_index=True).to_file(snakemake.output.regions_offshore)
else:
offshore_shapes.to_frame().to_file(snakemake.output.regions_offshore)

View File

@ -123,7 +123,7 @@ if __name__ == "__main__":
# Determine the bounds from bus regions with a buffer of two grid cells
onshore = gpd.read_file(snakemake.input.regions_onshore)
offshore = gpd.read_file(snakemake.input.regions_offshore)
regions = onshore.append(offshore)
regions = pd.concat([onshore, offshore])
d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy', 0.25))*2
cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d]
elif {'x', 'y'}.issubset(cutout_params):

View File

@ -64,7 +64,52 @@ from _helpers import configure_logging
import atlite
import geopandas as gpd
from vresutils import hydro as vhydro
import pandas as pd
import country_converter as coco
cc = coco.CountryConverter()
def get_eia_annual_hydro_generation(fn, countries):
# in billion kWh/a = TWh/a
df = pd.read_csv(fn, skiprows=2, index_col=1, na_values=[u' ','--']).iloc[1:, 1:]
df.index = df.index.str.strip()
former_countries = {
"Former Czechoslovakia": dict(
countries=["Czech Republic", "Slovakia"],
start=1980, end=1992),
"Former Serbia and Montenegro": dict(
countries=["Serbia", "Montenegro"],
start=1992, end=2005),
"Former Yugoslavia": dict(
countries=["Slovenia", "Croatia", "Bosnia and Herzegovina", "Serbia", "Montenegro", "North Macedonia"],
start=1980, end=1991),
}
for k, v in former_countries.items():
period = [str(i) for i in range(v["start"], v["end"]+1)]
ratio = df.loc[v['countries']].T.dropna().sum()
ratio /= ratio.sum()
for country in v['countries']:
df.loc[country, period] = df.loc[k, period] * ratio[country]
baltic_states = ["Latvia", "Estonia", "Lithuania"]
df.loc[baltic_states] = df.loc[baltic_states].T.fillna(df.loc[baltic_states].mean(axis=1)).T
df.loc["Germany"] = df.filter(like='Germany', axis=0).sum()
df.loc["Serbia"] += df.loc["Kosovo"].fillna(0.)
df = df.loc[~df.index.str.contains('Former')]
df.drop(["Europe", "Germany, West", "Germany, East", "Kosovo"], inplace=True)
df.index = cc.convert(df.index, to='iso2')
df.index.name = 'countries'
df = df.T[countries] * 1e6 # in MWh/a
return df
logger = logging.getLogger(__name__)
@ -82,7 +127,8 @@ if __name__ == "__main__":
.set_index('name')['geometry'].reindex(countries))
country_shapes.index.name = 'countries'
eia_stats = vhydro.get_eia_annual_hydro_generation(snakemake.input.eia_hydro_generation).reindex(columns=countries)
fn = snakemake.input.eia_hydro_generation
eia_stats = get_eia_annual_hydro_generation(fn, countries)
weather_year = snakemake.wildcards.weather_year
norm_year = snakemake.config['renewable']['hydro'].get('norm_year')

View File

@ -26,11 +26,12 @@ Relevant Settings
Inputs
------
- ``data/load_raw.csv``:
Outputs
-------
- ``resource/time_series_60min_singleindex_filtered.csv``:
- ``resources/load.csv``:
"""
@ -116,14 +117,19 @@ def nan_statistics(df):
keys=['total', 'consecutive', 'max_total_per_month'], axis=1)
def copy_timeslice(load, cntry, start, stop, delta):
def copy_timeslice(load, cntry, start, stop, delta, fn_load=None):
start = pd.Timestamp(start)
stop = pd.Timestamp(stop)
if start-delta in load.index and stop in load.index and cntry in load:
load.loc[start:stop, cntry] = load.loc[start-delta:stop-delta, cntry].values
if (start in load.index and stop in load.index):
if start-delta in load.index and stop-delta in load.index and cntry in load:
load.loc[start:stop, cntry] = load.loc[start-delta:stop-delta, cntry].values
elif fn_load is not None:
duration = pd.date_range(freq='h', start=start-delta, end=stop-delta)
load_raw = load_timeseries(fn_load, duration, [cntry], powerstatistics)
load.loc[start:stop, cntry] = load_raw.loc[start-delta:stop-delta, cntry].values
def manual_adjustment(load, powerstatistics):
def manual_adjustment(load, fn_load, powerstatistics):
"""
Adjust gaps manual for load data from OPSD time-series package.
@ -150,6 +156,8 @@ def manual_adjustment(load, powerstatistics):
powerstatistics: bool
Whether argument load comprises the electricity consumption data of
the ENTSOE power statistics or of the ENTSOE transparency map
load_fn: str
File name or url location (file format .csv)
Returns
-------
@ -175,7 +183,11 @@ def manual_adjustment(load, powerstatistics):
copy_timeslice(load, 'CH', '2010-11-04 04:00', '2010-11-04 22:00', Delta(days=1))
copy_timeslice(load, 'NO', '2010-12-09 11:00', '2010-12-09 18:00', Delta(days=1))
# whole january missing
copy_timeslice(load, 'GB', '2009-12-31 23:00', '2010-01-31 23:00', Delta(days=-364))
copy_timeslice(load, 'GB', '2010-01-01 00:00', '2010-01-31 23:00', Delta(days=-365), fn_load)
# 1.1. at midnight gets special treatment
copy_timeslice(load, 'IE', '2016-01-01 00:00', '2016-01-01 01:00', Delta(days=-366), fn_load)
copy_timeslice(load, 'PT', '2016-01-01 00:00', '2016-01-01 01:00', Delta(days=-366), fn_load)
copy_timeslice(load, 'GB', '2016-01-01 00:00', '2016-01-01 01:00', Delta(days=-366), fn_load)
else:
if 'ME' in load:
@ -218,7 +230,7 @@ if __name__ == "__main__":
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
if snakemake.config['load']['manual_adjustments']:
load = manual_adjustment(load, powerstatistics)
load = manual_adjustment(load, snakemake.input[0], powerstatistics)
logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.")
load = load.interpolate(method='linear', limit=interpolate_limit)

View File

@ -79,6 +79,7 @@ import powerplantmatching as pm
import pandas as pd
import numpy as np
from powerplantmatching.export import map_country_bus
from scipy.spatial import cKDTree as KDTree
logger = logging.getLogger(__name__)
@ -87,13 +88,16 @@ logger = logging.getLogger(__name__)
def add_custom_powerplants(ppl, custom_powerplants, custom_ppl_query=False):
if not custom_ppl_query:
return ppl
add_ppls = pd.read_csv(custom_powerplants, index_col=0,
dtype={'bus': 'str'})
add_ppls = pd.read_csv(custom_powerplants, index_col=0, dtype={'bus': 'str'})
if isinstance(custom_ppl_query, str):
add_ppls.query(custom_ppl_query, inplace=True)
return pd.concat([ppl, add_ppls], sort=False, ignore_index=True, verify_integrity=True)
def replace_natural_gas_by_technology(df):
return df.Fueltype.where(df.Fueltype != 'Natural Gas', df.Technology)
if __name__ == "__main__":
if 'snakemake' not in globals():
from _helpers import mock_snakemake
@ -103,16 +107,21 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input.base_network)
countries = n.buses.country.unique()
ppl = (pm.powerplants(from_url=True)
.powerplant.fill_missing_decommyears()
.powerplant.fill_missing_decommissioning_years()
.powerplant.convert_country_to_alpha2()
.query('Fueltype not in ["Solar", "Wind"] and Country in @countries')
.replace({'Technology': {'Steam Turbine': 'OCGT'}})
.assign(Fueltype=lambda df: (
df.Fueltype
.where(df.Fueltype != 'Natural Gas',
df.Technology.replace('Steam Turbine',
'OCGT').fillna('OCGT')))))
.replace({'Technology': {'Steam Turbine': 'OCGT', "Combustion Engine": "OCGT"}})
.assign(Fueltype=replace_natural_gas_by_technology))
# Correct bioenergy for countries where possible
opsd = pm.data.OPSD_VRE().powerplant.convert_country_to_alpha2()
opsd = opsd.query('Country in @countries and Fueltype == "Bioenergy"')
opsd['Name'] = "Biomass"
available_countries = opsd.Country.unique()
ppl = ppl.query('not (Country in @available_countries and Fueltype == "Bioenergy")')
ppl = pd.concat([ppl, opsd])
ppl_query = snakemake.config['electricity']['powerplants_filter']
if isinstance(ppl_query, str):
@ -122,21 +131,21 @@ if __name__ == "__main__":
custom_ppl_query = snakemake.config['electricity']['custom_powerplants']
ppl = add_custom_powerplants(ppl, snakemake.input.custom_powerplants, custom_ppl_query)
cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()]
countries_wo_ppl = set(countries)-set(ppl.Country.unique())
if countries_wo_ppl:
logging.warning(f"No powerplants known in: {', '.join(countries_wo_ppl)}")
for c in countries:
substation_i = n.buses.query('substation_lv and country == @c').index
kdtree = KDTree(n.buses.loc[substation_i, ['x','y']].values)
ppl_i = ppl.query('Country == @c').index
tree_i = kdtree.query(ppl.loc[ppl_i, ['lon','lat']].values)[1]
ppl.loc[ppl_i, 'bus'] = substation_i.append(pd.Index([np.nan]))[tree_i]
if cntries_without_ppl:
logging.warning(f"No powerplants known in: {', '.join(cntries_without_ppl)}")
substations = n.buses.query('substation_lv')
ppl = map_country_bus(ppl, substations)
bus_null_b = ppl["bus"].isnull()
if bus_null_b.any():
logging.warning(f"Couldn't find close bus for {bus_null_b.sum()} powerplants")
logging.warning(f"Couldn't find close bus for {bus_null_b.sum()} powerplants. "
"Removing them from the powerplants list.")
ppl = ppl[~bus_null_b]
ppl.to_csv(snakemake.output[0])
# TODO: This has to fixed in PPM, some powerplants are still duplicated
cumcount = ppl.groupby(['bus', 'Fueltype']).cumcount() + 1
ppl.Name = ppl.Name.where(cumcount == 1, ppl.Name + " " + cumcount.astype(str))
ppl.reset_index(drop=True).to_csv(snakemake.output[0])

View File

@ -189,6 +189,7 @@ import logging
from pypsa.geo import haversine
from shapely.geometry import LineString
import time
from dask.distributed import Client, LocalCluster
from _helpers import configure_logging
@ -216,9 +217,15 @@ if __name__ == '__main__':
if correction_factor != 1.:
logger.info(f'correction_factor is set as {correction_factor}')
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True)
cutout = atlite.Cutout(snakemake.input['cutout'])
regions = gpd.read_file(snakemake.input.regions).set_index('name').rename_axis('bus')
regions = gpd.read_file(snakemake.input.regions)
assert not regions.empty, (f"List of regions in {snakemake.input.regions} is empty, please "
"disable the corresponding renewable technology")
# do not pull up, set_index does not work if geo dataframe is empty
regions = regions.set_index('name').rename_axis('bus')
buses = regions.index
excluder = atlite.ExclusionContainer(crs=3035, res=100)
@ -266,7 +273,7 @@ if __name__ == '__main__':
potential = capacity_per_sqkm * availability.sum('bus') * area
func = getattr(cutout, resource.pop('method'))
resource['dask_kwargs'] = {'num_workers': nprocesses}
resource['dask_kwargs'] = {"scheduler": client}
capacity_factor = correction_factor * func(capacity_factor=True, **resource)
layout = capacity_factor * area * capacity_per_sqkm
profile, capacities = func(matrix=availability.stack(spatial=['y','x']),

View File

@ -129,14 +129,15 @@ def eez(country_shapes, eez, country_list):
df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c))
s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False))
s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3})
s = s.to_frame("geometry")
s.index.name = "name"
return s
def country_cover(country_shapes, eez_shapes=None):
shapes = list(country_shapes)
shapes = country_shapes
if eez_shapes is not None:
shapes += list(eez_shapes)
shapes = pd.concat([shapes, eez_shapes])
europe_shape = unary_union(shapes)
if isinstance(europe_shape, MultiPolygon):
@ -203,16 +204,6 @@ def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp):
return df
def save_to_geojson(df, fn):
if os.path.exists(fn):
os.unlink(fn)
if not isinstance(df, gpd.GeoDataFrame):
df = gpd.GeoDataFrame(dict(geometry=df))
df = df.reset_index()
schema = {**gpd.io.file.infer_schema(df), 'geometry': 'Unknown'}
df.to_file(fn, driver='GeoJSON', schema=schema)
if __name__ == "__main__":
if 'snakemake' not in globals():
from _helpers import mock_snakemake
@ -220,15 +211,14 @@ if __name__ == "__main__":
configure_logging(snakemake)
country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries'])
save_to_geojson(country_shapes, snakemake.output.country_shapes)
country_shapes.reset_index().to_file(snakemake.output.country_shapes)
offshore_shapes = eez(country_shapes, snakemake.input.eez, snakemake.config['countries'])
save_to_geojson(offshore_shapes, snakemake.output.offshore_shapes)
offshore_shapes.reset_index().to_file(snakemake.output.offshore_shapes)
europe_shape = country_cover(country_shapes, offshore_shapes)
save_to_geojson(gpd.GeoSeries(europe_shape), snakemake.output.europe_shape)
europe_shape = gpd.GeoDataFrame(geometry=[country_cover(country_shapes, offshore_shapes.geometry)])
europe_shape.reset_index().to_file(snakemake.output.europe_shape)
nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop,
snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp)
save_to_geojson(nuts3_shapes, snakemake.output.nuts3_shapes)
nuts3_shapes.reset_index().to_file(snakemake.output.nuts3_shapes)

View File

@ -11,11 +11,11 @@ Relevant Settings
.. code:: yaml
focus_weights:
clustering:
cluster_network:
aggregation_strategies:
renewable: (keys)
{technology}:
potential:
focus_weights:
solving:
solver:
@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes:
"""
import logging
from _helpers import configure_logging, update_p_nom_max
from _helpers import configure_logging, update_p_nom_max, get_aggregation_strategies
import pypsa
import os
@ -138,7 +138,7 @@ import seaborn as sns
from functools import reduce
from pypsa.networkclustering import (busmap_by_kmeans, busmap_by_spectral_clustering,
_make_consense, get_clustering_from_busmap)
busmap_by_hac, _make_consense, get_clustering_from_busmap)
import warnings
warnings.filterwarnings(action='ignore', category=UserWarning)
@ -173,6 +173,42 @@ def weighting_for_country(n, x):
return (w * (100. / w.max())).clip(lower=1.).astype(int)
def get_feature_for_hac(n, buses_i=None, feature=None):
if buses_i is None:
buses_i = n.buses.index
if feature is None:
feature = "solar+onwind-time"
carriers = feature.split('-')[0].split('+')
if "offwind" in carriers:
carriers.remove("offwind")
carriers = np.append(carriers, network.generators.carrier.filter(like='offwind').unique())
if feature.split('-')[1] == 'cap':
feature_data = pd.DataFrame(index=buses_i, columns=carriers)
for carrier in carriers:
gen_i = n.generators.query("carrier == @carrier").index
attach = n.generators_t.p_max_pu[gen_i].mean().rename(index = n.generators.loc[gen_i].bus)
feature_data[carrier] = attach
if feature.split('-')[1] == 'time':
feature_data = pd.DataFrame(columns=buses_i)
for carrier in carriers:
gen_i = n.generators.query("carrier == @carrier").index
attach = n.generators_t.p_max_pu[gen_i].rename(columns = n.generators.loc[gen_i].bus)
feature_data = pd.concat([feature_data, attach], axis=0)[buses_i]
feature_data = feature_data.T
# timestamp raises error in sklearn >= v1.2:
feature_data.columns = feature_data.columns.astype(str)
feature_data = feature_data.fillna(0)
return feature_data
def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"):
"""Determine the number of clusters per country"""
@ -221,13 +257,50 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"):
return pd.Series(m.n.get_values(), index=L.index).round().astype(int)
def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algorithm="kmeans", **algorithm_kwds):
def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algorithm="kmeans", feature=None, **algorithm_kwds):
if algorithm == "kmeans":
algorithm_kwds.setdefault('n_init', 1000)
algorithm_kwds.setdefault('max_iter', 30000)
algorithm_kwds.setdefault('tol', 1e-6)
algorithm_kwds.setdefault('random_state', 0)
def fix_country_assignment_for_hac(n):
from scipy.sparse import csgraph
# overwrite country of nodes that are disconnected from their country-topology
for country in n.buses.country.unique():
m = n[n.buses.country ==country].copy()
_, labels = csgraph.connected_components(m.adjacency_matrix(), directed=False)
component = pd.Series(labels, index=m.buses.index)
component_sizes = component.value_counts()
if len(component_sizes)>1:
disconnected_bus = component[component==component_sizes.index[-1]].index[0]
neighbor_bus = (
n.lines.query("bus0 == @disconnected_bus or bus1 == @disconnected_bus")
.iloc[0][['bus0', 'bus1']]
)
new_country = list(set(n.buses.loc[neighbor_bus].country)-set([country]))[0]
logger.info(
f"overwriting country `{country}` of bus `{disconnected_bus}` "
f"to new country `{new_country}`, because it is disconnected "
"from its inital inter-country transmission grid."
)
n.buses.at[disconnected_bus, "country"] = new_country
return n
if algorithm == "hac":
feature = get_feature_for_hac(n, buses_i=n.buses.index, feature=feature)
n = fix_country_assignment_for_hac(n)
if (algorithm != "hac") and (feature is not None):
logger.warning(f"Keyword argument feature is only valid for algorithm `hac`. "
f"Given feature `{feature}` will be ignored.")
n.determine_network_topology()
n_clusters = distribute_clusters(n, n_clusters, focus_weights=focus_weights, solver_name=solver_name)
@ -251,37 +324,34 @@ def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algori
return prefix + busmap_by_spectral_clustering(reduce_network(n, x), n_clusters[x.name], **algorithm_kwds)
elif algorithm == "louvain":
return prefix + busmap_by_louvain(reduce_network(n, x), n_clusters[x.name], **algorithm_kwds)
elif algorithm == "hac":
return prefix + busmap_by_hac(n, n_clusters[x.name], buses_i=x.index, feature=feature.loc[x.index])
else:
raise ValueError(f"`algorithm` must be one of 'kmeans', 'spectral' or 'louvain'. Is {algorithm}.")
raise ValueError(f"`algorithm` must be one of 'kmeans', 'hac', 'spectral' or 'louvain'. Is {algorithm}.")
return (n.buses.groupby(['country', 'sub_network'], group_keys=False)
.apply(busmap_for_country).squeeze().rename('busmap'))
def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carriers=None,
line_length_factor=1.25, potential_mode='simple', solver_name="cbc",
algorithm="kmeans", extended_link_costs=0, focus_weights=None):
line_length_factor=1.25, aggregation_strategies=dict(), solver_name="cbc",
algorithm="hac", feature=None, extended_link_costs=0, focus_weights=None):
if potential_mode == 'simple':
p_nom_max_strategy = pd.Series.sum
elif potential_mode == 'conservative':
p_nom_max_strategy = pd.Series.min
else:
raise AttributeError(f"potential_mode should be one of 'simple' or 'conservative' but is '{potential_mode}'")
bus_strategies, generator_strategies = get_aggregation_strategies(aggregation_strategies)
if not isinstance(custom_busmap, pd.Series):
busmap = busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights, algorithm)
busmap = busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights, algorithm, feature)
else:
busmap = custom_busmap
clustering = get_clustering_from_busmap(
n, busmap,
bus_strategies=dict(country=_make_consense("Bus", "country")),
bus_strategies=bus_strategies,
aggregate_generators_weighted=True,
aggregate_generators_carriers=aggregate_carriers,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=line_length_factor,
generator_strategies={'p_nom_max': p_nom_max_strategy, 'p_nom_min': pd.Series.sum},
generator_strategies=generator_strategies,
scale_link_capital_costs=False)
if not n.links.empty:
@ -296,24 +366,17 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
return clustering
def save_to_geojson(s, fn):
if os.path.exists(fn):
os.unlink(fn)
df = s.reset_index()
schema = {**gpd.io.file.infer_schema(df), 'geometry': 'Unknown'}
df.to_file(fn, driver='GeoJSON', schema=schema)
def cluster_regions(busmaps, input=None, output=None):
busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
for which in ('regions_onshore', 'regions_offshore'):
regions = gpd.read_file(getattr(input, which)).set_index('name')
geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.unary_union)
regions_c = gpd.GeoDataFrame(dict(geometry=geom_c))
regions = gpd.read_file(getattr(input, which))
regions = regions.reindex(columns=["name", "geometry"]).set_index('name')
regions_c = regions.dissolve(busmap)
regions_c.index.name = 'name'
save_to_geojson(regions_c, getattr(output, which))
regions_c = regions_c.reset_index()
regions_c.to_file(getattr(output, which))
def plot_busmap_for_n_clusters(n, n_clusters, fn=None):
@ -342,7 +405,7 @@ if __name__ == "__main__":
if snakemake.wildcards.clusters.endswith('m'):
n_clusters = int(snakemake.wildcards.clusters[:-1])
aggregate_carriers = pd.Index(n.generators.carrier.unique()).difference(renewable_carriers)
aggregate_carriers = snakemake.config["electricity"].get("conventional_carriers")
elif snakemake.wildcards.clusters == 'all':
n_clusters = len(n.buses)
aggregate_carriers = None # All
@ -368,20 +431,28 @@ if __name__ == "__main__":
"The `potential` configuration option must agree for all renewable carriers, for now!"
)
return v
potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential']
for tech in renewable_carriers]))
aggregation_strategies = snakemake.config["clustering"].get("aggregation_strategies", {})
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {k: getattr(pd.Series, v) for k,v in aggregation_strategies[p].items()}
for p in aggregation_strategies.keys()
}
custom_busmap = snakemake.config["enable"].get("custom_busmap", False)
if custom_busmap:
custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True)
custom_busmap.index = custom_busmap.index.astype(str)
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
cluster_config = snakemake.config.get('clustering', {}).get('cluster_network', {})
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers,
line_length_factor, potential_mode,
line_length_factor, aggregation_strategies,
snakemake.config['solving']['solver']['name'],
"kmeans", hvac_overhead_cost, focus_weights)
cluster_config.get("algorithm", "hac"),
cluster_config.get("feature", "solar+onwind-time"),
hvac_overhead_cost, focus_weights)
update_p_nom_max(n)
update_p_nom_max(clustering.network)
clustering.network.export_to_netcdf(snakemake.output.network)
for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative

View File

@ -77,6 +77,16 @@ def add_co2limit(n, co2limit, Nyears=1.):
constant=co2limit * Nyears)
def add_gaslimit(n, gaslimit, Nyears=1.):
sel = n.carriers.index.intersection(["OCGT", "CCGT", "CHP"])
n.carriers.loc[sel, "gas_usage"] = 1.
n.add("GlobalConstraint", "GasLimit",
carrier_attribute="gas_usage", sense="<=",
constant=gaslimit * Nyears)
def add_emission_prices(n, emission_prices={'co2': 0.}, exclude_co2=False):
if exclude_co2: emission_prices.pop('co2')
ep = (pd.Series(emission_prices).rename(lambda x: x+'_emissions') *
@ -233,8 +243,22 @@ if __name__ == "__main__":
if len(m) > 0:
co2limit = float(m[0]) * snakemake.config['electricity']['co2base']
add_co2limit(n, co2limit, Nyears)
logger.info("Setting CO2 limit according to wildcard value.")
else:
add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears)
logger.info("Setting CO2 limit according to config value.")
break
for o in opts:
if "CH4L" in o:
m = re.findall("[0-9]*\.?[0-9]+$", o)
if len(m) > 0:
limit = float(m[0]) * 1e6
add_gaslimit(n, limit, Nyears)
logger.info("Setting gas usage limit according to wildcard value.")
else:
add_gaslimit(n, snakemake.config["electricity"].get("gaslimit"), Nyears)
logger.info("Setting gas usage limit according to config value.")
break
for o in opts:
@ -243,7 +267,7 @@ if __name__ == "__main__":
if oo[0].startswith(tuple(suptechs)):
carrier = oo[0]
# handles only p_nom_max as stores and lines have no potentials
attr_lookup = {"p": "p_nom_max", "c": "capital_cost"}
attr_lookup = {"p": "p_nom_max", "c": "capital_cost", "m": "marginal_cost"}
attr = attr_lookup[oo[1][0]]
factor = float(oo[1][1:])
if carrier == "AC": # lines do not have carrier
@ -254,8 +278,16 @@ if __name__ == "__main__":
sel = c.df.carrier.str.contains(carrier)
c.df.loc[sel,attr] *= factor
if 'Ep' in opts:
add_emission_prices(n, snakemake.config['costs']['emission_prices'])
for o in opts:
if 'Ep' in o:
m = re.findall("[0-9]*\.?[0-9]+$", o)
if len(m) > 0:
logger.info("Setting emission prices according to wildcard value.")
add_emission_prices(n, dict(co2=float(m[0])))
else:
logger.info("Setting emission prices according to config value.")
add_emission_prices(n, snakemake.config['costs']['emission_prices'])
break
ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:]
set_transmission_limit(n, ll_type, factor, costs, Nyears)

View File

@ -11,7 +11,7 @@ The data bundle (1.4 GB) contains common GIS datasets like NUTS3 shapes, EEZ sha
This rule downloads the data bundle from `zenodo <https://doi.org/10.5281/zenodo.3517935>`_ and extracts it in the ``data`` sub-directory, such that all files of the bundle are stored in the ``data/bundle`` subdirectory.
The :ref:`tutorial` uses a smaller `data bundle <https://zenodo.org/record/3517921/files/pypsa-eur-tutorial-data-bundle.tar.xz>`_ than required for the full model (19 MB)
The :ref:`tutorial` uses a smaller `data bundle <https://zenodo.org/record/3517921/files/pypsa-eur-tutorial-data-bundle.tar.xz>`_ than required for the full model (188 MB)
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3517921.svg
:target: https://doi.org/10.5281/zenodo.3517921
@ -28,7 +28,7 @@ The :ref:`tutorial` uses a smaller `data bundle <https://zenodo.org/record/35179
**Outputs**
- ``cutouts/bundle``: input data collected from various sources
- ``data/bundle``: input data collected from various sources
"""

View File

@ -13,6 +13,11 @@ Relevant Settings
.. code:: yaml
clustering:
simplify_network:
cluster_network:
aggregation_strategies:
costs:
USD2013_to_EUR2013:
discountrate:
@ -22,10 +27,6 @@ Relevant Settings
electricity:
max_hours:
renewables: (keys)
{technology}:
potential:
lines:
length_factor:
@ -83,7 +84,7 @@ The rule :mod:`simplify_network` does up to four things:
"""
import logging
from _helpers import configure_logging, update_p_nom_max
from _helpers import configure_logging, update_p_nom_max, get_aggregation_strategies
from cluster_network import clustering_for_n_clusters, cluster_regions
from add_electricity import load_costs
@ -189,7 +190,10 @@ def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, out
def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output, aggregate_one_ports={"Load", "StorageUnit"}):
def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output,
aggregate_one_ports={"Load", "StorageUnit"},
aggregation_strategies=dict()):
def replace_components(n, c, df, pnl):
n.mremove(c, n.df(c).index)
@ -200,7 +204,12 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output, a
_adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output)
generators, generators_pnl = aggregategenerators(n, busmap, custom_strategies={'p_nom_min': np.sum})
_, generator_strategies = get_aggregation_strategies(aggregation_strategies)
generators, generators_pnl = aggregategenerators(
n, busmap, custom_strategies=generator_strategies
)
replace_components(n, "Generator", generators, generators_pnl)
for one_port in aggregate_one_ports:
@ -214,7 +223,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output, a
n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)])
def simplify_links(n, costs, config, output):
def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
## Complex multi-node links are folded into end-points
logger.info("Simplifying connected link components")
@ -306,21 +315,23 @@ def simplify_links(n, costs, config, output):
logger.debug("Collecting all components using the busmap")
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output)
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output,
aggregation_strategies=aggregation_strategies)
return n, busmap
def remove_stubs(n, costs, config, output):
def remove_stubs(n, costs, config, output, aggregation_strategies=dict()):
logger.info("Removing stubs")
busmap = busmap_by_stubs(n) # ['country'])
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config)
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output)
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output,
aggregation_strategies=aggregation_strategies)
return n, busmap
def aggregate_to_substations(n, buses_i=None):
def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None):
# can be used to aggregate a selection of buses to electrically closest neighbors
# if no buses are given, nodes that are no substations or without offshore connection are aggregated
@ -345,19 +356,20 @@ def aggregate_to_substations(n, buses_i=None):
busmap = n.buses.index.to_series()
busmap.loc[buses_i] = dist.idxmin(1)
bus_strategies, generator_strategies = get_aggregation_strategies(aggregation_strategies)
clustering = get_clustering_from_busmap(n, busmap,
bus_strategies=dict(country=_make_consense("Bus", "country")),
bus_strategies=bus_strategies,
aggregate_generators_weighted=True,
aggregate_generators_carriers=None,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=1.0,
generator_strategies={'p_nom_max': 'sum'},
generator_strategies=generator_strategies,
scale_link_capital_costs=False)
return clustering.network, busmap
def cluster(n, n_clusters, config):
def cluster(n, n_clusters, config, algorithm="hac", feature=None, aggregation_strategies=dict()):
logger.info(f"Clustering to {n_clusters} buses")
focus_weights = config.get('focus_weights', None)
@ -365,17 +377,11 @@ def cluster(n, n_clusters, config):
renewable_carriers = pd.Index([tech
for tech in n.generators.carrier.unique()
if tech.split('-', 2)[0] in config['renewable']])
def consense(x):
v = x.iat[0]
assert ((x == v).all() or x.isnull().all()), (
"The `potential` configuration option must agree for all renewable carriers, for now!"
)
return v
potential_mode = (consense(pd.Series([config['renewable'][tech]['potential']
for tech in renewable_carriers]))
if len(renewable_carriers) > 0 else 'conservative')
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode,
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False,
aggregation_strategies=aggregation_strategies,
solver_name=config['solving']['solver']['name'],
algorithm=algorithm, feature=feature,
focus_weights=focus_weights)
return clustering.network, clustering.busmap
@ -389,24 +395,50 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input.network)
aggregation_strategies = snakemake.config["clustering"].get("aggregation_strategies", {})
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {k: getattr(pd.Series, v) for k,v in aggregation_strategies[p].items()}
for p in aggregation_strategies.keys()
}
n, trafo_map = simplify_network_to_380(n)
Nyears = n.snapshot_weightings.objective.sum() / 8760
technology_costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
n, simplify_links_map = simplify_links(n, technology_costs, snakemake.config, snakemake.output)
n, simplify_links_map = simplify_links(n, technology_costs, snakemake.config, snakemake.output,
aggregation_strategies)
n, stub_map = remove_stubs(n, technology_costs, snakemake.config, snakemake.output)
n, stub_map = remove_stubs(n, technology_costs, snakemake.config, snakemake.output,
aggregation_strategies=aggregation_strategies)
busmaps = [trafo_map, simplify_links_map, stub_map]
if snakemake.config.get('clustering', {}).get('simplify', {}).get('to_substations', False):
n, substation_map = aggregate_to_substations(n)
cluster_config = snakemake.config.get('clustering', {}).get('simplify_network', {})
if cluster_config.get('clustering', {}).get('simplify_network', {}).get('to_substations', False):
n, substation_map = aggregate_to_substations(n, aggregation_strategies)
busmaps.append(substation_map)
# treatment of outliers (nodes without a profile for considered carrier):
# all nodes that have no profile of the given carrier are being aggregated to closest neighbor
if (
snakemake.config.get("clustering", {}).get("cluster_network", {}).get("algorithm", "hac") == "hac" or
cluster_config.get("algorithm", "hac") == "hac"
):
carriers = cluster_config.get("feature", "solar+onwind-time").split('-')[0].split('+')
for carrier in carriers:
buses_i = list(set(n.buses.index)-set(n.generators.query("carrier == @carrier").bus))
logger.info(f'clustering preparaton (hac): aggregating {len(buses_i)} buses of type {carrier}.')
n, busmap_hac = aggregate_to_substations(n, aggregation_strategies, buses_i)
busmaps.append(busmap_hac)
if snakemake.wildcards.simpl:
n, cluster_map = cluster(n, int(snakemake.wildcards.simpl), snakemake.config)
n, cluster_map = cluster(n, int(snakemake.wildcards.simpl), snakemake.config,
cluster_config.get('algorithm', 'hac'),
cluster_config.get('feature', None),
aggregation_strategies)
busmaps.append(cluster_map)
# some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed

View File

@ -84,8 +84,9 @@ import pandas as pd
import re
import pypsa
from pypsa.linopf import (get_var, define_constraints, linexpr, join_exprs,
network_lopf, ilopf)
from pypsa.linopf import (get_var, define_constraints, define_variables,
linexpr, join_exprs, network_lopf, ilopf)
from pypsa.descriptors import get_switchable_as_dense as get_as_dense
from pathlib import Path
from vresutils.benchmark import memory_logger
@ -99,17 +100,19 @@ def prepare_network(n, solve_opts):
for df in (n.generators_t.p_max_pu, n.storage_units_t.inflow):
df.where(df>solve_opts['clip_p_max_pu'], other=0., inplace=True)
if solve_opts.get('load_shedding'):
n.add("Carrier", "Load")
load_shedding = solve_opts.get('load_shedding')
if load_shedding:
n.add("Carrier", "load", color="#dd2e23", nice_name="Load shedding")
buses_i = n.buses.query("carrier == 'AC'").index
if not np.isscalar(load_shedding): load_shedding = 1e2 # Eur/kWh
# intersect between macroeconomic and surveybased
# willingness to pay
# http://journal.frontiersin.org/article/10.3389/fenrg.2015.00055/full)
n.madd("Generator", buses_i, " load",
bus=buses_i,
carrier='load',
sign=1e-3, # Adjust sign to measure p and p_nom in kW instead of MW
marginal_cost=1e2, # Eur/kWh
# intersect between macroeconomic and surveybased
# willingness to pay
# http://journal.frontiersin.org/article/10.3389/fenrg.2015.00055/full
marginal_cost=load_shedding,
p_nom=1e9 # kW
)
@ -211,6 +214,75 @@ def add_SAFE_constraints(n, config):
define_constraints(n, lhs, '>=', rhs, 'Safe', 'mintotalcap')
def add_operational_reserve_margin_constraint(n, config):
reserve_config = config["electricity"]["operational_reserve"]
EPSILON_LOAD = reserve_config["epsilon_load"]
EPSILON_VRES = reserve_config["epsilon_vres"]
CONTINGENCY = reserve_config["contingency"]
# Reserve Variables
reserve = get_var(n, 'Generator', 'r')
lhs = linexpr((1, reserve)).sum(1)
# Share of extendable renewable capacities
ext_i = n.generators.query('p_nom_extendable').index
vres_i = n.generators_t.p_max_pu.columns
if not ext_i.empty and not vres_i.empty:
capacity_factor = n.generators_t.p_max_pu[vres_i.intersection(ext_i)]
renewable_capacity_variables = get_var(n, 'Generator', 'p_nom')[vres_i.intersection(ext_i)]
lhs += linexpr((-EPSILON_VRES * capacity_factor, renewable_capacity_variables)).sum(1)
# Total demand at t
demand = n.loads_t.p.sum(1)
# VRES potential of non extendable generators
capacity_factor = n.generators_t.p_max_pu[vres_i.difference(ext_i)]
renewable_capacity = n.generators.p_nom[vres_i.difference(ext_i)]
potential = (capacity_factor * renewable_capacity).sum(1)
# Right-hand-side
rhs = EPSILON_LOAD * demand + EPSILON_VRES * potential + CONTINGENCY
define_constraints(n, lhs, '>=', rhs, "Reserve margin")
def update_capacity_constraint(n):
gen_i = n.generators.index
ext_i = n.generators.query('p_nom_extendable').index
fix_i = n.generators.query('not p_nom_extendable').index
dispatch = get_var(n, 'Generator', 'p')
reserve = get_var(n, 'Generator', 'r')
capacity_fixed = n.generators.p_nom[fix_i]
p_max_pu = get_as_dense(n, 'Generator', 'p_max_pu')
lhs = linexpr((1, dispatch), (1, reserve))
if not ext_i.empty:
capacity_variable = get_var(n, 'Generator', 'p_nom')
lhs += linexpr((-p_max_pu[ext_i], capacity_variable)).reindex(columns=gen_i, fill_value='')
rhs = (p_max_pu[fix_i] * capacity_fixed).reindex(columns=gen_i, fill_value=0)
define_constraints(n, lhs, '<=', rhs, 'Generators', 'updated_capacity_constraint')
def add_operational_reserve_margin(n, sns, config):
"""
Build reserve margin constraints based on the formulation given in
https://genxproject.github.io/GenX/dev/core/#Reserves.
"""
define_variables(n, 0, np.inf, 'Generator', 'r', axes=[sns, n.generators.index])
add_operational_reserve_margin_constraint(n, config)
update_capacity_constraint(n)
def add_battery_constraints(n):
nodes = n.buses.index[n.buses.carrier == "battery"]
if nodes.empty or ('Link', 'p_nom') not in n.variables.index:
@ -236,6 +308,9 @@ def extra_functionality(n, snapshots):
add_SAFE_constraints(n, config)
if 'CCL' in opts and n.generators.p_nom_extendable.any():
add_CCL_constraints(n, config)
reserve = config["electricity"].get("operational_reserve", {})
if reserve.get("activate"):
add_operational_reserve_margin(n, snapshots, config)
for o in opts:
if "EQ" in o:
add_EQ_constraints(n, o)

View File

@ -8,7 +8,6 @@ logging:
level: INFO
format: '%(levelname)s:%(name)s:%(message)s'
summary_dir: results
scenario:
weather_year: ['']
@ -19,10 +18,6 @@ scenario:
countries: ['BE']
clustering:
simplify:
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
snapshots:
start: "2013-03-01"
end: "2013-03-08"
@ -170,6 +165,25 @@ costs:
emission_prices: # only used with the option Ep
co2: 0.
clustering:
simplify_network:
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
algorithm: kmeans # choose from: [hac, kmeans]
feature: solar+onwind-time # only for hac. choose from: [solar+onwind-time, solar+onwind-cap, solar-time, solar-cap, solar+offwind-cap] etc.
cluster_network:
algorithm: kmeans
feature: solar+onwind-time
aggregation_strategies:
generators:
p_nom_max: sum # use "min" for more conservative assumptions
p_nom_min: sum
p_min_pu: mean
marginal_cost: mean
committable: any
ramp_limit_up: max
ramp_limit_down: max
efficiency: mean
solving:
options:
formulation: kirchhoff