remove snakemake dependencies in functions, use as kwarg instead

This commit is contained in:
martacki 2021-09-14 16:34:02 +02:00
parent 2252ee2118
commit dfb929f2cf
9 changed files with 445 additions and 148 deletions

317
config.yaml~ Normal file
View File

@ -0,0 +1,317 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: CC0-1.0
version: 0.3.0
tutorial: false
logging:
level: INFO
format: '%(levelname)s:%(name)s:%(message)s'
summary_dir: results
scenario:
simpl: ['']
ll: ['copt']
clusters: [37, 128, 256, 512, 1024]
opts: [Co2L-3H]
countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK']
snapshots:
start: "2013-01-01"
end: "2014-01-01"
closed: 'left' # end is not inclusive
enable:
prepare_links_p_nom: false
retrieve_databundle: true
build_cutout: false
retrieve_cutout: true
build_natura_raster: false
retrieve_natura_raster: true
custom_busmap: false
clustering:
algorithm:
name: kmeans #kmeans
feature: coordinates #feature not supported yet
electricity:
voltages: [220., 300., 380.]
co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5
co2base: 1.487e+9
agg_p_nom_limits: data/agg_p_nom_minmax.csv
extendable_carriers:
Generator: []
StorageUnit: [] # battery, H2
Store: [battery, H2]
Link: []
max_hours:
battery: 6
H2: 168
powerplants_filter: false # use pandas query strings here, e.g. Country not in ['Germany']
custom_powerplants: false # use pandas query strings here, e.g. Country in ['Germany']
conventional_carriers: [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass]
renewable_capacities_from_OPSD: [] # onwind, offwind, solar
# estimate_renewable_capacities_from_capacity_stats:
# # Wind is the Fueltype in ppm.data.Capacity_stats, onwind, offwind-{ac,dc} the carrier in PyPSA-Eur
# Wind: [onwind, offwind-ac, offwind-dc]
# Solar: [solar]
atlite:
nprocesses: 4
cutouts:
# use 'base' to determine geographical bounds and time span from config
# base:
# module: era5
europe-2013-era5:
module: era5 # in priority order
x: [-12., 35.]
y: [33., 72]
dx: 0.3
dy: 0.3
time: ['2013', '2013']
europe-2013-sarah:
module: [sarah, era5] # in priority order
x: [-12., 45.]
y: [33., 65]
dx: 0.2
dy: 0.2
time: ['2013', '2013']
sarah_interpolate: false
sarah_dir:
features: [influx, temperature]
renewable:
onwind:
cutout: europe-2013-era5
resource:
method: wind
turbine: Vestas_V112_3MW
capacity_per_sqkm: 3 # ScholzPhd Tab 4.3.1: 10MW/km^2
# correction_factor: 0.93
corine:
# Scholz, Y. (2012). Renewable energy based electricity supply at low costs:
# development of the REMix model and application for Europe. ( p.42 / p.28)
grid_codes: [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 31, 32]
distance: 1000
distance_grid_codes: [1, 2, 3, 4, 5, 6]
natura: true
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-ac:
cutout: europe-2013-era5
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
capacity_per_sqkm: 3
# correction_factor: 0.93
corine: [44, 255]
natura: true
max_depth: 50
max_shore_distance: 30000
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-dc:
cutout: europe-2013-era5
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
# ScholzPhd Tab 4.3.1: 10MW/km^2
capacity_per_sqkm: 3
# correction_factor: 0.93
corine: [44, 255]
natura: true
max_depth: 50
min_shore_distance: 30000
potential: simple # or conservative
clip_p_max_pu: 1.e-2
solar:
cutout: europe-2013-sarah
resource:
method: pv
panel: CSi
orientation:
slope: 35.
azimuth: 180.
capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2
# Determined by comparing uncorrected area-weighted full-load hours to those
# published in Supplementary Data to
# Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power
# sector: The economic potential of photovoltaics and concentrating solar
# power." Applied Energy 135 (2014): 704-720.
correction_factor: 0.854337
corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
14, 15, 16, 17, 18, 19, 20, 26, 31, 32]
natura: true
potential: simple # or conservative
clip_p_max_pu: 1.e-2
hydro:
cutout: europe-2013-era5
carriers: [ror, PHS, hydro]
PHS_max_hours: 6
hydro_max_hours: "energy_capacity_totals_by_country" # one of energy_capacity_totals_by_country, estimate_by_large_installations or a float
clip_min_inflow: 1.0
lines:
types:
220.: "Al/St 240/40 2-bundle 220.0"
300.: "Al/St 240/40 3-bundle 300.0"
380.: "Al/St 240/40 4-bundle 380.0"
s_max_pu: 0.7
s_nom_max: .inf
length_factor: 1.25
under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity
links:
p_max_pu: 1.0
p_nom_max: .inf
include_tyndp: true
under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity
transformers:
x: 0.1
s_nom: 2000.
type: ''
load:
url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
interpolate_limit: 3 # data gaps up until this size are interpolated linearly
time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from
manual_adjustments: true # false
scaling_factor: 1.0
costs:
year: 2030
discountrate: 0.07 # From a Lion Hirth paper, also reflects average of Noothout et al 2016
USD2013_to_EUR2013: 0.7532 # [EUR/USD] ECB: https://www.ecb.europa.eu/stats/exchange/eurofxref/html/eurofxref-graph-usd.en.html
marginal_cost: # EUR/MWh
solar: 0.01
onwind: 0.015
offwind: 0.015
hydro: 0.
H2: 0.
electrolysis: 0.
fuel cell: 0.
battery: 0.
battery inverter: 0.
emission_prices: # in currency per tonne emission, only used with the option Ep
co2: 0.
solving:
options:
formulation: kirchhoff
load_shedding: true
noisy_costs: true
min_iterations: 4
max_iterations: 6
clip_p_max_pu: 0.01
skip_iterations: false
track_iterations: false
#nhours: 10
solver:
name: gurobi
threads: 4
method: 2 # barrier
crossover: 0
BarConvTol: 1.e-5
FeasibilityTol: 1.e-6
AggFill: 0
PreDual: 0
GURO_PAR_BARDENSETHRESH: 200
# solver:
# name: cplex
# threads: 4
# lpmethod: 4 # barrier
# solutiontype: 2 # non basic solution, ie no crossover
# barrier_convergetol: 1.e-5
# feasopt_tolerance: 1.e-6
plotting:
map:
figsize: [7, 7]
boundaries: [-10.2, 29, 35, 72]
p_nom:
bus_size_factor: 5.e+4
linewidth_factor: 3.e+3
costs_max: 80
costs_threshold: 1
energy_max: 15000.
energy_min: -10000.
energy_threshold: 50.
vre_techs: ["onwind", "offwind-ac", "offwind-dc", "solar", "ror"]
conv_techs: ["OCGT", "CCGT", "Nuclear", "Coal"]
storage_techs: ["hydro+PHS", "battery", "H2"]
load_carriers: ["AC load"]
AC_carriers: ["AC line", "AC transformer"]
link_carriers: ["DC line", "Converter AC-DC"]
tech_colors:
"onwind" : "#235ebc"
"onshore wind" : "#235ebc"
'offwind' : "#6895dd"
'offwind-ac' : "#6895dd"
'offshore wind' : "#6895dd"
'offshore wind ac' : "#6895dd"
'offwind-dc' : "#74c6f2"
'offshore wind dc' : "#74c6f2"
"hydro" : "#08ad97"
"hydro+PHS" : "#08ad97"
"PHS" : "#08ad97"
"hydro reservoir" : "#08ad97"
'hydroelectricity' : '#08ad97'
"ror" : "#4adbc8"
"run of river" : "#4adbc8"
'solar' : "#f9d002"
'solar PV' : "#f9d002"
'solar thermal' : '#ffef60'
'biomass' : '#0c6013'
'solid biomass' : '#06540d'
'biogas' : '#23932d'
'waste' : '#68896b'
'geothermal' : '#ba91b1'
"OCGT" : "#d35050"
"gas" : "#d35050"
"natural gas" : "#d35050"
"CCGT" : "#b20101"
"nuclear" : "#ff9000"
"coal" : "#707070"
"lignite" : "#9e5a01"
"oil" : "#262626"
"H2" : "#ea048a"
"hydrogen storage" : "#ea048a"
"battery" : "#b8ea04"
"Electric load" : "#f9d002"
"electricity" : "#f9d002"
"lines" : "#70af1d"
"transmission lines" : "#70af1d"
"AC-AC" : "#70af1d"
"AC line" : "#70af1d"
"links" : "#8a1caf"
"HVDC links" : "#8a1caf"
"DC-DC" : "#8a1caf"
"DC link" : "#8a1caf"
nice_names:
OCGT: "Open-Cycle Gas"
CCGT: "Combined-Cycle Gas"
offwind-ac: "Offshore Wind (AC)"
offwind-dc: "Offshore Wind (DC)"
onwind: "Onshore Wind"
solar: "Solar"
PHS: "Pumped Hydro Storage"
hydro: "Reservoir & Dam"
battery: "Battery Storage"
H2: "Hydrogen Storage"
lines: "Transmission Lines"
ror: "Run of River"

View File

@ -118,12 +118,7 @@ def _add_missing_carriers_from_costs(n, costs, carriers):
n.import_components_from_dataframe(emissions, 'Carrier')
def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
if tech_costs is None:
tech_costs = snakemake.input.tech_costs
if config is None:
config = snakemake.config['costs']
def load_costs(tech_costs, config, elec_config, Nyears=1.):
# set all asset costs and other parameters
costs = pd.read_csv(tech_costs, index_col=list(range(3))).sort_index()
@ -169,8 +164,6 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
marginal_cost=0.,
co2_emissions=0.))
if elec_config is None:
elec_config = snakemake.config['electricity']
max_hours = elec_config['max_hours']
costs.loc["battery"] = \
costs_for_storage(costs.loc["battery storage"], costs.loc["battery inverter"],
@ -188,9 +181,7 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
return costs
def load_powerplants(ppl_fn=None):
if ppl_fn is None:
ppl_fn = snakemake.input.powerplants
def load_powerplants(ppl_fn):
carrier_dict = {'ocgt': 'OCGT', 'ccgt': 'CCGT', 'bioenergy': 'biomass',
'ccgt, thermal': 'CCGT', 'hard coal': 'coal'}
return (pd.read_csv(ppl_fn, index_col=0, dtype={'bus': 'str'})
@ -199,18 +190,17 @@ def load_powerplants(ppl_fn=None):
.replace({'carrier': carrier_dict}))
def attach_load(n):
def attach_load(n, regions, load, nuts3_shapes, cntries = [], scaling = 1.):
substation_lv_i = n.buses.index[n.buses['substation_lv']]
regions = (gpd.read_file(snakemake.input.regions).set_index('name')
regions = (gpd.read_file(regions).set_index('name')
.reindex(substation_lv_i))
opsd_load = (pd.read_csv(snakemake.input.load, index_col=0, parse_dates=True)
.filter(items=snakemake.config['countries']))
opsd_load = (pd.read_csv(load, index_col=0, parse_dates=True)
.filter(items=cntries))
scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0)
logger.info(f"Load data scaled with scalling factor {scaling}.")
opsd_load *= scaling
nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index('index')
nuts3 = gpd.read_file(nuts3_shapes).set_index('index')
def upsample(cntry, group):
l = opsd_load[cntry]
@ -263,18 +253,20 @@ def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=Fal
n.links.loc[dc_b, 'capital_cost'] = costs
def attach_wind_and_solar(n, costs):
for tech in snakemake.config['renewable']:
def attach_wind_and_solar(n, costs, input_profiles,
technologies = ['onwind', 'offwind-ac', 'offwind-dc', 'solar'],
line_length_factor = 1.):
for tech in technologies:
if tech == 'hydro': continue
n.add("Carrier", name=tech)
with xr.open_dataset(getattr(snakemake.input, 'profile_' + tech)) as ds:
with xr.open_dataset(getattr(input_profiles, 'profile_' + tech)) as ds:
if ds.indexes['bus'].empty: continue
suptech = tech.split('-', 2)[0]
if suptech == 'offwind':
underwater_fraction = ds['underwater_fraction'].to_pandas()
connection_cost = (snakemake.config['lines']['length_factor'] *
connection_cost = (line_length_factor *
ds['average_distance'].to_pandas() *
(underwater_fraction *
costs.at[tech + '-connection-submarine', 'capital_cost'] +
@ -300,8 +292,8 @@ def attach_wind_and_solar(n, costs):
p_max_pu=ds['profile'].transpose('time', 'bus').to_pandas())
def attach_conventional_generators(n, costs, ppl):
carriers = snakemake.config['electricity']['conventional_carriers']
def attach_conventional_generators(n, costs, ppl, carriers=['nuclear', 'oil', 'OCGT', 'CCGT',
'coal', 'lignite', 'geothermal', 'biomass']):
_add_missing_carriers_from_costs(n, costs, carriers)
@ -322,10 +314,9 @@ def attach_conventional_generators(n, costs, ppl):
logger.warning(f'Capital costs for conventional generators put to 0 EUR/MW.')
def attach_hydro(n, costs, ppl):
if 'hydro' not in snakemake.config['renewable']: return
c = snakemake.config['renewable']['hydro']
carriers = c.get('carriers', ['ror', 'PHS', 'hydro'])
def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities,
config_hydro = {'carriers': {'ror', 'PHS', 'hydro'}}):
carriers = config_hydro.get('carriers', ['ror', 'PHS', 'hydro'])
_add_missing_carriers_from_costs(n, costs, carriers)
@ -341,11 +332,11 @@ def attach_hydro(n, costs, ppl):
if not inflow_idx.empty:
dist_key = ppl.loc[inflow_idx, 'p_nom'].groupby(country).transform(normed)
with xr.open_dataarray(snakemake.input.profile_hydro) as inflow:
with xr.open_dataarray(profile_hydro) as inflow:
inflow_countries = pd.Index(country[inflow_idx])
missing_c = (inflow_countries.unique()
.difference(inflow.indexes['countries']))
assert missing_c.empty, (f"'{snakemake.input.profile_hydro}' is missing "
assert missing_c.empty, (f"'{profile_hydro}' is missing "
f"inflow time-series for at least one country: {', '.join(missing_c)}")
inflow_t = (inflow.sel(countries=inflow_countries)
@ -370,7 +361,7 @@ def attach_hydro(n, costs, ppl):
if 'PHS' in carriers and not phs.empty:
# fill missing max hours to config value and
# assume no natural inflow due to lack of data
phs = phs.replace({'max_hours': {0: c['PHS_max_hours']}})
phs = phs.replace({'max_hours': {0: config_hydro['PHS_max_hours']}})
n.madd('StorageUnit', phs.index,
carrier='PHS',
bus=phs['bus'],
@ -382,8 +373,8 @@ def attach_hydro(n, costs, ppl):
cyclic_state_of_charge=True)
if 'hydro' in carriers and not hydro.empty:
hydro_max_hours = c.get('hydro_max_hours')
hydro_stats = pd.read_csv(snakemake.input.hydro_capacities,
hydro_max_hours = config_hydro.get('hydro_max_hours')
hydro_stats = pd.read_csv(hydro_capacities,
comment="#", na_values='-', index_col=0)
e_target = hydro_stats["E_store[TWh]"].clip(lower=0.2) * 1e6
e_installed = hydro.eval('p_nom * max_hours').groupby(hydro.country).sum()
@ -412,7 +403,7 @@ def attach_hydro(n, costs, ppl):
p_nom=hydro['p_nom'],
max_hours=hydro_max_hours,
capital_cost=(costs.at['hydro', 'capital_cost']
if c.get('hydro_capital_cost') else 0.),
if config_hydro.get('hydro_capital_cost') else 0.),
marginal_cost=costs.at['hydro', 'marginal_cost'],
p_max_pu=1., # dispatch
p_min_pu=0., # store
@ -422,8 +413,7 @@ def attach_hydro(n, costs, ppl):
inflow=inflow_t.loc[:, hydro.index])
def attach_extendable_generators(n, costs, ppl):
elec_opts = snakemake.config['electricity']
def attach_extendable_generators(n, costs, ppl, elec_opts = {'extendable_carriers': {'Generator': []}}):
carriers = pd.Index(elec_opts['extendable_carriers']['Generator'])
_add_missing_carriers_from_costs(n, costs, carriers)
@ -472,12 +462,11 @@ def attach_extendable_generators(n, costs, ppl):
def attach_OPSD_renewables(n):
def attach_OPSD_renewables(n, techs=[]):
available = ['DE', 'FR', 'PL', 'CH', 'DK', 'CZ', 'SE', 'GB']
tech_map = {'Onshore': 'onwind', 'Offshore': 'offwind', 'Solar': 'solar'}
countries = set(available) & set(n.buses.country)
techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', [])
tech_map = {k: v for k, v in tech_map.items() if v in techs}
if not tech_map:
@ -505,10 +494,7 @@ def attach_OPSD_renewables(n):
def estimate_renewable_capacities(n, tech_map=None):
if tech_map is None:
tech_map = (snakemake.config['electricity']
.get('estimate_renewable_capacities_from_capacity_stats', {}))
def estimate_renewable_capacities(n, tech_map={}):
if len(tech_map) == 0: return
@ -540,8 +526,7 @@ def estimate_renewable_capacities(n, tech_map=None):
n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom']
def add_nice_carrier_names(n, config=None):
if config is None: config = snakemake.config
def add_nice_carrier_names(n, config):
carrier_i = n.carriers.index
nice_names = (pd.Series(config['plotting']['nice_names'])
.reindex(carrier_i).fillna(carrier_i.to_series().str.title()))
@ -563,22 +548,32 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input.base_network)
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(Nyears)
ppl = load_powerplants()
costs = load_costs(tech_costs = snakemake.input.tech_costs, config = snakemake.config['costs'],
elec_config = snakemake.config['electricity'], Nyears = Nyears)
ppl = load_powerplants(snakemake.input.powerplants)
attach_load(n)
attach_load(n, regions = snakemake.input.regions, load = snakemake.input.load,
nuts3_shapes = snakemake.input.nuts3_shapes,
cntries = snakemake.config['countries'],
scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0))
update_transmission_costs(n, costs)
attach_conventional_generators(n, costs, ppl)
attach_wind_and_solar(n, costs)
attach_hydro(n, costs, ppl)
attach_extendable_generators(n, costs, ppl)
attach_conventional_generators(n, costs, ppl, carriers = snakemake.config['electricity']['conventional_carriers'])
attach_wind_and_solar(n, costs, snakemake.input, technologies = snakemake.config['renewable'],
line_length_factor = snakemake.config['lines']['length_factor'])
estimate_renewable_capacities(n)
attach_OPSD_renewables(n)
if 'hydro' in snakemake.config['renewable']:
attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities,
config_hydro = snakemake.config['renewable']['hydro'])
attach_extendable_generators(n, costs, ppl, elec_opts = snakemake.config['electricity'])
estimate_renewable_capacities(n, tech_map = (snakemake.config['electricity']
.get('estimate_renewable_capacities_from_capacity_stats', {})))
attach_OPSD_renewables(n, techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', []))
update_p_nom_max(n)
add_nice_carrier_names(n)
add_nice_carrier_names(n, config = snakemake.config)
n.export_to_netcdf(snakemake.output[0])

View File

@ -64,8 +64,7 @@ idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def attach_storageunits(n, costs):
elec_opts = snakemake.config['electricity']
def attach_storageunits(n, costs, elec_opts = {'extendable_carriers': {'StorageUnit': []}, 'max_hours': {'battery': 6, 'H2': 168}}):
carriers = elec_opts['extendable_carriers']['StorageUnit']
max_hours = elec_opts['max_hours']
@ -89,8 +88,7 @@ def attach_storageunits(n, costs):
cyclic_state_of_charge=True)
def attach_stores(n, costs):
elec_opts = snakemake.config['electricity']
def attach_stores(n, costs, elec_opts = {'extendable_carriers': {'Store': ['battery', 'H2']}}):
carriers = elec_opts['extendable_carriers']['Store']
_add_missing_carriers_from_costs(n, costs, carriers)
@ -156,8 +154,7 @@ def attach_stores(n, costs):
marginal_cost=costs.at["battery inverter", "marginal_cost"])
def attach_hydrogen_pipelines(n, costs):
elec_opts = snakemake.config['electricity']
def attach_hydrogen_pipelines(n, costs, elec_opts = {'extendable_carriers': {'Store': ['H2', 'battery']}}):
ext_carriers = elec_opts['extendable_carriers']
as_stores = ext_carriers.get('Store', [])
@ -198,13 +195,13 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input.network)
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(Nyears, tech_costs=snakemake.input.tech_costs,
config=snakemake.config['costs'],
elec_config=snakemake.config['electricity'])
costs = load_costs(tech_costs = snakemake.input.tech_costs,
config = snakemake.config['costs'],
elec_config = snakemake.config['electricity'], Nyears = Nyears)
attach_storageunits(n, costs)
attach_stores(n, costs)
attach_hydrogen_pipelines(n, costs)
attach_storageunits(n, costs, elec_opts = snakemake.config['electricity'])
attach_stores(n, costs, elec_opts = snakemake.config['electricity'])
attach_hydrogen_pipelines(n, costs, elec_opts = snakemake.config['electricity'])
add_nice_carrier_names(n, config=snakemake.config)

View File

@ -84,11 +84,10 @@ from scipy.spatial import cKDTree as KDTree
logger = logging.getLogger(__name__)
def add_custom_powerplants(ppl):
custom_ppl_query = snakemake.config['electricity']['custom_powerplants']
def add_custom_powerplants(ppl, custom_powerplants, custom_ppl_query=False):
if not custom_ppl_query:
return ppl
add_ppls = pd.read_csv(snakemake.input.custom_powerplants, index_col=0,
add_ppls = pd.read_csv(custom_powerplants, index_col=0,
dtype={'bus': 'str'})
if isinstance(custom_ppl_query, str):
add_ppls.query(custom_ppl_query, inplace=True)
@ -119,7 +118,9 @@ if __name__ == "__main__":
if isinstance(ppl_query, str):
ppl.query(ppl_query, inplace=True)
ppl = add_custom_powerplants(ppl) # add carriers from own powerplant files
# add carriers from own powerplant files:
ppl = add_custom_powerplants(ppl, custom_powerplants = snakemake.input.custom_powerplants,
custom_ppl_query = snakemake.config['electricity']['custom_powerplants'])
cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()]

View File

@ -107,11 +107,10 @@ def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True):
return polys.simplify(tolerance=tolerance)
def countries():
cntries = snakemake.config['countries']
def countries(naturalearth, cntries=[]):
if 'RS' in cntries: cntries.append('KV')
df = gpd.read_file(snakemake.input.naturalearth)
df = gpd.read_file(naturalearth)
# Names are a hassle in naturalearth, try several fields
fieldnames = (df[x].where(lambda s: s!='-99') for x in ('ISO_A2', 'WB_A2', 'ADM0_A3'))
@ -124,9 +123,9 @@ def countries():
return s
def eez(country_shapes):
df = gpd.read_file(snakemake.input.eez)
df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in snakemake.config['countries']])]
def eez(country_shapes, eez, cntries=[]):
df = gpd.read_file(eez)
df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in cntries])]
df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c))
s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False))
s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3})
@ -145,29 +144,29 @@ def country_cover(country_shapes, eez_shapes=None):
return Polygon(shell=europe_shape.exterior)
def nuts3(country_shapes):
df = gpd.read_file(snakemake.input.nuts3)
def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp):
df = gpd.read_file(nuts3)
df = df.loc[df['STAT_LEVL_'] == 3]
df['geometry'] = df['geometry'].map(_simplify_polys)
df = df.rename(columns={'NUTS_ID': 'id'})[['id', 'geometry']].set_index('id')
pop = pd.read_table(snakemake.input.nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python')
pop = pd.read_table(nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python')
pop = (pop
.set_index(pd.MultiIndex.from_tuples(pop.pop('unit,geo\\time').str.split(','))).loc['THS']
.applymap(lambda x: pd.to_numeric(x, errors='coerce'))
.fillna(method='bfill', axis=1))['2014']
gdp = pd.read_table(snakemake.input.nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python')
gdp = pd.read_table(nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python')
gdp = (gdp
.set_index(pd.MultiIndex.from_tuples(gdp.pop('unit,geo\\time').str.split(','))).loc['EUR_HAB']
.applymap(lambda x: pd.to_numeric(x, errors='coerce'))
.fillna(method='bfill', axis=1))['2014']
cantons = pd.read_csv(snakemake.input.ch_cantons)
cantons = pd.read_csv(ch_cantons)
cantons = cantons.set_index(cantons['HASC'].str[3:])['NUTS']
cantons = cantons.str.pad(5, side='right', fillchar='0')
swiss = pd.read_excel(snakemake.input.ch_popgdp, skiprows=3, index_col=0)
swiss = pd.read_excel(ch_popgdp, skiprows=3, index_col=0)
swiss.columns = swiss.columns.to_series().map(cantons)
pop = pop.append(pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':]))
@ -220,14 +219,16 @@ if __name__ == "__main__":
out = snakemake.output
country_shapes = countries()
country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries'])
save_to_geojson(country_shapes, out.country_shapes)
offshore_shapes = eez(country_shapes)
offshore_shapes = eez(country_shapes, snakemake.input.eez, cntries=snakemake.config['countries'])
save_to_geojson(offshore_shapes, out.offshore_shapes)
europe_shape = country_cover(country_shapes, offshore_shapes)
save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape)
nuts3_shapes = nuts3(country_shapes)
nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop,
snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp)
save_to_geojson(nuts3_shapes, out.nuts3_shapes)

View File

@ -170,12 +170,9 @@ def weighting_for_country(n, x):
return (w * (100. / w.max())).clip(lower=1.).astype(int)
def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None):
def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"):
"""Determine the number of clusters per country"""
if solver_name is None:
solver_name = snakemake.config['solving']['solver']['name']
L = (n.loads_t.p_set.mean()
.groupby(n.loads.bus).sum()
.groupby([n.buses.country, n.buses.sub_network]).sum()
@ -268,12 +265,10 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
else:
raise AttributeError(f"potential_mode should be one of 'simple' or 'conservative' but is '{potential_mode}'")
if custom_busmap:
busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True)
busmap.index = busmap.index.astype(str)
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
else:
if custom_busmap is False:
busmap = busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights, algorithm)
else:
busmap = custom_busmap
clustering = get_clustering_from_busmap(
n, busmap,
@ -306,8 +301,6 @@ def save_to_geojson(s, fn):
def cluster_regions(busmaps, input=None, output=None):
if input is None: input = snakemake.input
if output is None: output = snakemake.output
busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
@ -358,10 +351,9 @@ if __name__ == "__main__":
else:
line_length_factor = snakemake.config['lines']['length_factor']
Nyears = n.snapshot_weightings.objective.sum()/8760
hvac_overhead_cost = (load_costs(Nyears,
tech_costs=snakemake.input.tech_costs,
config=snakemake.config['costs'],
elec_config=snakemake.config['electricity'])
hvac_overhead_cost = (load_costs(tech_costs = snakemake.input.tech_costs,
config = snakemake.config['costs'],
elec_config=snakemake.config['electricity'], Nyears = Nyears)
.at['HVAC overhead', 'capital_cost'])
def consense(x):
@ -373,6 +365,10 @@ if __name__ == "__main__":
potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential']
for tech in renewable_carriers]))
custom_busmap = snakemake.config["enable"].get("custom_busmap", False)
if custom_busmap:
custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True)
custom_busmap.index = custom_busmap.index.astype(str)
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers,
line_length_factor=line_length_factor,
potential_mode=potential_mode,
@ -386,4 +382,4 @@ if __name__ == "__main__":
for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative
getattr(clustering, attr).to_csv(snakemake.output[attr])
cluster_regions((clustering.busmap,))
cluster_regions((clustering.busmap,), snakemake.input, snakemake.output)

View File

@ -403,8 +403,8 @@ def make_summaries(networks_dict, country='all'):
n = n[n.buses.country == country]
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(Nyears, snakemake.input[0],
snakemake.config['costs'], snakemake.config['electricity'])
costs = load_costs(tech_costs = snakemake.input[0], config = snakemake.config['costs'],
elec_config = snakemake.config['electricity'], Nyears = Nyears)
update_transmission_costs(n, costs, simple_hvdc_costs=False)
assign_carriers(n)
@ -415,8 +415,7 @@ def make_summaries(networks_dict, country='all'):
return dfs
def to_csv(dfs):
dir = snakemake.output[0]
def to_csv(dfs, dir):
os.makedirs(dir, exist_ok=True)
for key, df in dfs.items():
df.to_csv(os.path.join(dir, f"{key}.csv"))
@ -453,4 +452,4 @@ if __name__ == "__main__":
dfs = make_summaries(networks_dict, country=snakemake.wildcards.country)
to_csv(dfs)
to_csv(dfs, snakemake.output[0])

View File

@ -70,21 +70,14 @@ idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def add_co2limit(n, Nyears=1., factor=None):
if factor is not None:
annual_emissions = factor*snakemake.config['electricity']['co2base']
else:
annual_emissions = snakemake.config['electricity']['co2limit']
def add_co2limit(n, co2limit=1.487e+9, Nyears=1.):
n.add("GlobalConstraint", "CO2Limit",
carrier_attribute="co2_emissions", sense="<=",
constant=annual_emissions * Nyears)
constant=co2limit * Nyears)
def add_emission_prices(n, emission_prices=None, exclude_co2=False):
if emission_prices is None:
emission_prices = snakemake.config['costs']['emission_prices']
def add_emission_prices(n, emission_prices={'co2': 0.}, exclude_co2=False):
if exclude_co2: emission_prices.pop('co2')
ep = (pd.Series(emission_prices).rename(lambda x: x+'_emissions') *
n.carriers.filter(like='_emissions')).sum(axis=1)
@ -94,13 +87,12 @@ def add_emission_prices(n, emission_prices=None, exclude_co2=False):
n.storage_units['marginal_cost'] += su_ep
def set_line_s_max_pu(n):
s_max_pu = snakemake.config['lines']['s_max_pu']
def set_line_s_max_pu(n, s_max_pu = 0.7):
n.lines['s_max_pu'] = s_max_pu
logger.info(f"N-1 security margin of lines set to {s_max_pu}")
def set_transmission_limit(n, ll_type, factor, Nyears=1):
def set_transmission_limit(n, ll_type, factor, costs, Nyears=1):
links_dc_b = n.links.carrier == 'DC' if not n.links.empty else pd.Series()
_lines_s_nom = (np.sqrt(3) * n.lines.type.map(n.line_types.i_nom) *
@ -112,9 +104,6 @@ def set_transmission_limit(n, ll_type, factor, Nyears=1):
ref = (lines_s_nom @ n.lines[col] +
n.links.loc[links_dc_b, "p_nom"] @ n.links.loc[links_dc_b, col])
costs = load_costs(Nyears, snakemake.input.tech_costs,
snakemake.config['costs'],
snakemake.config['electricity'])
update_transmission_costs(n, costs, simple_hvdc_costs=False)
if factor == 'opt' or float(factor) > 1.0:
@ -151,7 +140,7 @@ def average_every_nhours(n, offset):
return m
def apply_time_segmentation(n, segments):
def apply_time_segmentation(n, segments, solver_name="cplex"):
logger.info(f"Aggregating time series to {segments} segments.")
try:
import tsam.timeseriesaggregation as tsam
@ -170,8 +159,6 @@ def apply_time_segmentation(n, segments):
raw = pd.concat([p_max_pu, load, inflow], axis=1, sort=False)
solver_name = snakemake.config["solving"]["solver"]["name"]
agg = tsam.TimeSeriesAggregation(raw, hoursPerPeriod=len(raw),
noTypicalPeriods=1, noSegments=int(segments),
segmentation=True, solver=solver_name)
@ -208,9 +195,7 @@ def enforce_autarky(n, only_crossborder=False):
n.mremove("Line", lines_rm)
n.mremove("Link", links_rm)
def set_line_nom_max(n):
s_nom_max_set = snakemake.config["lines"].get("s_nom_max,", np.inf)
p_nom_max_set = snakemake.config["links"].get("p_nom_max", np.inf)
def set_line_nom_max(n, s_nom_max_set=np.inf, p_nom_max_set=np.inf):
n.lines.s_nom_max.clip(upper=s_nom_max_set, inplace=True)
n.links.p_nom_max.clip(upper=p_nom_max_set, inplace=True)
@ -225,8 +210,11 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input[0])
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(tech_costs = snakemake.input.tech_costs,
config = snakemake.config['costs'],
elec_config = snakemake.config['electricity'], Nyears = Nyears)
set_line_s_max_pu(n)
set_line_s_max_pu(n, s_max_pu=snakemake.config['lines']['s_max_pu'])
for o in opts:
m = re.match(r'^\d+h$', o, re.IGNORECASE)
@ -237,16 +225,17 @@ if __name__ == "__main__":
for o in opts:
m = re.match(r'^\d+seg$', o, re.IGNORECASE)
if m is not None:
n = apply_time_segmentation(n, m.group(0)[:-3])
n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=snakemake.config["solving"]["solver"]["name"])
break
for o in opts:
if "Co2L" in o:
m = re.findall("[0-9]*\.?[0-9]+$", o)
if len(m) > 0:
add_co2limit(n, Nyears, float(m[0]))
co2limit=float(m[0])*snakemake.config['electricity']['co2base']
add_co2limit(n, Nyears, co2limit)
else:
add_co2limit(n, Nyears)
add_co2limit(n, Nyears, snakemake.config['electricity']['co2limit'])
break
for o in opts:
@ -267,12 +256,13 @@ if __name__ == "__main__":
c.df.loc[sel,attr] *= factor
if 'Ep' in opts:
add_emission_prices(n)
add_emission_prices(n, emission_prices=snakemake.config['costs']['emission_prices'])
ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:]
set_transmission_limit(n, ll_type, factor, Nyears)
set_transmission_limit(n, ll_type, factor, costs, Nyears)
set_line_nom_max(n)
set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf),
p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf))
if "ATK" in opts:
enforce_autarky(n)

View File

@ -138,13 +138,9 @@ def simplify_network_to_380(n):
return n, trafo_map
def _prepare_connection_costs_per_link(n):
def _prepare_connection_costs_per_link(n, costs):
if n.links.empty: return {}
Nyears = n.snapshot_weightings.objective.sum() / 8760
costs = load_costs(Nyears, snakemake.input.tech_costs,
snakemake.config['costs'], snakemake.config['electricity'])
connection_costs_per_link = {}
for tech in snakemake.config['renewable']:
@ -158,9 +154,9 @@ def _prepare_connection_costs_per_link(n):
return connection_costs_per_link
def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None, buses=None):
def _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link=None, buses=None):
if connection_costs_per_link is None:
connection_costs_per_link = _prepare_connection_costs_per_link(n)
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs)
if buses is None:
buses = busmap.index[busmap.index != busmap.values]
@ -217,7 +213,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate
n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)])
def simplify_links(n):
def simplify_links(n, costs):
## Complex multi-node links are folded into end-points
logger.info("Simplifying connected link components")
@ -264,7 +260,7 @@ def simplify_links(n):
busmap = n.buses.index.to_series()
connection_costs_per_link = _prepare_connection_costs_per_link(n)
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs)
connection_costs_to_bus = pd.DataFrame(0., index=n.buses.index, columns=list(connection_costs_per_link))
for lbl in labels.value_counts().loc[lambda s: s > 2].index:
@ -278,7 +274,7 @@ def simplify_links(n):
m = sp.spatial.distance_matrix(n.buses.loc[b, ['x', 'y']],
n.buses.loc[buses[1:-1], ['x', 'y']])
busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]]
connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link, buses)
connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link, buses)
all_links = [i for _, i in sum(links, [])]
@ -312,12 +308,12 @@ def simplify_links(n):
_aggregate_and_move_components(n, busmap, connection_costs_to_bus)
return n, busmap
def remove_stubs(n):
def remove_stubs(n, costs):
logger.info("Removing stubs")
busmap = busmap_by_stubs(n) # ['country'])
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap)
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs)
_aggregate_and_move_components(n, busmap, connection_costs_to_bus)
@ -394,9 +390,14 @@ if __name__ == "__main__":
n, trafo_map = simplify_network_to_380(n)
n, simplify_links_map = simplify_links(n)
Nyears = n.snapshot_weightings.objective.sum() / 8760
technology_costs = load_costs(tech_costs = snakemake.input.tech_costs,
config = snakemake.config['costs'],
elec_config = snakemake.config['electricity'], Nyears = Nyears)
n, stub_map = remove_stubs(n)
n, simplify_links_map = simplify_links(n, technology_costs)
n, stub_map = remove_stubs(n, technology_costs)
busmaps = [trafo_map, simplify_links_map, stub_map]