use snakemake keywords directly without extracting them beforehand

This commit is contained in:
martacki 2022-01-24 19:48:26 +01:00
parent c45691d305
commit 6cdf3a2879
16 changed files with 152 additions and 186 deletions

View File

@ -95,10 +95,6 @@ def pdbcast(v, h):
return pd.DataFrame(v.values.reshape((-1, 1)) * h.values,
index=v.index, columns=h.index)
def retrieve_snakemake_keys(snakemake):
return (snakemake.input, snakemake.config, snakemake.wildcards,
snakemake.log, snakemake.output)
def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True):
import pypsa

View File

@ -84,7 +84,7 @@ It further adds extendable ``generators`` with **zero** capacity for
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max
from _helpers import configure_logging, update_p_nom_max
import pypsa
import pandas as pd
@ -544,40 +544,38 @@ if __name__ == "__main__":
snakemake = mock_snakemake('add_electricity')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
n = pypsa.Network(paths.base_network)
n = pypsa.Network(snakemake.input.base_network)
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears)
ppl = load_powerplants(paths.powerplants)
costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
ppl = load_powerplants(snakemake.input.powerplants)
attach_load(n, paths.regions, paths.load, paths.nuts3_shapes, config['countries'],
config['load']['scaling_factor'])
attach_load(n, snakemake.input.regions, snakemake.input.load, snakemake.input.nuts3_shapes,
snakemake.config['countries'], snakemake.config['load']['scaling_factor'])
update_transmission_costs(n, costs, config['lines']['length_factor'])
update_transmission_costs(n, costs, snakemake.config['lines']['length_factor'])
carriers = config['electricity']['conventional_carriers']
carriers = snakemake.config['electricity']['conventional_carriers']
attach_conventional_generators(n, costs, ppl, carriers)
carriers = config['renewable']
attach_wind_and_solar(n, costs, paths, carriers, config['lines']['length_factor'])
carriers = snakemake.config['renewable']
attach_wind_and_solar(n, costs, snakemake.input, carriers, snakemake.config['lines']['length_factor'])
if 'hydro' in config['renewable']:
carriers = config['renewable']['hydro'].pop('carriers', [])
attach_hydro(n, costs, ppl, paths.profile_hydro, paths.hydro_capacities,
carriers, **config['renewable']['hydro'])
if 'hydro' in snakemake.config['renewable']:
carriers = snakemake.config['renewable']['hydro'].pop('carriers', [])
attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities,
carriers, **snakemake.config['renewable']['hydro'])
carriers = config['electricity']['extendable_carriers']['Generator']
carriers = snakemake.config['electricity']['extendable_carriers']['Generator']
attach_extendable_generators(n, costs, ppl, carriers)
tech_map = config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {})
tech_map = snakemake.config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {})
estimate_renewable_capacities(n, tech_map)
techs = config['electricity'].get('renewable_capacities_from_OPSD', [])
techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', [])
attach_OPSD_renewables(n, techs)
update_p_nom_max(n)
add_nice_carrier_names(n, config)
add_nice_carrier_names(n, snakemake.config)
n.export_to_netcdf(out[0])
n.export_to_netcdf(snakemake.output[0])

View File

@ -50,7 +50,7 @@ The rule :mod:`add_extra_components` attaches additional extendable components t
- ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If this option is chosen, the script adds extra buses with corresponding carrier where energy ``Stores`` are attached and which are connected to the corresponding power buses via two links, one each for charging and discharging. This leads to three investment variables for the energy capacity, charging and discharging capacity of the storage unit.
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pypsa
import pandas as pd
@ -193,18 +193,16 @@ if __name__ == "__main__":
simpl='', clusters=5)
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
n = pypsa.Network(paths.network)
elec_config = config['electricity']
n = pypsa.Network(snakemake.input.network)
elec_config = snakemake.config['electricity']
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(paths.tech_costs, config['costs'], elec_config, Nyears)
costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], elec_config, Nyears)
attach_storageunits(n, costs, elec_config)
attach_stores(n, costs, elec_config)
attach_hydrogen_pipelines(n, costs, elec_config)
add_nice_carrier_names(n, config)
add_nice_carrier_names(n, snakemake.config)
n.export_to_netcdf(out[0])
n.export_to_netcdf(snakemake.output[0])

View File

@ -63,7 +63,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pypsa
import yaml
@ -588,10 +588,8 @@ if __name__ == "__main__":
snakemake = mock_snakemake('base_network')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
n = base_network(snakemake.input.eg_buses, snakemake.input.eg_converters, snakemake.input.eg_transformers, snakemake.input.eg_lines, snakemake.input.eg_links,
snakemake.input.links_p_nom, snakemake.input.links_tyndp, snakemake.input.europe_shape, snakemake.input.country_shapes, snakemake.input.offshore_shapes,
snakemake.input.parameter_corrections, snakemake.config)
n = base_network(paths.eg_buses, paths.eg_converters, paths.eg_transformers, paths.eg_lines, paths.eg_links,
paths.links_p_nom, paths.links_tyndp, paths.europe_shape, paths.country_shapes, paths.offshore_shapes,
paths.parameter_corrections, config)
n.export_to_netcdf(out[0])
n.export_to_netcdf(snakemake.output[0])

View File

@ -42,7 +42,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pypsa
import os
@ -67,14 +67,12 @@ if __name__ == "__main__":
snakemake = mock_snakemake('build_bus_regions')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
countries = snakemake.config['countries']
countries = config['countries']
n = pypsa.Network(snakemake.input.base_network)
n = pypsa.Network(paths.base_network)
country_shapes = gpd.read_file(paths.country_shapes).set_index('name')['geometry']
offshore_shapes = gpd.read_file(paths.offshore_shapes).set_index('name')['geometry']
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry']
offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry']
onshore_regions = []
offshore_regions = []
@ -105,6 +103,6 @@ if __name__ == "__main__":
offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2]
offshore_regions.append(offshore_regions_c)
save_to_geojson(pd.concat(onshore_regions, ignore_index=True), out.regions_onshore)
save_to_geojson(pd.concat(onshore_regions, ignore_index=True), snakemake.output.regions_onshore)
save_to_geojson(pd.concat(offshore_regions, ignore_index=True), out.regions_offshore)
save_to_geojson(pd.concat(offshore_regions, ignore_index=True), snakemake.output.regions_offshore)

View File

@ -95,7 +95,7 @@ import logging
import atlite
import geopandas as gpd
import pandas as pd
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
logger = logging.getLogger(__name__)
@ -106,18 +106,16 @@ if __name__ == "__main__":
snakemake = mock_snakemake('build_cutout', cutout='europe-2013-era5')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout]
cutout_params = config['atlite']['cutouts'][wildcards.cutout]
snapshots = pd.date_range(freq='h', **config['snapshots'])
snapshots = pd.date_range(freq='h', **snakemake.config['snapshots'])
time = [snapshots[0], snapshots[-1]]
cutout_params['time'] = slice(*cutout_params.get('time', time))
if {'x', 'y', 'bounds'}.isdisjoint(cutout_params):
# Determine the bounds from bus regions with a buffer of two grid cells
onshore = gpd.read_file(paths.regions_onshore)
offshore = gpd.read_file(paths.regions_offshore)
onshore = gpd.read_file(snakemake.input.regions_onshore)
offshore = gpd.read_file(snakemake.input.regions_offshore)
regions = onshore.append(offshore)
d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy', 0.25))*2
cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d]
@ -128,5 +126,5 @@ if __name__ == "__main__":
logging.info(f"Preparing cutout with parameters {cutout_params}.")
features = cutout_params.pop('features', None)
cutout = atlite.Cutout(out[0], **cutout_params)
cutout = atlite.Cutout(snakemake.output[0], **cutout_params)
cutout.prepare(features=features)

View File

@ -60,7 +60,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import atlite
import geopandas as gpd
@ -74,18 +74,16 @@ if __name__ == "__main__":
snakemake = mock_snakemake('build_hydro_profile')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
config_hydro = snakemake.config['renewable']['hydro']
cutout = atlite.Cutout(snakemake.input.cutout)
config_hydro = config['renewable']['hydro']
cutout = atlite.Cutout(paths.cutout)
countries = config['countries']
country_shapes = (gpd.read_file(paths.country_shapes)
countries = snakemake.config['countries']
country_shapes = (gpd.read_file(snakemake.input.country_shapes)
.set_index('name')['geometry'].reindex(countries))
country_shapes.index.name = 'countries'
eia_stats = vhydro.get_eia_annual_hydro_generation(
paths.eia_hydro_generation).reindex(columns=countries)
snakemake.input.eia_hydro_generation).reindex(columns=countries)
inflow = cutout.runoff(shapes=country_shapes,
smooth=True,
lower_threshold_quantile=True,
@ -94,4 +92,4 @@ if __name__ == "__main__":
if 'clip_min_inflow' in config_hydro:
inflow = inflow.where(inflow > config_hydro['clip_min_inflow'], 0)
inflow.to_netcdf(out[0])
inflow.to_netcdf(snakemake.output[0])

View File

@ -37,7 +37,7 @@ Outputs
import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pandas as pd
import numpy as np
@ -196,18 +196,16 @@ if __name__ == "__main__":
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
powerstatistics = config['load']['power_statistics']
interpolate_limit = config['load']['interpolate_limit']
countries = config['countries']
snapshots = pd.date_range(freq='h', **config['snapshots'])
powerstatistics = snakemake.config['load']['power_statistics']
interpolate_limit = snakemake.config['load']['interpolate_limit']
countries = snakemake.config['countries']
snapshots = pd.date_range(freq='h', **snakemake.config['snapshots'])
years = slice(snapshots[0], snapshots[-1])
time_shift = config['load']['time_shift_for_large_gaps']
time_shift = snakemake.config['load']['time_shift_for_large_gaps']
load = load_timeseries(paths[0], years, countries, powerstatistics)
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
if config['load']['manual_adjustments']:
if snakemake.config['load']['manual_adjustments']:
load = manual_adjustment(load, powerstatistics)
logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.")
@ -222,5 +220,5 @@ if __name__ == "__main__":
'`time_shift_for_large_gaps` or modify the `manual_adjustment` function '
'for implementing the needed load data modifications.')
load.to_csv(out[0])
load.to_csv(snakemake.output[0])

View File

@ -72,7 +72,7 @@ The configuration options ``electricity: powerplants_filter`` and ``electricity:
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pypsa
import powerplantmatching as pm
@ -100,9 +100,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake('build_powerplants')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
n = pypsa.Network(paths.base_network)
n = pypsa.Network(snakemake.input.base_network)
countries = n.buses.country.unique()
ppl = (pm.powerplants(from_url=True)
@ -116,13 +114,13 @@ if __name__ == "__main__":
df.Technology.replace('Steam Turbine',
'OCGT').fillna('OCGT')))))
ppl_query = config['electricity']['powerplants_filter']
ppl_query = snakemake.config['electricity']['powerplants_filter']
if isinstance(ppl_query, str):
ppl.query(ppl_query, inplace=True)
# add carriers from own powerplant files:
custom_ppl_query = config['electricity']['custom_powerplants']
ppl = add_custom_powerplants(ppl, paths.custom_powerplants, custom_ppl_query)
custom_ppl_query = snakemake.config['electricity']['custom_powerplants']
ppl = add_custom_powerplants(ppl, snakemake.input.custom_powerplants, custom_ppl_query)
cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()]
@ -141,4 +139,4 @@ if __name__ == "__main__":
if bus_null_b.any():
logging.warning(f"Couldn't find close bus for {bus_null_b.sum()} powerplants")
ppl.to_csv(out[0])
ppl.to_csv(snakemake.output[0])

View File

@ -190,7 +190,7 @@ from pypsa.geo import haversine
from shapely.geometry import LineString
import time
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
logger = logging.getLogger(__name__)
@ -202,55 +202,53 @@ if __name__ == '__main__':
configure_logging(snakemake)
pgb.streams.wrap_stderr()
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
nprocesses = config['atlite'].get('nprocesses')
noprogress = not config['atlite'].get('show_progress', True)
config = config['renewable'][wildcards.technology]
nprocesses = snakemake.config['atlite'].get('nprocesses')
noprogress = not snakemake.config['atlite'].get('show_progress', True)
config = snakemake.config['renewable'][snakemake.wildcards.technology]
resource = config['resource'] # pv panel config / wind turbine config
correction_factor = config.get('correction_factor', 1.)
correction_factor = snakemake.config.get('correction_factor', 1.)
capacity_per_sqkm = config['capacity_per_sqkm']
p_nom_max_meth = config.get('potential', 'conservative')
p_nom_max_meth = snakemake.config.get('potential', 'conservative')
if isinstance(config.get("corine", {}), list):
config['corine'] = {'grid_codes': config['corine']}
snakemake.config['corine'] = {'grid_codes': config['corine']}
if correction_factor != 1.:
logger.info(f'correction_factor is set as {correction_factor}')
cutout = atlite.Cutout(paths['cutout'])
regions = gpd.read_file(paths.regions).set_index('name').rename_axis('bus')
cutout = atlite.Cutout(snakemake.input['cutout'])
regions = gpd.read_file(snakemake.input.regions).set_index('name').rename_axis('bus')
buses = regions.index
excluder = atlite.ExclusionContainer(crs=3035, res=100)
if config['natura']:
excluder.add_raster(paths.natura, nodata=0, allow_no_overlap=True)
excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True)
corine = config.get("corine", {})
corine = snakemake.config.get("corine", {})
if "grid_codes" in corine:
codes = corine["grid_codes"]
excluder.add_raster(paths.corine, codes=codes, invert=True, crs=3035)
excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035)
if corine.get("distance", 0.) > 0.:
codes = corine["distance_grid_codes"]
buffer = corine["distance"]
excluder.add_raster(paths.corine, codes=codes, buffer=buffer, crs=3035)
excluder.add_raster(snakemake.input.corine, codes=codes, buffer=buffer, crs=3035)
if "max_depth" in config:
# lambda not supported for atlite + multiprocessing
# use named function np.greater with partially frozen argument instead
# and exclude areas where: -max_depth > grid cell depth
func = functools.partial(np.greater,-config['max_depth'])
excluder.add_raster(paths.gebco, codes=func, crs=4236, nodata=-1000)
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4236, nodata=-1000)
if 'min_shore_distance' in config:
buffer = config['min_shore_distance']
excluder.add_geometry(paths.country_shapes, buffer=buffer)
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)
if 'max_shore_distance' in config:
buffer = config['max_shore_distance']
excluder.add_geometry(paths.country_shapes, buffer=buffer, invert=True)
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer, invert=True)
kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress)
if noprogress:
@ -315,9 +313,9 @@ if __name__ == '__main__':
average_distance.rename('average_distance')])
if wildcards.technology.startswith("offwind"):
if snakemake.wildcards.technology.startswith("offwind"):
logger.info('Calculate underwater fraction of connections.')
offshore_shape = gpd.read_file(paths['offshore_shapes']).unary_union
offshore_shape = gpd.read_file(snakemake.input['offshore_shapes']).unary_union
underwater_fraction = []
for bus in buses:
p = centre_of_mass.sel(bus=bus).data
@ -328,11 +326,11 @@ if __name__ == '__main__':
ds['underwater_fraction'] = xr.DataArray(underwater_fraction, [buses])
# select only buses with some capacity and minimal capacity factor
ds = ds.sel(bus=((ds['profile'].mean('time') > config.get('min_p_max_pu', 0.)) &
(ds['p_nom_max'] > config.get('min_p_nom_max', 0.))))
ds = ds.sel(bus=((ds['profile'].mean('time') > snakemake.config.get('min_p_max_pu', 0.)) &
(ds['p_nom_max'] > snakemake.config.get('min_p_nom_max', 0.))))
if 'clip_p_max_pu' in config:
min_p_max_pu = config['clip_p_max_pu']
if 'clip_p_max_pu' in snakemake.config:
min_p_max_pu = snakemake.config['clip_p_max_pu']
ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0)
ds.to_netcdf(out.profile)
ds.to_netcdf(snakemake.output.profile)

View File

@ -68,7 +68,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import os
import numpy as np
@ -217,18 +217,16 @@ if __name__ == "__main__":
snakemake = mock_snakemake('build_shapes')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries'])
save_to_geojson(country_shapes, snakemake.output.country_shapes)
country_shapes = countries(paths.naturalearth, config['countries'])
save_to_geojson(country_shapes, out.country_shapes)
offshore_shapes = eez(country_shapes, paths.eez, config['countries'])
save_to_geojson(offshore_shapes, out.offshore_shapes)
offshore_shapes = eez(country_shapes, snakemake.input.eez, snakemake.config['countries'])
save_to_geojson(offshore_shapes, snakemake.output.offshore_shapes)
europe_shape = country_cover(country_shapes, offshore_shapes)
save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape)
save_to_geojson(gpd.GeoSeries(europe_shape), snakemake.output.europe_shape)
nuts3_shapes = nuts3(country_shapes, paths.nuts3, paths.nuts3pop,
paths.nuts3gdp, paths.ch_cantons, paths.ch_popgdp)
nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop,
snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp)
save_to_geojson(nuts3_shapes, out.nuts3_shapes)
save_to_geojson(nuts3_shapes, snakemake.output.nuts3_shapes)

View File

@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes:
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max
from _helpers import configure_logging, update_p_nom_max
import pypsa
import os
@ -331,21 +331,19 @@ if __name__ == "__main__":
snakemake = mock_snakemake('cluster_network', network='elec', simpl='', clusters='5')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
n = pypsa.Network(snakemake.input.network)
n = pypsa.Network(paths.network)
focus_weights = config.get('focus_weights', None)
focus_weights = snakemake.config.get('focus_weights', None)
renewable_carriers = pd.Index([tech
for tech in n.generators.carrier.unique()
if tech in config['renewable']])
if tech in snakemake.config['renewable']])
if wildcards.clusters.endswith('m'):
n_clusters = int(wildcards.clusters[:-1])
if snakemake.wildcards.clusters.endswith('m'):
n_clusters = int(snakemake.wildcards.clusters[:-1])
aggregate_carriers = pd.Index(n.generators.carrier.unique()).difference(renewable_carriers)
else:
n_clusters = int(wildcards.clusters)
n_clusters = int(snakemake.wildcards.clusters)
aggregate_carriers = None # All
if n_clusters == len(n.buses):
@ -354,11 +352,10 @@ if __name__ == "__main__":
linemap = n.lines.index.to_series()
clustering = pypsa.networkclustering.Clustering(n, busmap, linemap, linemap, pd.Series(dtype='O'))
else:
line_length_factor = config['lines']['length_factor']
line_length_factor = snakemake.config['lines']['length_factor']
Nyears = n.snapshot_weightings.objective.sum()/8760
hvac_overhead_cost = (load_costs(tech_costs = paths.tech_costs,
config = config['costs'],
elec_config=config['electricity'], Nyears = Nyears)
hvac_overhead_cost = (load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
.at['HVAC overhead', 'capital_cost'])
def consense(x):
@ -367,22 +364,23 @@ if __name__ == "__main__":
"The `potential` configuration option must agree for all renewable carriers, for now!"
)
return v
potential_mode = consense(pd.Series([config['renewable'][tech]['potential']
potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential']
for tech in renewable_carriers]))
custom_busmap = config["enable"].get("custom_busmap", False)
custom_busmap = snakemake.config["enable"].get("custom_busmap", False)
if custom_busmap:
custom_busmap = pd.read_csv(paths.custom_busmap, index_col=0, squeeze=True)
custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True)
custom_busmap.index = custom_busmap.index.astype(str)
logger.info(f"Imported custom busmap from {paths.custom_busmap}")
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers,
line_length_factor, potential_mode, config['solving']['solver']['name'],
line_length_factor, potential_mode,
snakemake.config['solving']['solver']['name'],
"kmeans", hvac_overhead_cost, focus_weights)
update_p_nom_max(n)
clustering.network.export_to_netcdf(out.network)
clustering.network.export_to_netcdf(snakemake.output.network)
for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative
getattr(clustering, attr).to_csv(out[attr])
getattr(clustering, attr).to_csv(snakemake.output[attr])
cluster_regions((clustering.busmap,), paths, out)
cluster_regions((clustering.busmap,), snakemake.input, snakemake.output)

View File

@ -56,7 +56,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import re
import pypsa
@ -70,7 +70,7 @@ idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def add_co2limit(n, co2limit=1.487e+9, Nyears=1.):
def add_co2limit(n, co2limit, Nyears=1.):
n.add("GlobalConstraint", "CO2Limit",
carrier_attribute="co2_emissions", sense="<=",
@ -206,15 +206,13 @@ if __name__ == "__main__":
clusters='40', ll='v0.3', opts='Co2L-24H')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
opts = snakemake.wildcards.opts.split('-')
opts = wildcards.opts.split('-')
n = pypsa.Network(paths[0])
n = pypsa.Network(snakemake.input[0])
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears)
costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
set_line_s_max_pu(n, config['lines']['s_max_pu'])
set_line_s_max_pu(n, snakemake.config['lines']['s_max_pu'])
for o in opts:
m = re.match(r'^\d+h$', o, re.IGNORECASE)
@ -225,7 +223,7 @@ if __name__ == "__main__":
for o in opts:
m = re.match(r'^\d+seg$', o, re.IGNORECASE)
if m is not None:
solver_name = config["solving"]["solver"]["name"]
solver_name = snakemake.config["solving"]["solver"]["name"]
n = apply_time_segmentation(n, m.group(0)[:-3], solver_name)
break
@ -233,10 +231,10 @@ if __name__ == "__main__":
if "Co2L" in o:
m = re.findall("[0-9]*\.?[0-9]+$", o)
if len(m) > 0:
co2limit = float(m[0]) * config['electricity']['co2base']
co2limit = float(m[0]) * snakemake.config['electricity']['co2base']
add_co2limit(n, co2limit, Nyears)
else:
add_co2limit(n, config['electricity']['co2limit'], Nyears)
add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears)
break
for o in opts:
@ -257,17 +255,17 @@ if __name__ == "__main__":
c.df.loc[sel,attr] *= factor
if 'Ep' in opts:
add_emission_prices(n, config['costs']['emission_prices'])
add_emission_prices(n, snakemake.config['costs']['emission_prices'])
ll_type, factor = wildcards.ll[0], wildcards.ll[1:]
ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:]
set_transmission_limit(n, ll_type, factor, costs, Nyears)
set_line_nom_max(n, s_nom_max_set=config["lines"].get("s_nom_max,", np.inf),
p_nom_max_set=config["links"].get("p_nom_max,", np.inf))
set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf),
p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf))
if "ATK" in opts:
enforce_autarky(n)
elif "ATKc" in opts:
enforce_autarky(n, only_crossborder=True)
n.export_to_netcdf(out[0])
n.export_to_netcdf(snakemake.output[0])

View File

@ -33,7 +33,7 @@ The :ref:`tutorial` uses a smaller `data bundle <https://zenodo.org/record/35179
"""
import logging
from _helpers import progress_retrieve, retrieve_snakemake_keys, configure_logging
from _helpers import progress_retrieve, configure_logging
import tarfile
from pathlib import Path
@ -50,9 +50,7 @@ if __name__ == "__main__":
rootpath = '.'
configure_logging(snakemake) # TODO Make logging compatible with progressbar (see PR #102)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
if config['tutorial']:
if snakemake.config['tutorial']:
url = "https://zenodo.org/record/3517921/files/pypsa-eur-tutorial-data-bundle.tar.xz"
else:
url = "https://zenodo.org/record/3517935/files/pypsa-eur-data-bundle.tar.xz"

View File

@ -77,7 +77,7 @@ Details (and errors made through this heuristic) are discussed in the paper
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import numpy as np
import pandas as pd
@ -273,20 +273,18 @@ if __name__ == "__main__":
clusters='5', ll='copt', opts='Co2L-BAU-CCL-24H')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
tmpdir = config['solving'].get('tmpdir')
tmpdir = snakemake.config['solving'].get('tmpdir')
if tmpdir is not None:
Path(tmpdir).mkdir(parents=True, exist_ok=True)
opts = wildcards.opts.split('-')
solve_opts = config['solving']['options']
opts = snakemake.wildcards.opts.split('-')
solve_opts = snakemake.config['solving']['options']
fn = getattr(logs, 'memory', None)
fn = getattr(snakemake.log, 'memory', None)
with memory_logger(filename=fn, interval=30.) as mem:
n = pypsa.Network(paths[0])
n = pypsa.Network(snakemake.input[0])
n = prepare_network(n, solve_opts)
n = solve_network(n, config, opts, solver_dir=tmpdir,
solver_logfile=logs.solver)
n.export_to_netcdf(out[0])
n = solve_network(n, snakemake.config, opts, solver_dir=tmpdir,
solver_logfile=snakemake.log.solver)
n.export_to_netcdf(snakemake.output[0])
logger.info("Maximum memory usage: {}".format(mem.mem_usage))

View File

@ -46,7 +46,7 @@ Description
"""
import logging
from _helpers import configure_logging, retrieve_snakemake_keys
from _helpers import configure_logging
import pypsa
import numpy as np
@ -100,25 +100,23 @@ if __name__ == "__main__":
simpl='', clusters='5', ll='copt', opts='Co2L-BAU-24H')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
tmpdir = config['solving'].get('tmpdir')
tmpdir = snakemake.config['solving'].get('tmpdir')
if tmpdir is not None:
Path(tmpdir).mkdir(parents=True, exist_ok=True)
n = pypsa.Network(paths.unprepared)
n_optim = pypsa.Network(paths.optimized)
n = pypsa.Network(snakemake.input.unprepared)
n_optim = pypsa.Network(snakemake.input.optimized)
n = set_parameters_from_optimized(n, n_optim)
del n_optim
opts = wildcards.opts.split('-')
config['solving']['options']['skip_iterations'] = False
opts = snakemake.wildcards.opts.split('-')
snakemake.config['solving']['options']['skip_iterations'] = False
fn = getattr(logs, 'memory', None)
fn = getattr(snakemake.log, 'memory', None)
with memory_logger(filename=fn, interval=30.) as mem:
n = prepare_network(n, config['solving']['options'])
n = solve_network(n, config, opts, solver_dir=tmpdir,
solver_logfile=logs.solver)
n.export_to_netcdf(out[0])
n = prepare_network(n, snakemake.config['solving']['options'])
n = solve_network(n, snakemake.config, opts, solver_dir=tmpdir,
solver_logfile=snakemake.log.solver)
n.export_to_netcdf(snakemake.output[0])
logger.info("Maximum memory usage: {}".format(mem.mem_usage))