diff --git a/scripts/_helpers.py b/scripts/_helpers.py index f1e5e887..a44a8133 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -95,6 +95,10 @@ def pdbcast(v, h): return pd.DataFrame(v.values.reshape((-1, 1)) * h.values, index=v.index, columns=h.index) +def retrieve_snakemake_keys(snakemake): + return (snakemake.input, snakemake.config, snakemake.wildcards, + snakemake.log, snakemake.output) + def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True): import pypsa diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 42d4f5cc..c4a883f5 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -84,7 +84,7 @@ It further adds extendable ``generators`` with **zero** capacity for """ import logging -from _helpers import configure_logging, update_p_nom_max +from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max import pypsa import pandas as pd @@ -546,8 +546,7 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_electricity') configure_logging(snakemake) - config = snakemake.config - paths = snakemake.input + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = pypsa.Network(paths.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. @@ -583,4 +582,4 @@ if __name__ == "__main__": add_nice_carrier_names(n, config) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index db764d4f..35947aee 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -50,7 +50,7 @@ The rule :mod:`add_extra_components` attaches additional extendable components t - ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If this option is chosen, the script adds extra buses with corresponding carrier where energy ``Stores`` are attached and which are connected to the corresponding power buses via two links, one each for charging and discharging. This leads to three investment variables for the energy capacity, charging and discharging capacity of the storage unit. """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import pandas as pd @@ -192,18 +192,19 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_extra_components', network='elec', simpl='', clusters=5) configure_logging(snakemake) - paths = snakemake.input + + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = pypsa.Network(paths.network) - elec_config = snakemake.config['electricity'] + elec_config = config['electricity'] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, snakemake.config['costs'], elec_config, Nyears=Nyears) + costs = load_costs(paths.tech_costs, config['costs'], elec_config, Nyears=Nyears) attach_storageunits(n, costs, elec_config) attach_stores(n, costs, elec_config) attach_hydrogen_pipelines(n, costs, elec_config) - add_nice_carrier_names(n, snakemake.config) + add_nice_carrier_names(n, config) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/base_network.py b/scripts/base_network.py index baa12092..41699f04 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -63,7 +63,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import yaml @@ -588,11 +588,10 @@ if __name__ == "__main__": snakemake = mock_snakemake('base_network') configure_logging(snakemake) - paths = snakemake.input - config = snakemake.config + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = base_network(paths.eg_buses, paths.eg_converters, paths.eg_transformers, paths.eg_lines, paths.eg_links, paths.links_p_nom, paths.links_tyndp, paths.europe_shape, paths.country_shapes, paths.offshore_shapes, paths.parameter_corrections, config) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index d91d0575..78e2070d 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -42,7 +42,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import os @@ -67,12 +67,14 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_bus_regions') configure_logging(snakemake) - countries = snakemake.config['countries'] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - n = pypsa.Network(snakemake.input.base_network) + countries = config['countries'] - country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'] - offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry'] + n = pypsa.Network(paths.base_network) + + country_shapes = gpd.read_file(paths.country_shapes).set_index('name')['geometry'] + offshore_shapes = gpd.read_file(paths.offshore_shapes).set_index('name')['geometry'] onshore_regions = [] offshore_regions = [] @@ -103,6 +105,6 @@ if __name__ == "__main__": offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2] offshore_regions.append(offshore_regions_c) - save_to_geojson(pd.concat(onshore_regions, ignore_index=True), snakemake.output.regions_onshore) + save_to_geojson(pd.concat(onshore_regions, ignore_index=True), out.regions_onshore) - save_to_geojson(pd.concat(offshore_regions, ignore_index=True), snakemake.output.regions_offshore) + save_to_geojson(pd.concat(offshore_regions, ignore_index=True), out.regions_offshore) diff --git a/scripts/build_cutout.py b/scripts/build_cutout.py index 78eafac6..4b3e2bdc 100644 --- a/scripts/build_cutout.py +++ b/scripts/build_cutout.py @@ -95,7 +95,7 @@ import logging import atlite import geopandas as gpd import pandas as pd -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys logger = logging.getLogger(__name__) @@ -106,16 +106,18 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_cutout', cutout='europe-2013-era5') configure_logging(snakemake) - cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - snapshots = pd.date_range(freq='h', **snakemake.config['snapshots']) + cutout_params = config['atlite']['cutouts'][wildcards.cutout] + + snapshots = pd.date_range(freq='h', **config['snapshots']) time = [snapshots[0], snapshots[-1]] cutout_params['time'] = slice(*cutout_params.get('time', time)) if {'x', 'y', 'bounds'}.isdisjoint(cutout_params): # Determine the bounds from bus regions with a buffer of two grid cells - onshore = gpd.read_file(snakemake.input.regions_onshore) - offshore = gpd.read_file(snakemake.input.regions_offshore) + onshore = gpd.read_file(paths.regions_onshore) + offshore = gpd.read_file(paths.regions_offshore) regions = onshore.append(offshore) d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy', 0.25))*2 cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d] @@ -126,5 +128,5 @@ if __name__ == "__main__": logging.info(f"Preparing cutout with parameters {cutout_params}.") features = cutout_params.pop('features', None) - cutout = atlite.Cutout(snakemake.output[0], **cutout_params) + cutout = atlite.Cutout(out[0], **cutout_params) cutout.prepare(features=features) diff --git a/scripts/build_hydro_profile.py b/scripts/build_hydro_profile.py index 6ac59262..563c8ecb 100644 --- a/scripts/build_hydro_profile.py +++ b/scripts/build_hydro_profile.py @@ -60,7 +60,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import atlite import geopandas as gpd @@ -74,22 +74,24 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_hydro_profile') configure_logging(snakemake) - config = snakemake.config['renewable']['hydro'] - cutout = atlite.Cutout(snakemake.input.cutout) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - countries = snakemake.config['countries'] - country_shapes = (gpd.read_file(snakemake.input.country_shapes) + config_hydro = config['renewable']['hydro'] + cutout = atlite.Cutout(paths.cutout) + + countries = config['countries'] + country_shapes = (gpd.read_file(paths.country_shapes) .set_index('name')['geometry'].reindex(countries)) country_shapes.index.name = 'countries' eia_stats = vhydro.get_eia_annual_hydro_generation( - snakemake.input.eia_hydro_generation).reindex(columns=countries) + paths.eia_hydro_generation).reindex(columns=countries) inflow = cutout.runoff(shapes=country_shapes, smooth=True, lower_threshold_quantile=True, normalize_using_yearly=eia_stats) - if 'clip_min_inflow' in config: - inflow = inflow.where(inflow > config['clip_min_inflow'], 0) + if 'clip_min_inflow' in config_hydro: + inflow = inflow.where(inflow > config_hydro['clip_min_inflow'], 0) - inflow.to_netcdf(snakemake.output[0]) + inflow.to_netcdf(out[0]) diff --git a/scripts/build_load_data.py b/scripts/build_load_data.py index f71be6ea..0f9124ea 100755 --- a/scripts/build_load_data.py +++ b/scripts/build_load_data.py @@ -37,7 +37,7 @@ Outputs import logging logger = logging.getLogger(__name__) -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pandas as pd import numpy as np @@ -196,7 +196,8 @@ if __name__ == "__main__": configure_logging(snakemake) - config = snakemake.config + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + powerstatistics = config['load']['power_statistics'] interpolate_limit = config['load']['interpolate_limit'] countries = config['countries'] @@ -204,7 +205,7 @@ if __name__ == "__main__": years = slice(snapshots[0], snapshots[-1]) time_shift = config['load']['time_shift_for_large_gaps'] - load = load_timeseries(snakemake.input[0], years, countries, powerstatistics) + load = load_timeseries(paths[0], years, countries, powerstatistics) if config['load']['manual_adjustments']: load = manual_adjustment(load, powerstatistics) @@ -221,5 +222,5 @@ if __name__ == "__main__": '`time_shift_for_large_gaps` or modify the `manual_adjustment` function ' 'for implementing the needed load data modifications.') - load.to_csv(snakemake.output[0]) + load.to_csv(out[0]) diff --git a/scripts/build_natura_raster.py b/scripts/build_natura_raster.py index f7a923d6..71d2c45e 100644 --- a/scripts/build_natura_raster.py +++ b/scripts/build_natura_raster.py @@ -40,7 +40,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import atlite import geopandas as gpd @@ -73,18 +73,19 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_natura_raster') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - cutouts = snakemake.input.cutouts + cutouts = paths.cutouts xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts)) bounds = transform_bounds(4326, 3035, min(xs), min(ys), max(Xs), max(Ys)) transform, out_shape = get_transform_and_shape(bounds, res=100) # adjusted boundaries - shapes = gpd.read_file(snakemake.input.natura).to_crs(3035) + shapes = gpd.read_file(paths.natura).to_crs(3035) raster = ~geometry_mask(shapes.geometry, out_shape[::-1], transform) raster = raster.astype(rio.uint8) - with rio.open(snakemake.output[0], 'w', driver='GTiff', dtype=rio.uint8, + with rio.open(out[0], 'w', driver='GTiff', dtype=rio.uint8, count=1, transform=transform, crs=3035, compress='lzw', width=raster.shape[1], height=raster.shape[0]) as dst: dst.write(raster, indexes=1) diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index be57baa8..4b9d13a1 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -72,7 +72,7 @@ The configuration options ``electricity: powerplants_filter`` and ``electricity: """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import powerplantmatching as pm @@ -99,7 +99,8 @@ if __name__ == "__main__": from _helpers import mock_snakemake snakemake = mock_snakemake('build_powerplants') configure_logging(snakemake) - paths = snakemake.input + + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = pypsa.Network(paths.base_network) countries = n.buses.country.unique() @@ -115,12 +116,12 @@ if __name__ == "__main__": df.Technology.replace('Steam Turbine', 'OCGT').fillna('OCGT'))))) - ppl_query = snakemake.config['electricity']['powerplants_filter'] + ppl_query = config['electricity']['powerplants_filter'] if isinstance(ppl_query, str): ppl.query(ppl_query, inplace=True) # add carriers from own powerplant files: - custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] + custom_ppl_query = config['electricity']['custom_powerplants'] ppl = add_custom_powerplants(ppl, paths.custom_powerplants, custom_ppl_query) cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()] @@ -140,4 +141,4 @@ if __name__ == "__main__": if bus_null_b.any(): logging.warning(f"Couldn't find close bus for {bus_null_b.sum()} powerplants") - ppl.to_csv(snakemake.output[0]) + ppl.to_csv(out[0]) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 9ce83de3..944d6f39 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -190,7 +190,7 @@ from pypsa.geo import haversine from shapely.geometry import LineString import time -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys logger = logging.getLogger(__name__) @@ -201,10 +201,12 @@ if __name__ == '__main__': snakemake = mock_snakemake('build_renewable_profiles', technology='solar') configure_logging(snakemake) pgb.streams.wrap_stderr() - paths = snakemake.input - nprocesses = snakemake.config['atlite'].get('nprocesses') - noprogress = not snakemake.config['atlite'].get('show_progress', True) - config = snakemake.config['renewable'][snakemake.wildcards.technology] + + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + + nprocesses = config['atlite'].get('nprocesses') + noprogress = not config['atlite'].get('show_progress', True) + config = config['renewable'][wildcards.technology] resource = config['resource'] # pv panel config / wind turbine config correction_factor = config.get('correction_factor', 1.) capacity_per_sqkm = config['capacity_per_sqkm'] @@ -313,7 +315,7 @@ if __name__ == '__main__': average_distance.rename('average_distance')]) - if snakemake.wildcards.technology.startswith("offwind"): + if wildcards.technology.startswith("offwind"): logger.info('Calculate underwater fraction of connections.') offshore_shape = gpd.read_file(paths['offshore_shapes']).unary_union underwater_fraction = [] @@ -333,4 +335,4 @@ if __name__ == '__main__': min_p_max_pu = config['clip_p_max_pu'] ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0) - ds.to_netcdf(snakemake.output.profile) + ds.to_netcdf(out.profile) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index cca941e6..b4686ac3 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -68,7 +68,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import os import numpy as np @@ -217,13 +217,12 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_shapes') configure_logging(snakemake) - paths = snakemake.input - out = snakemake.output + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - country_shapes = countries(paths.naturalearth, snakemake.config['countries']) + country_shapes = countries(paths.naturalearth, config['countries']) save_to_geojson(country_shapes, out.country_shapes) - offshore_shapes = eez(country_shapes, paths.eez, snakemake.config['countries']) + offshore_shapes = eez(country_shapes, paths.eez, config['countries']) save_to_geojson(offshore_shapes, out.offshore_shapes) europe_shape = country_cover(country_shapes, offshore_shapes) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 51556b27..554109e3 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes: """ import logging -from _helpers import configure_logging, update_p_nom_max +from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max import pypsa import os @@ -306,7 +306,7 @@ def cluster_regions(busmaps, input=None, output=None): for which in ('regions_onshore', 'regions_offshore'): regions = gpd.read_file(getattr(input, which)).set_index('name') - geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.cascaded_union) + geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.unary_union) regions_c = gpd.GeoDataFrame(dict(geometry=geom_c)) regions_c.index.name = 'name' save_to_geojson(regions_c, getattr(output, which)) @@ -328,19 +328,21 @@ if __name__ == "__main__": snakemake = mock_snakemake('cluster_network', network='elec', simpl='', clusters='5') configure_logging(snakemake) - n = pypsa.Network(snakemake.input.network) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - focus_weights = snakemake.config.get('focus_weights', None) + n = pypsa.Network(paths.network) + + focus_weights = config.get('focus_weights', None) renewable_carriers = pd.Index([tech for tech in n.generators.carrier.unique() - if tech in snakemake.config['renewable']]) + if tech in config['renewable']]) - if snakemake.wildcards.clusters.endswith('m'): - n_clusters = int(snakemake.wildcards.clusters[:-1]) + if wildcards.clusters.endswith('m'): + n_clusters = int(wildcards.clusters[:-1]) aggregate_carriers = pd.Index(n.generators.carrier.unique()).difference(renewable_carriers) else: - n_clusters = int(snakemake.wildcards.clusters) + n_clusters = int(wildcards.clusters) aggregate_carriers = None # All if n_clusters == len(n.buses): @@ -349,11 +351,11 @@ if __name__ == "__main__": linemap = n.lines.index.to_series() clustering = pypsa.networkclustering.Clustering(n, busmap, linemap, linemap, pd.Series(dtype='O')) else: - line_length_factor = snakemake.config['lines']['length_factor'] + line_length_factor = config['lines']['length_factor'] Nyears = n.snapshot_weightings.objective.sum()/8760 - hvac_overhead_cost = (load_costs(tech_costs = snakemake.input.tech_costs, - config = snakemake.config['costs'], - elec_config=snakemake.config['electricity'], Nyears = Nyears) + hvac_overhead_cost = (load_costs(tech_costs = paths.tech_costs, + config = config['costs'], + elec_config=config['electricity'], Nyears = Nyears) .at['HVAC overhead', 'capital_cost']) def consense(x): @@ -362,24 +364,24 @@ if __name__ == "__main__": "The `potential` configuration option must agree for all renewable carriers, for now!" ) return v - potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential'] + potential_mode = consense(pd.Series([config['renewable'][tech]['potential'] for tech in renewable_carriers])) - custom_busmap = snakemake.config["enable"].get("custom_busmap", False) + custom_busmap = config["enable"].get("custom_busmap", False) if custom_busmap: - custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True) + custom_busmap = pd.read_csv(paths.custom_busmap, index_col=0, squeeze=True) custom_busmap.index = custom_busmap.index.astype(str) - logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") + logger.info(f"Imported custom busmap from {paths.custom_busmap}") clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers, line_length_factor=line_length_factor, potential_mode=potential_mode, - solver_name=snakemake.config['solving']['solver']['name'], + solver_name=config['solving']['solver']['name'], extended_link_costs=hvac_overhead_cost, focus_weights=focus_weights) update_p_nom_max(n) - clustering.network.export_to_netcdf(snakemake.output.network) + clustering.network.export_to_netcdf(out.network) for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative - getattr(clustering, attr).to_csv(snakemake.output[attr]) + getattr(clustering, attr).to_csv(out[attr]) - cluster_regions((clustering.busmap,), snakemake.input, snakemake.output) + cluster_regions((clustering.busmap,), paths, out) diff --git a/scripts/plot_network.py b/scripts/plot_network.py index 456bf50f..645c8c39 100755 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -20,8 +20,8 @@ Description """ import logging -from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs, - configure_logging) +from _helpers import (retrieve_snakemake_keys, load_network_for_plots, + aggregate_p, aggregate_costs, configure_logging) import pandas as pd import numpy as np @@ -259,18 +259,19 @@ if __name__ == "__main__": set_plot_style() - opts = snakemake.config['plotting'] - map_figsize = opts['map']['figsize'] - map_boundaries = opts['map']['boundaries'] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - n = load_network_for_plots(snakemake.input.network, snakemake.input.tech_costs, snakemake.config) + map_figsize = config['map']['figsize'] + map_boundaries = config['map']['boundaries'] - scenario_opts = snakemake.wildcards.opts.split('-') + n = load_network_for_plots(paths.network, paths.tech_costs, config) + + scenario_opts = wildcards.opts.split('-') fig, ax = plt.subplots(figsize=map_figsize, subplot_kw={"projection": ccrs.PlateCarree()}) - plot_map(n, ax, snakemake.wildcards.attr, opts) + plot_map(n, ax, wildcards.attr, config) - fig.savefig(snakemake.output.only_map, dpi=150, bbox_inches='tight') + fig.savefig(out.only_map, dpi=150, bbox_inches='tight') ax1 = fig.add_axes([-0.115, 0.625, 0.2, 0.2]) plot_total_energy_pie(n, ax1) @@ -278,12 +279,12 @@ if __name__ == "__main__": ax2 = fig.add_axes([-0.075, 0.1, 0.1, 0.45]) plot_total_cost_bar(n, ax2) - ll = snakemake.wildcards.ll + ll = wildcards.ll ll_type = ll[0] ll_factor = ll[1:] lbl = dict(c='line cost', v='line volume')[ll_type] amnt = '{ll} x today\'s'.format(ll=ll_factor) if ll_factor != 'opt' else 'optimal' fig.suptitle('Expansion to {amount} {label} at {clusters} clusters' - .format(amount=amnt, label=lbl, clusters=snakemake.wildcards.clusters)) + .format(amount=amnt, label=lbl, clusters=wildcards.clusters)) - fig.savefig(snakemake.output.ext, transparent=True, bbox_inches='tight') + fig.savefig(out.ext, transparent=True, bbox_inches='tight') diff --git a/scripts/plot_p_nom_max.py b/scripts/plot_p_nom_max.py index e79ad274..540608f9 100644 --- a/scripts/plot_p_nom_max.py +++ b/scripts/plot_p_nom_max.py @@ -53,11 +53,13 @@ if __name__ == "__main__": clusts= '5,full', country= 'all') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + plot_kwds = dict(drawstyle="steps-post") - clusters = snakemake.wildcards.clusts.split(',') - techs = snakemake.wildcards.techs.split(',') - country = snakemake.wildcards.country + clusters = wildcards.clusts.split(',') + techs = wildcards.techs.split(',') + country = wildcards.country if country == 'all': country = None else: @@ -66,7 +68,7 @@ if __name__ == "__main__": fig, axes = plt.subplots(1, len(techs)) for j, cluster in enumerate(clusters): - net = pypsa.Network(snakemake.input[j]) + net = pypsa.Network(paths[j]) for i, tech in enumerate(techs): cum_p_nom_max(net, tech, country).plot(x="p_max_pu", y="cum_p_nom_max", @@ -79,4 +81,4 @@ if __name__ == "__main__": plt.legend(title="Cluster level") - fig.savefig(snakemake.output[0], transparent=True, bbox_inches='tight') + fig.savefig(out[0], transparent=True, bbox_inches='tight') diff --git a/scripts/prepare_links_p_nom.py b/scripts/prepare_links_p_nom.py index b83089d6..6bd4bca4 100644 --- a/scripts/prepare_links_p_nom.py +++ b/scripts/prepare_links_p_nom.py @@ -37,7 +37,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pandas as pd @@ -63,6 +63,8 @@ if __name__ == "__main__": snakemake = mock_snakemake('prepare_links_p_nom', simpl='', network='elec') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0] mw = "Power (MW)" @@ -74,4 +76,4 @@ if __name__ == "__main__": links_p_nom['x1'], links_p_nom['y1'] = extract_coordinates(links_p_nom['Converterstation 1']) links_p_nom['x2'], links_p_nom['y2'] = extract_coordinates(links_p_nom['Converterstation 2']) - links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(snakemake.output[0], index=False) + links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(out[0], index=False) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 3eb244cf..19a395ea 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -56,7 +56,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import re import pypsa @@ -206,15 +206,17 @@ if __name__ == "__main__": clusters='40', ll='v0.3', opts='Co2L-24H') configure_logging(snakemake) - opts = snakemake.wildcards.opts.split('-') + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - n = pypsa.Network(snakemake.input[0]) + opts = wildcards.opts.split('-') + + n = pypsa.Network(paths[0]) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = snakemake.input.tech_costs, - config = snakemake.config['costs'], - elec_config = snakemake.config['electricity'], Nyears = Nyears) + costs = load_costs(tech_costs = paths.tech_costs, + config = config['costs'], + elec_config = config['electricity'], Nyears = Nyears) - set_line_s_max_pu(n, s_max_pu=snakemake.config['lines']['s_max_pu']) + set_line_s_max_pu(n, s_max_pu=config['lines']['s_max_pu']) for o in opts: m = re.match(r'^\d+h$', o, re.IGNORECASE) @@ -225,7 +227,7 @@ if __name__ == "__main__": for o in opts: m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: - solver_name = snakemake.config["solving"]["solver"]["name"] + solver_name = config["solving"]["solver"]["name"] n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=solver_name) break @@ -233,10 +235,10 @@ if __name__ == "__main__": if "Co2L" in o: m = re.findall("[0-9]*\.?[0-9]+$", o) if len(m) > 0: - co2limit = float(m[0]) * snakemake.config['electricity']['co2base'] + co2limit = float(m[0]) * config['electricity']['co2base'] add_co2limit(n, co2limit, Nyears) else: - add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears) + add_co2limit(n, config['electricity']['co2limit'], Nyears) break for o in opts: @@ -257,17 +259,17 @@ if __name__ == "__main__": c.df.loc[sel,attr] *= factor if 'Ep' in opts: - add_emission_prices(n, emission_prices=snakemake.config['costs']['emission_prices']) + add_emission_prices(n, config['costs']['emission_prices']) - ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] + ll_type, factor = wildcards.ll[0], wildcards.ll[1:] set_transmission_limit(n, ll_type, factor, costs, Nyears) - set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf), - p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf)) + set_line_nom_max(n, s_nom_max_set=config["lines"].get("s_nom_max,", np.inf), + p_nom_max_set=config["links"].get("p_nom_max,", np.inf)) if "ATK" in opts: enforce_autarky(n) elif "ATKc" in opts: enforce_autarky(n, only_crossborder=True) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index 86869879..c5a31f81 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -33,7 +33,7 @@ The :ref:`tutorial` uses a smaller `data bundle