move snakemake keys (input, output, config, ...) to own variables
This commit is contained in:
parent
fccb59098d
commit
deac9f32e7
@ -95,6 +95,10 @@ def pdbcast(v, h):
|
||||
return pd.DataFrame(v.values.reshape((-1, 1)) * h.values,
|
||||
index=v.index, columns=h.index)
|
||||
|
||||
def retrieve_snakemake_keys(snakemake):
|
||||
return (snakemake.input, snakemake.config, snakemake.wildcards,
|
||||
snakemake.log, snakemake.output)
|
||||
|
||||
|
||||
def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True):
|
||||
import pypsa
|
||||
|
@ -84,7 +84,7 @@ It further adds extendable ``generators`` with **zero** capacity for
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging, update_p_nom_max
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max
|
||||
|
||||
import pypsa
|
||||
import pandas as pd
|
||||
@ -546,8 +546,7 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('add_electricity')
|
||||
configure_logging(snakemake)
|
||||
|
||||
config = snakemake.config
|
||||
paths = snakemake.input
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
n = pypsa.Network(paths.base_network)
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.
|
||||
@ -583,4 +582,4 @@ if __name__ == "__main__":
|
||||
|
||||
add_nice_carrier_names(n, config)
|
||||
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
n.export_to_netcdf(out[0])
|
||||
|
@ -50,7 +50,7 @@ The rule :mod:`add_extra_components` attaches additional extendable components t
|
||||
- ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If this option is chosen, the script adds extra buses with corresponding carrier where energy ``Stores`` are attached and which are connected to the corresponding power buses via two links, one each for charging and discharging. This leads to three investment variables for the energy capacity, charging and discharging capacity of the storage unit.
|
||||
"""
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import pypsa
|
||||
import pandas as pd
|
||||
@ -192,18 +192,19 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('add_extra_components', network='elec',
|
||||
simpl='', clusters=5)
|
||||
configure_logging(snakemake)
|
||||
paths = snakemake.input
|
||||
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
n = pypsa.Network(paths.network)
|
||||
elec_config = snakemake.config['electricity']
|
||||
elec_config = config['electricity']
|
||||
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.
|
||||
costs = load_costs(paths.tech_costs, snakemake.config['costs'], elec_config, Nyears=Nyears)
|
||||
costs = load_costs(paths.tech_costs, config['costs'], elec_config, Nyears=Nyears)
|
||||
|
||||
attach_storageunits(n, costs, elec_config)
|
||||
attach_stores(n, costs, elec_config)
|
||||
attach_hydrogen_pipelines(n, costs, elec_config)
|
||||
|
||||
add_nice_carrier_names(n, snakemake.config)
|
||||
add_nice_carrier_names(n, config)
|
||||
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
n.export_to_netcdf(out[0])
|
||||
|
@ -63,7 +63,7 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import pypsa
|
||||
import yaml
|
||||
@ -588,11 +588,10 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('base_network')
|
||||
configure_logging(snakemake)
|
||||
|
||||
paths = snakemake.input
|
||||
config = snakemake.config
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
n = base_network(paths.eg_buses, paths.eg_converters, paths.eg_transformers, paths.eg_lines, paths.eg_links,
|
||||
paths.links_p_nom, paths.links_tyndp, paths.europe_shape, paths.country_shapes, paths.offshore_shapes,
|
||||
paths.parameter_corrections, config)
|
||||
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
n.export_to_netcdf(out[0])
|
||||
|
@ -42,7 +42,7 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import pypsa
|
||||
import os
|
||||
@ -67,12 +67,14 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('build_bus_regions')
|
||||
configure_logging(snakemake)
|
||||
|
||||
countries = snakemake.config['countries']
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
n = pypsa.Network(snakemake.input.base_network)
|
||||
countries = config['countries']
|
||||
|
||||
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry']
|
||||
offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry']
|
||||
n = pypsa.Network(paths.base_network)
|
||||
|
||||
country_shapes = gpd.read_file(paths.country_shapes).set_index('name')['geometry']
|
||||
offshore_shapes = gpd.read_file(paths.offshore_shapes).set_index('name')['geometry']
|
||||
|
||||
onshore_regions = []
|
||||
offshore_regions = []
|
||||
@ -103,6 +105,6 @@ if __name__ == "__main__":
|
||||
offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2]
|
||||
offshore_regions.append(offshore_regions_c)
|
||||
|
||||
save_to_geojson(pd.concat(onshore_regions, ignore_index=True), snakemake.output.regions_onshore)
|
||||
save_to_geojson(pd.concat(onshore_regions, ignore_index=True), out.regions_onshore)
|
||||
|
||||
save_to_geojson(pd.concat(offshore_regions, ignore_index=True), snakemake.output.regions_offshore)
|
||||
save_to_geojson(pd.concat(offshore_regions, ignore_index=True), out.regions_offshore)
|
||||
|
@ -95,7 +95,7 @@ import logging
|
||||
import atlite
|
||||
import geopandas as gpd
|
||||
import pandas as pd
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -106,16 +106,18 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('build_cutout', cutout='europe-2013-era5')
|
||||
configure_logging(snakemake)
|
||||
|
||||
cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout]
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
snapshots = pd.date_range(freq='h', **snakemake.config['snapshots'])
|
||||
cutout_params = config['atlite']['cutouts'][wildcards.cutout]
|
||||
|
||||
snapshots = pd.date_range(freq='h', **config['snapshots'])
|
||||
time = [snapshots[0], snapshots[-1]]
|
||||
cutout_params['time'] = slice(*cutout_params.get('time', time))
|
||||
|
||||
if {'x', 'y', 'bounds'}.isdisjoint(cutout_params):
|
||||
# Determine the bounds from bus regions with a buffer of two grid cells
|
||||
onshore = gpd.read_file(snakemake.input.regions_onshore)
|
||||
offshore = gpd.read_file(snakemake.input.regions_offshore)
|
||||
onshore = gpd.read_file(paths.regions_onshore)
|
||||
offshore = gpd.read_file(paths.regions_offshore)
|
||||
regions = onshore.append(offshore)
|
||||
d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy', 0.25))*2
|
||||
cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d]
|
||||
@ -126,5 +128,5 @@ if __name__ == "__main__":
|
||||
|
||||
logging.info(f"Preparing cutout with parameters {cutout_params}.")
|
||||
features = cutout_params.pop('features', None)
|
||||
cutout = atlite.Cutout(snakemake.output[0], **cutout_params)
|
||||
cutout = atlite.Cutout(out[0], **cutout_params)
|
||||
cutout.prepare(features=features)
|
||||
|
@ -60,7 +60,7 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import atlite
|
||||
import geopandas as gpd
|
||||
@ -74,22 +74,24 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('build_hydro_profile')
|
||||
configure_logging(snakemake)
|
||||
|
||||
config = snakemake.config['renewable']['hydro']
|
||||
cutout = atlite.Cutout(snakemake.input.cutout)
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
countries = snakemake.config['countries']
|
||||
country_shapes = (gpd.read_file(snakemake.input.country_shapes)
|
||||
config_hydro = config['renewable']['hydro']
|
||||
cutout = atlite.Cutout(paths.cutout)
|
||||
|
||||
countries = config['countries']
|
||||
country_shapes = (gpd.read_file(paths.country_shapes)
|
||||
.set_index('name')['geometry'].reindex(countries))
|
||||
country_shapes.index.name = 'countries'
|
||||
|
||||
eia_stats = vhydro.get_eia_annual_hydro_generation(
|
||||
snakemake.input.eia_hydro_generation).reindex(columns=countries)
|
||||
paths.eia_hydro_generation).reindex(columns=countries)
|
||||
inflow = cutout.runoff(shapes=country_shapes,
|
||||
smooth=True,
|
||||
lower_threshold_quantile=True,
|
||||
normalize_using_yearly=eia_stats)
|
||||
|
||||
if 'clip_min_inflow' in config:
|
||||
inflow = inflow.where(inflow > config['clip_min_inflow'], 0)
|
||||
if 'clip_min_inflow' in config_hydro:
|
||||
inflow = inflow.where(inflow > config_hydro['clip_min_inflow'], 0)
|
||||
|
||||
inflow.to_netcdf(snakemake.output[0])
|
||||
inflow.to_netcdf(out[0])
|
||||
|
@ -37,7 +37,7 @@ Outputs
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
@ -196,7 +196,8 @@ if __name__ == "__main__":
|
||||
|
||||
configure_logging(snakemake)
|
||||
|
||||
config = snakemake.config
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
powerstatistics = config['load']['power_statistics']
|
||||
interpolate_limit = config['load']['interpolate_limit']
|
||||
countries = config['countries']
|
||||
@ -204,7 +205,7 @@ if __name__ == "__main__":
|
||||
years = slice(snapshots[0], snapshots[-1])
|
||||
time_shift = config['load']['time_shift_for_large_gaps']
|
||||
|
||||
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
|
||||
load = load_timeseries(paths[0], years, countries, powerstatistics)
|
||||
|
||||
if config['load']['manual_adjustments']:
|
||||
load = manual_adjustment(load, powerstatistics)
|
||||
@ -221,5 +222,5 @@ if __name__ == "__main__":
|
||||
'`time_shift_for_large_gaps` or modify the `manual_adjustment` function '
|
||||
'for implementing the needed load data modifications.')
|
||||
|
||||
load.to_csv(snakemake.output[0])
|
||||
load.to_csv(out[0])
|
||||
|
||||
|
@ -40,7 +40,7 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import atlite
|
||||
import geopandas as gpd
|
||||
@ -73,18 +73,19 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('build_natura_raster')
|
||||
configure_logging(snakemake)
|
||||
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
cutouts = snakemake.input.cutouts
|
||||
cutouts = paths.cutouts
|
||||
xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts))
|
||||
bounds = transform_bounds(4326, 3035, min(xs), min(ys), max(Xs), max(Ys))
|
||||
transform, out_shape = get_transform_and_shape(bounds, res=100)
|
||||
|
||||
# adjusted boundaries
|
||||
shapes = gpd.read_file(snakemake.input.natura).to_crs(3035)
|
||||
shapes = gpd.read_file(paths.natura).to_crs(3035)
|
||||
raster = ~geometry_mask(shapes.geometry, out_shape[::-1], transform)
|
||||
raster = raster.astype(rio.uint8)
|
||||
|
||||
with rio.open(snakemake.output[0], 'w', driver='GTiff', dtype=rio.uint8,
|
||||
with rio.open(out[0], 'w', driver='GTiff', dtype=rio.uint8,
|
||||
count=1, transform=transform, crs=3035, compress='lzw',
|
||||
width=raster.shape[1], height=raster.shape[0]) as dst:
|
||||
dst.write(raster, indexes=1)
|
||||
|
@ -72,7 +72,7 @@ The configuration options ``electricity: powerplants_filter`` and ``electricity:
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import pypsa
|
||||
import powerplantmatching as pm
|
||||
@ -99,7 +99,8 @@ if __name__ == "__main__":
|
||||
from _helpers import mock_snakemake
|
||||
snakemake = mock_snakemake('build_powerplants')
|
||||
configure_logging(snakemake)
|
||||
paths = snakemake.input
|
||||
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
n = pypsa.Network(paths.base_network)
|
||||
countries = n.buses.country.unique()
|
||||
@ -115,12 +116,12 @@ if __name__ == "__main__":
|
||||
df.Technology.replace('Steam Turbine',
|
||||
'OCGT').fillna('OCGT')))))
|
||||
|
||||
ppl_query = snakemake.config['electricity']['powerplants_filter']
|
||||
ppl_query = config['electricity']['powerplants_filter']
|
||||
if isinstance(ppl_query, str):
|
||||
ppl.query(ppl_query, inplace=True)
|
||||
|
||||
# add carriers from own powerplant files:
|
||||
custom_ppl_query = snakemake.config['electricity']['custom_powerplants']
|
||||
custom_ppl_query = config['electricity']['custom_powerplants']
|
||||
ppl = add_custom_powerplants(ppl, paths.custom_powerplants, custom_ppl_query)
|
||||
|
||||
cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()]
|
||||
@ -140,4 +141,4 @@ if __name__ == "__main__":
|
||||
if bus_null_b.any():
|
||||
logging.warning(f"Couldn't find close bus for {bus_null_b.sum()} powerplants")
|
||||
|
||||
ppl.to_csv(snakemake.output[0])
|
||||
ppl.to_csv(out[0])
|
||||
|
@ -190,7 +190,7 @@ from pypsa.geo import haversine
|
||||
from shapely.geometry import LineString
|
||||
import time
|
||||
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -201,10 +201,12 @@ if __name__ == '__main__':
|
||||
snakemake = mock_snakemake('build_renewable_profiles', technology='solar')
|
||||
configure_logging(snakemake)
|
||||
pgb.streams.wrap_stderr()
|
||||
paths = snakemake.input
|
||||
nprocesses = snakemake.config['atlite'].get('nprocesses')
|
||||
noprogress = not snakemake.config['atlite'].get('show_progress', True)
|
||||
config = snakemake.config['renewable'][snakemake.wildcards.technology]
|
||||
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
nprocesses = config['atlite'].get('nprocesses')
|
||||
noprogress = not config['atlite'].get('show_progress', True)
|
||||
config = config['renewable'][wildcards.technology]
|
||||
resource = config['resource'] # pv panel config / wind turbine config
|
||||
correction_factor = config.get('correction_factor', 1.)
|
||||
capacity_per_sqkm = config['capacity_per_sqkm']
|
||||
@ -313,7 +315,7 @@ if __name__ == '__main__':
|
||||
average_distance.rename('average_distance')])
|
||||
|
||||
|
||||
if snakemake.wildcards.technology.startswith("offwind"):
|
||||
if wildcards.technology.startswith("offwind"):
|
||||
logger.info('Calculate underwater fraction of connections.')
|
||||
offshore_shape = gpd.read_file(paths['offshore_shapes']).unary_union
|
||||
underwater_fraction = []
|
||||
@ -333,4 +335,4 @@ if __name__ == '__main__':
|
||||
min_p_max_pu = config['clip_p_max_pu']
|
||||
ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0)
|
||||
|
||||
ds.to_netcdf(snakemake.output.profile)
|
||||
ds.to_netcdf(out.profile)
|
||||
|
@ -68,7 +68,7 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import os
|
||||
import numpy as np
|
||||
@ -217,13 +217,12 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('build_shapes')
|
||||
configure_logging(snakemake)
|
||||
|
||||
paths = snakemake.input
|
||||
out = snakemake.output
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
country_shapes = countries(paths.naturalearth, snakemake.config['countries'])
|
||||
country_shapes = countries(paths.naturalearth, config['countries'])
|
||||
save_to_geojson(country_shapes, out.country_shapes)
|
||||
|
||||
offshore_shapes = eez(country_shapes, paths.eez, snakemake.config['countries'])
|
||||
offshore_shapes = eez(country_shapes, paths.eez, config['countries'])
|
||||
save_to_geojson(offshore_shapes, out.offshore_shapes)
|
||||
|
||||
europe_shape = country_cover(country_shapes, offshore_shapes)
|
||||
|
@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes:
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging, update_p_nom_max
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max
|
||||
|
||||
import pypsa
|
||||
import os
|
||||
@ -306,7 +306,7 @@ def cluster_regions(busmaps, input=None, output=None):
|
||||
|
||||
for which in ('regions_onshore', 'regions_offshore'):
|
||||
regions = gpd.read_file(getattr(input, which)).set_index('name')
|
||||
geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.cascaded_union)
|
||||
geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.unary_union)
|
||||
regions_c = gpd.GeoDataFrame(dict(geometry=geom_c))
|
||||
regions_c.index.name = 'name'
|
||||
save_to_geojson(regions_c, getattr(output, which))
|
||||
@ -328,19 +328,21 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('cluster_network', network='elec', simpl='', clusters='5')
|
||||
configure_logging(snakemake)
|
||||
|
||||
n = pypsa.Network(snakemake.input.network)
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
focus_weights = snakemake.config.get('focus_weights', None)
|
||||
n = pypsa.Network(paths.network)
|
||||
|
||||
focus_weights = config.get('focus_weights', None)
|
||||
|
||||
renewable_carriers = pd.Index([tech
|
||||
for tech in n.generators.carrier.unique()
|
||||
if tech in snakemake.config['renewable']])
|
||||
if tech in config['renewable']])
|
||||
|
||||
if snakemake.wildcards.clusters.endswith('m'):
|
||||
n_clusters = int(snakemake.wildcards.clusters[:-1])
|
||||
if wildcards.clusters.endswith('m'):
|
||||
n_clusters = int(wildcards.clusters[:-1])
|
||||
aggregate_carriers = pd.Index(n.generators.carrier.unique()).difference(renewable_carriers)
|
||||
else:
|
||||
n_clusters = int(snakemake.wildcards.clusters)
|
||||
n_clusters = int(wildcards.clusters)
|
||||
aggregate_carriers = None # All
|
||||
|
||||
if n_clusters == len(n.buses):
|
||||
@ -349,11 +351,11 @@ if __name__ == "__main__":
|
||||
linemap = n.lines.index.to_series()
|
||||
clustering = pypsa.networkclustering.Clustering(n, busmap, linemap, linemap, pd.Series(dtype='O'))
|
||||
else:
|
||||
line_length_factor = snakemake.config['lines']['length_factor']
|
||||
line_length_factor = config['lines']['length_factor']
|
||||
Nyears = n.snapshot_weightings.objective.sum()/8760
|
||||
hvac_overhead_cost = (load_costs(tech_costs = snakemake.input.tech_costs,
|
||||
config = snakemake.config['costs'],
|
||||
elec_config=snakemake.config['electricity'], Nyears = Nyears)
|
||||
hvac_overhead_cost = (load_costs(tech_costs = paths.tech_costs,
|
||||
config = config['costs'],
|
||||
elec_config=config['electricity'], Nyears = Nyears)
|
||||
.at['HVAC overhead', 'capital_cost'])
|
||||
|
||||
def consense(x):
|
||||
@ -362,24 +364,24 @@ if __name__ == "__main__":
|
||||
"The `potential` configuration option must agree for all renewable carriers, for now!"
|
||||
)
|
||||
return v
|
||||
potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential']
|
||||
potential_mode = consense(pd.Series([config['renewable'][tech]['potential']
|
||||
for tech in renewable_carriers]))
|
||||
custom_busmap = snakemake.config["enable"].get("custom_busmap", False)
|
||||
custom_busmap = config["enable"].get("custom_busmap", False)
|
||||
if custom_busmap:
|
||||
custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True)
|
||||
custom_busmap = pd.read_csv(paths.custom_busmap, index_col=0, squeeze=True)
|
||||
custom_busmap.index = custom_busmap.index.astype(str)
|
||||
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
|
||||
logger.info(f"Imported custom busmap from {paths.custom_busmap}")
|
||||
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers,
|
||||
line_length_factor=line_length_factor,
|
||||
potential_mode=potential_mode,
|
||||
solver_name=snakemake.config['solving']['solver']['name'],
|
||||
solver_name=config['solving']['solver']['name'],
|
||||
extended_link_costs=hvac_overhead_cost,
|
||||
focus_weights=focus_weights)
|
||||
|
||||
update_p_nom_max(n)
|
||||
|
||||
clustering.network.export_to_netcdf(snakemake.output.network)
|
||||
clustering.network.export_to_netcdf(out.network)
|
||||
for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative
|
||||
getattr(clustering, attr).to_csv(snakemake.output[attr])
|
||||
getattr(clustering, attr).to_csv(out[attr])
|
||||
|
||||
cluster_regions((clustering.busmap,), snakemake.input, snakemake.output)
|
||||
cluster_regions((clustering.busmap,), paths, out)
|
||||
|
@ -20,8 +20,8 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs,
|
||||
configure_logging)
|
||||
from _helpers import (retrieve_snakemake_keys, load_network_for_plots,
|
||||
aggregate_p, aggregate_costs, configure_logging)
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
@ -259,18 +259,19 @@ if __name__ == "__main__":
|
||||
|
||||
set_plot_style()
|
||||
|
||||
opts = snakemake.config['plotting']
|
||||
map_figsize = opts['map']['figsize']
|
||||
map_boundaries = opts['map']['boundaries']
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
n = load_network_for_plots(snakemake.input.network, snakemake.input.tech_costs, snakemake.config)
|
||||
map_figsize = config['map']['figsize']
|
||||
map_boundaries = config['map']['boundaries']
|
||||
|
||||
scenario_opts = snakemake.wildcards.opts.split('-')
|
||||
n = load_network_for_plots(paths.network, paths.tech_costs, config)
|
||||
|
||||
scenario_opts = wildcards.opts.split('-')
|
||||
|
||||
fig, ax = plt.subplots(figsize=map_figsize, subplot_kw={"projection": ccrs.PlateCarree()})
|
||||
plot_map(n, ax, snakemake.wildcards.attr, opts)
|
||||
plot_map(n, ax, wildcards.attr, config)
|
||||
|
||||
fig.savefig(snakemake.output.only_map, dpi=150, bbox_inches='tight')
|
||||
fig.savefig(out.only_map, dpi=150, bbox_inches='tight')
|
||||
|
||||
ax1 = fig.add_axes([-0.115, 0.625, 0.2, 0.2])
|
||||
plot_total_energy_pie(n, ax1)
|
||||
@ -278,12 +279,12 @@ if __name__ == "__main__":
|
||||
ax2 = fig.add_axes([-0.075, 0.1, 0.1, 0.45])
|
||||
plot_total_cost_bar(n, ax2)
|
||||
|
||||
ll = snakemake.wildcards.ll
|
||||
ll = wildcards.ll
|
||||
ll_type = ll[0]
|
||||
ll_factor = ll[1:]
|
||||
lbl = dict(c='line cost', v='line volume')[ll_type]
|
||||
amnt = '{ll} x today\'s'.format(ll=ll_factor) if ll_factor != 'opt' else 'optimal'
|
||||
fig.suptitle('Expansion to {amount} {label} at {clusters} clusters'
|
||||
.format(amount=amnt, label=lbl, clusters=snakemake.wildcards.clusters))
|
||||
.format(amount=amnt, label=lbl, clusters=wildcards.clusters))
|
||||
|
||||
fig.savefig(snakemake.output.ext, transparent=True, bbox_inches='tight')
|
||||
fig.savefig(out.ext, transparent=True, bbox_inches='tight')
|
||||
|
@ -53,11 +53,13 @@ if __name__ == "__main__":
|
||||
clusts= '5,full', country= 'all')
|
||||
configure_logging(snakemake)
|
||||
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
plot_kwds = dict(drawstyle="steps-post")
|
||||
|
||||
clusters = snakemake.wildcards.clusts.split(',')
|
||||
techs = snakemake.wildcards.techs.split(',')
|
||||
country = snakemake.wildcards.country
|
||||
clusters = wildcards.clusts.split(',')
|
||||
techs = wildcards.techs.split(',')
|
||||
country = wildcards.country
|
||||
if country == 'all':
|
||||
country = None
|
||||
else:
|
||||
@ -66,7 +68,7 @@ if __name__ == "__main__":
|
||||
fig, axes = plt.subplots(1, len(techs))
|
||||
|
||||
for j, cluster in enumerate(clusters):
|
||||
net = pypsa.Network(snakemake.input[j])
|
||||
net = pypsa.Network(paths[j])
|
||||
|
||||
for i, tech in enumerate(techs):
|
||||
cum_p_nom_max(net, tech, country).plot(x="p_max_pu", y="cum_p_nom_max",
|
||||
@ -79,4 +81,4 @@ if __name__ == "__main__":
|
||||
|
||||
plt.legend(title="Cluster level")
|
||||
|
||||
fig.savefig(snakemake.output[0], transparent=True, bbox_inches='tight')
|
||||
fig.savefig(out[0], transparent=True, bbox_inches='tight')
|
||||
|
@ -37,7 +37,7 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import pandas as pd
|
||||
|
||||
@ -63,6 +63,8 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('prepare_links_p_nom', simpl='', network='elec')
|
||||
configure_logging(snakemake)
|
||||
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0]
|
||||
|
||||
mw = "Power (MW)"
|
||||
@ -74,4 +76,4 @@ if __name__ == "__main__":
|
||||
links_p_nom['x1'], links_p_nom['y1'] = extract_coordinates(links_p_nom['Converterstation 1'])
|
||||
links_p_nom['x2'], links_p_nom['y2'] = extract_coordinates(links_p_nom['Converterstation 2'])
|
||||
|
||||
links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(snakemake.output[0], index=False)
|
||||
links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(out[0], index=False)
|
||||
|
@ -56,7 +56,7 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import re
|
||||
import pypsa
|
||||
@ -206,15 +206,17 @@ if __name__ == "__main__":
|
||||
clusters='40', ll='v0.3', opts='Co2L-24H')
|
||||
configure_logging(snakemake)
|
||||
|
||||
opts = snakemake.wildcards.opts.split('-')
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
n = pypsa.Network(snakemake.input[0])
|
||||
opts = wildcards.opts.split('-')
|
||||
|
||||
n = pypsa.Network(paths[0])
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.
|
||||
costs = load_costs(tech_costs = snakemake.input.tech_costs,
|
||||
config = snakemake.config['costs'],
|
||||
elec_config = snakemake.config['electricity'], Nyears = Nyears)
|
||||
costs = load_costs(tech_costs = paths.tech_costs,
|
||||
config = config['costs'],
|
||||
elec_config = config['electricity'], Nyears = Nyears)
|
||||
|
||||
set_line_s_max_pu(n, s_max_pu=snakemake.config['lines']['s_max_pu'])
|
||||
set_line_s_max_pu(n, s_max_pu=config['lines']['s_max_pu'])
|
||||
|
||||
for o in opts:
|
||||
m = re.match(r'^\d+h$', o, re.IGNORECASE)
|
||||
@ -225,7 +227,7 @@ if __name__ == "__main__":
|
||||
for o in opts:
|
||||
m = re.match(r'^\d+seg$', o, re.IGNORECASE)
|
||||
if m is not None:
|
||||
solver_name = snakemake.config["solving"]["solver"]["name"]
|
||||
solver_name = config["solving"]["solver"]["name"]
|
||||
n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=solver_name)
|
||||
break
|
||||
|
||||
@ -233,10 +235,10 @@ if __name__ == "__main__":
|
||||
if "Co2L" in o:
|
||||
m = re.findall("[0-9]*\.?[0-9]+$", o)
|
||||
if len(m) > 0:
|
||||
co2limit = float(m[0]) * snakemake.config['electricity']['co2base']
|
||||
co2limit = float(m[0]) * config['electricity']['co2base']
|
||||
add_co2limit(n, co2limit, Nyears)
|
||||
else:
|
||||
add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears)
|
||||
add_co2limit(n, config['electricity']['co2limit'], Nyears)
|
||||
break
|
||||
|
||||
for o in opts:
|
||||
@ -257,17 +259,17 @@ if __name__ == "__main__":
|
||||
c.df.loc[sel,attr] *= factor
|
||||
|
||||
if 'Ep' in opts:
|
||||
add_emission_prices(n, emission_prices=snakemake.config['costs']['emission_prices'])
|
||||
add_emission_prices(n, config['costs']['emission_prices'])
|
||||
|
||||
ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:]
|
||||
ll_type, factor = wildcards.ll[0], wildcards.ll[1:]
|
||||
set_transmission_limit(n, ll_type, factor, costs, Nyears)
|
||||
|
||||
set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf),
|
||||
p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf))
|
||||
set_line_nom_max(n, s_nom_max_set=config["lines"].get("s_nom_max,", np.inf),
|
||||
p_nom_max_set=config["links"].get("p_nom_max,", np.inf))
|
||||
|
||||
if "ATK" in opts:
|
||||
enforce_autarky(n)
|
||||
elif "ATKc" in opts:
|
||||
enforce_autarky(n, only_crossborder=True)
|
||||
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
n.export_to_netcdf(out[0])
|
||||
|
@ -33,7 +33,7 @@ The :ref:`tutorial` uses a smaller `data bundle <https://zenodo.org/record/35179
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import progress_retrieve, configure_logging
|
||||
from _helpers import progress_retrieve, retrieve_snakemake_keys, configure_logging
|
||||
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
@ -50,7 +50,9 @@ if __name__ == "__main__":
|
||||
rootpath = '.'
|
||||
configure_logging(snakemake) # TODO Make logging compatible with progressbar (see PR #102)
|
||||
|
||||
if snakemake.config['tutorial']:
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
if config['tutorial']:
|
||||
url = "https://zenodo.org/record/3517921/files/pypsa-eur-tutorial-data-bundle.tar.xz"
|
||||
else:
|
||||
url = "https://zenodo.org/record/3517935/files/pypsa-eur-data-bundle.tar.xz"
|
||||
|
@ -83,7 +83,7 @@ The rule :mod:`simplify_network` does up to four things:
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging, update_p_nom_max
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max
|
||||
|
||||
from cluster_network import clustering_for_n_clusters, cluster_regions
|
||||
from add_electricity import load_costs
|
||||
@ -386,14 +386,16 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('simplify_network', simpl='', network='elec')
|
||||
configure_logging(snakemake)
|
||||
|
||||
n = pypsa.Network(snakemake.input.network)
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
n = pypsa.Network(paths.network)
|
||||
|
||||
n, trafo_map = simplify_network_to_380(n)
|
||||
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
||||
technology_costs = load_costs(tech_costs = snakemake.input.tech_costs,
|
||||
config = snakemake.config['costs'],
|
||||
elec_config = snakemake.config['electricity'], Nyears = Nyears)
|
||||
technology_costs = load_costs(tech_costs = paths.tech_costs,
|
||||
config = config['costs'],
|
||||
elec_config = config['electricity'], Nyears = Nyears)
|
||||
|
||||
n, simplify_links_map = simplify_links(n, technology_costs)
|
||||
|
||||
@ -401,12 +403,12 @@ if __name__ == "__main__":
|
||||
|
||||
busmaps = [trafo_map, simplify_links_map, stub_map]
|
||||
|
||||
if snakemake.config.get('clustering', {}).get('simplify', {}).get('to_substations', False):
|
||||
if config.get('clustering', {}).get('simplify', {}).get('to_substations', False):
|
||||
n, substation_map = aggregate_to_substations(n)
|
||||
busmaps.append(substation_map)
|
||||
|
||||
if snakemake.wildcards.simpl:
|
||||
n, cluster_map = cluster(n, int(snakemake.wildcards.simpl))
|
||||
if wildcards.simpl:
|
||||
n, cluster_map = cluster(n, int(wildcards.simpl))
|
||||
busmaps.append(cluster_map)
|
||||
|
||||
# some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed
|
||||
@ -416,9 +418,9 @@ if __name__ == "__main__":
|
||||
|
||||
update_p_nom_max(n)
|
||||
|
||||
n.export_to_netcdf(snakemake.output.network)
|
||||
n.export_to_netcdf(out.network)
|
||||
|
||||
busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
|
||||
busmap_s.to_csv(snakemake.output.busmap)
|
||||
busmap_s.to_csv(out.busmap)
|
||||
|
||||
cluster_regions(busmaps, snakemake.input, snakemake.output)
|
||||
cluster_regions(busmaps, paths, out)
|
||||
|
@ -77,7 +77,7 @@ Details (and errors made through this heuristic) are discussed in the paper
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
@ -273,19 +273,21 @@ if __name__ == "__main__":
|
||||
clusters='5', ll='copt', opts='Co2L-BAU-CCL-24H')
|
||||
configure_logging(snakemake)
|
||||
|
||||
tmpdir = snakemake.config['solving'].get('tmpdir')
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
tmpdir = config['solving'].get('tmpdir')
|
||||
if tmpdir is not None:
|
||||
Path(tmpdir).mkdir(parents=True, exist_ok=True)
|
||||
opts = snakemake.wildcards.opts.split('-')
|
||||
solve_opts = snakemake.config['solving']['options']
|
||||
opts = wildcards.opts.split('-')
|
||||
solve_opts = config['solving']['options']
|
||||
|
||||
fn = getattr(snakemake.log, 'memory', None)
|
||||
fn = getattr(logs, 'memory', None)
|
||||
with memory_logger(filename=fn, interval=30.) as mem:
|
||||
n = pypsa.Network(snakemake.input[0])
|
||||
n = pypsa.Network(paths[0])
|
||||
n = prepare_network(n, solve_opts)
|
||||
n = solve_network(n, config=snakemake.config, opts=opts,
|
||||
n = solve_network(n, config=config, opts=opts,
|
||||
solver_dir=tmpdir,
|
||||
solver_logfile=snakemake.log.solver)
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
solver_logfile=logs.solver)
|
||||
n.export_to_netcdf(out[0])
|
||||
|
||||
logger.info("Maximum memory usage: {}".format(mem.mem_usage))
|
||||
|
@ -46,7 +46,7 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, retrieve_snakemake_keys
|
||||
|
||||
import pypsa
|
||||
import numpy as np
|
||||
@ -100,25 +100,26 @@ if __name__ == "__main__":
|
||||
simpl='', clusters='5', ll='copt', opts='Co2L-BAU-24H')
|
||||
configure_logging(snakemake)
|
||||
|
||||
tmpdir = snakemake.config['solving'].get('tmpdir')
|
||||
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
|
||||
|
||||
tmpdir = config['solving'].get('tmpdir')
|
||||
if tmpdir is not None:
|
||||
Path(tmpdir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
n = pypsa.Network(snakemake.input.unprepared)
|
||||
n_optim = pypsa.Network(snakemake.input.optimized)
|
||||
n = pypsa.Network(paths.unprepared)
|
||||
n_optim = pypsa.Network(paths.optimized)
|
||||
n = set_parameters_from_optimized(n, n_optim)
|
||||
del n_optim
|
||||
|
||||
config = snakemake.config
|
||||
opts = snakemake.wildcards.opts.split('-')
|
||||
opts = wildcards.opts.split('-')
|
||||
config['solving']['options']['skip_iterations'] = False
|
||||
|
||||
fn = getattr(snakemake.log, 'memory', None)
|
||||
fn = getattr(logs, 'memory', None)
|
||||
with memory_logger(filename=fn, interval=30.) as mem:
|
||||
n = prepare_network(n, solve_opts=snakemake.config['solving']['options'])
|
||||
n = prepare_network(n, solve_opts=config['solving']['options'])
|
||||
n = solve_network(n, config=config, opts=opts,
|
||||
solver_dir=tmpdir,
|
||||
solver_logfile=snakemake.log.solver)
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
solver_logfile=logs.solver)
|
||||
n.export_to_netcdf(out[0])
|
||||
|
||||
logger.info("Maximum memory usage: {}".format(mem.mem_usage))
|
||||
|
Loading…
Reference in New Issue
Block a user