From dfb929f2cf87786349a30d5273b68f5f92d3958c Mon Sep 17 00:00:00 2001 From: martacki Date: Tue, 14 Sep 2021 16:34:02 +0200 Subject: [PATCH 01/26] remove snakemake dependencies in functions, use as kwarg instead --- config.yaml~ | 317 ++++++++++++++++++++++++++++++++ scripts/add_electricity.py | 101 +++++----- scripts/add_extra_components.py | 21 +-- scripts/build_powerplants.py | 9 +- scripts/build_shapes.py | 31 ++-- scripts/cluster_network.py | 28 ++- scripts/make_summary.py | 9 +- scripts/prepare_network.py | 48 ++--- scripts/simplify_network.py | 29 +-- 9 files changed, 445 insertions(+), 148 deletions(-) create mode 100644 config.yaml~ diff --git a/config.yaml~ b/config.yaml~ new file mode 100644 index 00000000..91f645f8 --- /dev/null +++ b/config.yaml~ @@ -0,0 +1,317 @@ +# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors +# +# SPDX-License-Identifier: CC0-1.0 + +version: 0.3.0 +tutorial: false + +logging: + level: INFO + format: '%(levelname)s:%(name)s:%(message)s' + +summary_dir: results + +scenario: + simpl: [''] + ll: ['copt'] + clusters: [37, 128, 256, 512, 1024] + opts: [Co2L-3H] + +countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK'] + +snapshots: + start: "2013-01-01" + end: "2014-01-01" + closed: 'left' # end is not inclusive + +enable: + prepare_links_p_nom: false + retrieve_databundle: true + build_cutout: false + retrieve_cutout: true + build_natura_raster: false + retrieve_natura_raster: true + custom_busmap: false + +clustering: + algorithm: + name: kmeans #kmeans + feature: coordinates #feature not supported yet + +electricity: + voltages: [220., 300., 380.] + co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5 + co2base: 1.487e+9 + agg_p_nom_limits: data/agg_p_nom_minmax.csv + + extendable_carriers: + Generator: [] + StorageUnit: [] # battery, H2 + Store: [battery, H2] + Link: [] + + max_hours: + battery: 6 + H2: 168 + + powerplants_filter: false # use pandas query strings here, e.g. Country not in ['Germany'] + custom_powerplants: false # use pandas query strings here, e.g. Country in ['Germany'] + conventional_carriers: [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass] + renewable_capacities_from_OPSD: [] # onwind, offwind, solar + + # estimate_renewable_capacities_from_capacity_stats: + # # Wind is the Fueltype in ppm.data.Capacity_stats, onwind, offwind-{ac,dc} the carrier in PyPSA-Eur + # Wind: [onwind, offwind-ac, offwind-dc] + # Solar: [solar] + +atlite: + nprocesses: 4 + cutouts: + # use 'base' to determine geographical bounds and time span from config + # base: + # module: era5 + europe-2013-era5: + module: era5 # in priority order + x: [-12., 35.] + y: [33., 72] + dx: 0.3 + dy: 0.3 + time: ['2013', '2013'] + europe-2013-sarah: + module: [sarah, era5] # in priority order + x: [-12., 45.] + y: [33., 65] + dx: 0.2 + dy: 0.2 + time: ['2013', '2013'] + sarah_interpolate: false + sarah_dir: + features: [influx, temperature] + + +renewable: + onwind: + cutout: europe-2013-era5 + resource: + method: wind + turbine: Vestas_V112_3MW + capacity_per_sqkm: 3 # ScholzPhd Tab 4.3.1: 10MW/km^2 + # correction_factor: 0.93 + corine: + # Scholz, Y. (2012). Renewable energy based electricity supply at low costs: + # development of the REMix model and application for Europe. ( p.42 / p.28) + grid_codes: [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 31, 32] + distance: 1000 + distance_grid_codes: [1, 2, 3, 4, 5, 6] + natura: true + potential: simple # or conservative + clip_p_max_pu: 1.e-2 + offwind-ac: + cutout: europe-2013-era5 + resource: + method: wind + turbine: NREL_ReferenceTurbine_5MW_offshore + capacity_per_sqkm: 3 + # correction_factor: 0.93 + corine: [44, 255] + natura: true + max_depth: 50 + max_shore_distance: 30000 + potential: simple # or conservative + clip_p_max_pu: 1.e-2 + offwind-dc: + cutout: europe-2013-era5 + resource: + method: wind + turbine: NREL_ReferenceTurbine_5MW_offshore + # ScholzPhd Tab 4.3.1: 10MW/km^2 + capacity_per_sqkm: 3 + # correction_factor: 0.93 + corine: [44, 255] + natura: true + max_depth: 50 + min_shore_distance: 30000 + potential: simple # or conservative + clip_p_max_pu: 1.e-2 + solar: + cutout: europe-2013-sarah + resource: + method: pv + panel: CSi + orientation: + slope: 35. + azimuth: 180. + capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2 + # Determined by comparing uncorrected area-weighted full-load hours to those + # published in Supplementary Data to + # Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power + # sector: The economic potential of photovoltaics and concentrating solar + # power." Applied Energy 135 (2014): 704-720. + correction_factor: 0.854337 + corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, + 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] + natura: true + potential: simple # or conservative + clip_p_max_pu: 1.e-2 + hydro: + cutout: europe-2013-era5 + carriers: [ror, PHS, hydro] + PHS_max_hours: 6 + hydro_max_hours: "energy_capacity_totals_by_country" # one of energy_capacity_totals_by_country, estimate_by_large_installations or a float + clip_min_inflow: 1.0 + +lines: + types: + 220.: "Al/St 240/40 2-bundle 220.0" + 300.: "Al/St 240/40 3-bundle 300.0" + 380.: "Al/St 240/40 4-bundle 380.0" + s_max_pu: 0.7 + s_nom_max: .inf + length_factor: 1.25 + under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity + +links: + p_max_pu: 1.0 + p_nom_max: .inf + include_tyndp: true + under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity + +transformers: + x: 0.1 + s_nom: 2000. + type: '' + +load: + url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv + power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data + interpolate_limit: 3 # data gaps up until this size are interpolated linearly + time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from + manual_adjustments: true # false + scaling_factor: 1.0 + +costs: + year: 2030 + discountrate: 0.07 # From a Lion Hirth paper, also reflects average of Noothout et al 2016 + USD2013_to_EUR2013: 0.7532 # [EUR/USD] ECB: https://www.ecb.europa.eu/stats/exchange/eurofxref/html/eurofxref-graph-usd.en.html + marginal_cost: # EUR/MWh + solar: 0.01 + onwind: 0.015 + offwind: 0.015 + hydro: 0. + H2: 0. + electrolysis: 0. + fuel cell: 0. + battery: 0. + battery inverter: 0. + emission_prices: # in currency per tonne emission, only used with the option Ep + co2: 0. + +solving: + options: + formulation: kirchhoff + load_shedding: true + noisy_costs: true + min_iterations: 4 + max_iterations: 6 + clip_p_max_pu: 0.01 + skip_iterations: false + track_iterations: false + #nhours: 10 + solver: + name: gurobi + threads: 4 + method: 2 # barrier + crossover: 0 + BarConvTol: 1.e-5 + FeasibilityTol: 1.e-6 + AggFill: 0 + PreDual: 0 + GURO_PAR_BARDENSETHRESH: 200 + # solver: + # name: cplex + # threads: 4 + # lpmethod: 4 # barrier + # solutiontype: 2 # non basic solution, ie no crossover + # barrier_convergetol: 1.e-5 + # feasopt_tolerance: 1.e-6 + +plotting: + map: + figsize: [7, 7] + boundaries: [-10.2, 29, 35, 72] + p_nom: + bus_size_factor: 5.e+4 + linewidth_factor: 3.e+3 + + costs_max: 80 + costs_threshold: 1 + + energy_max: 15000. + energy_min: -10000. + energy_threshold: 50. + + vre_techs: ["onwind", "offwind-ac", "offwind-dc", "solar", "ror"] + conv_techs: ["OCGT", "CCGT", "Nuclear", "Coal"] + storage_techs: ["hydro+PHS", "battery", "H2"] + load_carriers: ["AC load"] + AC_carriers: ["AC line", "AC transformer"] + link_carriers: ["DC line", "Converter AC-DC"] + tech_colors: + "onwind" : "#235ebc" + "onshore wind" : "#235ebc" + 'offwind' : "#6895dd" + 'offwind-ac' : "#6895dd" + 'offshore wind' : "#6895dd" + 'offshore wind ac' : "#6895dd" + 'offwind-dc' : "#74c6f2" + 'offshore wind dc' : "#74c6f2" + "hydro" : "#08ad97" + "hydro+PHS" : "#08ad97" + "PHS" : "#08ad97" + "hydro reservoir" : "#08ad97" + 'hydroelectricity' : '#08ad97' + "ror" : "#4adbc8" + "run of river" : "#4adbc8" + 'solar' : "#f9d002" + 'solar PV' : "#f9d002" + 'solar thermal' : '#ffef60' + 'biomass' : '#0c6013' + 'solid biomass' : '#06540d' + 'biogas' : '#23932d' + 'waste' : '#68896b' + 'geothermal' : '#ba91b1' + "OCGT" : "#d35050" + "gas" : "#d35050" + "natural gas" : "#d35050" + "CCGT" : "#b20101" + "nuclear" : "#ff9000" + "coal" : "#707070" + "lignite" : "#9e5a01" + "oil" : "#262626" + "H2" : "#ea048a" + "hydrogen storage" : "#ea048a" + "battery" : "#b8ea04" + "Electric load" : "#f9d002" + "electricity" : "#f9d002" + "lines" : "#70af1d" + "transmission lines" : "#70af1d" + "AC-AC" : "#70af1d" + "AC line" : "#70af1d" + "links" : "#8a1caf" + "HVDC links" : "#8a1caf" + "DC-DC" : "#8a1caf" + "DC link" : "#8a1caf" + nice_names: + OCGT: "Open-Cycle Gas" + CCGT: "Combined-Cycle Gas" + offwind-ac: "Offshore Wind (AC)" + offwind-dc: "Offshore Wind (DC)" + onwind: "Onshore Wind" + solar: "Solar" + PHS: "Pumped Hydro Storage" + hydro: "Reservoir & Dam" + battery: "Battery Storage" + H2: "Hydrogen Storage" + lines: "Transmission Lines" + ror: "Run of River" diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 8f721652..813df498 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -118,12 +118,7 @@ def _add_missing_carriers_from_costs(n, costs, carriers): n.import_components_from_dataframe(emissions, 'Carrier') -def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None): - if tech_costs is None: - tech_costs = snakemake.input.tech_costs - - if config is None: - config = snakemake.config['costs'] +def load_costs(tech_costs, config, elec_config, Nyears=1.): # set all asset costs and other parameters costs = pd.read_csv(tech_costs, index_col=list(range(3))).sort_index() @@ -169,8 +164,6 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None): marginal_cost=0., co2_emissions=0.)) - if elec_config is None: - elec_config = snakemake.config['electricity'] max_hours = elec_config['max_hours'] costs.loc["battery"] = \ costs_for_storage(costs.loc["battery storage"], costs.loc["battery inverter"], @@ -188,9 +181,7 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None): return costs -def load_powerplants(ppl_fn=None): - if ppl_fn is None: - ppl_fn = snakemake.input.powerplants +def load_powerplants(ppl_fn): carrier_dict = {'ocgt': 'OCGT', 'ccgt': 'CCGT', 'bioenergy': 'biomass', 'ccgt, thermal': 'CCGT', 'hard coal': 'coal'} return (pd.read_csv(ppl_fn, index_col=0, dtype={'bus': 'str'}) @@ -199,18 +190,17 @@ def load_powerplants(ppl_fn=None): .replace({'carrier': carrier_dict})) -def attach_load(n): +def attach_load(n, regions, load, nuts3_shapes, cntries = [], scaling = 1.): substation_lv_i = n.buses.index[n.buses['substation_lv']] - regions = (gpd.read_file(snakemake.input.regions).set_index('name') + regions = (gpd.read_file(regions).set_index('name') .reindex(substation_lv_i)) - opsd_load = (pd.read_csv(snakemake.input.load, index_col=0, parse_dates=True) - .filter(items=snakemake.config['countries'])) + opsd_load = (pd.read_csv(load, index_col=0, parse_dates=True) + .filter(items=cntries)) - scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0) logger.info(f"Load data scaled with scalling factor {scaling}.") opsd_load *= scaling - nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index('index') + nuts3 = gpd.read_file(nuts3_shapes).set_index('index') def upsample(cntry, group): l = opsd_load[cntry] @@ -263,18 +253,20 @@ def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=Fal n.links.loc[dc_b, 'capital_cost'] = costs -def attach_wind_and_solar(n, costs): - for tech in snakemake.config['renewable']: +def attach_wind_and_solar(n, costs, input_profiles, + technologies = ['onwind', 'offwind-ac', 'offwind-dc', 'solar'], + line_length_factor = 1.): + for tech in technologies: if tech == 'hydro': continue n.add("Carrier", name=tech) - with xr.open_dataset(getattr(snakemake.input, 'profile_' + tech)) as ds: + with xr.open_dataset(getattr(input_profiles, 'profile_' + tech)) as ds: if ds.indexes['bus'].empty: continue suptech = tech.split('-', 2)[0] if suptech == 'offwind': underwater_fraction = ds['underwater_fraction'].to_pandas() - connection_cost = (snakemake.config['lines']['length_factor'] * + connection_cost = (line_length_factor * ds['average_distance'].to_pandas() * (underwater_fraction * costs.at[tech + '-connection-submarine', 'capital_cost'] + @@ -300,8 +292,8 @@ def attach_wind_and_solar(n, costs): p_max_pu=ds['profile'].transpose('time', 'bus').to_pandas()) -def attach_conventional_generators(n, costs, ppl): - carriers = snakemake.config['electricity']['conventional_carriers'] +def attach_conventional_generators(n, costs, ppl, carriers=['nuclear', 'oil', 'OCGT', 'CCGT', + 'coal', 'lignite', 'geothermal', 'biomass']): _add_missing_carriers_from_costs(n, costs, carriers) @@ -322,10 +314,9 @@ def attach_conventional_generators(n, costs, ppl): logger.warning(f'Capital costs for conventional generators put to 0 EUR/MW.') -def attach_hydro(n, costs, ppl): - if 'hydro' not in snakemake.config['renewable']: return - c = snakemake.config['renewable']['hydro'] - carriers = c.get('carriers', ['ror', 'PHS', 'hydro']) +def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, + config_hydro = {'carriers': {'ror', 'PHS', 'hydro'}}): + carriers = config_hydro.get('carriers', ['ror', 'PHS', 'hydro']) _add_missing_carriers_from_costs(n, costs, carriers) @@ -341,11 +332,11 @@ def attach_hydro(n, costs, ppl): if not inflow_idx.empty: dist_key = ppl.loc[inflow_idx, 'p_nom'].groupby(country).transform(normed) - with xr.open_dataarray(snakemake.input.profile_hydro) as inflow: + with xr.open_dataarray(profile_hydro) as inflow: inflow_countries = pd.Index(country[inflow_idx]) missing_c = (inflow_countries.unique() .difference(inflow.indexes['countries'])) - assert missing_c.empty, (f"'{snakemake.input.profile_hydro}' is missing " + assert missing_c.empty, (f"'{profile_hydro}' is missing " f"inflow time-series for at least one country: {', '.join(missing_c)}") inflow_t = (inflow.sel(countries=inflow_countries) @@ -370,7 +361,7 @@ def attach_hydro(n, costs, ppl): if 'PHS' in carriers and not phs.empty: # fill missing max hours to config value and # assume no natural inflow due to lack of data - phs = phs.replace({'max_hours': {0: c['PHS_max_hours']}}) + phs = phs.replace({'max_hours': {0: config_hydro['PHS_max_hours']}}) n.madd('StorageUnit', phs.index, carrier='PHS', bus=phs['bus'], @@ -382,8 +373,8 @@ def attach_hydro(n, costs, ppl): cyclic_state_of_charge=True) if 'hydro' in carriers and not hydro.empty: - hydro_max_hours = c.get('hydro_max_hours') - hydro_stats = pd.read_csv(snakemake.input.hydro_capacities, + hydro_max_hours = config_hydro.get('hydro_max_hours') + hydro_stats = pd.read_csv(hydro_capacities, comment="#", na_values='-', index_col=0) e_target = hydro_stats["E_store[TWh]"].clip(lower=0.2) * 1e6 e_installed = hydro.eval('p_nom * max_hours').groupby(hydro.country).sum() @@ -412,7 +403,7 @@ def attach_hydro(n, costs, ppl): p_nom=hydro['p_nom'], max_hours=hydro_max_hours, capital_cost=(costs.at['hydro', 'capital_cost'] - if c.get('hydro_capital_cost') else 0.), + if config_hydro.get('hydro_capital_cost') else 0.), marginal_cost=costs.at['hydro', 'marginal_cost'], p_max_pu=1., # dispatch p_min_pu=0., # store @@ -422,8 +413,7 @@ def attach_hydro(n, costs, ppl): inflow=inflow_t.loc[:, hydro.index]) -def attach_extendable_generators(n, costs, ppl): - elec_opts = snakemake.config['electricity'] +def attach_extendable_generators(n, costs, ppl, elec_opts = {'extendable_carriers': {'Generator': []}}): carriers = pd.Index(elec_opts['extendable_carriers']['Generator']) _add_missing_carriers_from_costs(n, costs, carriers) @@ -472,12 +462,11 @@ def attach_extendable_generators(n, costs, ppl): -def attach_OPSD_renewables(n): +def attach_OPSD_renewables(n, techs=[]): available = ['DE', 'FR', 'PL', 'CH', 'DK', 'CZ', 'SE', 'GB'] tech_map = {'Onshore': 'onwind', 'Offshore': 'offwind', 'Solar': 'solar'} countries = set(available) & set(n.buses.country) - techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', []) tech_map = {k: v for k, v in tech_map.items() if v in techs} if not tech_map: @@ -505,10 +494,7 @@ def attach_OPSD_renewables(n): -def estimate_renewable_capacities(n, tech_map=None): - if tech_map is None: - tech_map = (snakemake.config['electricity'] - .get('estimate_renewable_capacities_from_capacity_stats', {})) +def estimate_renewable_capacities(n, tech_map={}): if len(tech_map) == 0: return @@ -540,8 +526,7 @@ def estimate_renewable_capacities(n, tech_map=None): n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom'] -def add_nice_carrier_names(n, config=None): - if config is None: config = snakemake.config +def add_nice_carrier_names(n, config): carrier_i = n.carriers.index nice_names = (pd.Series(config['plotting']['nice_names']) .reindex(carrier_i).fillna(carrier_i.to_series().str.title())) @@ -563,22 +548,32 @@ if __name__ == "__main__": n = pypsa.Network(snakemake.input.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(Nyears) - ppl = load_powerplants() + costs = load_costs(tech_costs = snakemake.input.tech_costs, config = snakemake.config['costs'], + elec_config = snakemake.config['electricity'], Nyears = Nyears) + ppl = load_powerplants(snakemake.input.powerplants) - attach_load(n) + attach_load(n, regions = snakemake.input.regions, load = snakemake.input.load, + nuts3_shapes = snakemake.input.nuts3_shapes, + cntries = snakemake.config['countries'], + scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0)) update_transmission_costs(n, costs) - attach_conventional_generators(n, costs, ppl) - attach_wind_and_solar(n, costs) - attach_hydro(n, costs, ppl) - attach_extendable_generators(n, costs, ppl) + attach_conventional_generators(n, costs, ppl, carriers = snakemake.config['electricity']['conventional_carriers']) + attach_wind_and_solar(n, costs, snakemake.input, technologies = snakemake.config['renewable'], + line_length_factor = snakemake.config['lines']['length_factor']) - estimate_renewable_capacities(n) - attach_OPSD_renewables(n) + if 'hydro' in snakemake.config['renewable']: + attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities, + config_hydro = snakemake.config['renewable']['hydro']) + + attach_extendable_generators(n, costs, ppl, elec_opts = snakemake.config['electricity']) + + estimate_renewable_capacities(n, tech_map = (snakemake.config['electricity'] + .get('estimate_renewable_capacities_from_capacity_stats', {}))) + attach_OPSD_renewables(n, techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', [])) update_p_nom_max(n) - add_nice_carrier_names(n) + add_nice_carrier_names(n, config = snakemake.config) n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index ae581382..846fb120 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -64,8 +64,7 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) -def attach_storageunits(n, costs): - elec_opts = snakemake.config['electricity'] +def attach_storageunits(n, costs, elec_opts = {'extendable_carriers': {'StorageUnit': []}, 'max_hours': {'battery': 6, 'H2': 168}}): carriers = elec_opts['extendable_carriers']['StorageUnit'] max_hours = elec_opts['max_hours'] @@ -89,8 +88,7 @@ def attach_storageunits(n, costs): cyclic_state_of_charge=True) -def attach_stores(n, costs): - elec_opts = snakemake.config['electricity'] +def attach_stores(n, costs, elec_opts = {'extendable_carriers': {'Store': ['battery', 'H2']}}): carriers = elec_opts['extendable_carriers']['Store'] _add_missing_carriers_from_costs(n, costs, carriers) @@ -156,8 +154,7 @@ def attach_stores(n, costs): marginal_cost=costs.at["battery inverter", "marginal_cost"]) -def attach_hydrogen_pipelines(n, costs): - elec_opts = snakemake.config['electricity'] +def attach_hydrogen_pipelines(n, costs, elec_opts = {'extendable_carriers': {'Store': ['H2', 'battery']}}): ext_carriers = elec_opts['extendable_carriers'] as_stores = ext_carriers.get('Store', []) @@ -198,13 +195,13 @@ if __name__ == "__main__": n = pypsa.Network(snakemake.input.network) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(Nyears, tech_costs=snakemake.input.tech_costs, - config=snakemake.config['costs'], - elec_config=snakemake.config['electricity']) + costs = load_costs(tech_costs = snakemake.input.tech_costs, + config = snakemake.config['costs'], + elec_config = snakemake.config['electricity'], Nyears = Nyears) - attach_storageunits(n, costs) - attach_stores(n, costs) - attach_hydrogen_pipelines(n, costs) + attach_storageunits(n, costs, elec_opts = snakemake.config['electricity']) + attach_stores(n, costs, elec_opts = snakemake.config['electricity']) + attach_hydrogen_pipelines(n, costs, elec_opts = snakemake.config['electricity']) add_nice_carrier_names(n, config=snakemake.config) diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index 8b329469..e87637f9 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -84,11 +84,10 @@ from scipy.spatial import cKDTree as KDTree logger = logging.getLogger(__name__) -def add_custom_powerplants(ppl): - custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] +def add_custom_powerplants(ppl, custom_powerplants, custom_ppl_query=False): if not custom_ppl_query: return ppl - add_ppls = pd.read_csv(snakemake.input.custom_powerplants, index_col=0, + add_ppls = pd.read_csv(custom_powerplants, index_col=0, dtype={'bus': 'str'}) if isinstance(custom_ppl_query, str): add_ppls.query(custom_ppl_query, inplace=True) @@ -119,7 +118,9 @@ if __name__ == "__main__": if isinstance(ppl_query, str): ppl.query(ppl_query, inplace=True) - ppl = add_custom_powerplants(ppl) # add carriers from own powerplant files + # add carriers from own powerplant files: + ppl = add_custom_powerplants(ppl, custom_powerplants = snakemake.input.custom_powerplants, + custom_ppl_query = snakemake.config['electricity']['custom_powerplants']) cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()] diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 59603f96..7edf439a 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -107,11 +107,10 @@ def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True): return polys.simplify(tolerance=tolerance) -def countries(): - cntries = snakemake.config['countries'] +def countries(naturalearth, cntries=[]): if 'RS' in cntries: cntries.append('KV') - df = gpd.read_file(snakemake.input.naturalearth) + df = gpd.read_file(naturalearth) # Names are a hassle in naturalearth, try several fields fieldnames = (df[x].where(lambda s: s!='-99') for x in ('ISO_A2', 'WB_A2', 'ADM0_A3')) @@ -124,9 +123,9 @@ def countries(): return s -def eez(country_shapes): - df = gpd.read_file(snakemake.input.eez) - df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in snakemake.config['countries']])] +def eez(country_shapes, eez, cntries=[]): + df = gpd.read_file(eez) + df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in cntries])] df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c)) s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False)) s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3}) @@ -145,29 +144,29 @@ def country_cover(country_shapes, eez_shapes=None): return Polygon(shell=europe_shape.exterior) -def nuts3(country_shapes): - df = gpd.read_file(snakemake.input.nuts3) +def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp): + df = gpd.read_file(nuts3) df = df.loc[df['STAT_LEVL_'] == 3] df['geometry'] = df['geometry'].map(_simplify_polys) df = df.rename(columns={'NUTS_ID': 'id'})[['id', 'geometry']].set_index('id') - pop = pd.read_table(snakemake.input.nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python') + pop = pd.read_table(nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python') pop = (pop .set_index(pd.MultiIndex.from_tuples(pop.pop('unit,geo\\time').str.split(','))).loc['THS'] .applymap(lambda x: pd.to_numeric(x, errors='coerce')) .fillna(method='bfill', axis=1))['2014'] - gdp = pd.read_table(snakemake.input.nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python') + gdp = pd.read_table(nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python') gdp = (gdp .set_index(pd.MultiIndex.from_tuples(gdp.pop('unit,geo\\time').str.split(','))).loc['EUR_HAB'] .applymap(lambda x: pd.to_numeric(x, errors='coerce')) .fillna(method='bfill', axis=1))['2014'] - cantons = pd.read_csv(snakemake.input.ch_cantons) + cantons = pd.read_csv(ch_cantons) cantons = cantons.set_index(cantons['HASC'].str[3:])['NUTS'] cantons = cantons.str.pad(5, side='right', fillchar='0') - swiss = pd.read_excel(snakemake.input.ch_popgdp, skiprows=3, index_col=0) + swiss = pd.read_excel(ch_popgdp, skiprows=3, index_col=0) swiss.columns = swiss.columns.to_series().map(cantons) pop = pop.append(pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':])) @@ -220,14 +219,16 @@ if __name__ == "__main__": out = snakemake.output - country_shapes = countries() + country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries']) save_to_geojson(country_shapes, out.country_shapes) - offshore_shapes = eez(country_shapes) + offshore_shapes = eez(country_shapes, snakemake.input.eez, cntries=snakemake.config['countries']) save_to_geojson(offshore_shapes, out.offshore_shapes) europe_shape = country_cover(country_shapes, offshore_shapes) save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape) - nuts3_shapes = nuts3(country_shapes) + nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop, + snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp) + save_to_geojson(nuts3_shapes, out.nuts3_shapes) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index d74745d0..4784bb32 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -170,12 +170,9 @@ def weighting_for_country(n, x): return (w * (100. / w.max())).clip(lower=1.).astype(int) -def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None): +def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"): """Determine the number of clusters per country""" - if solver_name is None: - solver_name = snakemake.config['solving']['solver']['name'] - L = (n.loads_t.p_set.mean() .groupby(n.loads.bus).sum() .groupby([n.buses.country, n.buses.sub_network]).sum() @@ -268,12 +265,10 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr else: raise AttributeError(f"potential_mode should be one of 'simple' or 'conservative' but is '{potential_mode}'") - if custom_busmap: - busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True) - busmap.index = busmap.index.astype(str) - logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") - else: + if custom_busmap is False: busmap = busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights, algorithm) + else: + busmap = custom_busmap clustering = get_clustering_from_busmap( n, busmap, @@ -306,8 +301,6 @@ def save_to_geojson(s, fn): def cluster_regions(busmaps, input=None, output=None): - if input is None: input = snakemake.input - if output is None: output = snakemake.output busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) @@ -358,10 +351,9 @@ if __name__ == "__main__": else: line_length_factor = snakemake.config['lines']['length_factor'] Nyears = n.snapshot_weightings.objective.sum()/8760 - hvac_overhead_cost = (load_costs(Nyears, - tech_costs=snakemake.input.tech_costs, - config=snakemake.config['costs'], - elec_config=snakemake.config['electricity']) + hvac_overhead_cost = (load_costs(tech_costs = snakemake.input.tech_costs, + config = snakemake.config['costs'], + elec_config=snakemake.config['electricity'], Nyears = Nyears) .at['HVAC overhead', 'capital_cost']) def consense(x): @@ -373,6 +365,10 @@ if __name__ == "__main__": potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential'] for tech in renewable_carriers])) custom_busmap = snakemake.config["enable"].get("custom_busmap", False) + if custom_busmap: + custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True) + custom_busmap.index = custom_busmap.index.astype(str) + logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers, line_length_factor=line_length_factor, potential_mode=potential_mode, @@ -386,4 +382,4 @@ if __name__ == "__main__": for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative getattr(clustering, attr).to_csv(snakemake.output[attr]) - cluster_regions((clustering.busmap,)) + cluster_regions((clustering.busmap,), snakemake.input, snakemake.output) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 53482c48..a0778e73 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -403,8 +403,8 @@ def make_summaries(networks_dict, country='all'): n = n[n.buses.country == country] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(Nyears, snakemake.input[0], - snakemake.config['costs'], snakemake.config['electricity']) + costs = load_costs(tech_costs = snakemake.input[0], config = snakemake.config['costs'], + elec_config = snakemake.config['electricity'], Nyears = Nyears) update_transmission_costs(n, costs, simple_hvdc_costs=False) assign_carriers(n) @@ -415,8 +415,7 @@ def make_summaries(networks_dict, country='all'): return dfs -def to_csv(dfs): - dir = snakemake.output[0] +def to_csv(dfs, dir): os.makedirs(dir, exist_ok=True) for key, df in dfs.items(): df.to_csv(os.path.join(dir, f"{key}.csv")) @@ -453,4 +452,4 @@ if __name__ == "__main__": dfs = make_summaries(networks_dict, country=snakemake.wildcards.country) - to_csv(dfs) + to_csv(dfs, snakemake.output[0]) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 86afef2f..90b3a0df 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -70,21 +70,14 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) -def add_co2limit(n, Nyears=1., factor=None): - - if factor is not None: - annual_emissions = factor*snakemake.config['electricity']['co2base'] - else: - annual_emissions = snakemake.config['electricity']['co2limit'] +def add_co2limit(n, co2limit=1.487e+9, Nyears=1.): n.add("GlobalConstraint", "CO2Limit", carrier_attribute="co2_emissions", sense="<=", - constant=annual_emissions * Nyears) + constant=co2limit * Nyears) -def add_emission_prices(n, emission_prices=None, exclude_co2=False): - if emission_prices is None: - emission_prices = snakemake.config['costs']['emission_prices'] +def add_emission_prices(n, emission_prices={'co2': 0.}, exclude_co2=False): if exclude_co2: emission_prices.pop('co2') ep = (pd.Series(emission_prices).rename(lambda x: x+'_emissions') * n.carriers.filter(like='_emissions')).sum(axis=1) @@ -94,13 +87,12 @@ def add_emission_prices(n, emission_prices=None, exclude_co2=False): n.storage_units['marginal_cost'] += su_ep -def set_line_s_max_pu(n): - s_max_pu = snakemake.config['lines']['s_max_pu'] +def set_line_s_max_pu(n, s_max_pu = 0.7): n.lines['s_max_pu'] = s_max_pu logger.info(f"N-1 security margin of lines set to {s_max_pu}") -def set_transmission_limit(n, ll_type, factor, Nyears=1): +def set_transmission_limit(n, ll_type, factor, costs, Nyears=1): links_dc_b = n.links.carrier == 'DC' if not n.links.empty else pd.Series() _lines_s_nom = (np.sqrt(3) * n.lines.type.map(n.line_types.i_nom) * @@ -112,9 +104,6 @@ def set_transmission_limit(n, ll_type, factor, Nyears=1): ref = (lines_s_nom @ n.lines[col] + n.links.loc[links_dc_b, "p_nom"] @ n.links.loc[links_dc_b, col]) - costs = load_costs(Nyears, snakemake.input.tech_costs, - snakemake.config['costs'], - snakemake.config['electricity']) update_transmission_costs(n, costs, simple_hvdc_costs=False) if factor == 'opt' or float(factor) > 1.0: @@ -151,7 +140,7 @@ def average_every_nhours(n, offset): return m -def apply_time_segmentation(n, segments): +def apply_time_segmentation(n, segments, solver_name="cplex"): logger.info(f"Aggregating time series to {segments} segments.") try: import tsam.timeseriesaggregation as tsam @@ -170,8 +159,6 @@ def apply_time_segmentation(n, segments): raw = pd.concat([p_max_pu, load, inflow], axis=1, sort=False) - solver_name = snakemake.config["solving"]["solver"]["name"] - agg = tsam.TimeSeriesAggregation(raw, hoursPerPeriod=len(raw), noTypicalPeriods=1, noSegments=int(segments), segmentation=True, solver=solver_name) @@ -208,9 +195,7 @@ def enforce_autarky(n, only_crossborder=False): n.mremove("Line", lines_rm) n.mremove("Link", links_rm) -def set_line_nom_max(n): - s_nom_max_set = snakemake.config["lines"].get("s_nom_max,", np.inf) - p_nom_max_set = snakemake.config["links"].get("p_nom_max", np.inf) +def set_line_nom_max(n, s_nom_max_set=np.inf, p_nom_max_set=np.inf): n.lines.s_nom_max.clip(upper=s_nom_max_set, inplace=True) n.links.p_nom_max.clip(upper=p_nom_max_set, inplace=True) @@ -225,8 +210,11 @@ if __name__ == "__main__": n = pypsa.Network(snakemake.input[0]) Nyears = n.snapshot_weightings.objective.sum() / 8760. + costs = load_costs(tech_costs = snakemake.input.tech_costs, + config = snakemake.config['costs'], + elec_config = snakemake.config['electricity'], Nyears = Nyears) - set_line_s_max_pu(n) + set_line_s_max_pu(n, s_max_pu=snakemake.config['lines']['s_max_pu']) for o in opts: m = re.match(r'^\d+h$', o, re.IGNORECASE) @@ -237,16 +225,17 @@ if __name__ == "__main__": for o in opts: m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: - n = apply_time_segmentation(n, m.group(0)[:-3]) + n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=snakemake.config["solving"]["solver"]["name"]) break for o in opts: if "Co2L" in o: m = re.findall("[0-9]*\.?[0-9]+$", o) if len(m) > 0: - add_co2limit(n, Nyears, float(m[0])) + co2limit=float(m[0])*snakemake.config['electricity']['co2base'] + add_co2limit(n, Nyears, co2limit) else: - add_co2limit(n, Nyears) + add_co2limit(n, Nyears, snakemake.config['electricity']['co2limit']) break for o in opts: @@ -267,12 +256,13 @@ if __name__ == "__main__": c.df.loc[sel,attr] *= factor if 'Ep' in opts: - add_emission_prices(n) + add_emission_prices(n, emission_prices=snakemake.config['costs']['emission_prices']) ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] - set_transmission_limit(n, ll_type, factor, Nyears) + set_transmission_limit(n, ll_type, factor, costs, Nyears) - set_line_nom_max(n) + set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf), + p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf)) if "ATK" in opts: enforce_autarky(n) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 48f0ebe6..384025b8 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -138,13 +138,9 @@ def simplify_network_to_380(n): return n, trafo_map -def _prepare_connection_costs_per_link(n): +def _prepare_connection_costs_per_link(n, costs): if n.links.empty: return {} - Nyears = n.snapshot_weightings.objective.sum() / 8760 - costs = load_costs(Nyears, snakemake.input.tech_costs, - snakemake.config['costs'], snakemake.config['electricity']) - connection_costs_per_link = {} for tech in snakemake.config['renewable']: @@ -158,9 +154,9 @@ def _prepare_connection_costs_per_link(n): return connection_costs_per_link -def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None, buses=None): +def _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link=None, buses=None): if connection_costs_per_link is None: - connection_costs_per_link = _prepare_connection_costs_per_link(n) + connection_costs_per_link = _prepare_connection_costs_per_link(n, costs) if buses is None: buses = busmap.index[busmap.index != busmap.values] @@ -217,7 +213,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)]) -def simplify_links(n): +def simplify_links(n, costs): ## Complex multi-node links are folded into end-points logger.info("Simplifying connected link components") @@ -264,7 +260,7 @@ def simplify_links(n): busmap = n.buses.index.to_series() - connection_costs_per_link = _prepare_connection_costs_per_link(n) + connection_costs_per_link = _prepare_connection_costs_per_link(n, costs) connection_costs_to_bus = pd.DataFrame(0., index=n.buses.index, columns=list(connection_costs_per_link)) for lbl in labels.value_counts().loc[lambda s: s > 2].index: @@ -278,7 +274,7 @@ def simplify_links(n): m = sp.spatial.distance_matrix(n.buses.loc[b, ['x', 'y']], n.buses.loc[buses[1:-1], ['x', 'y']]) busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]] - connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link, buses) + connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link, buses) all_links = [i for _, i in sum(links, [])] @@ -312,12 +308,12 @@ def simplify_links(n): _aggregate_and_move_components(n, busmap, connection_costs_to_bus) return n, busmap -def remove_stubs(n): +def remove_stubs(n, costs): logger.info("Removing stubs") busmap = busmap_by_stubs(n) # ['country']) - connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap) + connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs) _aggregate_and_move_components(n, busmap, connection_costs_to_bus) @@ -394,9 +390,14 @@ if __name__ == "__main__": n, trafo_map = simplify_network_to_380(n) - n, simplify_links_map = simplify_links(n) + Nyears = n.snapshot_weightings.objective.sum() / 8760 + technology_costs = load_costs(tech_costs = snakemake.input.tech_costs, + config = snakemake.config['costs'], + elec_config = snakemake.config['electricity'], Nyears = Nyears) - n, stub_map = remove_stubs(n) + n, simplify_links_map = simplify_links(n, technology_costs) + + n, stub_map = remove_stubs(n, technology_costs) busmaps = [trafo_map, simplify_links_map, stub_map] From d551a3b5f1d2c748ffda74744495fb2cb1a0eae8 Mon Sep 17 00:00:00 2001 From: martacki Date: Tue, 14 Sep 2021 16:36:13 +0200 Subject: [PATCH 02/26] delete config.yaml --- config.yaml~ | 317 --------------------------------------------------- 1 file changed, 317 deletions(-) delete mode 100644 config.yaml~ diff --git a/config.yaml~ b/config.yaml~ deleted file mode 100644 index 91f645f8..00000000 --- a/config.yaml~ +++ /dev/null @@ -1,317 +0,0 @@ -# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors -# -# SPDX-License-Identifier: CC0-1.0 - -version: 0.3.0 -tutorial: false - -logging: - level: INFO - format: '%(levelname)s:%(name)s:%(message)s' - -summary_dir: results - -scenario: - simpl: [''] - ll: ['copt'] - clusters: [37, 128, 256, 512, 1024] - opts: [Co2L-3H] - -countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK'] - -snapshots: - start: "2013-01-01" - end: "2014-01-01" - closed: 'left' # end is not inclusive - -enable: - prepare_links_p_nom: false - retrieve_databundle: true - build_cutout: false - retrieve_cutout: true - build_natura_raster: false - retrieve_natura_raster: true - custom_busmap: false - -clustering: - algorithm: - name: kmeans #kmeans - feature: coordinates #feature not supported yet - -electricity: - voltages: [220., 300., 380.] - co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5 - co2base: 1.487e+9 - agg_p_nom_limits: data/agg_p_nom_minmax.csv - - extendable_carriers: - Generator: [] - StorageUnit: [] # battery, H2 - Store: [battery, H2] - Link: [] - - max_hours: - battery: 6 - H2: 168 - - powerplants_filter: false # use pandas query strings here, e.g. Country not in ['Germany'] - custom_powerplants: false # use pandas query strings here, e.g. Country in ['Germany'] - conventional_carriers: [nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass] - renewable_capacities_from_OPSD: [] # onwind, offwind, solar - - # estimate_renewable_capacities_from_capacity_stats: - # # Wind is the Fueltype in ppm.data.Capacity_stats, onwind, offwind-{ac,dc} the carrier in PyPSA-Eur - # Wind: [onwind, offwind-ac, offwind-dc] - # Solar: [solar] - -atlite: - nprocesses: 4 - cutouts: - # use 'base' to determine geographical bounds and time span from config - # base: - # module: era5 - europe-2013-era5: - module: era5 # in priority order - x: [-12., 35.] - y: [33., 72] - dx: 0.3 - dy: 0.3 - time: ['2013', '2013'] - europe-2013-sarah: - module: [sarah, era5] # in priority order - x: [-12., 45.] - y: [33., 65] - dx: 0.2 - dy: 0.2 - time: ['2013', '2013'] - sarah_interpolate: false - sarah_dir: - features: [influx, temperature] - - -renewable: - onwind: - cutout: europe-2013-era5 - resource: - method: wind - turbine: Vestas_V112_3MW - capacity_per_sqkm: 3 # ScholzPhd Tab 4.3.1: 10MW/km^2 - # correction_factor: 0.93 - corine: - # Scholz, Y. (2012). Renewable energy based electricity supply at low costs: - # development of the REMix model and application for Europe. ( p.42 / p.28) - grid_codes: [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, - 24, 25, 26, 27, 28, 29, 31, 32] - distance: 1000 - distance_grid_codes: [1, 2, 3, 4, 5, 6] - natura: true - potential: simple # or conservative - clip_p_max_pu: 1.e-2 - offwind-ac: - cutout: europe-2013-era5 - resource: - method: wind - turbine: NREL_ReferenceTurbine_5MW_offshore - capacity_per_sqkm: 3 - # correction_factor: 0.93 - corine: [44, 255] - natura: true - max_depth: 50 - max_shore_distance: 30000 - potential: simple # or conservative - clip_p_max_pu: 1.e-2 - offwind-dc: - cutout: europe-2013-era5 - resource: - method: wind - turbine: NREL_ReferenceTurbine_5MW_offshore - # ScholzPhd Tab 4.3.1: 10MW/km^2 - capacity_per_sqkm: 3 - # correction_factor: 0.93 - corine: [44, 255] - natura: true - max_depth: 50 - min_shore_distance: 30000 - potential: simple # or conservative - clip_p_max_pu: 1.e-2 - solar: - cutout: europe-2013-sarah - resource: - method: pv - panel: CSi - orientation: - slope: 35. - azimuth: 180. - capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2 - # Determined by comparing uncorrected area-weighted full-load hours to those - # published in Supplementary Data to - # Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power - # sector: The economic potential of photovoltaics and concentrating solar - # power." Applied Energy 135 (2014): 704-720. - correction_factor: 0.854337 - corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, - 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] - natura: true - potential: simple # or conservative - clip_p_max_pu: 1.e-2 - hydro: - cutout: europe-2013-era5 - carriers: [ror, PHS, hydro] - PHS_max_hours: 6 - hydro_max_hours: "energy_capacity_totals_by_country" # one of energy_capacity_totals_by_country, estimate_by_large_installations or a float - clip_min_inflow: 1.0 - -lines: - types: - 220.: "Al/St 240/40 2-bundle 220.0" - 300.: "Al/St 240/40 3-bundle 300.0" - 380.: "Al/St 240/40 4-bundle 380.0" - s_max_pu: 0.7 - s_nom_max: .inf - length_factor: 1.25 - under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity - -links: - p_max_pu: 1.0 - p_nom_max: .inf - include_tyndp: true - under_construction: 'zero' # 'zero': set capacity to zero, 'remove': remove, 'keep': with full capacity - -transformers: - x: 0.1 - s_nom: 2000. - type: '' - -load: - url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv - power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data - interpolate_limit: 3 # data gaps up until this size are interpolated linearly - time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from - manual_adjustments: true # false - scaling_factor: 1.0 - -costs: - year: 2030 - discountrate: 0.07 # From a Lion Hirth paper, also reflects average of Noothout et al 2016 - USD2013_to_EUR2013: 0.7532 # [EUR/USD] ECB: https://www.ecb.europa.eu/stats/exchange/eurofxref/html/eurofxref-graph-usd.en.html - marginal_cost: # EUR/MWh - solar: 0.01 - onwind: 0.015 - offwind: 0.015 - hydro: 0. - H2: 0. - electrolysis: 0. - fuel cell: 0. - battery: 0. - battery inverter: 0. - emission_prices: # in currency per tonne emission, only used with the option Ep - co2: 0. - -solving: - options: - formulation: kirchhoff - load_shedding: true - noisy_costs: true - min_iterations: 4 - max_iterations: 6 - clip_p_max_pu: 0.01 - skip_iterations: false - track_iterations: false - #nhours: 10 - solver: - name: gurobi - threads: 4 - method: 2 # barrier - crossover: 0 - BarConvTol: 1.e-5 - FeasibilityTol: 1.e-6 - AggFill: 0 - PreDual: 0 - GURO_PAR_BARDENSETHRESH: 200 - # solver: - # name: cplex - # threads: 4 - # lpmethod: 4 # barrier - # solutiontype: 2 # non basic solution, ie no crossover - # barrier_convergetol: 1.e-5 - # feasopt_tolerance: 1.e-6 - -plotting: - map: - figsize: [7, 7] - boundaries: [-10.2, 29, 35, 72] - p_nom: - bus_size_factor: 5.e+4 - linewidth_factor: 3.e+3 - - costs_max: 80 - costs_threshold: 1 - - energy_max: 15000. - energy_min: -10000. - energy_threshold: 50. - - vre_techs: ["onwind", "offwind-ac", "offwind-dc", "solar", "ror"] - conv_techs: ["OCGT", "CCGT", "Nuclear", "Coal"] - storage_techs: ["hydro+PHS", "battery", "H2"] - load_carriers: ["AC load"] - AC_carriers: ["AC line", "AC transformer"] - link_carriers: ["DC line", "Converter AC-DC"] - tech_colors: - "onwind" : "#235ebc" - "onshore wind" : "#235ebc" - 'offwind' : "#6895dd" - 'offwind-ac' : "#6895dd" - 'offshore wind' : "#6895dd" - 'offshore wind ac' : "#6895dd" - 'offwind-dc' : "#74c6f2" - 'offshore wind dc' : "#74c6f2" - "hydro" : "#08ad97" - "hydro+PHS" : "#08ad97" - "PHS" : "#08ad97" - "hydro reservoir" : "#08ad97" - 'hydroelectricity' : '#08ad97' - "ror" : "#4adbc8" - "run of river" : "#4adbc8" - 'solar' : "#f9d002" - 'solar PV' : "#f9d002" - 'solar thermal' : '#ffef60' - 'biomass' : '#0c6013' - 'solid biomass' : '#06540d' - 'biogas' : '#23932d' - 'waste' : '#68896b' - 'geothermal' : '#ba91b1' - "OCGT" : "#d35050" - "gas" : "#d35050" - "natural gas" : "#d35050" - "CCGT" : "#b20101" - "nuclear" : "#ff9000" - "coal" : "#707070" - "lignite" : "#9e5a01" - "oil" : "#262626" - "H2" : "#ea048a" - "hydrogen storage" : "#ea048a" - "battery" : "#b8ea04" - "Electric load" : "#f9d002" - "electricity" : "#f9d002" - "lines" : "#70af1d" - "transmission lines" : "#70af1d" - "AC-AC" : "#70af1d" - "AC line" : "#70af1d" - "links" : "#8a1caf" - "HVDC links" : "#8a1caf" - "DC-DC" : "#8a1caf" - "DC link" : "#8a1caf" - nice_names: - OCGT: "Open-Cycle Gas" - CCGT: "Combined-Cycle Gas" - offwind-ac: "Offshore Wind (AC)" - offwind-dc: "Offshore Wind (DC)" - onwind: "Onshore Wind" - solar: "Solar" - PHS: "Pumped Hydro Storage" - hydro: "Reservoir & Dam" - battery: "Battery Storage" - H2: "Hydrogen Storage" - lines: "Transmission Lines" - ror: "Run of River" From f1243c3e0cc5fc1f4d6b5de03f6489f6109c9769 Mon Sep 17 00:00:00 2001 From: Max Parzen Date: Wed, 24 Nov 2021 14:16:24 +0100 Subject: [PATCH 03/26] Fix distribute clustering with cbc/glpk/ipopt Assume you have 10 nodes that need to be distributed between 2 countries. What can happen with some of the open source solvers is that one country gets assigned to 9.01 (float) nodes, and the other one to 0.99. Now using .astype(int) would lead to a node distribution of 0 and 9, as the `astype(int)` function round down by default (0.99 -> 0). This assigned zero value breaks the code in case open source solvers are used. Gurobi somehow does deal with it. --- scripts/cluster_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 980b73b0..1a976cd1 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -218,7 +218,7 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None): results = opt.solve(m) assert results['Solver'][0]['Status'] == 'ok', f"Solver returned non-optimally: {results}" - return pd.Series(m.n.get_values(), index=L.index).astype(int) + return pd.Series(m.n.get_values(), index=L.index).round().astype(int) def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algorithm="kmeans", **algorithm_kwds): From 66f2d36f0df1545f97f67359493b9debcd9e9924 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 09:38:34 +0100 Subject: [PATCH 04/26] add_electricity: revise code and make it leaner --- scripts/add_electricity.py | 95 +++++++++++++++++++++----------------- 1 file changed, 52 insertions(+), 43 deletions(-) diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 2a2c26d9..e8498789 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -190,12 +190,13 @@ def load_powerplants(ppl_fn): .replace({'carrier': carrier_dict})) -def attach_load(n, regions, load, nuts3_shapes, cntries = [], scaling = 1.): +def attach_load(n, regions, load, nuts3_shapes, countries, scaling=1.): + substation_lv_i = n.buses.index[n.buses['substation_lv']] regions = (gpd.read_file(regions).set_index('name') .reindex(substation_lv_i)) opsd_load = (pd.read_csv(load, index_col=0, parse_dates=True) - .filter(items=cntries)) + .filter(items=countries)) logger.info(f"Load data scaled with scalling factor {scaling}.") opsd_load *= scaling @@ -229,6 +230,9 @@ def attach_load(n, regions, load, nuts3_shapes, cntries = [], scaling = 1.): def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=False): + # TODO: line length factor of lines is applied to lines and links. + # Separate the function to distinguish. + n.lines['capital_cost'] = (n.lines['length'] * length_factor * costs.at['HVAC overhead', 'capital_cost']) @@ -253,9 +257,9 @@ def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=Fal n.links.loc[dc_b, 'capital_cost'] = costs -def attach_wind_and_solar(n, costs, input_profiles, - technologies = ['onwind', 'offwind-ac', 'offwind-dc', 'solar'], - line_length_factor = 1.): +def attach_wind_and_solar(n, costs, input_profiles, technologies, line_length_factor=1): + # TODO: rename tech -> carrier, technologies -> carriers + for tech in technologies: if tech == 'hydro': continue @@ -292,8 +296,7 @@ def attach_wind_and_solar(n, costs, input_profiles, p_max_pu=ds['profile'].transpose('time', 'bus').to_pandas()) -def attach_conventional_generators(n, costs, ppl, carriers=['nuclear', 'oil', 'OCGT', 'CCGT', - 'coal', 'lignite', 'geothermal', 'biomass']): +def attach_conventional_generators(n, costs, ppl, carriers): _add_missing_carriers_from_costs(n, costs, carriers) @@ -314,9 +317,7 @@ def attach_conventional_generators(n, costs, ppl, carriers=['nuclear', 'oil', 'O logger.warning(f'Capital costs for conventional generators put to 0 EUR/MW.') -def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, - config_hydro = {'carriers': {'ror', 'PHS', 'hydro'}}): - carriers = config_hydro.get('carriers', ['ror', 'PHS', 'hydro']) +def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **config): _add_missing_carriers_from_costs(n, costs, carriers) @@ -361,7 +362,8 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, if 'PHS' in carriers and not phs.empty: # fill missing max hours to config value and # assume no natural inflow due to lack of data - phs = phs.replace({'max_hours': {0: config_hydro['PHS_max_hours']}}) + max_hours = config.get('PHS_max_hours', 6) + phs = phs.replace({'max_hours': {0: max_hours}}) n.madd('StorageUnit', phs.index, carrier='PHS', bus=phs['bus'], @@ -373,7 +375,10 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, cyclic_state_of_charge=True) if 'hydro' in carriers and not hydro.empty: - hydro_max_hours = config_hydro.get('hydro_max_hours') + hydro_max_hours = config.get('hydro_max_hours') + + assert hydro_max_hours is not None, "No path for hydro capacities given." + hydro_stats = pd.read_csv(hydro_capacities, comment="#", na_values='-', index_col=0) e_target = hydro_stats["E_store[TWh]"].clip(lower=0.2) * 1e6 @@ -402,8 +407,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, bus=hydro['bus'], p_nom=hydro['p_nom'], max_hours=hydro_max_hours, - capital_cost=(costs.at['hydro', 'capital_cost'] - if config_hydro.get('hydro_capital_cost') else 0.), + capital_cost=costs.at['hydro', 'capital_cost'], marginal_cost=costs.at['hydro', 'marginal_cost'], p_max_pu=1., # dispatch p_min_pu=0., # store @@ -413,8 +417,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, inflow=inflow_t.loc[:, hydro.index]) -def attach_extendable_generators(n, costs, ppl, elec_opts = {'extendable_carriers': {'Generator': []}}): - carriers = pd.Index(elec_opts['extendable_carriers']['Generator']) +def attach_extendable_generators(n, costs, ppl, carriers): _add_missing_carriers_from_costs(n, costs, carriers) @@ -462,7 +465,7 @@ def attach_extendable_generators(n, costs, ppl, elec_opts = {'extendable_carrier -def attach_OPSD_renewables(n, techs=[]): +def attach_OPSD_renewables(n, techs): available = ['DE', 'FR', 'PL', 'CH', 'DK', 'CZ', 'SE', 'GB'] tech_map = {'Onshore': 'onwind', 'Offshore': 'offwind', 'Solar': 'solar'} @@ -494,7 +497,7 @@ def attach_OPSD_renewables(n, techs=[]): -def estimate_renewable_capacities(n, tech_map={}): +def estimate_renewable_capacities(n, tech_map): if len(tech_map) == 0: return @@ -526,16 +529,15 @@ def estimate_renewable_capacities(n, tech_map={}): n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom'] -def add_nice_carrier_names(n, config): +def add_nice_carrier_names(n, nice_names, tech_colors): carrier_i = n.carriers.index - nice_names = (pd.Series(config['plotting']['nice_names']) + nice_names = (pd.Series(nice_names) .reindex(carrier_i).fillna(carrier_i.to_series().str.title())) n.carriers['nice_name'] = nice_names - colors = pd.Series(config['plotting']['tech_colors']).reindex(carrier_i) + colors = pd.Series(tech_colors).reindex(carrier_i) if colors.isna().any(): missing_i = list(colors.index[colors.isna()]) - logger.warning(f'tech_colors for carriers {missing_i} not defined ' - 'in config.') + logger.warning(f'tech_colors for carriers {missing_i} not defined.') n.carriers['color'] = colors @@ -545,35 +547,42 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_electricity') configure_logging(snakemake) - n = pypsa.Network(snakemake.input.base_network) + config = snakemake.config + paths = snakemake.input + + n = pypsa.Network(paths.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = snakemake.input.tech_costs, config = snakemake.config['costs'], - elec_config = snakemake.config['electricity'], Nyears = Nyears) - ppl = load_powerplants(snakemake.input.powerplants) + costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears=Nyears) + ppl = load_powerplants(paths.powerplants) - attach_load(n, regions = snakemake.input.regions, load = snakemake.input.load, - nuts3_shapes = snakemake.input.nuts3_shapes, - cntries = snakemake.config['countries'], - scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0)) + attach_load(n, paths.regions, paths.load, paths.nuts3_shapes, config['countries'], + scaling=config['load']['scaling_factor']) - update_transmission_costs(n, costs) + update_transmission_costs(n, costs, config['lines']['length_factor']) - attach_conventional_generators(n, costs, ppl, carriers = snakemake.config['electricity']['conventional_carriers']) - attach_wind_and_solar(n, costs, snakemake.input, technologies = snakemake.config['renewable'], - line_length_factor = snakemake.config['lines']['length_factor']) + carriers = config['electricity']['conventional_carriers'] + attach_conventional_generators(n, costs, ppl, carriers) - if 'hydro' in snakemake.config['renewable']: - attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities, - config_hydro = snakemake.config['renewable']['hydro']) + carriers = config['renewable'] + attach_wind_and_solar(n, costs, paths, carriers, config['lines']['length_factor']) - attach_extendable_generators(n, costs, ppl, elec_opts = snakemake.config['electricity']) + if 'hydro' in config['renewable']: + carriers = config['renewable']['hydro'].pop('carriers', []) + attach_hydro(n, costs, ppl, paths.profile_hydro, paths.hydro_capacities, + carriers, **config['renewable']['hydro']) + + carriers = config['electricity']['extendable_carriers']['Generator'] + attach_extendable_generators(n, costs, ppl, carriers) + + tech_map = config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {}) + estimate_renewable_capacities(n, tech_map) + techs = config['electricity'].get('renewable_capacities_from_OPSD', []) + attach_OPSD_renewables(n, techs) - estimate_renewable_capacities(n, tech_map = (snakemake.config['electricity'] - .get('estimate_renewable_capacities_from_capacity_stats', {}))) - attach_OPSD_renewables(n, techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', [])) update_p_nom_max(n) - add_nice_carrier_names(n, config = snakemake.config) + plot_config = config['plotting'] + add_nice_carrier_names(n, plot_config['nice_names'], plot_config['tech_colors']) n.export_to_netcdf(snakemake.output[0]) From 2c318a247ecbeaaf86993db7873819507c86add2 Mon Sep 17 00:00:00 2001 From: Fabian Hofmann Date: Tue, 11 Jan 2022 09:55:22 +0100 Subject: [PATCH 05/26] Update scripts/prepare_network.py Co-authored-by: Fabian Neumann --- scripts/prepare_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 9d53625c..03187284 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -225,7 +225,8 @@ if __name__ == "__main__": for o in opts: m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: - n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=snakemake.config["solving"]["solver"]["name"]) + solver_name = snakemake.config["solving"]["solver"]["name"] + n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=solver_name) break for o in opts: From acd7122aad6b3a944f436931a98dbb0b2e2d3010 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 09:58:59 +0100 Subject: [PATCH 06/26] add_electricity: revert changes in add_nice_carrier_names --- scripts/add_electricity.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index e8498789..42d4f5cc 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -529,18 +529,17 @@ def estimate_renewable_capacities(n, tech_map): n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom'] -def add_nice_carrier_names(n, nice_names, tech_colors): +def add_nice_carrier_names(n, config): carrier_i = n.carriers.index - nice_names = (pd.Series(nice_names) + nice_names = (pd.Series(config['plotting']['nice_names']) .reindex(carrier_i).fillna(carrier_i.to_series().str.title())) n.carriers['nice_name'] = nice_names - colors = pd.Series(tech_colors).reindex(carrier_i) + colors = pd.Series(config['plotting']['tech_colors']).reindex(carrier_i) if colors.isna().any(): missing_i = list(colors.index[colors.isna()]) - logger.warning(f'tech_colors for carriers {missing_i} not defined.') + logger.warning(f'tech_colors for carriers {missing_i} not defined in config.') n.carriers['color'] = colors - if __name__ == "__main__": if 'snakemake' not in globals(): from _helpers import mock_snakemake @@ -582,7 +581,6 @@ if __name__ == "__main__": update_p_nom_max(n) - plot_config = config['plotting'] - add_nice_carrier_names(n, plot_config['nice_names'], plot_config['tech_colors']) + add_nice_carrier_names(n, config) n.export_to_netcdf(snakemake.output[0]) From 01e93545f694d450839cc6ac0a423f6acb239de0 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 09:59:34 +0100 Subject: [PATCH 07/26] add_extra_components: revise changes --- scripts/add_extra_components.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index 9a2d6033..db764d4f 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -64,7 +64,7 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) -def attach_storageunits(n, costs, elec_opts = {'extendable_carriers': {'StorageUnit': []}, 'max_hours': {'battery': 6, 'H2': 168}}): +def attach_storageunits(n, costs, elec_opts): carriers = elec_opts['extendable_carriers']['StorageUnit'] max_hours = elec_opts['max_hours'] @@ -88,7 +88,7 @@ def attach_storageunits(n, costs, elec_opts = {'extendable_carriers': {'StorageU cyclic_state_of_charge=True) -def attach_stores(n, costs, elec_opts = {'extendable_carriers': {'Store': ['battery', 'H2']}}): +def attach_stores(n, costs, elec_opts): carriers = elec_opts['extendable_carriers']['Store'] _add_missing_carriers_from_costs(n, costs, carriers) @@ -154,7 +154,7 @@ def attach_stores(n, costs, elec_opts = {'extendable_carriers': {'Store': ['batt marginal_cost=costs.at["battery inverter", "marginal_cost"]) -def attach_hydrogen_pipelines(n, costs, elec_opts = {'extendable_carriers': {'Store': ['H2', 'battery']}}): +def attach_hydrogen_pipelines(n, costs, elec_opts): ext_carriers = elec_opts['extendable_carriers'] as_stores = ext_carriers.get('Store', []) @@ -192,17 +192,18 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_extra_components', network='elec', simpl='', clusters=5) configure_logging(snakemake) + paths = snakemake.input - n = pypsa.Network(snakemake.input.network) + n = pypsa.Network(paths.network) + elec_config = snakemake.config['electricity'] + Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = snakemake.input.tech_costs, - config = snakemake.config['costs'], - elec_config = snakemake.config['electricity'], Nyears = Nyears) + costs = load_costs(paths.tech_costs, snakemake.config['costs'], elec_config, Nyears=Nyears) - attach_storageunits(n, costs, elec_opts = snakemake.config['electricity']) - attach_stores(n, costs, elec_opts = snakemake.config['electricity']) - attach_hydrogen_pipelines(n, costs, elec_opts = snakemake.config['electricity']) + attach_storageunits(n, costs, elec_config) + attach_stores(n, costs, elec_config) + attach_hydrogen_pipelines(n, costs, elec_config) - add_nice_carrier_names(n, config=snakemake.config) + add_nice_carrier_names(n, snakemake.config) n.export_to_netcdf(snakemake.output[0]) From 39a6753ece2ff6e9e30f70edb8a6f8053a7721fb Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 09:59:58 +0100 Subject: [PATCH 08/26] build_powerplants: revise changes --- scripts/build_powerplants.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index 39af2385..be57baa8 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -99,8 +99,9 @@ if __name__ == "__main__": from _helpers import mock_snakemake snakemake = mock_snakemake('build_powerplants') configure_logging(snakemake) + paths = snakemake.input - n = pypsa.Network(snakemake.input.base_network) + n = pypsa.Network(paths.base_network) countries = n.buses.country.unique() ppl = (pm.powerplants(from_url=True) @@ -119,8 +120,8 @@ if __name__ == "__main__": ppl.query(ppl_query, inplace=True) # add carriers from own powerplant files: - ppl = add_custom_powerplants(ppl, custom_powerplants = snakemake.input.custom_powerplants, - custom_ppl_query = snakemake.config['electricity']['custom_powerplants']) + custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] + ppl = add_custom_powerplants(ppl, paths.custom_powerplants, custom_ppl_query) cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()] From c7a443c9695c4c2ddb96f9cacf05fb62a3c46874 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 10:23:22 +0100 Subject: [PATCH 09/26] build_shapes: revise changes --- scripts/build_shapes.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index b7264470..12b1d015 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -107,8 +107,8 @@ def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True): return polys.simplify(tolerance=tolerance) -def countries(naturalearth, cntries=[]): - if 'RS' in cntries: cntries.append('KV') +def countries(naturalearth, country_list): + if 'RS' in country_list: country_list.append('KV') df = gpd.read_file(naturalearth) @@ -116,16 +116,16 @@ def countries(naturalearth, cntries=[]): fieldnames = (df[x].where(lambda s: s!='-99') for x in ('ISO_A2', 'WB_A2', 'ADM0_A3')) df['name'] = reduce(lambda x,y: x.fillna(y), fieldnames, next(fieldnames)).str[0:2] - df = df.loc[df.name.isin(cntries) & ((df['scalerank'] == 0) | (df['scalerank'] == 5))] + df = df.loc[df.name.isin(country_list) & ((df['scalerank'] == 0) | (df['scalerank'] == 5))] s = df.set_index('name')['geometry'].map(_simplify_polys) - if 'RS' in cntries: s['RS'] = s['RS'].union(s.pop('KV')) + if 'RS' in country_list: s['RS'] = s['RS'].union(s.pop('KV')) return s -def eez(country_shapes, eez, cntries=[]): +def eez(country_shapes, eez, countries): df = gpd.read_file(eez) - df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in cntries])] + df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in countries])] df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c)) s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False)) s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3}) @@ -217,18 +217,19 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_shapes') configure_logging(snakemake) + paths = snakemake.input out = snakemake.output - country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries']) + country_shapes = countries(paths.naturalearth, snakemake.config['countries']) save_to_geojson(country_shapes, out.country_shapes) - offshore_shapes = eez(country_shapes, snakemake.input.eez, cntries=snakemake.config['countries']) + offshore_shapes = eez(country_shapes, paths.eez, snakemake.config['countries']) save_to_geojson(offshore_shapes, out.offshore_shapes) europe_shape = country_cover(country_shapes, offshore_shapes) save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape) - nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop, - snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp) + nuts3_shapes = nuts3(country_shapes, paths.nuts3, paths.nuts3pop, + paths.nuts3gdp, paths.ch_cantons, paths.ch_popgdp) save_to_geojson(nuts3_shapes, out.nuts3_shapes) From ed7fd27adc82db6b5466b830a1e989cd15dc6413 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 11 Jan 2022 10:24:45 +0100 Subject: [PATCH 10/26] build_shapes: use country_list as argument --- scripts/build_shapes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 12b1d015..cca941e6 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -123,9 +123,9 @@ def countries(naturalearth, country_list): return s -def eez(country_shapes, eez, countries): +def eez(country_shapes, eez, country_list): df = gpd.read_file(eez) - df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in countries])] + df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in country_list])] df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c)) s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False)) s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3}) From 94364cbeebbf84b6407056cd00330f2e84e0989d Mon Sep 17 00:00:00 2001 From: Fabian Hofmann Date: Tue, 11 Jan 2022 10:31:49 +0100 Subject: [PATCH 11/26] Update scripts/cluster_network.py Co-authored-by: Fabian Neumann --- scripts/cluster_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 30a8770d..51556b27 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -265,7 +265,7 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr else: raise AttributeError(f"potential_mode should be one of 'simple' or 'conservative' but is '{potential_mode}'") - if custom_busmap is False: + if not custom_busmap: busmap = busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights, algorithm) else: busmap = custom_busmap From e1aae5a98ebbbe073c78ad801dd23e702dab56f6 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 13 Jan 2022 15:36:13 +0100 Subject: [PATCH 12/26] minor adjustment to memory requirements of add_electricity, cluster_network --- Snakefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Snakefile b/Snakefile index cb50e3bf..ce79a421 100644 --- a/Snakefile +++ b/Snakefile @@ -232,7 +232,7 @@ rule add_electricity: log: "logs/add_electricity.log" benchmark: "benchmarks/add_electricity" threads: 1 - resources: mem=3000 + resources: mem=5000 script: "scripts/add_electricity.py" @@ -273,7 +273,7 @@ rule cluster_network: log: "logs/cluster_network/elec_s{simpl}_{clusters}.log" benchmark: "benchmarks/cluster_network/elec_s{simpl}_{clusters}" threads: 1 - resources: mem=3000 + resources: mem=6000 script: "scripts/cluster_network.py" From a747c88d08a57bddd72a523bf91fb2bd6ec72b24 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 13 Jan 2022 15:42:48 +0100 Subject: [PATCH 13/26] disable solar PV CF correction factor for default satellite data (closes #285) --- config.default.yaml | 5 +++-- config.tutorial.yaml | 5 +++-- doc/release_notes.rst | 4 ++++ 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/config.default.yaml b/config.default.yaml index f70e7c2c..7a443a03 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -148,12 +148,13 @@ renewable: slope: 35. azimuth: 180. capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2 - # Determined by comparing uncorrected area-weighted full-load hours to those + # Correction factor determined by comparing uncorrected area-weighted full-load hours to those # published in Supplementary Data to # Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power # sector: The economic potential of photovoltaics and concentrating solar # power." Applied Energy 135 (2014): 704-720. - correction_factor: 0.854337 + # This correction factor of 0.854337 may be in order if using reanalysis data. + # correction_factor: 0.854337 corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] natura: true diff --git a/config.tutorial.yaml b/config.tutorial.yaml index 26ead242..ea624727 100755 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -116,12 +116,13 @@ renewable: slope: 35. azimuth: 180. capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2 - # Determined by comparing uncorrected area-weighted full-load hours to those + # Correction factor determined by comparing uncorrected area-weighted full-load hours to those # published in Supplementary Data to # Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power # sector: The economic potential of photovoltaics and concentrating solar # power." Applied Energy 135 (2014): 704-720. - correction_factor: 0.854337 + # This correction factor of 0.854337 may be in order if using reanalysis data. + # correction_factor: 0.854337 corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] natura: true diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 0423a581..c379cf5c 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -18,6 +18,10 @@ Upcoming Release * The default deployment density of AC- and DC-connected offshore wind capacity is reduced from 3 MW/sqkm to a more conservative estimate of 2 MW/sqkm [`#280 `_]. +* Following discussion in `#285 `_ we have disabled the + correction factor for solar PV capacity factors by default while satellite data is used. + A correction factor of 0.854337 is recommended if reanalysis data like ERA5 is used. + PyPSA-Eur 0.4.0 (22th September 2021) ===================================== From 505f093141063dbf30cb8ce850ad38214006ff57 Mon Sep 17 00:00:00 2001 From: martacki Date: Thu, 13 Jan 2022 18:00:22 +0100 Subject: [PATCH 14/26] fix ordering of kwargs in add_co2limit --- scripts/prepare_network.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 03187284..3eb244cf 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -233,10 +233,10 @@ if __name__ == "__main__": if "Co2L" in o: m = re.findall("[0-9]*\.?[0-9]+$", o) if len(m) > 0: - co2limit=float(m[0])*snakemake.config['electricity']['co2base'] - add_co2limit(n, Nyears, co2limit) + co2limit = float(m[0]) * snakemake.config['electricity']['co2base'] + add_co2limit(n, co2limit, Nyears) else: - add_co2limit(n, Nyears, snakemake.config['electricity']['co2limit']) + add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears) break for o in opts: From b660277e37aa69e91187c12e7167cb306627bf48 Mon Sep 17 00:00:00 2001 From: euronion <42553970+euronion@users.noreply.github.com> Date: Thu, 13 Jan 2022 18:25:12 +0100 Subject: [PATCH 15/26] Merge pull request #303 from PyPSA/misc/improve-ci-speed [DNMY] Improve CI performance. --- .github/workflows/ci.yaml | 93 +++++++++++++++++++++++++++------------ envs/environment.yaml | 2 - 2 files changed, 64 insertions(+), 31 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b0699d74..c753deab 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -4,6 +4,10 @@ name: CI +# Caching method based on and described by: +# epassaro (2021): https://dev.to/epassaro/caching-anaconda-environments-in-github-actions-5hde +# and code in GitHub repo: https://github.com/epassaro/cache-conda-envs + on: push: branches: @@ -14,42 +18,73 @@ on: schedule: - cron: "0 5 * * TUE" +env: + CACHE_NUMBER: 1 # Change this value to manually reset the environment cache + jobs: build: - runs-on: ${{ matrix.os }} strategy: - max-parallel: 5 matrix: - os: - - ubuntu-latest - - macos-latest - - windows-latest + include: + # Matrix required to handle caching with Mambaforge + - os: ubuntu-latest + label: ubuntu-latest + prefix: /usr/share/miniconda3/envs/pypsa-eur + - os: macos-latest + label: macos-latest + prefix: /Users/runner/miniconda3/envs/pypsa-eur + + - os: windows-latest + label: windows-latest + prefix: C:\Miniconda3\envs\pypsa-eur + + name: ${{ matrix.label }} + + runs-on: ${{ matrix.os }} + defaults: run: shell: bash -l {0} - - steps: - - - uses: actions/checkout@v2 - - - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2.1.1 - with: # checks out environment 'test' by default - mamba-version: "*" - channels: conda-forge,defaults - channel-priority: true - - - name: Install dependencies - run: | - echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc - echo -e " - glpk\n - ipopt<3.13.3" >> envs/environment.yaml - mamba env update -f envs/environment.yaml --name test - - name: Test snakemake workflow - run: | - conda list - cp test/config.test1.yaml config.yaml - snakemake --cores all solve_all_networks - rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results + steps: + - uses: actions/checkout@v2 + + - name: Setup secrets + run: | + echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc + + - name: Add solver to environment + run: | + echo -e " - glpk\n - ipopt<3.13.3" >> envs/environment.yaml + + - name: Setup Mambaforge + uses: conda-incubator/setup-miniconda@v2 + with: + miniforge-variant: Mambaforge + miniforge-version: latest + activate-environment: pypsa-eur + use-mamba: true + + - name: Set cache date + run: echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV + + - name: Create environment cache + uses: actions/cache@v2 + id: cache + with: + path: ${{ matrix.prefix }} + key: ${{ matrix.label }}-conda-${{ hashFiles('envs/environment.yaml') }}-${{ env.DATE }}-${{ env.CACHE_NUMBER }} + + - name: Update environment due to outdated or unavailable cache + run: mamba env update -n pypsa-eur -f envs/environment.yaml + if: steps.cache.outputs.cache-hit != 'true' + + - name: Test snakemake workflow + run: | + conda activate pypsa-eur + conda list + cp test/config.test1.yaml config.yaml + snakemake --cores all solve_all_networks + rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results diff --git a/envs/environment.yaml b/envs/environment.yaml index 29d743ac..b6958d85 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -6,11 +6,9 @@ name: pypsa-eur channels: - conda-forge - bioconda - - http://conda.anaconda.org/gurobi dependencies: - python>=3.8 - pip - - mamba # esp for windows build - pypsa>=0.18 - atlite>=0.2.5 From 9b7bb27da1b13fc7c74707ba491675244d2807c6 Mon Sep 17 00:00:00 2001 From: martacki Date: Thu, 13 Jan 2022 18:54:27 +0100 Subject: [PATCH 16/26] snakemake dependencies in base_network --- scripts/base_network.py | 120 +++++++++++++++++++++------------------- 1 file changed, 64 insertions(+), 56 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 514e4dc3..baa12092 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -112,8 +112,8 @@ def _find_closest_links(links, new_links, distance_upper_bound=1.5): .sort_index()['i'] -def _load_buses_from_eg(): - buses = (pd.read_csv(snakemake.input.eg_buses, quotechar="'", +def _load_buses_from_eg(eg_buses, europe_shape, config_elec): + buses = (pd.read_csv(eg_buses, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(bus_id="str")) .set_index("bus_id") @@ -124,18 +124,18 @@ def _load_buses_from_eg(): buses['under_construction'] = buses['under_construction'].fillna(False).astype(bool) # remove all buses outside of all countries including exclusive economic zones (offshore) - europe_shape = gpd.read_file(snakemake.input.europe_shape).loc[0, 'geometry'] + europe_shape = gpd.read_file(europe_shape).loc[0, 'geometry'] europe_shape_prepped = shapely.prepared.prep(europe_shape) buses_in_europe_b = buses[['x', 'y']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1) - buses_with_v_nom_to_keep_b = buses.v_nom.isin(snakemake.config['electricity']['voltages']) | buses.v_nom.isnull() - logger.info("Removing buses with voltages {}".format(pd.Index(buses.v_nom.unique()).dropna().difference(snakemake.config['electricity']['voltages']))) + buses_with_v_nom_to_keep_b = buses.v_nom.isin(config_elec['voltages']) | buses.v_nom.isnull() + logger.info("Removing buses with voltages {}".format(pd.Index(buses.v_nom.unique()).dropna().difference(config_elec['voltages']))) return pd.DataFrame(buses.loc[buses_in_europe_b & buses_with_v_nom_to_keep_b]) -def _load_transformers_from_eg(buses): - transformers = (pd.read_csv(snakemake.input.eg_transformers, quotechar="'", +def _load_transformers_from_eg(buses, eg_transformers): + transformers = (pd.read_csv(eg_transformers, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(transformer_id='str', bus0='str', bus1='str')) .set_index('transformer_id')) @@ -145,8 +145,8 @@ def _load_transformers_from_eg(buses): return transformers -def _load_converters_from_eg(buses): - converters = (pd.read_csv(snakemake.input.eg_converters, quotechar="'", +def _load_converters_from_eg(buses, eg_converters): + converters = (pd.read_csv(eg_converters, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(converter_id='str', bus0='str', bus1='str')) .set_index('converter_id')) @@ -158,8 +158,8 @@ def _load_converters_from_eg(buses): return converters -def _load_links_from_eg(buses): - links = (pd.read_csv(snakemake.input.eg_links, quotechar="'", true_values=['t'], false_values=['f'], +def _load_links_from_eg(buses, eg_links): + links = (pd.read_csv(eg_links, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(link_id='str', bus0='str', bus1='str', under_construction="bool")) .set_index('link_id')) @@ -176,11 +176,11 @@ def _load_links_from_eg(buses): return links -def _add_links_from_tyndp(buses, links): - links_tyndp = pd.read_csv(snakemake.input.links_tyndp) +def _add_links_from_tyndp(buses, links, links_tyndp, europe_shape): + links_tyndp = pd.read_csv(links_tyndp) # remove all links from list which lie outside all of the desired countries - europe_shape = gpd.read_file(snakemake.input.europe_shape).loc[0, 'geometry'] + europe_shape = gpd.read_file(europe_shape).loc[0, 'geometry'] europe_shape_prepped = shapely.prepared.prep(europe_shape) x1y1_in_europe_b = links_tyndp[['x1', 'y1']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1) x2y2_in_europe_b = links_tyndp[['x2', 'y2']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1) @@ -248,8 +248,8 @@ def _add_links_from_tyndp(buses, links): return buses, links.append(links_tyndp, sort=True) -def _load_lines_from_eg(buses): - lines = (pd.read_csv(snakemake.input.eg_lines, quotechar="'", true_values=['t'], false_values=['f'], +def _load_lines_from_eg(buses, eg_lines): + lines = (pd.read_csv(eg_lines, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(line_id='str', bus0='str', bus1='str', underground="bool", under_construction="bool")) .set_index('line_id') @@ -262,8 +262,8 @@ def _load_lines_from_eg(buses): return lines -def _apply_parameter_corrections(n): - with open(snakemake.input.parameter_corrections) as f: +def _apply_parameter_corrections(n, parameter_corrections): + with open(parameter_corrections) as f: corrections = yaml.safe_load(f) if corrections is None: return @@ -285,14 +285,14 @@ def _apply_parameter_corrections(n): df.loc[inds, attr] = r[inds].astype(df[attr].dtype) -def _set_electrical_parameters_lines(lines): - v_noms = snakemake.config['electricity']['voltages'] - linetypes = snakemake.config['lines']['types'] +def _set_electrical_parameters_lines(lines, config): + v_noms = config['electricity']['voltages'] + linetypes = config['lines']['types'] for v_nom in v_noms: lines.loc[lines["v_nom"] == v_nom, 'type'] = linetypes[v_nom] - lines['s_max_pu'] = snakemake.config['lines']['s_max_pu'] + lines['s_max_pu'] = config['lines']['s_max_pu'] return lines @@ -304,14 +304,14 @@ def _set_lines_s_nom_from_linetypes(n): ) -def _set_electrical_parameters_links(links): +def _set_electrical_parameters_links(links, config, links_p_nom): if links.empty: return links - p_max_pu = snakemake.config['links'].get('p_max_pu', 1.) + p_max_pu = config['links'].get('p_max_pu', 1.) links['p_max_pu'] = p_max_pu links['p_min_pu'] = -p_max_pu - links_p_nom = pd.read_csv(snakemake.input.links_p_nom) + links_p_nom = pd.read_csv(links_p_nom) # filter links that are not in operation anymore removed_b = links_p_nom.Remarks.str.contains('Shut down|Replaced', na=False) @@ -331,8 +331,8 @@ def _set_electrical_parameters_links(links): return links -def _set_electrical_parameters_converters(converters): - p_max_pu = snakemake.config['links'].get('p_max_pu', 1.) +def _set_electrical_parameters_converters(converters, config): + p_max_pu = config['links'].get('p_max_pu', 1.) converters['p_max_pu'] = p_max_pu converters['p_min_pu'] = -p_max_pu @@ -345,8 +345,8 @@ def _set_electrical_parameters_converters(converters): return converters -def _set_electrical_parameters_transformers(transformers): - config = snakemake.config['transformers'] +def _set_electrical_parameters_transformers(transformers, config): + config = config['transformers'] ## Add transformer parameters transformers["x"] = config.get('x', 0.1) @@ -373,7 +373,7 @@ def _remove_unconnected_components(network): return network[component == component_sizes.index[0]] -def _set_countries_and_substations(n): +def _set_countries_and_substations(n, config, country_shapes, offshore_shapes): buses = n.buses @@ -386,9 +386,9 @@ def _set_countries_and_substations(n): index=buses.index ) - countries = snakemake.config['countries'] - country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'] - offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry'] + countries = config['countries'] + country_shapes = gpd.read_file(country_shapes).set_index('name')['geometry'] + offshore_shapes = gpd.read_file(offshore_shapes).set_index('name')['geometry'] substation_b = buses['symbol'].str.contains('substation|converter station', case=False) def prefer_voltage(x, which): @@ -498,19 +498,19 @@ def _replace_b2b_converter_at_country_border_by_link(n): .format(i, b0, line, linkcntry.at[i], buscntry.at[b1])) -def _set_links_underwater_fraction(n): +def _set_links_underwater_fraction(n, offshore_shapes): if n.links.empty: return if not hasattr(n.links, 'geometry'): n.links['underwater_fraction'] = 0. else: - offshore_shape = gpd.read_file(snakemake.input.offshore_shapes).unary_union + offshore_shape = gpd.read_file(offshore_shapes).unary_union links = gpd.GeoSeries(n.links.geometry.dropna().map(shapely.wkt.loads)) n.links['underwater_fraction'] = links.intersection(offshore_shape).length / links.length -def _adjust_capacities_of_under_construction_branches(n): - lines_mode = snakemake.config['lines'].get('under_construction', 'undef') +def _adjust_capacities_of_under_construction_branches(n, config): + lines_mode = config['lines'].get('under_construction', 'undef') if lines_mode == 'zero': n.lines.loc[n.lines.under_construction, 'num_parallel'] = 0. n.lines.loc[n.lines.under_construction, 's_nom'] = 0. @@ -519,7 +519,7 @@ def _adjust_capacities_of_under_construction_branches(n): elif lines_mode != 'keep': logger.warning("Unrecognized configuration for `lines: under_construction` = `{}`. Keeping under construction lines.") - links_mode = snakemake.config['links'].get('under_construction', 'undef') + links_mode = config['links'].get('under_construction', 'undef') if links_mode == 'zero': n.links.loc[n.links.under_construction, "p_nom"] = 0. elif links_mode == 'remove': @@ -534,27 +534,30 @@ def _adjust_capacities_of_under_construction_branches(n): return n -def base_network(): - buses = _load_buses_from_eg() +def base_network(eg_buses, eg_converters, eg_transformers, eg_lines, eg_links, + links_p_nom, links_tyndp, europe_shape, country_shapes, offshore_shapes, + parameter_corrections, config): - links = _load_links_from_eg(buses) - if snakemake.config['links'].get('include_tyndp'): - buses, links = _add_links_from_tyndp(buses, links) + buses = _load_buses_from_eg(eg_buses, europe_shape, config['electricity']) - converters = _load_converters_from_eg(buses) + links = _load_links_from_eg(buses, eg_links) + if config['links'].get('include_tyndp'): + buses, links = _add_links_from_tyndp(buses, links, links_tyndp, europe_shape) - lines = _load_lines_from_eg(buses) - transformers = _load_transformers_from_eg(buses) + converters = _load_converters_from_eg(buses, eg_converters) - lines = _set_electrical_parameters_lines(lines) - transformers = _set_electrical_parameters_transformers(transformers) - links = _set_electrical_parameters_links(links) - converters = _set_electrical_parameters_converters(converters) + lines = _load_lines_from_eg(buses, eg_lines) + transformers = _load_transformers_from_eg(buses, eg_transformers) + + lines = _set_electrical_parameters_lines(lines, config) + transformers = _set_electrical_parameters_transformers(transformers, config) + links = _set_electrical_parameters_links(links, config, links_p_nom) + converters = _set_electrical_parameters_converters(converters, config) n = pypsa.Network() n.name = 'PyPSA-Eur' - n.set_snapshots(pd.date_range(freq='h', **snakemake.config['snapshots'])) + n.set_snapshots(pd.date_range(freq='h', **config['snapshots'])) n.snapshot_weightings[:] *= 8760. / n.snapshot_weightings.sum() n.import_components_from_dataframe(buses, "Bus") @@ -565,17 +568,17 @@ def base_network(): _set_lines_s_nom_from_linetypes(n) - _apply_parameter_corrections(n) + _apply_parameter_corrections(n, parameter_corrections) n = _remove_unconnected_components(n) - _set_countries_and_substations(n) + _set_countries_and_substations(n, config, country_shapes, offshore_shapes) - _set_links_underwater_fraction(n) + _set_links_underwater_fraction(n, offshore_shapes) _replace_b2b_converter_at_country_border_by_link(n) - n = _adjust_capacities_of_under_construction_branches(n) + n = _adjust_capacities_of_under_construction_branches(n, config) return n @@ -585,6 +588,11 @@ if __name__ == "__main__": snakemake = mock_snakemake('base_network') configure_logging(snakemake) - n = base_network() + paths = snakemake.input + config = snakemake.config + + n = base_network(paths.eg_buses, paths.eg_converters, paths.eg_transformers, paths.eg_lines, paths.eg_links, + paths.links_p_nom, paths.links_tyndp, paths.europe_shape, paths.country_shapes, paths.offshore_shapes, + paths.parameter_corrections, config) n.export_to_netcdf(snakemake.output[0]) From 72e277a007c9421a5b48078942867e73b48ed481 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 14 Jan 2022 08:43:21 +0100 Subject: [PATCH 17/26] update environment and address deprecations (#291) * update environment and address deprecations * check pandas<1.3 * limit snakemake due to ascii encoding error, address review comments * remove version restriction on snakemake --- README.md | 2 +- envs/environment.yaml | 8 ++++---- scripts/add_electricity.py | 2 -- scripts/base_network.py | 2 +- scripts/build_shapes.py | 6 +++--- scripts/cluster_network.py | 5 ++++- 6 files changed, 13 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 15f979a7..8f569f2e 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ The dataset consists of: - Electrical demand time series from the [OPSD project](https://open-power-system-data.org/). - Renewable time series based on ERA5 and SARAH, assembled using the [atlite tool](https://github.com/FRESNA/atlite). -- Geographical potentials for wind and solar generators based on land use (CORINE) and excluding nature reserves (Natura2000) are computed with the [vresutils library](https://github.com/FRESNA/vresutils) and the [glaes library](https://github.com/FZJ-IEK3-VSA/glaes). +- Geographical potentials for wind and solar generators based on land use (CORINE) and excluding nature reserves (Natura2000) are computed with the [atlite library](https://github.com/PyPSA/atlite). Already-built versions of the model can be found in the accompanying [Zenodo repository](https://doi.org/10.5281/zenodo.3601881). diff --git a/envs/environment.yaml b/envs/environment.yaml index b6958d85..4b7b0ec5 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -10,9 +10,9 @@ dependencies: - python>=3.8 - pip - - pypsa>=0.18 + - pypsa>=0.18.1 - atlite>=0.2.5 - - dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved + - dask # Dependencies of the workflow itself - xlrd @@ -36,7 +36,7 @@ dependencies: - progressbar2 - pyomo - matplotlib - - proj<8 + - proj # Keep in conda environment when calling ipython - ipython @@ -54,5 +54,5 @@ dependencies: - tabula-py - pip: - - vresutils==0.3.1 + - vresutils>=0.3.1 - tsam>=1.1.0 diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 08a32a26..9e64ad29 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -95,7 +95,6 @@ import powerplantmatching as pm from powerplantmatching.export import map_country_bus from vresutils.costdata import annuity -from vresutils.load import timeseries_opsd from vresutils import transfer as vtransfer idx = pd.IndexSlice @@ -227,7 +226,6 @@ def attach_load(n): # relative factors 0.6 and 0.4 have been determined from a linear # regression on the country to continent load data - # (refer to vresutils.load._upsampling_weights) factors = normed(0.6 * normed(gdp_n) + 0.4 * normed(pop_n)) return pd.DataFrame(factors.values * l.values[:,np.newaxis], index=l.index, columns=factors.index) diff --git a/scripts/base_network.py b/scripts/base_network.py index 514e4dc3..1f2b9241 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -97,7 +97,7 @@ def _get_country(df): def _find_closest_links(links, new_links, distance_upper_bound=1.5): - treecoords = np.asarray([np.asarray(shapely.wkt.loads(s))[[0, -1]].flatten() + treecoords = np.asarray([np.asarray(shapely.wkt.loads(s).coords)[[0, -1]].flatten() for s in links.geometry]) querycoords = np.vstack([new_links[['x1', 'y1', 'x2', 'y2']], new_links[['x2', 'y2', 'x1', 'y1']]]) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 5814085b..366cb820 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -79,7 +79,7 @@ from itertools import takewhile import pandas as pd import geopandas as gpd from shapely.geometry import MultiPolygon, Polygon -from shapely.ops import cascaded_union +from shapely.ops import unary_union import pycountry as pyc logger = logging.getLogger(__name__) @@ -95,7 +95,7 @@ def _get_country(target, **keys): def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True): if isinstance(polys, MultiPolygon): - polys = sorted(polys, key=attrgetter('area'), reverse=True) + polys = sorted(polys.geoms, key=attrgetter('area'), reverse=True) mainpoly = polys[0] mainlength = np.sqrt(mainpoly.area/(2.*np.pi)) if mainpoly.area > minarea: @@ -139,7 +139,7 @@ def country_cover(country_shapes, eez_shapes=None): if eez_shapes is not None: shapes += list(eez_shapes) - europe_shape = cascaded_union(shapes) + europe_shape = unary_union(shapes) if isinstance(europe_shape, MultiPolygon): europe_shape = max(europe_shape, key=attrgetter('area')) return Polygon(shell=europe_shape.exterior) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 1a976cd1..4b9db466 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -140,6 +140,9 @@ from functools import reduce from pypsa.networkclustering import (busmap_by_kmeans, busmap_by_spectral_clustering, _make_consense, get_clustering_from_busmap) +import warnings +warnings.filterwarnings(action='ignore', category=UserWarning) + from add_electricity import load_costs idx = pd.IndexSlice @@ -313,7 +316,7 @@ def cluster_regions(busmaps, input=None, output=None): for which in ('regions_onshore', 'regions_offshore'): regions = gpd.read_file(getattr(input, which)).set_index('name') - geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.cascaded_union) + geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.unary_union) regions_c = gpd.GeoDataFrame(dict(geometry=geom_c)) regions_c.index.name = 'name' save_to_geojson(regions_c, getattr(output, which)) From deac9f32e7280ab4f30ea80322796e9fd8861c1b Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 14 Jan 2022 11:05:15 +0100 Subject: [PATCH 18/26] move snakemake keys (input, output, config, ...) to own variables --- scripts/_helpers.py | 4 +++ scripts/add_electricity.py | 7 +++-- scripts/add_extra_components.py | 13 ++++----- scripts/base_network.py | 7 +++-- scripts/build_bus_regions.py | 16 ++++++----- scripts/build_cutout.py | 14 +++++----- scripts/build_hydro_profile.py | 20 +++++++------- scripts/build_load_data.py | 9 ++++--- scripts/build_natura_raster.py | 9 ++++--- scripts/build_powerplants.py | 11 ++++---- scripts/build_renewable_profiles.py | 16 ++++++----- scripts/build_shapes.py | 9 +++---- scripts/cluster_network.py | 42 +++++++++++++++-------------- scripts/plot_network.py | 25 ++++++++--------- scripts/plot_p_nom_max.py | 12 +++++---- scripts/prepare_links_p_nom.py | 6 +++-- scripts/prepare_network.py | 32 +++++++++++----------- scripts/retrieve_databundle.py | 6 +++-- scripts/simplify_network.py | 24 +++++++++-------- scripts/solve_network.py | 20 +++++++------- scripts/solve_operations_network.py | 21 ++++++++------- 21 files changed, 176 insertions(+), 147 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index f1e5e887..a44a8133 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -95,6 +95,10 @@ def pdbcast(v, h): return pd.DataFrame(v.values.reshape((-1, 1)) * h.values, index=v.index, columns=h.index) +def retrieve_snakemake_keys(snakemake): + return (snakemake.input, snakemake.config, snakemake.wildcards, + snakemake.log, snakemake.output) + def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True): import pypsa diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 42d4f5cc..c4a883f5 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -84,7 +84,7 @@ It further adds extendable ``generators`` with **zero** capacity for """ import logging -from _helpers import configure_logging, update_p_nom_max +from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max import pypsa import pandas as pd @@ -546,8 +546,7 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_electricity') configure_logging(snakemake) - config = snakemake.config - paths = snakemake.input + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = pypsa.Network(paths.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. @@ -583,4 +582,4 @@ if __name__ == "__main__": add_nice_carrier_names(n, config) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index db764d4f..35947aee 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -50,7 +50,7 @@ The rule :mod:`add_extra_components` attaches additional extendable components t - ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If this option is chosen, the script adds extra buses with corresponding carrier where energy ``Stores`` are attached and which are connected to the corresponding power buses via two links, one each for charging and discharging. This leads to three investment variables for the energy capacity, charging and discharging capacity of the storage unit. """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import pandas as pd @@ -192,18 +192,19 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_extra_components', network='elec', simpl='', clusters=5) configure_logging(snakemake) - paths = snakemake.input + + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = pypsa.Network(paths.network) - elec_config = snakemake.config['electricity'] + elec_config = config['electricity'] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, snakemake.config['costs'], elec_config, Nyears=Nyears) + costs = load_costs(paths.tech_costs, config['costs'], elec_config, Nyears=Nyears) attach_storageunits(n, costs, elec_config) attach_stores(n, costs, elec_config) attach_hydrogen_pipelines(n, costs, elec_config) - add_nice_carrier_names(n, snakemake.config) + add_nice_carrier_names(n, config) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/base_network.py b/scripts/base_network.py index baa12092..41699f04 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -63,7 +63,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import yaml @@ -588,11 +588,10 @@ if __name__ == "__main__": snakemake = mock_snakemake('base_network') configure_logging(snakemake) - paths = snakemake.input - config = snakemake.config + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = base_network(paths.eg_buses, paths.eg_converters, paths.eg_transformers, paths.eg_lines, paths.eg_links, paths.links_p_nom, paths.links_tyndp, paths.europe_shape, paths.country_shapes, paths.offshore_shapes, paths.parameter_corrections, config) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index d91d0575..78e2070d 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -42,7 +42,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import os @@ -67,12 +67,14 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_bus_regions') configure_logging(snakemake) - countries = snakemake.config['countries'] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - n = pypsa.Network(snakemake.input.base_network) + countries = config['countries'] - country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'] - offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry'] + n = pypsa.Network(paths.base_network) + + country_shapes = gpd.read_file(paths.country_shapes).set_index('name')['geometry'] + offshore_shapes = gpd.read_file(paths.offshore_shapes).set_index('name')['geometry'] onshore_regions = [] offshore_regions = [] @@ -103,6 +105,6 @@ if __name__ == "__main__": offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2] offshore_regions.append(offshore_regions_c) - save_to_geojson(pd.concat(onshore_regions, ignore_index=True), snakemake.output.regions_onshore) + save_to_geojson(pd.concat(onshore_regions, ignore_index=True), out.regions_onshore) - save_to_geojson(pd.concat(offshore_regions, ignore_index=True), snakemake.output.regions_offshore) + save_to_geojson(pd.concat(offshore_regions, ignore_index=True), out.regions_offshore) diff --git a/scripts/build_cutout.py b/scripts/build_cutout.py index 78eafac6..4b3e2bdc 100644 --- a/scripts/build_cutout.py +++ b/scripts/build_cutout.py @@ -95,7 +95,7 @@ import logging import atlite import geopandas as gpd import pandas as pd -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys logger = logging.getLogger(__name__) @@ -106,16 +106,18 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_cutout', cutout='europe-2013-era5') configure_logging(snakemake) - cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - snapshots = pd.date_range(freq='h', **snakemake.config['snapshots']) + cutout_params = config['atlite']['cutouts'][wildcards.cutout] + + snapshots = pd.date_range(freq='h', **config['snapshots']) time = [snapshots[0], snapshots[-1]] cutout_params['time'] = slice(*cutout_params.get('time', time)) if {'x', 'y', 'bounds'}.isdisjoint(cutout_params): # Determine the bounds from bus regions with a buffer of two grid cells - onshore = gpd.read_file(snakemake.input.regions_onshore) - offshore = gpd.read_file(snakemake.input.regions_offshore) + onshore = gpd.read_file(paths.regions_onshore) + offshore = gpd.read_file(paths.regions_offshore) regions = onshore.append(offshore) d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy', 0.25))*2 cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d] @@ -126,5 +128,5 @@ if __name__ == "__main__": logging.info(f"Preparing cutout with parameters {cutout_params}.") features = cutout_params.pop('features', None) - cutout = atlite.Cutout(snakemake.output[0], **cutout_params) + cutout = atlite.Cutout(out[0], **cutout_params) cutout.prepare(features=features) diff --git a/scripts/build_hydro_profile.py b/scripts/build_hydro_profile.py index 6ac59262..563c8ecb 100644 --- a/scripts/build_hydro_profile.py +++ b/scripts/build_hydro_profile.py @@ -60,7 +60,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import atlite import geopandas as gpd @@ -74,22 +74,24 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_hydro_profile') configure_logging(snakemake) - config = snakemake.config['renewable']['hydro'] - cutout = atlite.Cutout(snakemake.input.cutout) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - countries = snakemake.config['countries'] - country_shapes = (gpd.read_file(snakemake.input.country_shapes) + config_hydro = config['renewable']['hydro'] + cutout = atlite.Cutout(paths.cutout) + + countries = config['countries'] + country_shapes = (gpd.read_file(paths.country_shapes) .set_index('name')['geometry'].reindex(countries)) country_shapes.index.name = 'countries' eia_stats = vhydro.get_eia_annual_hydro_generation( - snakemake.input.eia_hydro_generation).reindex(columns=countries) + paths.eia_hydro_generation).reindex(columns=countries) inflow = cutout.runoff(shapes=country_shapes, smooth=True, lower_threshold_quantile=True, normalize_using_yearly=eia_stats) - if 'clip_min_inflow' in config: - inflow = inflow.where(inflow > config['clip_min_inflow'], 0) + if 'clip_min_inflow' in config_hydro: + inflow = inflow.where(inflow > config_hydro['clip_min_inflow'], 0) - inflow.to_netcdf(snakemake.output[0]) + inflow.to_netcdf(out[0]) diff --git a/scripts/build_load_data.py b/scripts/build_load_data.py index f71be6ea..0f9124ea 100755 --- a/scripts/build_load_data.py +++ b/scripts/build_load_data.py @@ -37,7 +37,7 @@ Outputs import logging logger = logging.getLogger(__name__) -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pandas as pd import numpy as np @@ -196,7 +196,8 @@ if __name__ == "__main__": configure_logging(snakemake) - config = snakemake.config + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + powerstatistics = config['load']['power_statistics'] interpolate_limit = config['load']['interpolate_limit'] countries = config['countries'] @@ -204,7 +205,7 @@ if __name__ == "__main__": years = slice(snapshots[0], snapshots[-1]) time_shift = config['load']['time_shift_for_large_gaps'] - load = load_timeseries(snakemake.input[0], years, countries, powerstatistics) + load = load_timeseries(paths[0], years, countries, powerstatistics) if config['load']['manual_adjustments']: load = manual_adjustment(load, powerstatistics) @@ -221,5 +222,5 @@ if __name__ == "__main__": '`time_shift_for_large_gaps` or modify the `manual_adjustment` function ' 'for implementing the needed load data modifications.') - load.to_csv(snakemake.output[0]) + load.to_csv(out[0]) diff --git a/scripts/build_natura_raster.py b/scripts/build_natura_raster.py index f7a923d6..71d2c45e 100644 --- a/scripts/build_natura_raster.py +++ b/scripts/build_natura_raster.py @@ -40,7 +40,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import atlite import geopandas as gpd @@ -73,18 +73,19 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_natura_raster') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - cutouts = snakemake.input.cutouts + cutouts = paths.cutouts xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts)) bounds = transform_bounds(4326, 3035, min(xs), min(ys), max(Xs), max(Ys)) transform, out_shape = get_transform_and_shape(bounds, res=100) # adjusted boundaries - shapes = gpd.read_file(snakemake.input.natura).to_crs(3035) + shapes = gpd.read_file(paths.natura).to_crs(3035) raster = ~geometry_mask(shapes.geometry, out_shape[::-1], transform) raster = raster.astype(rio.uint8) - with rio.open(snakemake.output[0], 'w', driver='GTiff', dtype=rio.uint8, + with rio.open(out[0], 'w', driver='GTiff', dtype=rio.uint8, count=1, transform=transform, crs=3035, compress='lzw', width=raster.shape[1], height=raster.shape[0]) as dst: dst.write(raster, indexes=1) diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index be57baa8..4b9d13a1 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -72,7 +72,7 @@ The configuration options ``electricity: powerplants_filter`` and ``electricity: """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import powerplantmatching as pm @@ -99,7 +99,8 @@ if __name__ == "__main__": from _helpers import mock_snakemake snakemake = mock_snakemake('build_powerplants') configure_logging(snakemake) - paths = snakemake.input + + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) n = pypsa.Network(paths.base_network) countries = n.buses.country.unique() @@ -115,12 +116,12 @@ if __name__ == "__main__": df.Technology.replace('Steam Turbine', 'OCGT').fillna('OCGT'))))) - ppl_query = snakemake.config['electricity']['powerplants_filter'] + ppl_query = config['electricity']['powerplants_filter'] if isinstance(ppl_query, str): ppl.query(ppl_query, inplace=True) # add carriers from own powerplant files: - custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] + custom_ppl_query = config['electricity']['custom_powerplants'] ppl = add_custom_powerplants(ppl, paths.custom_powerplants, custom_ppl_query) cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()] @@ -140,4 +141,4 @@ if __name__ == "__main__": if bus_null_b.any(): logging.warning(f"Couldn't find close bus for {bus_null_b.sum()} powerplants") - ppl.to_csv(snakemake.output[0]) + ppl.to_csv(out[0]) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 9ce83de3..944d6f39 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -190,7 +190,7 @@ from pypsa.geo import haversine from shapely.geometry import LineString import time -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys logger = logging.getLogger(__name__) @@ -201,10 +201,12 @@ if __name__ == '__main__': snakemake = mock_snakemake('build_renewable_profiles', technology='solar') configure_logging(snakemake) pgb.streams.wrap_stderr() - paths = snakemake.input - nprocesses = snakemake.config['atlite'].get('nprocesses') - noprogress = not snakemake.config['atlite'].get('show_progress', True) - config = snakemake.config['renewable'][snakemake.wildcards.technology] + + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + + nprocesses = config['atlite'].get('nprocesses') + noprogress = not config['atlite'].get('show_progress', True) + config = config['renewable'][wildcards.technology] resource = config['resource'] # pv panel config / wind turbine config correction_factor = config.get('correction_factor', 1.) capacity_per_sqkm = config['capacity_per_sqkm'] @@ -313,7 +315,7 @@ if __name__ == '__main__': average_distance.rename('average_distance')]) - if snakemake.wildcards.technology.startswith("offwind"): + if wildcards.technology.startswith("offwind"): logger.info('Calculate underwater fraction of connections.') offshore_shape = gpd.read_file(paths['offshore_shapes']).unary_union underwater_fraction = [] @@ -333,4 +335,4 @@ if __name__ == '__main__': min_p_max_pu = config['clip_p_max_pu'] ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0) - ds.to_netcdf(snakemake.output.profile) + ds.to_netcdf(out.profile) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index cca941e6..b4686ac3 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -68,7 +68,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import os import numpy as np @@ -217,13 +217,12 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_shapes') configure_logging(snakemake) - paths = snakemake.input - out = snakemake.output + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - country_shapes = countries(paths.naturalearth, snakemake.config['countries']) + country_shapes = countries(paths.naturalearth, config['countries']) save_to_geojson(country_shapes, out.country_shapes) - offshore_shapes = eez(country_shapes, paths.eez, snakemake.config['countries']) + offshore_shapes = eez(country_shapes, paths.eez, config['countries']) save_to_geojson(offshore_shapes, out.offshore_shapes) europe_shape = country_cover(country_shapes, offshore_shapes) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 51556b27..554109e3 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes: """ import logging -from _helpers import configure_logging, update_p_nom_max +from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max import pypsa import os @@ -306,7 +306,7 @@ def cluster_regions(busmaps, input=None, output=None): for which in ('regions_onshore', 'regions_offshore'): regions = gpd.read_file(getattr(input, which)).set_index('name') - geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.cascaded_union) + geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.unary_union) regions_c = gpd.GeoDataFrame(dict(geometry=geom_c)) regions_c.index.name = 'name' save_to_geojson(regions_c, getattr(output, which)) @@ -328,19 +328,21 @@ if __name__ == "__main__": snakemake = mock_snakemake('cluster_network', network='elec', simpl='', clusters='5') configure_logging(snakemake) - n = pypsa.Network(snakemake.input.network) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - focus_weights = snakemake.config.get('focus_weights', None) + n = pypsa.Network(paths.network) + + focus_weights = config.get('focus_weights', None) renewable_carriers = pd.Index([tech for tech in n.generators.carrier.unique() - if tech in snakemake.config['renewable']]) + if tech in config['renewable']]) - if snakemake.wildcards.clusters.endswith('m'): - n_clusters = int(snakemake.wildcards.clusters[:-1]) + if wildcards.clusters.endswith('m'): + n_clusters = int(wildcards.clusters[:-1]) aggregate_carriers = pd.Index(n.generators.carrier.unique()).difference(renewable_carriers) else: - n_clusters = int(snakemake.wildcards.clusters) + n_clusters = int(wildcards.clusters) aggregate_carriers = None # All if n_clusters == len(n.buses): @@ -349,11 +351,11 @@ if __name__ == "__main__": linemap = n.lines.index.to_series() clustering = pypsa.networkclustering.Clustering(n, busmap, linemap, linemap, pd.Series(dtype='O')) else: - line_length_factor = snakemake.config['lines']['length_factor'] + line_length_factor = config['lines']['length_factor'] Nyears = n.snapshot_weightings.objective.sum()/8760 - hvac_overhead_cost = (load_costs(tech_costs = snakemake.input.tech_costs, - config = snakemake.config['costs'], - elec_config=snakemake.config['electricity'], Nyears = Nyears) + hvac_overhead_cost = (load_costs(tech_costs = paths.tech_costs, + config = config['costs'], + elec_config=config['electricity'], Nyears = Nyears) .at['HVAC overhead', 'capital_cost']) def consense(x): @@ -362,24 +364,24 @@ if __name__ == "__main__": "The `potential` configuration option must agree for all renewable carriers, for now!" ) return v - potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential'] + potential_mode = consense(pd.Series([config['renewable'][tech]['potential'] for tech in renewable_carriers])) - custom_busmap = snakemake.config["enable"].get("custom_busmap", False) + custom_busmap = config["enable"].get("custom_busmap", False) if custom_busmap: - custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True) + custom_busmap = pd.read_csv(paths.custom_busmap, index_col=0, squeeze=True) custom_busmap.index = custom_busmap.index.astype(str) - logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") + logger.info(f"Imported custom busmap from {paths.custom_busmap}") clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers, line_length_factor=line_length_factor, potential_mode=potential_mode, - solver_name=snakemake.config['solving']['solver']['name'], + solver_name=config['solving']['solver']['name'], extended_link_costs=hvac_overhead_cost, focus_weights=focus_weights) update_p_nom_max(n) - clustering.network.export_to_netcdf(snakemake.output.network) + clustering.network.export_to_netcdf(out.network) for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative - getattr(clustering, attr).to_csv(snakemake.output[attr]) + getattr(clustering, attr).to_csv(out[attr]) - cluster_regions((clustering.busmap,), snakemake.input, snakemake.output) + cluster_regions((clustering.busmap,), paths, out) diff --git a/scripts/plot_network.py b/scripts/plot_network.py index 456bf50f..645c8c39 100755 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -20,8 +20,8 @@ Description """ import logging -from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs, - configure_logging) +from _helpers import (retrieve_snakemake_keys, load_network_for_plots, + aggregate_p, aggregate_costs, configure_logging) import pandas as pd import numpy as np @@ -259,18 +259,19 @@ if __name__ == "__main__": set_plot_style() - opts = snakemake.config['plotting'] - map_figsize = opts['map']['figsize'] - map_boundaries = opts['map']['boundaries'] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - n = load_network_for_plots(snakemake.input.network, snakemake.input.tech_costs, snakemake.config) + map_figsize = config['map']['figsize'] + map_boundaries = config['map']['boundaries'] - scenario_opts = snakemake.wildcards.opts.split('-') + n = load_network_for_plots(paths.network, paths.tech_costs, config) + + scenario_opts = wildcards.opts.split('-') fig, ax = plt.subplots(figsize=map_figsize, subplot_kw={"projection": ccrs.PlateCarree()}) - plot_map(n, ax, snakemake.wildcards.attr, opts) + plot_map(n, ax, wildcards.attr, config) - fig.savefig(snakemake.output.only_map, dpi=150, bbox_inches='tight') + fig.savefig(out.only_map, dpi=150, bbox_inches='tight') ax1 = fig.add_axes([-0.115, 0.625, 0.2, 0.2]) plot_total_energy_pie(n, ax1) @@ -278,12 +279,12 @@ if __name__ == "__main__": ax2 = fig.add_axes([-0.075, 0.1, 0.1, 0.45]) plot_total_cost_bar(n, ax2) - ll = snakemake.wildcards.ll + ll = wildcards.ll ll_type = ll[0] ll_factor = ll[1:] lbl = dict(c='line cost', v='line volume')[ll_type] amnt = '{ll} x today\'s'.format(ll=ll_factor) if ll_factor != 'opt' else 'optimal' fig.suptitle('Expansion to {amount} {label} at {clusters} clusters' - .format(amount=amnt, label=lbl, clusters=snakemake.wildcards.clusters)) + .format(amount=amnt, label=lbl, clusters=wildcards.clusters)) - fig.savefig(snakemake.output.ext, transparent=True, bbox_inches='tight') + fig.savefig(out.ext, transparent=True, bbox_inches='tight') diff --git a/scripts/plot_p_nom_max.py b/scripts/plot_p_nom_max.py index e79ad274..540608f9 100644 --- a/scripts/plot_p_nom_max.py +++ b/scripts/plot_p_nom_max.py @@ -53,11 +53,13 @@ if __name__ == "__main__": clusts= '5,full', country= 'all') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + plot_kwds = dict(drawstyle="steps-post") - clusters = snakemake.wildcards.clusts.split(',') - techs = snakemake.wildcards.techs.split(',') - country = snakemake.wildcards.country + clusters = wildcards.clusts.split(',') + techs = wildcards.techs.split(',') + country = wildcards.country if country == 'all': country = None else: @@ -66,7 +68,7 @@ if __name__ == "__main__": fig, axes = plt.subplots(1, len(techs)) for j, cluster in enumerate(clusters): - net = pypsa.Network(snakemake.input[j]) + net = pypsa.Network(paths[j]) for i, tech in enumerate(techs): cum_p_nom_max(net, tech, country).plot(x="p_max_pu", y="cum_p_nom_max", @@ -79,4 +81,4 @@ if __name__ == "__main__": plt.legend(title="Cluster level") - fig.savefig(snakemake.output[0], transparent=True, bbox_inches='tight') + fig.savefig(out[0], transparent=True, bbox_inches='tight') diff --git a/scripts/prepare_links_p_nom.py b/scripts/prepare_links_p_nom.py index b83089d6..6bd4bca4 100644 --- a/scripts/prepare_links_p_nom.py +++ b/scripts/prepare_links_p_nom.py @@ -37,7 +37,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pandas as pd @@ -63,6 +63,8 @@ if __name__ == "__main__": snakemake = mock_snakemake('prepare_links_p_nom', simpl='', network='elec') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0] mw = "Power (MW)" @@ -74,4 +76,4 @@ if __name__ == "__main__": links_p_nom['x1'], links_p_nom['y1'] = extract_coordinates(links_p_nom['Converterstation 1']) links_p_nom['x2'], links_p_nom['y2'] = extract_coordinates(links_p_nom['Converterstation 2']) - links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(snakemake.output[0], index=False) + links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(out[0], index=False) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 3eb244cf..19a395ea 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -56,7 +56,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import re import pypsa @@ -206,15 +206,17 @@ if __name__ == "__main__": clusters='40', ll='v0.3', opts='Co2L-24H') configure_logging(snakemake) - opts = snakemake.wildcards.opts.split('-') + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - n = pypsa.Network(snakemake.input[0]) + opts = wildcards.opts.split('-') + + n = pypsa.Network(paths[0]) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = snakemake.input.tech_costs, - config = snakemake.config['costs'], - elec_config = snakemake.config['electricity'], Nyears = Nyears) + costs = load_costs(tech_costs = paths.tech_costs, + config = config['costs'], + elec_config = config['electricity'], Nyears = Nyears) - set_line_s_max_pu(n, s_max_pu=snakemake.config['lines']['s_max_pu']) + set_line_s_max_pu(n, s_max_pu=config['lines']['s_max_pu']) for o in opts: m = re.match(r'^\d+h$', o, re.IGNORECASE) @@ -225,7 +227,7 @@ if __name__ == "__main__": for o in opts: m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: - solver_name = snakemake.config["solving"]["solver"]["name"] + solver_name = config["solving"]["solver"]["name"] n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=solver_name) break @@ -233,10 +235,10 @@ if __name__ == "__main__": if "Co2L" in o: m = re.findall("[0-9]*\.?[0-9]+$", o) if len(m) > 0: - co2limit = float(m[0]) * snakemake.config['electricity']['co2base'] + co2limit = float(m[0]) * config['electricity']['co2base'] add_co2limit(n, co2limit, Nyears) else: - add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears) + add_co2limit(n, config['electricity']['co2limit'], Nyears) break for o in opts: @@ -257,17 +259,17 @@ if __name__ == "__main__": c.df.loc[sel,attr] *= factor if 'Ep' in opts: - add_emission_prices(n, emission_prices=snakemake.config['costs']['emission_prices']) + add_emission_prices(n, config['costs']['emission_prices']) - ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] + ll_type, factor = wildcards.ll[0], wildcards.ll[1:] set_transmission_limit(n, ll_type, factor, costs, Nyears) - set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf), - p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf)) + set_line_nom_max(n, s_nom_max_set=config["lines"].get("s_nom_max,", np.inf), + p_nom_max_set=config["links"].get("p_nom_max,", np.inf)) if "ATK" in opts: enforce_autarky(n) elif "ATKc" in opts: enforce_autarky(n, only_crossborder=True) - n.export_to_netcdf(snakemake.output[0]) + n.export_to_netcdf(out[0]) diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index 86869879..c5a31f81 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -33,7 +33,7 @@ The :ref:`tutorial` uses a smaller `data bundle Date: Fri, 14 Jan 2022 11:29:01 +0100 Subject: [PATCH 19/26] make_summary: remove snakemake dependencies --- scripts/make_summary.py | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 24c5e87c..a283fd20 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -54,7 +54,7 @@ Replacing '/summaries/' with '/plots/' creates nice colored maps of the results. """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import os import pypsa @@ -378,7 +378,7 @@ outputs = ["costs", ] -def make_summaries(networks_dict, country='all'): +def make_summaries(networks_dict, paths, config, country='all'): columns = pd.MultiIndex.from_tuples(networks_dict.keys(),names=["simpl","clusters","ll","opts"]) @@ -403,8 +403,8 @@ def make_summaries(networks_dict, country='all'): n = n[n.buses.country == country] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = snakemake.input[0], config = snakemake.config['costs'], - elec_config = snakemake.config['electricity'], Nyears = Nyears) + costs = load_costs(tech_costs = paths[0], config = config['costs'], + elec_config = config['electricity'], Nyears) update_transmission_costs(n, costs, simple_hvdc_costs=False) assign_carriers(n) @@ -431,25 +431,27 @@ if __name__ == "__main__": network_dir = os.path.join('results', 'networks') configure_logging(snakemake) - def expand_from_wildcard(key): - w = getattr(snakemake.wildcards, key) - return snakemake.config["scenario"][key] if w == "all" else [w] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - if snakemake.wildcards.ll.endswith("all"): - ll = snakemake.config["scenario"]["ll"] - if len(snakemake.wildcards.ll) == 4: - ll = [l for l in ll if l[0] == snakemake.wildcards.ll[0]] + def expand_from_wildcard(key, config): + w = getattr(wildcards, key) + return config["scenario"][key] if w == "all" else [w] + + if wildcards.ll.endswith("all"): + ll = config["scenario"]["ll"] + if len(wildcards.ll) == 4: + ll = [l for l in ll if l[0] == wildcards.ll[0]] else: - ll = [snakemake.wildcards.ll] + ll = [wildcards.ll] networks_dict = {(simpl,clusters,l,opts) : os.path.join(network_dir, f'elec_s{simpl}_' f'{clusters}_ec_l{l}_{opts}.nc') - for simpl in expand_from_wildcard("simpl") + for simpl in expand_from_wildcard("simpl", config) for clusters in expand_from_wildcard("clusters") for l in ll for opts in expand_from_wildcard("opts")} - dfs = make_summaries(networks_dict, country=snakemake.wildcards.country) + dfs = make_summaries(networks_dict, paths, config, country=wildcards.country) - to_csv(dfs, snakemake.output[0]) + to_csv(dfs, out[0]) From f28a088ea3b1f49214e82dde5ccddcfb93c2d0db Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 14 Jan 2022 11:30:15 +0100 Subject: [PATCH 20/26] arguments in function calls instead of kwarg-style --- scripts/add_electricity.py | 4 ++-- scripts/add_extra_components.py | 2 +- scripts/cluster_network.py | 8 +++----- scripts/prepare_network.py | 10 ++++------ scripts/solve_network.py | 3 +-- scripts/solve_operations_network.py | 5 ++--- 6 files changed, 13 insertions(+), 19 deletions(-) diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index cbefba2f..fcddea8c 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -549,11 +549,11 @@ if __name__ == "__main__": n = pypsa.Network(paths.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears=Nyears) + costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears) ppl = load_powerplants(paths.powerplants) attach_load(n, paths.regions, paths.load, paths.nuts3_shapes, config['countries'], - scaling=config['load']['scaling_factor']) + config['load']['scaling_factor']) update_transmission_costs(n, costs, config['lines']['length_factor']) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index 35947aee..0531c9fa 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -199,7 +199,7 @@ if __name__ == "__main__": elec_config = config['electricity'] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, config['costs'], elec_config, Nyears=Nyears) + costs = load_costs(paths.tech_costs, config['costs'], elec_config, Nyears) attach_storageunits(n, costs, elec_config) attach_stores(n, costs, elec_config) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 71dd1746..041fb259 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -374,12 +374,10 @@ if __name__ == "__main__": custom_busmap = pd.read_csv(paths.custom_busmap, index_col=0, squeeze=True) custom_busmap.index = custom_busmap.index.astype(str) logger.info(f"Imported custom busmap from {paths.custom_busmap}") + clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers, - line_length_factor=line_length_factor, - potential_mode=potential_mode, - solver_name=config['solving']['solver']['name'], - extended_link_costs=hvac_overhead_cost, - focus_weights=focus_weights) + line_length_factor, potential_mode, config['solving']['solver']['name'], + "kmeans", hvac_overhead_cost, focus_weights) update_p_nom_max(n) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 19a395ea..e0b488f5 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -140,7 +140,7 @@ def average_every_nhours(n, offset): return m -def apply_time_segmentation(n, segments, solver_name="cplex"): +def apply_time_segmentation(n, segments, solver_name="cbc"): logger.info(f"Aggregating time series to {segments} segments.") try: import tsam.timeseriesaggregation as tsam @@ -212,11 +212,9 @@ if __name__ == "__main__": n = pypsa.Network(paths[0]) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = paths.tech_costs, - config = config['costs'], - elec_config = config['electricity'], Nyears = Nyears) + costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears) - set_line_s_max_pu(n, s_max_pu=config['lines']['s_max_pu']) + set_line_s_max_pu(n, config['lines']['s_max_pu']) for o in opts: m = re.match(r'^\d+h$', o, re.IGNORECASE) @@ -228,7 +226,7 @@ if __name__ == "__main__": m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: solver_name = config["solving"]["solver"]["name"] - n = apply_time_segmentation(n, m.group(0)[:-3], solver_name=solver_name) + n = apply_time_segmentation(n, m.group(0)[:-3], solver_name) break for o in opts: diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 1aaf4970..6f2124da 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -285,8 +285,7 @@ if __name__ == "__main__": with memory_logger(filename=fn, interval=30.) as mem: n = pypsa.Network(paths[0]) n = prepare_network(n, solve_opts) - n = solve_network(n, config=config, opts=opts, - solver_dir=tmpdir, + n = solve_network(n, config, opts, solver_dir=tmpdir, solver_logfile=logs.solver) n.export_to_netcdf(out[0]) diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index baea5a5c..6490ce51 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -116,9 +116,8 @@ if __name__ == "__main__": fn = getattr(logs, 'memory', None) with memory_logger(filename=fn, interval=30.) as mem: - n = prepare_network(n, solve_opts=config['solving']['options']) - n = solve_network(n, config=config, opts=opts, - solver_dir=tmpdir, + n = prepare_network(n, config['solving']['options']) + n = solve_network(n, config, opts, solver_dir=tmpdir, solver_logfile=logs.solver) n.export_to_netcdf(out[0]) From 0da77a7600ef53ef2b519ddafa41904505e5f56f Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 14 Jan 2022 13:44:33 +0100 Subject: [PATCH 21/26] remove snakemake dependencies in plot_summary --- scripts/plot_summary.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index a34611de..48f064b0 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -21,7 +21,7 @@ Description import os import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pandas as pd import matplotlib.pyplot as plt @@ -55,7 +55,7 @@ def rename_techs(label): preferred_order = pd.Index(["transmission lines","hydroelectricity","hydro reservoir","run of river","pumped hydro storage","onshore wind","offshore wind ac", "offshore wind dc","solar PV","solar thermal","OCGT","hydrogen storage","battery storage"]) -def plot_costs(infn, fn=None): +def plot_costs(infn, config, fn=None): ## For now ignore the simpl header cost_df = pd.read_csv(infn,index_col=list(range(3)),header=[1,2,3]) @@ -67,7 +67,7 @@ def plot_costs(infn, fn=None): df = df.groupby(df.index.map(rename_techs)).sum() - to_drop = df.index[df.max(axis=1) < snakemake.config['plotting']['costs_threshold']] + to_drop = df.index[df.max(axis=1) < config['plotting']['costs_threshold']] print("dropping") @@ -84,7 +84,7 @@ def plot_costs(infn, fn=None): fig, ax = plt.subplots() fig.set_size_inches((12,8)) - df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[snakemake.config['plotting']['tech_colors'][i] for i in new_index]) + df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[config['plotting']['tech_colors'][i] for i in new_index]) handles,labels = ax.get_legend_handles_labels() @@ -92,7 +92,7 @@ def plot_costs(infn, fn=None): handles.reverse() labels.reverse() - ax.set_ylim([0,snakemake.config['plotting']['costs_max']]) + ax.set_ylim([0,config['plotting']['costs_max']]) ax.set_ylabel("System Cost [EUR billion per year]") @@ -109,7 +109,7 @@ def plot_costs(infn, fn=None): fig.savefig(fn, transparent=True) -def plot_energy(infn, fn=None): +def plot_energy(infn, config, fn=None): energy_df = pd.read_csv(infn, index_col=list(range(2)),header=[1,2,3]) @@ -120,7 +120,7 @@ def plot_energy(infn, fn=None): df = df.groupby(df.index.map(rename_techs)).sum() - to_drop = df.index[df.abs().max(axis=1) < snakemake.config['plotting']['energy_threshold']] + to_drop = df.index[df.abs().max(axis=1) < config['plotting']['energy_threshold']] print("dropping") @@ -137,7 +137,7 @@ def plot_energy(infn, fn=None): fig, ax = plt.subplots() fig.set_size_inches((12,8)) - df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[snakemake.config['plotting']['tech_colors'][i] for i in new_index]) + df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[config['plotting']['tech_colors'][i] for i in new_index]) handles,labels = ax.get_legend_handles_labels() @@ -145,7 +145,7 @@ def plot_energy(infn, fn=None): handles.reverse() labels.reverse() - ax.set_ylim([snakemake.config['plotting']['energy_min'],snakemake.config['plotting']['energy_max']]) + ax.set_ylim([config['plotting']['energy_min'], config['plotting']['energy_max']]) ax.set_ylabel("Energy [TWh/a]") @@ -170,10 +170,12 @@ if __name__ == "__main__": attr='', ext='png', country='all') configure_logging(snakemake) - summary = snakemake.wildcards.summary + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + + summary = wildcards.summary try: func = globals()[f"plot_{summary}"] except KeyError: raise RuntimeError(f"plotting function for {summary} has not been defined") - func(os.path.join(snakemake.input[0], f"{summary}.csv"), snakemake.output[0]) + func(os.path.join(paths[0], f"{summary}.csv"), config, out[0]) From dc83fd8e0912f6a27e67d64ea8d1cce5e339344f Mon Sep 17 00:00:00 2001 From: martacki Date: Fri, 14 Jan 2022 15:13:44 +0100 Subject: [PATCH 22/26] fix small bugs --- scripts/make_summary.py | 7 +++---- scripts/plot_p_nom_max.py | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index a283fd20..3f8ee728 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -403,8 +403,7 @@ def make_summaries(networks_dict, paths, config, country='all'): n = n[n.buses.country == country] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(tech_costs = paths[0], config = config['costs'], - elec_config = config['electricity'], Nyears) + costs = load_costs(paths[0], config['costs'], config['electricity'], Nyears) update_transmission_costs(n, costs, simple_hvdc_costs=False) assign_carriers(n) @@ -448,9 +447,9 @@ if __name__ == "__main__": os.path.join(network_dir, f'elec_s{simpl}_' f'{clusters}_ec_l{l}_{opts}.nc') for simpl in expand_from_wildcard("simpl", config) - for clusters in expand_from_wildcard("clusters") + for clusters in expand_from_wildcard("clusters", config) for l in ll - for opts in expand_from_wildcard("opts")} + for opts in expand_from_wildcard("opts", config)} dfs = make_summaries(networks_dict, paths, config, country=wildcards.country) diff --git a/scripts/plot_p_nom_max.py b/scripts/plot_p_nom_max.py index 540608f9..ea66d612 100644 --- a/scripts/plot_p_nom_max.py +++ b/scripts/plot_p_nom_max.py @@ -19,7 +19,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import pandas as pd From 9f0515105b41414df7feb8052077f3cc18a3031d Mon Sep 17 00:00:00 2001 From: Qui-Rin <94053589+Qui-Rin@users.noreply.github.com> Date: Fri, 14 Jan 2022 19:02:44 +0100 Subject: [PATCH 23/26] build_load_data: Removed underscore in pattern When using the transparency option the pattern used as a filter is created with a double underscore -> removed underscore in '_transparency' --- scripts/build_load_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_load_data.py b/scripts/build_load_data.py index f71be6ea..840cb6c7 100755 --- a/scripts/build_load_data.py +++ b/scripts/build_load_data.py @@ -70,7 +70,7 @@ def load_timeseries(fn, years, countries, powerstatistics=True): """ logger.info(f"Retrieving load data from '{fn}'.") - pattern = 'power_statistics' if powerstatistics else '_transparency' + pattern = 'power_statistics' if powerstatistics else 'transparency' pattern = f'_load_actual_entsoe_{pattern}' rename = lambda s: s[:-len(pattern)] date_parser = lambda x: dateutil.parser.parse(x, ignoretz=True) From dbf0f65ab53f2e3b100336ea2c273915ee9361dd Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 24 Jan 2022 11:16:23 +0100 Subject: [PATCH 24/26] Update config.default.yaml --- config.default.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config.default.yaml b/config.default.yaml index 7a443a03..d2bf6159 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -154,6 +154,7 @@ renewable: # sector: The economic potential of photovoltaics and concentrating solar # power." Applied Energy 135 (2014): 704-720. # This correction factor of 0.854337 may be in order if using reanalysis data. + # for discussion refer to https://github.com/PyPSA/pypsa-eur/pull/304 # correction_factor: 0.854337 corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] From c4be81eb5ecbd3421504f4281f629fddc7f83cc4 Mon Sep 17 00:00:00 2001 From: martacki Date: Mon, 24 Jan 2022 19:13:48 +0100 Subject: [PATCH 25/26] simplify_network: remove snakemake dependencies --- scripts/simplify_network.py | 71 ++++++++++++++++++------------------- 1 file changed, 34 insertions(+), 37 deletions(-) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 1de180d0..70f27bf2 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -83,7 +83,7 @@ The rule :mod:`simplify_network` does up to four things: """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max +from _helpers import configure_logging, update_p_nom_max from cluster_network import clustering_for_n_clusters, cluster_regions from add_electricity import load_costs @@ -138,15 +138,15 @@ def simplify_network_to_380(n): return n, trafo_map -def _prepare_connection_costs_per_link(n, costs): +def _prepare_connection_costs_per_link(n, costs, config): if n.links.empty: return {} connection_costs_per_link = {} - for tech in snakemake.config['renewable']: + for tech in config['renewable']: if tech.startswith('offwind'): connection_costs_per_link[tech] = ( - n.links.length * snakemake.config['lines']['length_factor'] * + n.links.length * config['lines']['length_factor'] * (n.links.underwater_fraction * costs.at[tech + '-connection-submarine', 'capital_cost'] + (1. - n.links.underwater_fraction) * costs.at[tech + '-connection-underground', 'capital_cost']) ) @@ -154,9 +154,9 @@ def _prepare_connection_costs_per_link(n, costs): return connection_costs_per_link -def _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link=None, buses=None): +def _compute_connection_costs_to_bus(n, busmap, costs, config, connection_costs_per_link=None, buses=None): if connection_costs_per_link is None: - connection_costs_per_link = _prepare_connection_costs_per_link(n, costs) + connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config) if buses is None: buses = busmap.index[busmap.index != busmap.values] @@ -174,7 +174,7 @@ def _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link return connection_costs_to_bus -def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus): +def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output): connection_costs = {} for tech in connection_costs_to_bus: tech_b = n.generators.carrier == tech @@ -184,11 +184,11 @@ def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus): logger.info("Displacing {} generator(s) and adding connection costs to capital_costs: {} " .format(tech, ", ".join("{:.0f} Eur/MW/a for `{}`".format(d, b) for b, d in costs.iteritems()))) connection_costs[tech] = costs - pd.DataFrame(connection_costs).to_csv(snakemake.output.connection_costs) + pd.DataFrame(connection_costs).to_csv(output.connection_costs) -def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate_one_ports={"Load", "StorageUnit"}): +def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output, aggregate_one_ports={"Load", "StorageUnit"}): def replace_components(n, c, df, pnl): n.mremove(c, n.df(c).index) @@ -197,7 +197,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate if not df.empty: import_series_from_dataframe(n, df, c, attr) - _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus) + _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output) generators, generators_pnl = aggregategenerators(n, busmap, custom_strategies={'p_nom_min': np.sum}) replace_components(n, "Generator", generators, generators_pnl) @@ -213,7 +213,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)]) -def simplify_links(n, costs): +def simplify_links(n, costs, config, output): ## Complex multi-node links are folded into end-points logger.info("Simplifying connected link components") @@ -260,7 +260,7 @@ def simplify_links(n, costs): busmap = n.buses.index.to_series() - connection_costs_per_link = _prepare_connection_costs_per_link(n, costs) + connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config) connection_costs_to_bus = pd.DataFrame(0., index=n.buses.index, columns=list(connection_costs_per_link)) for lbl in labels.value_counts().loc[lambda s: s > 2].index: @@ -274,11 +274,11 @@ def simplify_links(n, costs): m = sp.spatial.distance_matrix(n.buses.loc[b, ['x', 'y']], n.buses.loc[buses[1:-1], ['x', 'y']]) busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]] - connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, costs, connection_costs_per_link, buses) + connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, costs, config, connection_costs_per_link, buses) all_links = [i for _, i in sum(links, [])] - p_max_pu = snakemake.config['links'].get('p_max_pu', 1.) + p_max_pu = config['links'].get('p_max_pu', 1.) lengths = n.links.loc[all_links, 'length'] name = lengths.idxmax() + '+{}'.format(len(links) - 1) params = dict( @@ -305,17 +305,17 @@ def simplify_links(n, costs): logger.debug("Collecting all components using the busmap") - _aggregate_and_move_components(n, busmap, connection_costs_to_bus) + _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output) return n, busmap -def remove_stubs(n, costs): +def remove_stubs(n, costs, config, output): logger.info("Removing stubs") busmap = busmap_by_stubs(n) # ['country']) - connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs) + connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config) - _aggregate_and_move_components(n, busmap, connection_costs_to_bus) + _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output) return n, busmap @@ -356,25 +356,25 @@ def aggregate_to_substations(n, buses_i=None): return clustering.network, busmap -def cluster(n, n_clusters): +def cluster(n, n_clusters, config): logger.info(f"Clustering to {n_clusters} buses") - focus_weights = snakemake.config.get('focus_weights', None) + focus_weights = config.get('focus_weights', None) renewable_carriers = pd.Index([tech for tech in n.generators.carrier.unique() - if tech.split('-', 2)[0] in snakemake.config['renewable']]) + if tech.split('-', 2)[0] in config['renewable']]) def consense(x): v = x.iat[0] assert ((x == v).all() or x.isnull().all()), ( "The `potential` configuration option must agree for all renewable carriers, for now!" ) return v - potential_mode = (consense(pd.Series([snakemake.config['renewable'][tech]['potential'] + potential_mode = (consense(pd.Series([config['renewable'][tech]['potential'] for tech in renewable_carriers])) if len(renewable_carriers) > 0 else 'conservative') clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode, - solver_name=snakemake.config['solving']['solver']['name'], + solver_name=config['solving']['solver']['name'], focus_weights=focus_weights) return clustering.network, clustering.busmap @@ -386,29 +386,26 @@ if __name__ == "__main__": snakemake = mock_snakemake('simplify_network', simpl='', network='elec') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - n = pypsa.Network(paths.network) + n = pypsa.Network(snakemake.input.network) n, trafo_map = simplify_network_to_380(n) Nyears = n.snapshot_weightings.objective.sum() / 8760 - technology_costs = load_costs(tech_costs = paths.tech_costs, - config = config['costs'], - elec_config = config['electricity'], Nyears = Nyears) - n, simplify_links_map = simplify_links(n, technology_costs) + technology_costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) - n, stub_map = remove_stubs(n, technology_costs) + n, simplify_links_map = simplify_links(n, technology_costs, snakemake.config, snakemake.output) + + n, stub_map = remove_stubs(n, technology_costs, snakemake.config, snakemake.output) busmaps = [trafo_map, simplify_links_map, stub_map] - if config.get('clustering', {}).get('simplify', {}).get('to_substations', False): + if snakemake.config.get('clustering', {}).get('simplify', {}).get('to_substations', False): n, substation_map = aggregate_to_substations(n) busmaps.append(substation_map) - if wildcards.simpl: - n, cluster_map = cluster(n, int(wildcards.simpl)) + if snakemake.wildcards.simpl: + n, cluster_map = cluster(n, int(snakemake.wildcards.simpl), snakemake.config) busmaps.append(cluster_map) # some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed @@ -418,9 +415,9 @@ if __name__ == "__main__": update_p_nom_max(n) - n.export_to_netcdf(out.network) + n.export_to_netcdf(snakemake.output.network) busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) - busmap_s.to_csv(out.busmap) + busmap_s.to_csv(snakemake.output.busmap) - cluster_regions(busmaps, paths, out) + cluster_regions(busmaps, snakemake.input, snakemake.output) From 6cdf3a287994201371fd9f7115ca28af587add2b Mon Sep 17 00:00:00 2001 From: martacki Date: Mon, 24 Jan 2022 19:48:26 +0100 Subject: [PATCH 26/26] use snakemake keywords directly without extracting them beforehand --- scripts/_helpers.py | 4 --- scripts/add_electricity.py | 40 ++++++++++++------------ scripts/add_extra_components.py | 14 ++++----- scripts/base_network.py | 12 +++----- scripts/build_bus_regions.py | 16 +++++----- scripts/build_cutout.py | 14 ++++----- scripts/build_hydro_profile.py | 16 +++++----- scripts/build_load_data.py | 20 ++++++------ scripts/build_powerplants.py | 14 ++++----- scripts/build_renewable_profiles.py | 48 ++++++++++++++--------------- scripts/build_shapes.py | 20 ++++++------ scripts/cluster_network.py | 40 ++++++++++++------------ scripts/prepare_network.py | 30 +++++++++--------- scripts/retrieve_databundle.py | 6 ++-- scripts/solve_network.py | 20 ++++++------ scripts/solve_operations_network.py | 24 +++++++-------- 16 files changed, 152 insertions(+), 186 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index a44a8133..f1e5e887 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -95,10 +95,6 @@ def pdbcast(v, h): return pd.DataFrame(v.values.reshape((-1, 1)) * h.values, index=v.index, columns=h.index) -def retrieve_snakemake_keys(snakemake): - return (snakemake.input, snakemake.config, snakemake.wildcards, - snakemake.log, snakemake.output) - def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True): import pypsa diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index fcddea8c..7dffe60f 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -84,7 +84,7 @@ It further adds extendable ``generators`` with **zero** capacity for """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max +from _helpers import configure_logging, update_p_nom_max import pypsa import pandas as pd @@ -544,40 +544,38 @@ if __name__ == "__main__": snakemake = mock_snakemake('add_electricity') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - n = pypsa.Network(paths.base_network) + n = pypsa.Network(snakemake.input.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears) - ppl = load_powerplants(paths.powerplants) + costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) + ppl = load_powerplants(snakemake.input.powerplants) - attach_load(n, paths.regions, paths.load, paths.nuts3_shapes, config['countries'], - config['load']['scaling_factor']) + attach_load(n, snakemake.input.regions, snakemake.input.load, snakemake.input.nuts3_shapes, + snakemake.config['countries'], snakemake.config['load']['scaling_factor']) - update_transmission_costs(n, costs, config['lines']['length_factor']) + update_transmission_costs(n, costs, snakemake.config['lines']['length_factor']) - carriers = config['electricity']['conventional_carriers'] + carriers = snakemake.config['electricity']['conventional_carriers'] attach_conventional_generators(n, costs, ppl, carriers) - carriers = config['renewable'] - attach_wind_and_solar(n, costs, paths, carriers, config['lines']['length_factor']) + carriers = snakemake.config['renewable'] + attach_wind_and_solar(n, costs, snakemake.input, carriers, snakemake.config['lines']['length_factor']) - if 'hydro' in config['renewable']: - carriers = config['renewable']['hydro'].pop('carriers', []) - attach_hydro(n, costs, ppl, paths.profile_hydro, paths.hydro_capacities, - carriers, **config['renewable']['hydro']) + if 'hydro' in snakemake.config['renewable']: + carriers = snakemake.config['renewable']['hydro'].pop('carriers', []) + attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities, + carriers, **snakemake.config['renewable']['hydro']) - carriers = config['electricity']['extendable_carriers']['Generator'] + carriers = snakemake.config['electricity']['extendable_carriers']['Generator'] attach_extendable_generators(n, costs, ppl, carriers) - tech_map = config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {}) + tech_map = snakemake.config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {}) estimate_renewable_capacities(n, tech_map) - techs = config['electricity'].get('renewable_capacities_from_OPSD', []) + techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', []) attach_OPSD_renewables(n, techs) update_p_nom_max(n) - add_nice_carrier_names(n, config) + add_nice_carrier_names(n, snakemake.config) - n.export_to_netcdf(out[0]) + n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index 0531c9fa..287dd66e 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -50,7 +50,7 @@ The rule :mod:`add_extra_components` attaches additional extendable components t - ``Stores`` of carrier 'H2' and/or 'battery' in combination with ``Links``. If this option is chosen, the script adds extra buses with corresponding carrier where energy ``Stores`` are attached and which are connected to the corresponding power buses via two links, one each for charging and discharging. This leads to three investment variables for the energy capacity, charging and discharging capacity of the storage unit. """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import pypsa import pandas as pd @@ -193,18 +193,16 @@ if __name__ == "__main__": simpl='', clusters=5) configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - n = pypsa.Network(paths.network) - elec_config = config['electricity'] + n = pypsa.Network(snakemake.input.network) + elec_config = snakemake.config['electricity'] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, config['costs'], elec_config, Nyears) + costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], elec_config, Nyears) attach_storageunits(n, costs, elec_config) attach_stores(n, costs, elec_config) attach_hydrogen_pipelines(n, costs, elec_config) - add_nice_carrier_names(n, config) + add_nice_carrier_names(n, snakemake.config) - n.export_to_netcdf(out[0]) + n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/base_network.py b/scripts/base_network.py index b9c9f37f..28d804cd 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -63,7 +63,7 @@ Description """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import pypsa import yaml @@ -588,10 +588,8 @@ if __name__ == "__main__": snakemake = mock_snakemake('base_network') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + n = base_network(snakemake.input.eg_buses, snakemake.input.eg_converters, snakemake.input.eg_transformers, snakemake.input.eg_lines, snakemake.input.eg_links, + snakemake.input.links_p_nom, snakemake.input.links_tyndp, snakemake.input.europe_shape, snakemake.input.country_shapes, snakemake.input.offshore_shapes, + snakemake.input.parameter_corrections, snakemake.config) - n = base_network(paths.eg_buses, paths.eg_converters, paths.eg_transformers, paths.eg_lines, paths.eg_links, - paths.links_p_nom, paths.links_tyndp, paths.europe_shape, paths.country_shapes, paths.offshore_shapes, - paths.parameter_corrections, config) - - n.export_to_netcdf(out[0]) + n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index 78e2070d..d91d0575 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -42,7 +42,7 @@ Description """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import pypsa import os @@ -67,14 +67,12 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_bus_regions') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + countries = snakemake.config['countries'] - countries = config['countries'] + n = pypsa.Network(snakemake.input.base_network) - n = pypsa.Network(paths.base_network) - - country_shapes = gpd.read_file(paths.country_shapes).set_index('name')['geometry'] - offshore_shapes = gpd.read_file(paths.offshore_shapes).set_index('name')['geometry'] + country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'] + offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry'] onshore_regions = [] offshore_regions = [] @@ -105,6 +103,6 @@ if __name__ == "__main__": offshore_regions_c = offshore_regions_c.loc[offshore_regions_c.area > 1e-2] offshore_regions.append(offshore_regions_c) - save_to_geojson(pd.concat(onshore_regions, ignore_index=True), out.regions_onshore) + save_to_geojson(pd.concat(onshore_regions, ignore_index=True), snakemake.output.regions_onshore) - save_to_geojson(pd.concat(offshore_regions, ignore_index=True), out.regions_offshore) + save_to_geojson(pd.concat(offshore_regions, ignore_index=True), snakemake.output.regions_offshore) diff --git a/scripts/build_cutout.py b/scripts/build_cutout.py index 4b3e2bdc..78eafac6 100644 --- a/scripts/build_cutout.py +++ b/scripts/build_cutout.py @@ -95,7 +95,7 @@ import logging import atlite import geopandas as gpd import pandas as pd -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging logger = logging.getLogger(__name__) @@ -106,18 +106,16 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_cutout', cutout='europe-2013-era5') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout] - cutout_params = config['atlite']['cutouts'][wildcards.cutout] - - snapshots = pd.date_range(freq='h', **config['snapshots']) + snapshots = pd.date_range(freq='h', **snakemake.config['snapshots']) time = [snapshots[0], snapshots[-1]] cutout_params['time'] = slice(*cutout_params.get('time', time)) if {'x', 'y', 'bounds'}.isdisjoint(cutout_params): # Determine the bounds from bus regions with a buffer of two grid cells - onshore = gpd.read_file(paths.regions_onshore) - offshore = gpd.read_file(paths.regions_offshore) + onshore = gpd.read_file(snakemake.input.regions_onshore) + offshore = gpd.read_file(snakemake.input.regions_offshore) regions = onshore.append(offshore) d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy', 0.25))*2 cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d] @@ -128,5 +126,5 @@ if __name__ == "__main__": logging.info(f"Preparing cutout with parameters {cutout_params}.") features = cutout_params.pop('features', None) - cutout = atlite.Cutout(out[0], **cutout_params) + cutout = atlite.Cutout(snakemake.output[0], **cutout_params) cutout.prepare(features=features) diff --git a/scripts/build_hydro_profile.py b/scripts/build_hydro_profile.py index 563c8ecb..74efc2ef 100644 --- a/scripts/build_hydro_profile.py +++ b/scripts/build_hydro_profile.py @@ -60,7 +60,7 @@ Description """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import atlite import geopandas as gpd @@ -74,18 +74,16 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_hydro_profile') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + config_hydro = snakemake.config['renewable']['hydro'] + cutout = atlite.Cutout(snakemake.input.cutout) - config_hydro = config['renewable']['hydro'] - cutout = atlite.Cutout(paths.cutout) - - countries = config['countries'] - country_shapes = (gpd.read_file(paths.country_shapes) + countries = snakemake.config['countries'] + country_shapes = (gpd.read_file(snakemake.input.country_shapes) .set_index('name')['geometry'].reindex(countries)) country_shapes.index.name = 'countries' eia_stats = vhydro.get_eia_annual_hydro_generation( - paths.eia_hydro_generation).reindex(columns=countries) + snakemake.input.eia_hydro_generation).reindex(columns=countries) inflow = cutout.runoff(shapes=country_shapes, smooth=True, lower_threshold_quantile=True, @@ -94,4 +92,4 @@ if __name__ == "__main__": if 'clip_min_inflow' in config_hydro: inflow = inflow.where(inflow > config_hydro['clip_min_inflow'], 0) - inflow.to_netcdf(out[0]) + inflow.to_netcdf(snakemake.output[0]) diff --git a/scripts/build_load_data.py b/scripts/build_load_data.py index 144037a9..10921782 100755 --- a/scripts/build_load_data.py +++ b/scripts/build_load_data.py @@ -37,7 +37,7 @@ Outputs import logging logger = logging.getLogger(__name__) -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import pandas as pd import numpy as np @@ -196,18 +196,16 @@ if __name__ == "__main__": configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - powerstatistics = config['load']['power_statistics'] - interpolate_limit = config['load']['interpolate_limit'] - countries = config['countries'] - snapshots = pd.date_range(freq='h', **config['snapshots']) + powerstatistics = snakemake.config['load']['power_statistics'] + interpolate_limit = snakemake.config['load']['interpolate_limit'] + countries = snakemake.config['countries'] + snapshots = pd.date_range(freq='h', **snakemake.config['snapshots']) years = slice(snapshots[0], snapshots[-1]) - time_shift = config['load']['time_shift_for_large_gaps'] + time_shift = snakemake.config['load']['time_shift_for_large_gaps'] - load = load_timeseries(paths[0], years, countries, powerstatistics) + load = load_timeseries(snakemake.input[0], years, countries, powerstatistics) - if config['load']['manual_adjustments']: + if snakemake.config['load']['manual_adjustments']: load = manual_adjustment(load, powerstatistics) logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.") @@ -222,5 +220,5 @@ if __name__ == "__main__": '`time_shift_for_large_gaps` or modify the `manual_adjustment` function ' 'for implementing the needed load data modifications.') - load.to_csv(out[0]) + load.to_csv(snakemake.output[0]) diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index 4b9d13a1..d4ad4989 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -72,7 +72,7 @@ The configuration options ``electricity: powerplants_filter`` and ``electricity: """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import pypsa import powerplantmatching as pm @@ -100,9 +100,7 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_powerplants') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - n = pypsa.Network(paths.base_network) + n = pypsa.Network(snakemake.input.base_network) countries = n.buses.country.unique() ppl = (pm.powerplants(from_url=True) @@ -116,13 +114,13 @@ if __name__ == "__main__": df.Technology.replace('Steam Turbine', 'OCGT').fillna('OCGT'))))) - ppl_query = config['electricity']['powerplants_filter'] + ppl_query = snakemake.config['electricity']['powerplants_filter'] if isinstance(ppl_query, str): ppl.query(ppl_query, inplace=True) # add carriers from own powerplant files: - custom_ppl_query = config['electricity']['custom_powerplants'] - ppl = add_custom_powerplants(ppl, paths.custom_powerplants, custom_ppl_query) + custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] + ppl = add_custom_powerplants(ppl, snakemake.input.custom_powerplants, custom_ppl_query) cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()] @@ -141,4 +139,4 @@ if __name__ == "__main__": if bus_null_b.any(): logging.warning(f"Couldn't find close bus for {bus_null_b.sum()} powerplants") - ppl.to_csv(out[0]) + ppl.to_csv(snakemake.output[0]) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 944d6f39..b37e6825 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -190,7 +190,7 @@ from pypsa.geo import haversine from shapely.geometry import LineString import time -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging logger = logging.getLogger(__name__) @@ -202,55 +202,53 @@ if __name__ == '__main__': configure_logging(snakemake) pgb.streams.wrap_stderr() - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - - nprocesses = config['atlite'].get('nprocesses') - noprogress = not config['atlite'].get('show_progress', True) - config = config['renewable'][wildcards.technology] + nprocesses = snakemake.config['atlite'].get('nprocesses') + noprogress = not snakemake.config['atlite'].get('show_progress', True) + config = snakemake.config['renewable'][snakemake.wildcards.technology] resource = config['resource'] # pv panel config / wind turbine config - correction_factor = config.get('correction_factor', 1.) + correction_factor = snakemake.config.get('correction_factor', 1.) capacity_per_sqkm = config['capacity_per_sqkm'] - p_nom_max_meth = config.get('potential', 'conservative') + p_nom_max_meth = snakemake.config.get('potential', 'conservative') if isinstance(config.get("corine", {}), list): - config['corine'] = {'grid_codes': config['corine']} + snakemake.config['corine'] = {'grid_codes': config['corine']} if correction_factor != 1.: logger.info(f'correction_factor is set as {correction_factor}') - cutout = atlite.Cutout(paths['cutout']) - regions = gpd.read_file(paths.regions).set_index('name').rename_axis('bus') + cutout = atlite.Cutout(snakemake.input['cutout']) + regions = gpd.read_file(snakemake.input.regions).set_index('name').rename_axis('bus') buses = regions.index excluder = atlite.ExclusionContainer(crs=3035, res=100) if config['natura']: - excluder.add_raster(paths.natura, nodata=0, allow_no_overlap=True) + excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True) - corine = config.get("corine", {}) + corine = snakemake.config.get("corine", {}) if "grid_codes" in corine: codes = corine["grid_codes"] - excluder.add_raster(paths.corine, codes=codes, invert=True, crs=3035) + excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035) if corine.get("distance", 0.) > 0.: codes = corine["distance_grid_codes"] buffer = corine["distance"] - excluder.add_raster(paths.corine, codes=codes, buffer=buffer, crs=3035) + excluder.add_raster(snakemake.input.corine, codes=codes, buffer=buffer, crs=3035) if "max_depth" in config: # lambda not supported for atlite + multiprocessing # use named function np.greater with partially frozen argument instead # and exclude areas where: -max_depth > grid cell depth func = functools.partial(np.greater,-config['max_depth']) - excluder.add_raster(paths.gebco, codes=func, crs=4236, nodata=-1000) + excluder.add_raster(snakemake.input.gebco, codes=func, crs=4236, nodata=-1000) if 'min_shore_distance' in config: buffer = config['min_shore_distance'] - excluder.add_geometry(paths.country_shapes, buffer=buffer) + excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer) if 'max_shore_distance' in config: buffer = config['max_shore_distance'] - excluder.add_geometry(paths.country_shapes, buffer=buffer, invert=True) + excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer, invert=True) kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress) if noprogress: @@ -315,9 +313,9 @@ if __name__ == '__main__': average_distance.rename('average_distance')]) - if wildcards.technology.startswith("offwind"): + if snakemake.wildcards.technology.startswith("offwind"): logger.info('Calculate underwater fraction of connections.') - offshore_shape = gpd.read_file(paths['offshore_shapes']).unary_union + offshore_shape = gpd.read_file(snakemake.input['offshore_shapes']).unary_union underwater_fraction = [] for bus in buses: p = centre_of_mass.sel(bus=bus).data @@ -328,11 +326,11 @@ if __name__ == '__main__': ds['underwater_fraction'] = xr.DataArray(underwater_fraction, [buses]) # select only buses with some capacity and minimal capacity factor - ds = ds.sel(bus=((ds['profile'].mean('time') > config.get('min_p_max_pu', 0.)) & - (ds['p_nom_max'] > config.get('min_p_nom_max', 0.)))) + ds = ds.sel(bus=((ds['profile'].mean('time') > snakemake.config.get('min_p_max_pu', 0.)) & + (ds['p_nom_max'] > snakemake.config.get('min_p_nom_max', 0.)))) - if 'clip_p_max_pu' in config: - min_p_max_pu = config['clip_p_max_pu'] + if 'clip_p_max_pu' in snakemake.config: + min_p_max_pu = snakemake.config['clip_p_max_pu'] ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0) - ds.to_netcdf(out.profile) + ds.to_netcdf(snakemake.output.profile) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 515cbc13..95867d89 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -68,7 +68,7 @@ Description """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import os import numpy as np @@ -217,18 +217,16 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_shapes') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries']) + save_to_geojson(country_shapes, snakemake.output.country_shapes) - country_shapes = countries(paths.naturalearth, config['countries']) - save_to_geojson(country_shapes, out.country_shapes) - - offshore_shapes = eez(country_shapes, paths.eez, config['countries']) - save_to_geojson(offshore_shapes, out.offshore_shapes) + offshore_shapes = eez(country_shapes, snakemake.input.eez, snakemake.config['countries']) + save_to_geojson(offshore_shapes, snakemake.output.offshore_shapes) europe_shape = country_cover(country_shapes, offshore_shapes) - save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape) + save_to_geojson(gpd.GeoSeries(europe_shape), snakemake.output.europe_shape) - nuts3_shapes = nuts3(country_shapes, paths.nuts3, paths.nuts3pop, - paths.nuts3gdp, paths.ch_cantons, paths.ch_popgdp) + nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop, + snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp) - save_to_geojson(nuts3_shapes, out.nuts3_shapes) + save_to_geojson(nuts3_shapes, snakemake.output.nuts3_shapes) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 041fb259..525196fc 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes: """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys, update_p_nom_max +from _helpers import configure_logging, update_p_nom_max import pypsa import os @@ -331,21 +331,19 @@ if __name__ == "__main__": snakemake = mock_snakemake('cluster_network', network='elec', simpl='', clusters='5') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + n = pypsa.Network(snakemake.input.network) - n = pypsa.Network(paths.network) - - focus_weights = config.get('focus_weights', None) + focus_weights = snakemake.config.get('focus_weights', None) renewable_carriers = pd.Index([tech for tech in n.generators.carrier.unique() - if tech in config['renewable']]) + if tech in snakemake.config['renewable']]) - if wildcards.clusters.endswith('m'): - n_clusters = int(wildcards.clusters[:-1]) + if snakemake.wildcards.clusters.endswith('m'): + n_clusters = int(snakemake.wildcards.clusters[:-1]) aggregate_carriers = pd.Index(n.generators.carrier.unique()).difference(renewable_carriers) else: - n_clusters = int(wildcards.clusters) + n_clusters = int(snakemake.wildcards.clusters) aggregate_carriers = None # All if n_clusters == len(n.buses): @@ -354,11 +352,10 @@ if __name__ == "__main__": linemap = n.lines.index.to_series() clustering = pypsa.networkclustering.Clustering(n, busmap, linemap, linemap, pd.Series(dtype='O')) else: - line_length_factor = config['lines']['length_factor'] + line_length_factor = snakemake.config['lines']['length_factor'] Nyears = n.snapshot_weightings.objective.sum()/8760 - hvac_overhead_cost = (load_costs(tech_costs = paths.tech_costs, - config = config['costs'], - elec_config=config['electricity'], Nyears = Nyears) + + hvac_overhead_cost = (load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) .at['HVAC overhead', 'capital_cost']) def consense(x): @@ -367,22 +364,23 @@ if __name__ == "__main__": "The `potential` configuration option must agree for all renewable carriers, for now!" ) return v - potential_mode = consense(pd.Series([config['renewable'][tech]['potential'] + potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential'] for tech in renewable_carriers])) - custom_busmap = config["enable"].get("custom_busmap", False) + custom_busmap = snakemake.config["enable"].get("custom_busmap", False) if custom_busmap: - custom_busmap = pd.read_csv(paths.custom_busmap, index_col=0, squeeze=True) + custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True) custom_busmap.index = custom_busmap.index.astype(str) - logger.info(f"Imported custom busmap from {paths.custom_busmap}") + logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers, - line_length_factor, potential_mode, config['solving']['solver']['name'], + line_length_factor, potential_mode, + snakemake.config['solving']['solver']['name'], "kmeans", hvac_overhead_cost, focus_weights) update_p_nom_max(n) - clustering.network.export_to_netcdf(out.network) + clustering.network.export_to_netcdf(snakemake.output.network) for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative - getattr(clustering, attr).to_csv(out[attr]) + getattr(clustering, attr).to_csv(snakemake.output[attr]) - cluster_regions((clustering.busmap,), paths, out) + cluster_regions((clustering.busmap,), snakemake.input, snakemake.output) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index e0b488f5..f984ace6 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -56,7 +56,7 @@ Description """ import logging -from _helpers import configure_logging, retrieve_snakemake_keys +from _helpers import configure_logging import re import pypsa @@ -70,7 +70,7 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) -def add_co2limit(n, co2limit=1.487e+9, Nyears=1.): +def add_co2limit(n, co2limit, Nyears=1.): n.add("GlobalConstraint", "CO2Limit", carrier_attribute="co2_emissions", sense="<=", @@ -206,15 +206,13 @@ if __name__ == "__main__": clusters='40', ll='v0.3', opts='Co2L-24H') configure_logging(snakemake) - paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + opts = snakemake.wildcards.opts.split('-') - opts = wildcards.opts.split('-') - - n = pypsa.Network(paths[0]) + n = pypsa.Network(snakemake.input[0]) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(paths.tech_costs, config['costs'], config['electricity'], Nyears) + costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) - set_line_s_max_pu(n, config['lines']['s_max_pu']) + set_line_s_max_pu(n, snakemake.config['lines']['s_max_pu']) for o in opts: m = re.match(r'^\d+h$', o, re.IGNORECASE) @@ -225,7 +223,7 @@ if __name__ == "__main__": for o in opts: m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: - solver_name = config["solving"]["solver"]["name"] + solver_name = snakemake.config["solving"]["solver"]["name"] n = apply_time_segmentation(n, m.group(0)[:-3], solver_name) break @@ -233,10 +231,10 @@ if __name__ == "__main__": if "Co2L" in o: m = re.findall("[0-9]*\.?[0-9]+$", o) if len(m) > 0: - co2limit = float(m[0]) * config['electricity']['co2base'] + co2limit = float(m[0]) * snakemake.config['electricity']['co2base'] add_co2limit(n, co2limit, Nyears) else: - add_co2limit(n, config['electricity']['co2limit'], Nyears) + add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears) break for o in opts: @@ -257,17 +255,17 @@ if __name__ == "__main__": c.df.loc[sel,attr] *= factor if 'Ep' in opts: - add_emission_prices(n, config['costs']['emission_prices']) + add_emission_prices(n, snakemake.config['costs']['emission_prices']) - ll_type, factor = wildcards.ll[0], wildcards.ll[1:] + ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] set_transmission_limit(n, ll_type, factor, costs, Nyears) - set_line_nom_max(n, s_nom_max_set=config["lines"].get("s_nom_max,", np.inf), - p_nom_max_set=config["links"].get("p_nom_max,", np.inf)) + set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf), + p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf)) if "ATK" in opts: enforce_autarky(n) elif "ATKc" in opts: enforce_autarky(n, only_crossborder=True) - n.export_to_netcdf(out[0]) + n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index c5a31f81..86869879 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -33,7 +33,7 @@ The :ref:`tutorial` uses a smaller `data bundle