diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 9e64ad29..7dffe60f 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -117,12 +117,7 @@ def _add_missing_carriers_from_costs(n, costs, carriers): n.import_components_from_dataframe(emissions, 'Carrier') -def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None): - if tech_costs is None: - tech_costs = snakemake.input.tech_costs - - if config is None: - config = snakemake.config['costs'] +def load_costs(tech_costs, config, elec_config, Nyears=1.): # set all asset costs and other parameters costs = pd.read_csv(tech_costs, index_col=list(range(3))).sort_index() @@ -168,8 +163,6 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None): marginal_cost=0., co2_emissions=0.)) - if elec_config is None: - elec_config = snakemake.config['electricity'] max_hours = elec_config['max_hours'] costs.loc["battery"] = \ costs_for_storage(costs.loc["battery storage"], costs.loc["battery inverter"], @@ -187,9 +180,7 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None): return costs -def load_powerplants(ppl_fn=None): - if ppl_fn is None: - ppl_fn = snakemake.input.powerplants +def load_powerplants(ppl_fn): carrier_dict = {'ocgt': 'OCGT', 'ccgt': 'CCGT', 'bioenergy': 'biomass', 'ccgt, thermal': 'CCGT', 'hard coal': 'coal'} return (pd.read_csv(ppl_fn, index_col=0, dtype={'bus': 'str'}) @@ -198,18 +189,18 @@ def load_powerplants(ppl_fn=None): .replace({'carrier': carrier_dict})) -def attach_load(n): - substation_lv_i = n.buses.index[n.buses['substation_lv']] - regions = (gpd.read_file(snakemake.input.regions).set_index('name') - .reindex(substation_lv_i)) - opsd_load = (pd.read_csv(snakemake.input.load, index_col=0, parse_dates=True) - .filter(items=snakemake.config['countries'])) +def attach_load(n, regions, load, nuts3_shapes, countries, scaling=1.): + + substation_lv_i = n.buses.index[n.buses['substation_lv']] + regions = (gpd.read_file(regions).set_index('name') + .reindex(substation_lv_i)) + opsd_load = (pd.read_csv(load, index_col=0, parse_dates=True) + .filter(items=countries)) - scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0) logger.info(f"Load data scaled with scalling factor {scaling}.") opsd_load *= scaling - nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index('index') + nuts3 = gpd.read_file(nuts3_shapes).set_index('index') def upsample(cntry, group): l = opsd_load[cntry] @@ -237,6 +228,9 @@ def attach_load(n): def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=False): + # TODO: line length factor of lines is applied to lines and links. + # Separate the function to distinguish. + n.lines['capital_cost'] = (n.lines['length'] * length_factor * costs.at['HVAC overhead', 'capital_cost']) @@ -261,18 +255,20 @@ def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=Fal n.links.loc[dc_b, 'capital_cost'] = costs -def attach_wind_and_solar(n, costs): - for tech in snakemake.config['renewable']: +def attach_wind_and_solar(n, costs, input_profiles, technologies, line_length_factor=1): + # TODO: rename tech -> carrier, technologies -> carriers + + for tech in technologies: if tech == 'hydro': continue n.add("Carrier", name=tech) - with xr.open_dataset(getattr(snakemake.input, 'profile_' + tech)) as ds: + with xr.open_dataset(getattr(input_profiles, 'profile_' + tech)) as ds: if ds.indexes['bus'].empty: continue suptech = tech.split('-', 2)[0] if suptech == 'offwind': underwater_fraction = ds['underwater_fraction'].to_pandas() - connection_cost = (snakemake.config['lines']['length_factor'] * + connection_cost = (line_length_factor * ds['average_distance'].to_pandas() * (underwater_fraction * costs.at[tech + '-connection-submarine', 'capital_cost'] + @@ -298,8 +294,7 @@ def attach_wind_and_solar(n, costs): p_max_pu=ds['profile'].transpose('time', 'bus').to_pandas()) -def attach_conventional_generators(n, costs, ppl): - carriers = snakemake.config['electricity']['conventional_carriers'] +def attach_conventional_generators(n, costs, ppl, carriers): _add_missing_carriers_from_costs(n, costs, carriers) @@ -320,10 +315,7 @@ def attach_conventional_generators(n, costs, ppl): logger.warning(f'Capital costs for conventional generators put to 0 EUR/MW.') -def attach_hydro(n, costs, ppl): - if 'hydro' not in snakemake.config['renewable']: return - c = snakemake.config['renewable']['hydro'] - carriers = c.get('carriers', ['ror', 'PHS', 'hydro']) +def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **config): _add_missing_carriers_from_costs(n, costs, carriers) @@ -339,11 +331,11 @@ def attach_hydro(n, costs, ppl): if not inflow_idx.empty: dist_key = ppl.loc[inflow_idx, 'p_nom'].groupby(country).transform(normed) - with xr.open_dataarray(snakemake.input.profile_hydro) as inflow: + with xr.open_dataarray(profile_hydro) as inflow: inflow_countries = pd.Index(country[inflow_idx]) missing_c = (inflow_countries.unique() .difference(inflow.indexes['countries'])) - assert missing_c.empty, (f"'{snakemake.input.profile_hydro}' is missing " + assert missing_c.empty, (f"'{profile_hydro}' is missing " f"inflow time-series for at least one country: {', '.join(missing_c)}") inflow_t = (inflow.sel(countries=inflow_countries) @@ -368,7 +360,8 @@ def attach_hydro(n, costs, ppl): if 'PHS' in carriers and not phs.empty: # fill missing max hours to config value and # assume no natural inflow due to lack of data - phs = phs.replace({'max_hours': {0: c['PHS_max_hours']}}) + max_hours = config.get('PHS_max_hours', 6) + phs = phs.replace({'max_hours': {0: max_hours}}) n.madd('StorageUnit', phs.index, carrier='PHS', bus=phs['bus'], @@ -380,8 +373,11 @@ def attach_hydro(n, costs, ppl): cyclic_state_of_charge=True) if 'hydro' in carriers and not hydro.empty: - hydro_max_hours = c.get('hydro_max_hours') - hydro_stats = pd.read_csv(snakemake.input.hydro_capacities, + hydro_max_hours = config.get('hydro_max_hours') + + assert hydro_max_hours is not None, "No path for hydro capacities given." + + hydro_stats = pd.read_csv(hydro_capacities, comment="#", na_values='-', index_col=0) e_target = hydro_stats["E_store[TWh]"].clip(lower=0.2) * 1e6 e_installed = hydro.eval('p_nom * max_hours').groupby(hydro.country).sum() @@ -409,8 +405,7 @@ def attach_hydro(n, costs, ppl): bus=hydro['bus'], p_nom=hydro['p_nom'], max_hours=hydro_max_hours, - capital_cost=(costs.at['hydro', 'capital_cost'] - if c.get('hydro_capital_cost') else 0.), + capital_cost=costs.at['hydro', 'capital_cost'], marginal_cost=costs.at['hydro', 'marginal_cost'], p_max_pu=1., # dispatch p_min_pu=0., # store @@ -420,9 +415,7 @@ def attach_hydro(n, costs, ppl): inflow=inflow_t.loc[:, hydro.index]) -def attach_extendable_generators(n, costs, ppl): - elec_opts = snakemake.config['electricity'] - carriers = pd.Index(elec_opts['extendable_carriers']['Generator']) +def attach_extendable_generators(n, costs, ppl, carriers): _add_missing_carriers_from_costs(n, costs, carriers) @@ -470,12 +463,11 @@ def attach_extendable_generators(n, costs, ppl): -def attach_OPSD_renewables(n): +def attach_OPSD_renewables(n, techs): available = ['DE', 'FR', 'PL', 'CH', 'DK', 'CZ', 'SE', 'GB'] tech_map = {'Onshore': 'onwind', 'Offshore': 'offwind', 'Solar': 'solar'} countries = set(available) & set(n.buses.country) - techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', []) tech_map = {k: v for k, v in tech_map.items() if v in techs} if not tech_map: @@ -503,10 +495,7 @@ def attach_OPSD_renewables(n): -def estimate_renewable_capacities(n, tech_map=None): - if tech_map is None: - tech_map = (snakemake.config['electricity'] - .get('estimate_renewable_capacities_from_capacity_stats', {})) +def estimate_renewable_capacities(n, tech_map): if len(tech_map) == 0: return @@ -538,8 +527,7 @@ def estimate_renewable_capacities(n, tech_map=None): n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom'] -def add_nice_carrier_names(n, config=None): - if config is None: config = snakemake.config +def add_nice_carrier_names(n, config): carrier_i = n.carriers.index nice_names = (pd.Series(config['plotting']['nice_names']) .reindex(carrier_i).fillna(carrier_i.to_series().str.title())) @@ -547,11 +535,9 @@ def add_nice_carrier_names(n, config=None): colors = pd.Series(config['plotting']['tech_colors']).reindex(carrier_i) if colors.isna().any(): missing_i = list(colors.index[colors.isna()]) - logger.warning(f'tech_colors for carriers {missing_i} not defined ' - 'in config.') + logger.warning(f'tech_colors for carriers {missing_i} not defined in config.') n.carriers['color'] = colors - if __name__ == "__main__": if 'snakemake' not in globals(): from _helpers import mock_snakemake @@ -561,22 +547,35 @@ if __name__ == "__main__": n = pypsa.Network(snakemake.input.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(Nyears) - ppl = load_powerplants() + costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) + ppl = load_powerplants(snakemake.input.powerplants) - attach_load(n) + attach_load(n, snakemake.input.regions, snakemake.input.load, snakemake.input.nuts3_shapes, + snakemake.config['countries'], snakemake.config['load']['scaling_factor']) - update_transmission_costs(n, costs) + update_transmission_costs(n, costs, snakemake.config['lines']['length_factor']) - attach_conventional_generators(n, costs, ppl) - attach_wind_and_solar(n, costs) - attach_hydro(n, costs, ppl) - attach_extendable_generators(n, costs, ppl) + carriers = snakemake.config['electricity']['conventional_carriers'] + attach_conventional_generators(n, costs, ppl, carriers) + + carriers = snakemake.config['renewable'] + attach_wind_and_solar(n, costs, snakemake.input, carriers, snakemake.config['lines']['length_factor']) + + if 'hydro' in snakemake.config['renewable']: + carriers = snakemake.config['renewable']['hydro'].pop('carriers', []) + attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities, + carriers, **snakemake.config['renewable']['hydro']) + + carriers = snakemake.config['electricity']['extendable_carriers']['Generator'] + attach_extendable_generators(n, costs, ppl, carriers) + + tech_map = snakemake.config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {}) + estimate_renewable_capacities(n, tech_map) + techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', []) + attach_OPSD_renewables(n, techs) - estimate_renewable_capacities(n) - attach_OPSD_renewables(n) update_p_nom_max(n) - add_nice_carrier_names(n) + add_nice_carrier_names(n, snakemake.config) n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index 88f7d35c..287dd66e 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -64,8 +64,7 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) -def attach_storageunits(n, costs): - elec_opts = snakemake.config['electricity'] +def attach_storageunits(n, costs, elec_opts): carriers = elec_opts['extendable_carriers']['StorageUnit'] max_hours = elec_opts['max_hours'] @@ -89,8 +88,7 @@ def attach_storageunits(n, costs): cyclic_state_of_charge=True) -def attach_stores(n, costs): - elec_opts = snakemake.config['electricity'] +def attach_stores(n, costs, elec_opts): carriers = elec_opts['extendable_carriers']['Store'] _add_missing_carriers_from_costs(n, costs, carriers) @@ -156,8 +154,7 @@ def attach_stores(n, costs): marginal_cost=costs.at["battery inverter", "marginal_cost"]) -def attach_hydrogen_pipelines(n, costs): - elec_opts = snakemake.config['electricity'] +def attach_hydrogen_pipelines(n, costs, elec_opts): ext_carriers = elec_opts['extendable_carriers'] as_stores = ext_carriers.get('Store', []) @@ -197,15 +194,15 @@ if __name__ == "__main__": configure_logging(snakemake) n = pypsa.Network(snakemake.input.network) + elec_config = snakemake.config['electricity'] + Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(Nyears, tech_costs=snakemake.input.tech_costs, - config=snakemake.config['costs'], - elec_config=snakemake.config['electricity']) + costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], elec_config, Nyears) - attach_storageunits(n, costs) - attach_stores(n, costs) - attach_hydrogen_pipelines(n, costs) + attach_storageunits(n, costs, elec_config) + attach_stores(n, costs, elec_config) + attach_hydrogen_pipelines(n, costs, elec_config) - add_nice_carrier_names(n, config=snakemake.config) + add_nice_carrier_names(n, snakemake.config) n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/base_network.py b/scripts/base_network.py index 1f2b9241..28d804cd 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -112,8 +112,8 @@ def _find_closest_links(links, new_links, distance_upper_bound=1.5): .sort_index()['i'] -def _load_buses_from_eg(): - buses = (pd.read_csv(snakemake.input.eg_buses, quotechar="'", +def _load_buses_from_eg(eg_buses, europe_shape, config_elec): + buses = (pd.read_csv(eg_buses, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(bus_id="str")) .set_index("bus_id") @@ -124,18 +124,18 @@ def _load_buses_from_eg(): buses['under_construction'] = buses['under_construction'].fillna(False).astype(bool) # remove all buses outside of all countries including exclusive economic zones (offshore) - europe_shape = gpd.read_file(snakemake.input.europe_shape).loc[0, 'geometry'] + europe_shape = gpd.read_file(europe_shape).loc[0, 'geometry'] europe_shape_prepped = shapely.prepared.prep(europe_shape) buses_in_europe_b = buses[['x', 'y']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1) - buses_with_v_nom_to_keep_b = buses.v_nom.isin(snakemake.config['electricity']['voltages']) | buses.v_nom.isnull() - logger.info("Removing buses with voltages {}".format(pd.Index(buses.v_nom.unique()).dropna().difference(snakemake.config['electricity']['voltages']))) + buses_with_v_nom_to_keep_b = buses.v_nom.isin(config_elec['voltages']) | buses.v_nom.isnull() + logger.info("Removing buses with voltages {}".format(pd.Index(buses.v_nom.unique()).dropna().difference(config_elec['voltages']))) return pd.DataFrame(buses.loc[buses_in_europe_b & buses_with_v_nom_to_keep_b]) -def _load_transformers_from_eg(buses): - transformers = (pd.read_csv(snakemake.input.eg_transformers, quotechar="'", +def _load_transformers_from_eg(buses, eg_transformers): + transformers = (pd.read_csv(eg_transformers, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(transformer_id='str', bus0='str', bus1='str')) .set_index('transformer_id')) @@ -145,8 +145,8 @@ def _load_transformers_from_eg(buses): return transformers -def _load_converters_from_eg(buses): - converters = (pd.read_csv(snakemake.input.eg_converters, quotechar="'", +def _load_converters_from_eg(buses, eg_converters): + converters = (pd.read_csv(eg_converters, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(converter_id='str', bus0='str', bus1='str')) .set_index('converter_id')) @@ -158,8 +158,8 @@ def _load_converters_from_eg(buses): return converters -def _load_links_from_eg(buses): - links = (pd.read_csv(snakemake.input.eg_links, quotechar="'", true_values=['t'], false_values=['f'], +def _load_links_from_eg(buses, eg_links): + links = (pd.read_csv(eg_links, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(link_id='str', bus0='str', bus1='str', under_construction="bool")) .set_index('link_id')) @@ -176,11 +176,11 @@ def _load_links_from_eg(buses): return links -def _add_links_from_tyndp(buses, links): - links_tyndp = pd.read_csv(snakemake.input.links_tyndp) +def _add_links_from_tyndp(buses, links, links_tyndp, europe_shape): + links_tyndp = pd.read_csv(links_tyndp) # remove all links from list which lie outside all of the desired countries - europe_shape = gpd.read_file(snakemake.input.europe_shape).loc[0, 'geometry'] + europe_shape = gpd.read_file(europe_shape).loc[0, 'geometry'] europe_shape_prepped = shapely.prepared.prep(europe_shape) x1y1_in_europe_b = links_tyndp[['x1', 'y1']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1) x2y2_in_europe_b = links_tyndp[['x2', 'y2']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1) @@ -248,8 +248,8 @@ def _add_links_from_tyndp(buses, links): return buses, links.append(links_tyndp, sort=True) -def _load_lines_from_eg(buses): - lines = (pd.read_csv(snakemake.input.eg_lines, quotechar="'", true_values=['t'], false_values=['f'], +def _load_lines_from_eg(buses, eg_lines): + lines = (pd.read_csv(eg_lines, quotechar="'", true_values=['t'], false_values=['f'], dtype=dict(line_id='str', bus0='str', bus1='str', underground="bool", under_construction="bool")) .set_index('line_id') @@ -262,8 +262,8 @@ def _load_lines_from_eg(buses): return lines -def _apply_parameter_corrections(n): - with open(snakemake.input.parameter_corrections) as f: +def _apply_parameter_corrections(n, parameter_corrections): + with open(parameter_corrections) as f: corrections = yaml.safe_load(f) if corrections is None: return @@ -285,14 +285,14 @@ def _apply_parameter_corrections(n): df.loc[inds, attr] = r[inds].astype(df[attr].dtype) -def _set_electrical_parameters_lines(lines): - v_noms = snakemake.config['electricity']['voltages'] - linetypes = snakemake.config['lines']['types'] +def _set_electrical_parameters_lines(lines, config): + v_noms = config['electricity']['voltages'] + linetypes = config['lines']['types'] for v_nom in v_noms: lines.loc[lines["v_nom"] == v_nom, 'type'] = linetypes[v_nom] - lines['s_max_pu'] = snakemake.config['lines']['s_max_pu'] + lines['s_max_pu'] = config['lines']['s_max_pu'] return lines @@ -304,14 +304,14 @@ def _set_lines_s_nom_from_linetypes(n): ) -def _set_electrical_parameters_links(links): +def _set_electrical_parameters_links(links, config, links_p_nom): if links.empty: return links - p_max_pu = snakemake.config['links'].get('p_max_pu', 1.) + p_max_pu = config['links'].get('p_max_pu', 1.) links['p_max_pu'] = p_max_pu links['p_min_pu'] = -p_max_pu - links_p_nom = pd.read_csv(snakemake.input.links_p_nom) + links_p_nom = pd.read_csv(links_p_nom) # filter links that are not in operation anymore removed_b = links_p_nom.Remarks.str.contains('Shut down|Replaced', na=False) @@ -331,8 +331,8 @@ def _set_electrical_parameters_links(links): return links -def _set_electrical_parameters_converters(converters): - p_max_pu = snakemake.config['links'].get('p_max_pu', 1.) +def _set_electrical_parameters_converters(converters, config): + p_max_pu = config['links'].get('p_max_pu', 1.) converters['p_max_pu'] = p_max_pu converters['p_min_pu'] = -p_max_pu @@ -345,8 +345,8 @@ def _set_electrical_parameters_converters(converters): return converters -def _set_electrical_parameters_transformers(transformers): - config = snakemake.config['transformers'] +def _set_electrical_parameters_transformers(transformers, config): + config = config['transformers'] ## Add transformer parameters transformers["x"] = config.get('x', 0.1) @@ -373,7 +373,7 @@ def _remove_unconnected_components(network): return network[component == component_sizes.index[0]] -def _set_countries_and_substations(n): +def _set_countries_and_substations(n, config, country_shapes, offshore_shapes): buses = n.buses @@ -386,9 +386,9 @@ def _set_countries_and_substations(n): index=buses.index ) - countries = snakemake.config['countries'] - country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'] - offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry'] + countries = config['countries'] + country_shapes = gpd.read_file(country_shapes).set_index('name')['geometry'] + offshore_shapes = gpd.read_file(offshore_shapes).set_index('name')['geometry'] substation_b = buses['symbol'].str.contains('substation|converter station', case=False) def prefer_voltage(x, which): @@ -498,19 +498,19 @@ def _replace_b2b_converter_at_country_border_by_link(n): .format(i, b0, line, linkcntry.at[i], buscntry.at[b1])) -def _set_links_underwater_fraction(n): +def _set_links_underwater_fraction(n, offshore_shapes): if n.links.empty: return if not hasattr(n.links, 'geometry'): n.links['underwater_fraction'] = 0. else: - offshore_shape = gpd.read_file(snakemake.input.offshore_shapes).unary_union + offshore_shape = gpd.read_file(offshore_shapes).unary_union links = gpd.GeoSeries(n.links.geometry.dropna().map(shapely.wkt.loads)) n.links['underwater_fraction'] = links.intersection(offshore_shape).length / links.length -def _adjust_capacities_of_under_construction_branches(n): - lines_mode = snakemake.config['lines'].get('under_construction', 'undef') +def _adjust_capacities_of_under_construction_branches(n, config): + lines_mode = config['lines'].get('under_construction', 'undef') if lines_mode == 'zero': n.lines.loc[n.lines.under_construction, 'num_parallel'] = 0. n.lines.loc[n.lines.under_construction, 's_nom'] = 0. @@ -519,7 +519,7 @@ def _adjust_capacities_of_under_construction_branches(n): elif lines_mode != 'keep': logger.warning("Unrecognized configuration for `lines: under_construction` = `{}`. Keeping under construction lines.") - links_mode = snakemake.config['links'].get('under_construction', 'undef') + links_mode = config['links'].get('under_construction', 'undef') if links_mode == 'zero': n.links.loc[n.links.under_construction, "p_nom"] = 0. elif links_mode == 'remove': @@ -534,27 +534,30 @@ def _adjust_capacities_of_under_construction_branches(n): return n -def base_network(): - buses = _load_buses_from_eg() +def base_network(eg_buses, eg_converters, eg_transformers, eg_lines, eg_links, + links_p_nom, links_tyndp, europe_shape, country_shapes, offshore_shapes, + parameter_corrections, config): - links = _load_links_from_eg(buses) - if snakemake.config['links'].get('include_tyndp'): - buses, links = _add_links_from_tyndp(buses, links) + buses = _load_buses_from_eg(eg_buses, europe_shape, config['electricity']) - converters = _load_converters_from_eg(buses) + links = _load_links_from_eg(buses, eg_links) + if config['links'].get('include_tyndp'): + buses, links = _add_links_from_tyndp(buses, links, links_tyndp, europe_shape) - lines = _load_lines_from_eg(buses) - transformers = _load_transformers_from_eg(buses) + converters = _load_converters_from_eg(buses, eg_converters) - lines = _set_electrical_parameters_lines(lines) - transformers = _set_electrical_parameters_transformers(transformers) - links = _set_electrical_parameters_links(links) - converters = _set_electrical_parameters_converters(converters) + lines = _load_lines_from_eg(buses, eg_lines) + transformers = _load_transformers_from_eg(buses, eg_transformers) + + lines = _set_electrical_parameters_lines(lines, config) + transformers = _set_electrical_parameters_transformers(transformers, config) + links = _set_electrical_parameters_links(links, config, links_p_nom) + converters = _set_electrical_parameters_converters(converters, config) n = pypsa.Network() n.name = 'PyPSA-Eur' - n.set_snapshots(pd.date_range(freq='h', **snakemake.config['snapshots'])) + n.set_snapshots(pd.date_range(freq='h', **config['snapshots'])) n.snapshot_weightings[:] *= 8760. / n.snapshot_weightings.sum() n.import_components_from_dataframe(buses, "Bus") @@ -565,17 +568,17 @@ def base_network(): _set_lines_s_nom_from_linetypes(n) - _apply_parameter_corrections(n) + _apply_parameter_corrections(n, parameter_corrections) n = _remove_unconnected_components(n) - _set_countries_and_substations(n) + _set_countries_and_substations(n, config, country_shapes, offshore_shapes) - _set_links_underwater_fraction(n) + _set_links_underwater_fraction(n, offshore_shapes) _replace_b2b_converter_at_country_border_by_link(n) - n = _adjust_capacities_of_under_construction_branches(n) + n = _adjust_capacities_of_under_construction_branches(n, config) return n @@ -585,6 +588,8 @@ if __name__ == "__main__": snakemake = mock_snakemake('base_network') configure_logging(snakemake) - n = base_network() + n = base_network(snakemake.input.eg_buses, snakemake.input.eg_converters, snakemake.input.eg_transformers, snakemake.input.eg_lines, snakemake.input.eg_links, + snakemake.input.links_p_nom, snakemake.input.links_tyndp, snakemake.input.europe_shape, snakemake.input.country_shapes, snakemake.input.offshore_shapes, + snakemake.input.parameter_corrections, snakemake.config) n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/build_hydro_profile.py b/scripts/build_hydro_profile.py index 6ac59262..74efc2ef 100644 --- a/scripts/build_hydro_profile.py +++ b/scripts/build_hydro_profile.py @@ -74,7 +74,7 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_hydro_profile') configure_logging(snakemake) - config = snakemake.config['renewable']['hydro'] + config_hydro = snakemake.config['renewable']['hydro'] cutout = atlite.Cutout(snakemake.input.cutout) countries = snakemake.config['countries'] @@ -89,7 +89,7 @@ if __name__ == "__main__": lower_threshold_quantile=True, normalize_using_yearly=eia_stats) - if 'clip_min_inflow' in config: - inflow = inflow.where(inflow > config['clip_min_inflow'], 0) + if 'clip_min_inflow' in config_hydro: + inflow = inflow.where(inflow > config_hydro['clip_min_inflow'], 0) inflow.to_netcdf(snakemake.output[0]) diff --git a/scripts/build_load_data.py b/scripts/build_load_data.py index 840cb6c7..10921782 100755 --- a/scripts/build_load_data.py +++ b/scripts/build_load_data.py @@ -196,17 +196,16 @@ if __name__ == "__main__": configure_logging(snakemake) - config = snakemake.config - powerstatistics = config['load']['power_statistics'] - interpolate_limit = config['load']['interpolate_limit'] - countries = config['countries'] - snapshots = pd.date_range(freq='h', **config['snapshots']) + powerstatistics = snakemake.config['load']['power_statistics'] + interpolate_limit = snakemake.config['load']['interpolate_limit'] + countries = snakemake.config['countries'] + snapshots = pd.date_range(freq='h', **snakemake.config['snapshots']) years = slice(snapshots[0], snapshots[-1]) - time_shift = config['load']['time_shift_for_large_gaps'] + time_shift = snakemake.config['load']['time_shift_for_large_gaps'] load = load_timeseries(snakemake.input[0], years, countries, powerstatistics) - if config['load']['manual_adjustments']: + if snakemake.config['load']['manual_adjustments']: load = manual_adjustment(load, powerstatistics) logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.") diff --git a/scripts/build_natura_raster.py b/scripts/build_natura_raster.py index f7a923d6..71d2c45e 100644 --- a/scripts/build_natura_raster.py +++ b/scripts/build_natura_raster.py @@ -40,7 +40,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import atlite import geopandas as gpd @@ -73,18 +73,19 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_natura_raster') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - cutouts = snakemake.input.cutouts + cutouts = paths.cutouts xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts)) bounds = transform_bounds(4326, 3035, min(xs), min(ys), max(Xs), max(Ys)) transform, out_shape = get_transform_and_shape(bounds, res=100) # adjusted boundaries - shapes = gpd.read_file(snakemake.input.natura).to_crs(3035) + shapes = gpd.read_file(paths.natura).to_crs(3035) raster = ~geometry_mask(shapes.geometry, out_shape[::-1], transform) raster = raster.astype(rio.uint8) - with rio.open(snakemake.output[0], 'w', driver='GTiff', dtype=rio.uint8, + with rio.open(out[0], 'w', driver='GTiff', dtype=rio.uint8, count=1, transform=transform, crs=3035, compress='lzw', width=raster.shape[1], height=raster.shape[0]) as dst: dst.write(raster, indexes=1) diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index ab000631..d4ad4989 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -84,11 +84,10 @@ from scipy.spatial import cKDTree as KDTree logger = logging.getLogger(__name__) -def add_custom_powerplants(ppl): - custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] +def add_custom_powerplants(ppl, custom_powerplants, custom_ppl_query=False): if not custom_ppl_query: return ppl - add_ppls = pd.read_csv(snakemake.input.custom_powerplants, index_col=0, + add_ppls = pd.read_csv(custom_powerplants, index_col=0, dtype={'bus': 'str'}) if isinstance(custom_ppl_query, str): add_ppls.query(custom_ppl_query, inplace=True) @@ -119,7 +118,9 @@ if __name__ == "__main__": if isinstance(ppl_query, str): ppl.query(ppl_query, inplace=True) - ppl = add_custom_powerplants(ppl) # add carriers from own powerplant files + # add carriers from own powerplant files: + custom_ppl_query = snakemake.config['electricity']['custom_powerplants'] + ppl = add_custom_powerplants(ppl, snakemake.input.custom_powerplants, custom_ppl_query) cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()] diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 9ce83de3..b37e6825 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -201,54 +201,54 @@ if __name__ == '__main__': snakemake = mock_snakemake('build_renewable_profiles', technology='solar') configure_logging(snakemake) pgb.streams.wrap_stderr() - paths = snakemake.input + nprocesses = snakemake.config['atlite'].get('nprocesses') noprogress = not snakemake.config['atlite'].get('show_progress', True) config = snakemake.config['renewable'][snakemake.wildcards.technology] resource = config['resource'] # pv panel config / wind turbine config - correction_factor = config.get('correction_factor', 1.) + correction_factor = snakemake.config.get('correction_factor', 1.) capacity_per_sqkm = config['capacity_per_sqkm'] - p_nom_max_meth = config.get('potential', 'conservative') + p_nom_max_meth = snakemake.config.get('potential', 'conservative') if isinstance(config.get("corine", {}), list): - config['corine'] = {'grid_codes': config['corine']} + snakemake.config['corine'] = {'grid_codes': config['corine']} if correction_factor != 1.: logger.info(f'correction_factor is set as {correction_factor}') - cutout = atlite.Cutout(paths['cutout']) - regions = gpd.read_file(paths.regions).set_index('name').rename_axis('bus') + cutout = atlite.Cutout(snakemake.input['cutout']) + regions = gpd.read_file(snakemake.input.regions).set_index('name').rename_axis('bus') buses = regions.index excluder = atlite.ExclusionContainer(crs=3035, res=100) if config['natura']: - excluder.add_raster(paths.natura, nodata=0, allow_no_overlap=True) + excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True) - corine = config.get("corine", {}) + corine = snakemake.config.get("corine", {}) if "grid_codes" in corine: codes = corine["grid_codes"] - excluder.add_raster(paths.corine, codes=codes, invert=True, crs=3035) + excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035) if corine.get("distance", 0.) > 0.: codes = corine["distance_grid_codes"] buffer = corine["distance"] - excluder.add_raster(paths.corine, codes=codes, buffer=buffer, crs=3035) + excluder.add_raster(snakemake.input.corine, codes=codes, buffer=buffer, crs=3035) if "max_depth" in config: # lambda not supported for atlite + multiprocessing # use named function np.greater with partially frozen argument instead # and exclude areas where: -max_depth > grid cell depth func = functools.partial(np.greater,-config['max_depth']) - excluder.add_raster(paths.gebco, codes=func, crs=4236, nodata=-1000) + excluder.add_raster(snakemake.input.gebco, codes=func, crs=4236, nodata=-1000) if 'min_shore_distance' in config: buffer = config['min_shore_distance'] - excluder.add_geometry(paths.country_shapes, buffer=buffer) + excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer) if 'max_shore_distance' in config: buffer = config['max_shore_distance'] - excluder.add_geometry(paths.country_shapes, buffer=buffer, invert=True) + excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer, invert=True) kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress) if noprogress: @@ -315,7 +315,7 @@ if __name__ == '__main__': if snakemake.wildcards.technology.startswith("offwind"): logger.info('Calculate underwater fraction of connections.') - offshore_shape = gpd.read_file(paths['offshore_shapes']).unary_union + offshore_shape = gpd.read_file(snakemake.input['offshore_shapes']).unary_union underwater_fraction = [] for bus in buses: p = centre_of_mass.sel(bus=bus).data @@ -326,11 +326,11 @@ if __name__ == '__main__': ds['underwater_fraction'] = xr.DataArray(underwater_fraction, [buses]) # select only buses with some capacity and minimal capacity factor - ds = ds.sel(bus=((ds['profile'].mean('time') > config.get('min_p_max_pu', 0.)) & - (ds['p_nom_max'] > config.get('min_p_nom_max', 0.)))) + ds = ds.sel(bus=((ds['profile'].mean('time') > snakemake.config.get('min_p_max_pu', 0.)) & + (ds['p_nom_max'] > snakemake.config.get('min_p_nom_max', 0.)))) - if 'clip_p_max_pu' in config: - min_p_max_pu = config['clip_p_max_pu'] + if 'clip_p_max_pu' in snakemake.config: + min_p_max_pu = snakemake.config['clip_p_max_pu'] ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0) ds.to_netcdf(snakemake.output.profile) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 366cb820..95867d89 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -107,26 +107,25 @@ def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True): return polys.simplify(tolerance=tolerance) -def countries(): - cntries = snakemake.config['countries'] - if 'RS' in cntries: cntries.append('KV') +def countries(naturalearth, country_list): + if 'RS' in country_list: country_list.append('KV') - df = gpd.read_file(snakemake.input.naturalearth) + df = gpd.read_file(naturalearth) # Names are a hassle in naturalearth, try several fields fieldnames = (df[x].where(lambda s: s!='-99') for x in ('ISO_A2', 'WB_A2', 'ADM0_A3')) df['name'] = reduce(lambda x,y: x.fillna(y), fieldnames, next(fieldnames)).str[0:2] - df = df.loc[df.name.isin(cntries) & ((df['scalerank'] == 0) | (df['scalerank'] == 5))] + df = df.loc[df.name.isin(country_list) & ((df['scalerank'] == 0) | (df['scalerank'] == 5))] s = df.set_index('name')['geometry'].map(_simplify_polys) - if 'RS' in cntries: s['RS'] = s['RS'].union(s.pop('KV')) + if 'RS' in country_list: s['RS'] = s['RS'].union(s.pop('KV')) return s -def eez(country_shapes): - df = gpd.read_file(snakemake.input.eez) - df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in snakemake.config['countries']])] +def eez(country_shapes, eez, country_list): + df = gpd.read_file(eez) + df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in country_list])] df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c)) s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False)) s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3}) @@ -145,29 +144,29 @@ def country_cover(country_shapes, eez_shapes=None): return Polygon(shell=europe_shape.exterior) -def nuts3(country_shapes): - df = gpd.read_file(snakemake.input.nuts3) +def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp): + df = gpd.read_file(nuts3) df = df.loc[df['STAT_LEVL_'] == 3] df['geometry'] = df['geometry'].map(_simplify_polys) df = df.rename(columns={'NUTS_ID': 'id'})[['id', 'geometry']].set_index('id') - pop = pd.read_table(snakemake.input.nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python') + pop = pd.read_table(nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python') pop = (pop .set_index(pd.MultiIndex.from_tuples(pop.pop('unit,geo\\time').str.split(','))).loc['THS'] .applymap(lambda x: pd.to_numeric(x, errors='coerce')) .fillna(method='bfill', axis=1))['2014'] - gdp = pd.read_table(snakemake.input.nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python') + gdp = pd.read_table(nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python') gdp = (gdp .set_index(pd.MultiIndex.from_tuples(gdp.pop('unit,geo\\time').str.split(','))).loc['EUR_HAB'] .applymap(lambda x: pd.to_numeric(x, errors='coerce')) .fillna(method='bfill', axis=1))['2014'] - cantons = pd.read_csv(snakemake.input.ch_cantons) + cantons = pd.read_csv(ch_cantons) cantons = cantons.set_index(cantons['HASC'].str[3:])['NUTS'] cantons = cantons.str.pad(5, side='right', fillchar='0') - swiss = pd.read_excel(snakemake.input.ch_popgdp, skiprows=3, index_col=0) + swiss = pd.read_excel(ch_popgdp, skiprows=3, index_col=0) swiss.columns = swiss.columns.to_series().map(cantons) pop = pop.append(pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':])) @@ -218,16 +217,16 @@ if __name__ == "__main__": snakemake = mock_snakemake('build_shapes') configure_logging(snakemake) - out = snakemake.output + country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries']) + save_to_geojson(country_shapes, snakemake.output.country_shapes) - country_shapes = countries() - save_to_geojson(country_shapes, out.country_shapes) - - offshore_shapes = eez(country_shapes) - save_to_geojson(offshore_shapes, out.offshore_shapes) + offshore_shapes = eez(country_shapes, snakemake.input.eez, snakemake.config['countries']) + save_to_geojson(offshore_shapes, snakemake.output.offshore_shapes) europe_shape = country_cover(country_shapes, offshore_shapes) - save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape) + save_to_geojson(gpd.GeoSeries(europe_shape), snakemake.output.europe_shape) - nuts3_shapes = nuts3(country_shapes) - save_to_geojson(nuts3_shapes, out.nuts3_shapes) + nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop, + snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp) + + save_to_geojson(nuts3_shapes, snakemake.output.nuts3_shapes) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 4b9db466..525196fc 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -173,12 +173,9 @@ def weighting_for_country(n, x): return (w * (100. / w.max())).clip(lower=1.).astype(int) -def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None): +def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"): """Determine the number of clusters per country""" - if solver_name is None: - solver_name = snakemake.config['solving']['solver']['name'] - L = (n.loads_t.p_set.mean() .groupby(n.loads.bus).sum() .groupby([n.buses.country, n.buses.sub_network]).sum() @@ -271,12 +268,10 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr else: raise AttributeError(f"potential_mode should be one of 'simple' or 'conservative' but is '{potential_mode}'") - if custom_busmap: - busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True) - busmap.index = busmap.index.astype(str) - logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") - else: + if not custom_busmap: busmap = busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights, algorithm) + else: + busmap = custom_busmap clustering = get_clustering_from_busmap( n, busmap, @@ -309,8 +304,6 @@ def save_to_geojson(s, fn): def cluster_regions(busmaps, input=None, output=None): - if input is None: input = snakemake.input - if output is None: output = snakemake.output busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0]) @@ -361,10 +354,8 @@ if __name__ == "__main__": else: line_length_factor = snakemake.config['lines']['length_factor'] Nyears = n.snapshot_weightings.objective.sum()/8760 - hvac_overhead_cost = (load_costs(Nyears, - tech_costs=snakemake.input.tech_costs, - config=snakemake.config['costs'], - elec_config=snakemake.config['electricity']) + + hvac_overhead_cost = (load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) .at['HVAC overhead', 'capital_cost']) def consense(x): @@ -376,12 +367,15 @@ if __name__ == "__main__": potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential'] for tech in renewable_carriers])) custom_busmap = snakemake.config["enable"].get("custom_busmap", False) + if custom_busmap: + custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True) + custom_busmap.index = custom_busmap.index.astype(str) + logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") + clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers, - line_length_factor=line_length_factor, - potential_mode=potential_mode, - solver_name=snakemake.config['solving']['solver']['name'], - extended_link_costs=hvac_overhead_cost, - focus_weights=focus_weights) + line_length_factor, potential_mode, + snakemake.config['solving']['solver']['name'], + "kmeans", hvac_overhead_cost, focus_weights) update_p_nom_max(n) @@ -389,4 +383,4 @@ if __name__ == "__main__": for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative getattr(clustering, attr).to_csv(snakemake.output[attr]) - cluster_regions((clustering.busmap,)) + cluster_regions((clustering.busmap,), snakemake.input, snakemake.output) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index cff5318c..3f8ee728 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -54,7 +54,7 @@ Replacing '/summaries/' with '/plots/' creates nice colored maps of the results. """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import os import pypsa @@ -378,7 +378,7 @@ outputs = ["costs", ] -def make_summaries(networks_dict, country='all'): +def make_summaries(networks_dict, paths, config, country='all'): columns = pd.MultiIndex.from_tuples(networks_dict.keys(),names=["simpl","clusters","ll","opts"]) @@ -403,8 +403,7 @@ def make_summaries(networks_dict, country='all'): n = n[n.buses.country == country] Nyears = n.snapshot_weightings.objective.sum() / 8760. - costs = load_costs(Nyears, snakemake.input[0], - snakemake.config['costs'], snakemake.config['electricity']) + costs = load_costs(paths[0], config['costs'], config['electricity'], Nyears) update_transmission_costs(n, costs, simple_hvdc_costs=False) assign_carriers(n) @@ -415,8 +414,7 @@ def make_summaries(networks_dict, country='all'): return dfs -def to_csv(dfs): - dir = snakemake.output[0] +def to_csv(dfs, dir): os.makedirs(dir, exist_ok=True) for key, df in dfs.items(): df.to_csv(os.path.join(dir, f"{key}.csv")) @@ -432,25 +430,27 @@ if __name__ == "__main__": network_dir = os.path.join('results', 'networks') configure_logging(snakemake) - def expand_from_wildcard(key): - w = getattr(snakemake.wildcards, key) - return snakemake.config["scenario"][key] if w == "all" else [w] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - if snakemake.wildcards.ll.endswith("all"): - ll = snakemake.config["scenario"]["ll"] - if len(snakemake.wildcards.ll) == 4: - ll = [l for l in ll if l[0] == snakemake.wildcards.ll[0]] + def expand_from_wildcard(key, config): + w = getattr(wildcards, key) + return config["scenario"][key] if w == "all" else [w] + + if wildcards.ll.endswith("all"): + ll = config["scenario"]["ll"] + if len(wildcards.ll) == 4: + ll = [l for l in ll if l[0] == wildcards.ll[0]] else: - ll = [snakemake.wildcards.ll] + ll = [wildcards.ll] networks_dict = {(simpl,clusters,l,opts) : os.path.join(network_dir, f'elec_s{simpl}_' f'{clusters}_ec_l{l}_{opts}.nc') - for simpl in expand_from_wildcard("simpl") - for clusters in expand_from_wildcard("clusters") + for simpl in expand_from_wildcard("simpl", config) + for clusters in expand_from_wildcard("clusters", config) for l in ll - for opts in expand_from_wildcard("opts")} + for opts in expand_from_wildcard("opts", config)} - dfs = make_summaries(networks_dict, country=snakemake.wildcards.country) + dfs = make_summaries(networks_dict, paths, config, country=wildcards.country) - to_csv(dfs) + to_csv(dfs, out[0]) diff --git a/scripts/plot_network.py b/scripts/plot_network.py index 456bf50f..645c8c39 100755 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -20,8 +20,8 @@ Description """ import logging -from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs, - configure_logging) +from _helpers import (retrieve_snakemake_keys, load_network_for_plots, + aggregate_p, aggregate_costs, configure_logging) import pandas as pd import numpy as np @@ -259,18 +259,19 @@ if __name__ == "__main__": set_plot_style() - opts = snakemake.config['plotting'] - map_figsize = opts['map']['figsize'] - map_boundaries = opts['map']['boundaries'] + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) - n = load_network_for_plots(snakemake.input.network, snakemake.input.tech_costs, snakemake.config) + map_figsize = config['map']['figsize'] + map_boundaries = config['map']['boundaries'] - scenario_opts = snakemake.wildcards.opts.split('-') + n = load_network_for_plots(paths.network, paths.tech_costs, config) + + scenario_opts = wildcards.opts.split('-') fig, ax = plt.subplots(figsize=map_figsize, subplot_kw={"projection": ccrs.PlateCarree()}) - plot_map(n, ax, snakemake.wildcards.attr, opts) + plot_map(n, ax, wildcards.attr, config) - fig.savefig(snakemake.output.only_map, dpi=150, bbox_inches='tight') + fig.savefig(out.only_map, dpi=150, bbox_inches='tight') ax1 = fig.add_axes([-0.115, 0.625, 0.2, 0.2]) plot_total_energy_pie(n, ax1) @@ -278,12 +279,12 @@ if __name__ == "__main__": ax2 = fig.add_axes([-0.075, 0.1, 0.1, 0.45]) plot_total_cost_bar(n, ax2) - ll = snakemake.wildcards.ll + ll = wildcards.ll ll_type = ll[0] ll_factor = ll[1:] lbl = dict(c='line cost', v='line volume')[ll_type] amnt = '{ll} x today\'s'.format(ll=ll_factor) if ll_factor != 'opt' else 'optimal' fig.suptitle('Expansion to {amount} {label} at {clusters} clusters' - .format(amount=amnt, label=lbl, clusters=snakemake.wildcards.clusters)) + .format(amount=amnt, label=lbl, clusters=wildcards.clusters)) - fig.savefig(snakemake.output.ext, transparent=True, bbox_inches='tight') + fig.savefig(out.ext, transparent=True, bbox_inches='tight') diff --git a/scripts/plot_p_nom_max.py b/scripts/plot_p_nom_max.py index e79ad274..ea66d612 100644 --- a/scripts/plot_p_nom_max.py +++ b/scripts/plot_p_nom_max.py @@ -19,7 +19,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pypsa import pandas as pd @@ -53,11 +53,13 @@ if __name__ == "__main__": clusts= '5,full', country= 'all') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + plot_kwds = dict(drawstyle="steps-post") - clusters = snakemake.wildcards.clusts.split(',') - techs = snakemake.wildcards.techs.split(',') - country = snakemake.wildcards.country + clusters = wildcards.clusts.split(',') + techs = wildcards.techs.split(',') + country = wildcards.country if country == 'all': country = None else: @@ -66,7 +68,7 @@ if __name__ == "__main__": fig, axes = plt.subplots(1, len(techs)) for j, cluster in enumerate(clusters): - net = pypsa.Network(snakemake.input[j]) + net = pypsa.Network(paths[j]) for i, tech in enumerate(techs): cum_p_nom_max(net, tech, country).plot(x="p_max_pu", y="cum_p_nom_max", @@ -79,4 +81,4 @@ if __name__ == "__main__": plt.legend(title="Cluster level") - fig.savefig(snakemake.output[0], transparent=True, bbox_inches='tight') + fig.savefig(out[0], transparent=True, bbox_inches='tight') diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index a34611de..48f064b0 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -21,7 +21,7 @@ Description import os import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pandas as pd import matplotlib.pyplot as plt @@ -55,7 +55,7 @@ def rename_techs(label): preferred_order = pd.Index(["transmission lines","hydroelectricity","hydro reservoir","run of river","pumped hydro storage","onshore wind","offshore wind ac", "offshore wind dc","solar PV","solar thermal","OCGT","hydrogen storage","battery storage"]) -def plot_costs(infn, fn=None): +def plot_costs(infn, config, fn=None): ## For now ignore the simpl header cost_df = pd.read_csv(infn,index_col=list(range(3)),header=[1,2,3]) @@ -67,7 +67,7 @@ def plot_costs(infn, fn=None): df = df.groupby(df.index.map(rename_techs)).sum() - to_drop = df.index[df.max(axis=1) < snakemake.config['plotting']['costs_threshold']] + to_drop = df.index[df.max(axis=1) < config['plotting']['costs_threshold']] print("dropping") @@ -84,7 +84,7 @@ def plot_costs(infn, fn=None): fig, ax = plt.subplots() fig.set_size_inches((12,8)) - df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[snakemake.config['plotting']['tech_colors'][i] for i in new_index]) + df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[config['plotting']['tech_colors'][i] for i in new_index]) handles,labels = ax.get_legend_handles_labels() @@ -92,7 +92,7 @@ def plot_costs(infn, fn=None): handles.reverse() labels.reverse() - ax.set_ylim([0,snakemake.config['plotting']['costs_max']]) + ax.set_ylim([0,config['plotting']['costs_max']]) ax.set_ylabel("System Cost [EUR billion per year]") @@ -109,7 +109,7 @@ def plot_costs(infn, fn=None): fig.savefig(fn, transparent=True) -def plot_energy(infn, fn=None): +def plot_energy(infn, config, fn=None): energy_df = pd.read_csv(infn, index_col=list(range(2)),header=[1,2,3]) @@ -120,7 +120,7 @@ def plot_energy(infn, fn=None): df = df.groupby(df.index.map(rename_techs)).sum() - to_drop = df.index[df.abs().max(axis=1) < snakemake.config['plotting']['energy_threshold']] + to_drop = df.index[df.abs().max(axis=1) < config['plotting']['energy_threshold']] print("dropping") @@ -137,7 +137,7 @@ def plot_energy(infn, fn=None): fig, ax = plt.subplots() fig.set_size_inches((12,8)) - df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[snakemake.config['plotting']['tech_colors'][i] for i in new_index]) + df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[config['plotting']['tech_colors'][i] for i in new_index]) handles,labels = ax.get_legend_handles_labels() @@ -145,7 +145,7 @@ def plot_energy(infn, fn=None): handles.reverse() labels.reverse() - ax.set_ylim([snakemake.config['plotting']['energy_min'],snakemake.config['plotting']['energy_max']]) + ax.set_ylim([config['plotting']['energy_min'], config['plotting']['energy_max']]) ax.set_ylabel("Energy [TWh/a]") @@ -170,10 +170,12 @@ if __name__ == "__main__": attr='', ext='png', country='all') configure_logging(snakemake) - summary = snakemake.wildcards.summary + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + + summary = wildcards.summary try: func = globals()[f"plot_{summary}"] except KeyError: raise RuntimeError(f"plotting function for {summary} has not been defined") - func(os.path.join(snakemake.input[0], f"{summary}.csv"), snakemake.output[0]) + func(os.path.join(paths[0], f"{summary}.csv"), config, out[0]) diff --git a/scripts/prepare_links_p_nom.py b/scripts/prepare_links_p_nom.py index b83089d6..6bd4bca4 100644 --- a/scripts/prepare_links_p_nom.py +++ b/scripts/prepare_links_p_nom.py @@ -37,7 +37,7 @@ Description """ import logging -from _helpers import configure_logging +from _helpers import configure_logging, retrieve_snakemake_keys import pandas as pd @@ -63,6 +63,8 @@ if __name__ == "__main__": snakemake = mock_snakemake('prepare_links_p_nom', simpl='', network='elec') configure_logging(snakemake) + paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake) + links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0] mw = "Power (MW)" @@ -74,4 +76,4 @@ if __name__ == "__main__": links_p_nom['x1'], links_p_nom['y1'] = extract_coordinates(links_p_nom['Converterstation 1']) links_p_nom['x2'], links_p_nom['y2'] = extract_coordinates(links_p_nom['Converterstation 2']) - links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(snakemake.output[0], index=False) + links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(out[0], index=False) diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index ed33abb7..f984ace6 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -70,21 +70,14 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) -def add_co2limit(n, Nyears=1., factor=None): - - if factor is not None: - annual_emissions = factor*snakemake.config['electricity']['co2base'] - else: - annual_emissions = snakemake.config['electricity']['co2limit'] +def add_co2limit(n, co2limit, Nyears=1.): n.add("GlobalConstraint", "CO2Limit", carrier_attribute="co2_emissions", sense="<=", - constant=annual_emissions * Nyears) + constant=co2limit * Nyears) -def add_emission_prices(n, emission_prices=None, exclude_co2=False): - if emission_prices is None: - emission_prices = snakemake.config['costs']['emission_prices'] +def add_emission_prices(n, emission_prices={'co2': 0.}, exclude_co2=False): if exclude_co2: emission_prices.pop('co2') ep = (pd.Series(emission_prices).rename(lambda x: x+'_emissions') * n.carriers.filter(like='_emissions')).sum(axis=1) @@ -94,13 +87,12 @@ def add_emission_prices(n, emission_prices=None, exclude_co2=False): n.storage_units['marginal_cost'] += su_ep -def set_line_s_max_pu(n): - s_max_pu = snakemake.config['lines']['s_max_pu'] +def set_line_s_max_pu(n, s_max_pu = 0.7): n.lines['s_max_pu'] = s_max_pu logger.info(f"N-1 security margin of lines set to {s_max_pu}") -def set_transmission_limit(n, ll_type, factor, Nyears=1): +def set_transmission_limit(n, ll_type, factor, costs, Nyears=1): links_dc_b = n.links.carrier == 'DC' if not n.links.empty else pd.Series() _lines_s_nom = (np.sqrt(3) * n.lines.type.map(n.line_types.i_nom) * @@ -112,9 +104,6 @@ def set_transmission_limit(n, ll_type, factor, Nyears=1): ref = (lines_s_nom @ n.lines[col] + n.links.loc[links_dc_b, "p_nom"] @ n.links.loc[links_dc_b, col]) - costs = load_costs(Nyears, snakemake.input.tech_costs, - snakemake.config['costs'], - snakemake.config['electricity']) update_transmission_costs(n, costs, simple_hvdc_costs=False) if factor == 'opt' or float(factor) > 1.0: @@ -151,7 +140,7 @@ def average_every_nhours(n, offset): return m -def apply_time_segmentation(n, segments): +def apply_time_segmentation(n, segments, solver_name="cbc"): logger.info(f"Aggregating time series to {segments} segments.") try: import tsam.timeseriesaggregation as tsam @@ -170,8 +159,6 @@ def apply_time_segmentation(n, segments): raw = pd.concat([p_max_pu, load, inflow], axis=1, sort=False) - solver_name = snakemake.config["solving"]["solver"]["name"] - agg = tsam.TimeSeriesAggregation(raw, hoursPerPeriod=len(raw), noTypicalPeriods=1, noSegments=int(segments), segmentation=True, solver=solver_name) @@ -208,9 +195,7 @@ def enforce_autarky(n, only_crossborder=False): n.mremove("Line", lines_rm) n.mremove("Link", links_rm) -def set_line_nom_max(n): - s_nom_max_set = snakemake.config["lines"].get("s_nom_max,", np.inf) - p_nom_max_set = snakemake.config["links"].get("p_nom_max", np.inf) +def set_line_nom_max(n, s_nom_max_set=np.inf, p_nom_max_set=np.inf): n.lines.s_nom_max.clip(upper=s_nom_max_set, inplace=True) n.links.p_nom_max.clip(upper=p_nom_max_set, inplace=True) @@ -225,8 +210,9 @@ if __name__ == "__main__": n = pypsa.Network(snakemake.input[0]) Nyears = n.snapshot_weightings.objective.sum() / 8760. + costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) - set_line_s_max_pu(n) + set_line_s_max_pu(n, snakemake.config['lines']['s_max_pu']) for o in opts: m = re.match(r'^\d+h$', o, re.IGNORECASE) @@ -237,16 +223,18 @@ if __name__ == "__main__": for o in opts: m = re.match(r'^\d+seg$', o, re.IGNORECASE) if m is not None: - n = apply_time_segmentation(n, m.group(0)[:-3]) + solver_name = snakemake.config["solving"]["solver"]["name"] + n = apply_time_segmentation(n, m.group(0)[:-3], solver_name) break for o in opts: if "Co2L" in o: m = re.findall("[0-9]*\.?[0-9]+$", o) if len(m) > 0: - add_co2limit(n, Nyears, float(m[0])) + co2limit = float(m[0]) * snakemake.config['electricity']['co2base'] + add_co2limit(n, co2limit, Nyears) else: - add_co2limit(n, Nyears) + add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears) break for o in opts: @@ -267,12 +255,13 @@ if __name__ == "__main__": c.df.loc[sel,attr] *= factor if 'Ep' in opts: - add_emission_prices(n) + add_emission_prices(n, snakemake.config['costs']['emission_prices']) ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] - set_transmission_limit(n, ll_type, factor, Nyears) + set_transmission_limit(n, ll_type, factor, costs, Nyears) - set_line_nom_max(n) + set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf), + p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf)) if "ATK" in opts: enforce_autarky(n) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 85bc4d15..70f27bf2 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -138,19 +138,15 @@ def simplify_network_to_380(n): return n, trafo_map -def _prepare_connection_costs_per_link(n): +def _prepare_connection_costs_per_link(n, costs, config): if n.links.empty: return {} - Nyears = n.snapshot_weightings.objective.sum() / 8760 - costs = load_costs(Nyears, snakemake.input.tech_costs, - snakemake.config['costs'], snakemake.config['electricity']) - connection_costs_per_link = {} - for tech in snakemake.config['renewable']: + for tech in config['renewable']: if tech.startswith('offwind'): connection_costs_per_link[tech] = ( - n.links.length * snakemake.config['lines']['length_factor'] * + n.links.length * config['lines']['length_factor'] * (n.links.underwater_fraction * costs.at[tech + '-connection-submarine', 'capital_cost'] + (1. - n.links.underwater_fraction) * costs.at[tech + '-connection-underground', 'capital_cost']) ) @@ -158,9 +154,9 @@ def _prepare_connection_costs_per_link(n): return connection_costs_per_link -def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None, buses=None): +def _compute_connection_costs_to_bus(n, busmap, costs, config, connection_costs_per_link=None, buses=None): if connection_costs_per_link is None: - connection_costs_per_link = _prepare_connection_costs_per_link(n) + connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config) if buses is None: buses = busmap.index[busmap.index != busmap.values] @@ -178,7 +174,7 @@ def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None, return connection_costs_to_bus -def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus): +def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output): connection_costs = {} for tech in connection_costs_to_bus: tech_b = n.generators.carrier == tech @@ -188,11 +184,11 @@ def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus): logger.info("Displacing {} generator(s) and adding connection costs to capital_costs: {} " .format(tech, ", ".join("{:.0f} Eur/MW/a for `{}`".format(d, b) for b, d in costs.iteritems()))) connection_costs[tech] = costs - pd.DataFrame(connection_costs).to_csv(snakemake.output.connection_costs) + pd.DataFrame(connection_costs).to_csv(output.connection_costs) -def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate_one_ports={"Load", "StorageUnit"}): +def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output, aggregate_one_ports={"Load", "StorageUnit"}): def replace_components(n, c, df, pnl): n.mremove(c, n.df(c).index) @@ -201,7 +197,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate if not df.empty: import_series_from_dataframe(n, df, c, attr) - _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus) + _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output) generators, generators_pnl = aggregategenerators(n, busmap, custom_strategies={'p_nom_min': np.sum}) replace_components(n, "Generator", generators, generators_pnl) @@ -217,7 +213,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)]) -def simplify_links(n): +def simplify_links(n, costs, config, output): ## Complex multi-node links are folded into end-points logger.info("Simplifying connected link components") @@ -264,7 +260,7 @@ def simplify_links(n): busmap = n.buses.index.to_series() - connection_costs_per_link = _prepare_connection_costs_per_link(n) + connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config) connection_costs_to_bus = pd.DataFrame(0., index=n.buses.index, columns=list(connection_costs_per_link)) for lbl in labels.value_counts().loc[lambda s: s > 2].index: @@ -278,11 +274,11 @@ def simplify_links(n): m = sp.spatial.distance_matrix(n.buses.loc[b, ['x', 'y']], n.buses.loc[buses[1:-1], ['x', 'y']]) busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]] - connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link, buses) + connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, costs, config, connection_costs_per_link, buses) all_links = [i for _, i in sum(links, [])] - p_max_pu = snakemake.config['links'].get('p_max_pu', 1.) + p_max_pu = config['links'].get('p_max_pu', 1.) lengths = n.links.loc[all_links, 'length'] name = lengths.idxmax() + '+{}'.format(len(links) - 1) params = dict( @@ -309,17 +305,17 @@ def simplify_links(n): logger.debug("Collecting all components using the busmap") - _aggregate_and_move_components(n, busmap, connection_costs_to_bus) + _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output) return n, busmap -def remove_stubs(n): +def remove_stubs(n, costs, config, output): logger.info("Removing stubs") busmap = busmap_by_stubs(n) # ['country']) - connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap) + connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config) - _aggregate_and_move_components(n, busmap, connection_costs_to_bus) + _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output) return n, busmap @@ -360,25 +356,25 @@ def aggregate_to_substations(n, buses_i=None): return clustering.network, busmap -def cluster(n, n_clusters): +def cluster(n, n_clusters, config): logger.info(f"Clustering to {n_clusters} buses") - focus_weights = snakemake.config.get('focus_weights', None) + focus_weights = config.get('focus_weights', None) renewable_carriers = pd.Index([tech for tech in n.generators.carrier.unique() - if tech.split('-', 2)[0] in snakemake.config['renewable']]) + if tech.split('-', 2)[0] in config['renewable']]) def consense(x): v = x.iat[0] assert ((x == v).all() or x.isnull().all()), ( "The `potential` configuration option must agree for all renewable carriers, for now!" ) return v - potential_mode = (consense(pd.Series([snakemake.config['renewable'][tech]['potential'] + potential_mode = (consense(pd.Series([config['renewable'][tech]['potential'] for tech in renewable_carriers])) if len(renewable_carriers) > 0 else 'conservative') clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode, - solver_name=snakemake.config['solving']['solver']['name'], + solver_name=config['solving']['solver']['name'], focus_weights=focus_weights) return clustering.network, clustering.busmap @@ -394,9 +390,13 @@ if __name__ == "__main__": n, trafo_map = simplify_network_to_380(n) - n, simplify_links_map = simplify_links(n) + Nyears = n.snapshot_weightings.objective.sum() / 8760 - n, stub_map = remove_stubs(n) + technology_costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears) + + n, simplify_links_map = simplify_links(n, technology_costs, snakemake.config, snakemake.output) + + n, stub_map = remove_stubs(n, technology_costs, snakemake.config, snakemake.output) busmaps = [trafo_map, simplify_links_map, stub_map] @@ -405,7 +405,7 @@ if __name__ == "__main__": busmaps.append(substation_map) if snakemake.wildcards.simpl: - n, cluster_map = cluster(n, int(snakemake.wildcards.simpl)) + n, cluster_map = cluster(n, int(snakemake.wildcards.simpl), snakemake.config) busmaps.append(cluster_map) # some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 6619f2d7..b902f525 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -283,8 +283,7 @@ if __name__ == "__main__": with memory_logger(filename=fn, interval=30.) as mem: n = pypsa.Network(snakemake.input[0]) n = prepare_network(n, solve_opts) - n = solve_network(n, config=snakemake.config, opts=opts, - solver_dir=tmpdir, + n = solve_network(n, snakemake.config, opts, solver_dir=tmpdir, solver_logfile=snakemake.log.solver) n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index 74506e5a..47bb713f 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -109,15 +109,13 @@ if __name__ == "__main__": n = set_parameters_from_optimized(n, n_optim) del n_optim - config = snakemake.config opts = snakemake.wildcards.opts.split('-') - config['solving']['options']['skip_iterations'] = False + snakemake.config['solving']['options']['skip_iterations'] = False fn = getattr(snakemake.log, 'memory', None) with memory_logger(filename=fn, interval=30.) as mem: - n = prepare_network(n, solve_opts=snakemake.config['solving']['options']) - n = solve_network(n, config=config, opts=opts, - solver_dir=tmpdir, + n = prepare_network(n, snakemake.config['solving']['options']) + n = solve_network(n, snakemake.config, opts, solver_dir=tmpdir, solver_logfile=snakemake.log.solver) n.export_to_netcdf(snakemake.output[0])