diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 88685f3b..5acfbf42 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: # Find common spelling mistakes in comments and docstrings - repo: https://github.com/codespell-project/codespell - rev: v2.2.4 + rev: v2.2.5 hooks: - id: codespell args: ['--ignore-regex="(\b[A-Z]+\b)"', '--ignore-words-list=fom,appartment,bage,ore,setis,tabacco,berfore'] # Ignore capital case words, e.g. country codes @@ -39,7 +39,7 @@ repos: # Make docstrings PEP 257 compliant - repo: https://github.com/PyCQA/docformatter - rev: v1.6.5 + rev: v1.7.2 hooks: - id: docformatter args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"] @@ -67,7 +67,7 @@ repos: # Do YAML formatting (before the linter checks it for misses) - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - rev: v2.8.0 + rev: v2.9.0 hooks: - id: pretty-format-yaml args: [--autofix, --indent, "2", --preserve-quotes] diff --git a/README.md b/README.md index 02cc3163..79cdfa65 100644 --- a/README.md +++ b/README.md @@ -96,7 +96,7 @@ repository](https://doi.org/10.5281/zenodo.3601881). We strongly welcome anyone interested in contributing to this project. If you have any ideas, suggestions or encounter problems, feel invited to file issues or make pull requests on GitHub. - In case of code-related **questions**, please post on [stack overflow](https://stackoverflow.com/questions/tagged/pypsa). - For non-programming related and more general questions please refer to the [mailing list](https://groups.google.com/group/pypsa). -- To **discuss** with other PyPSA users, organise projects, share news, and get in touch with the community you can use the [discord server](https://discord.gg/JTdvaEBb). +- To **discuss** with other PyPSA users, organise projects, share news, and get in touch with the community you can use the [discord server](https://discord.com/invite/AnuJBk23FU). - For **bugs and feature requests**, please use the [PyPSA-Eur Github Issues page](https://github.com/PyPSA/pypsa-eur/issues). # Licence diff --git a/config/config.default.yaml b/config/config.default.yaml index 5733b942..76d0aaba 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -625,9 +625,9 @@ clustering: solving: #tmpdir: "path/to/tmp" options: - formulation: kirchhoff clip_p_max_pu: 1.e-2 load_shedding: false + transmission_losses: 0 noisy_costs: true skip_iterations: true track_iterations: false diff --git a/config/test/config.myopic.yaml b/config/test/config.myopic.yaml index efa03136..0bb85ec6 100644 --- a/config/test/config.myopic.yaml +++ b/config/test/config.myopic.yaml @@ -31,6 +31,14 @@ snapshots: end: "2013-03-08" electricity: + co2limit: 100.e+6 + + extendable_carriers: + Generator: [OCGT] + StorageUnit: [battery] + Store: [H2] + Link: [H2 pipeline] + renewable_carriers: [solar, onwind, offwind-ac, offwind-dc] atlite: diff --git a/config/test/config.overnight.yaml b/config/test/config.overnight.yaml index fb468ded..a2a0f5a4 100644 --- a/config/test/config.overnight.yaml +++ b/config/test/config.overnight.yaml @@ -28,6 +28,14 @@ snapshots: end: "2013-03-08" electricity: + co2limit: 100.e+6 + + extendable_carriers: + Generator: [OCGT] + StorageUnit: [battery] + Store: [H2] + Link: [H2 pipeline] + renewable_carriers: [solar, onwind, offwind-ac, offwind-dc] atlite: diff --git a/doc/configtables/solving.csv b/doc/configtables/solving.csv index cba28cbe..c252ff32 100644 --- a/doc/configtables/solving.csv +++ b/doc/configtables/solving.csv @@ -1,7 +1,7 @@ ,Unit,Values,Description options,,, --- formulation,--,"Any of {'angles', 'kirchhoff', 'cycles', 'ptdf'}","Specifies which variant of linearized power flow formulations to use in the optimisation problem. Recommended is 'kirchhoff'. Explained in `this article `_." -- load_shedding,bool/float,"{'true','false', float}","Add generators with very high marginal cost to simulate load shedding and avoid problem infeasibilities. If load shedding is a float, it denotes the marginal cost in EUR/kWh." +-- transmission_losses,int,"[0-9]","Add piecewise linear approximation of transmission losses based on n tangents. Defaults to 0, which means losses are ignored." -- noisy_costs,bool,"{'true','false'}","Add random noise to marginal cost of generators by :math:`\mathcal{U}(0.009,0,011)` and capital cost of lines and links by :math:`\mathcal{U}(0.09,0,11)`." -- min_iterations,--,int,"Minimum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run." -- max_iterations,--,int,"Maximum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run." diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 818227d8..b9e24ad4 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,6 +10,7 @@ Release Notes Upcoming Release ================ +* ``param:`` section in rule definition are added to track changed settings in ``config.yaml``. The goal is to automatically re-execute rules whose parameters have changed. See `Non-file parameters for rules `_ in the snakemake documentation. * **Important:** The configuration files are now located in the ``config`` directory. This counts for ``config.default.yaml``, ``config.yaml`` as well as the test configuration files which are now located in ``config/test``. Config files that are still in the root directory will be ignored. @@ -29,6 +30,11 @@ Upcoming Release max_extension:``` to control the maximum capacity addition per line or link in MW. +* Add option to include a piecewise linear approximation of transmission losses, + e.g. by setting ``solving: options: transmission_losses: 2`` for an + approximation with two tangents. + + PyPSA-Eur 0.8.0 (18th March 2023) ================================= diff --git a/doc/supply_demand.rst b/doc/supply_demand.rst index 16242405..b043268b 100644 --- a/doc/supply_demand.rst +++ b/doc/supply_demand.rst @@ -133,12 +133,12 @@ The coefficient of performance (COP) of air- and ground-sourced heat pumps depen For the sink water temperature Tsink we assume 55 °C [`Config `_ file]. For the time- and location-dependent source temperatures Tsource, we rely on the `ERA5 `_ reanalysis weather data. The temperature differences are converted into COP time series using results from a regression analysis performed in the study by `Stafell et al. `_. For air-sourced heat pumps (ASHP), we use the function: .. math:: - COP (\Delta T) = 6.81 + 0.121\Delta T + 0.000630\Delta T^2 + COP (\Delta T) = 6.81 - 0.121\Delta T + 0.000630\Delta T^2 for ground-sourced heat pumps (GSHP), we use the function: .. math:: - COP(\Delta T) = 8.77 + 0.150\Delta T + 0.000734\Delta T^2 + COP(\Delta T) = 8.77 - 0.150\Delta T + 0.000734\Delta T^2 **Resistive heaters** diff --git a/doc/support.rst b/doc/support.rst index 36d1a2dd..1d512d59 100644 --- a/doc/support.rst +++ b/doc/support.rst @@ -9,6 +9,6 @@ Support * In case of code-related **questions**, please post on `stack overflow `_. * For non-programming related and more general questions please refer to the `mailing list `_. -* To **discuss** with other PyPSA users, organise projects, share news, and get in touch with the community you can use the [discord server](https://discord.gg/JTdvaEBb). +* To **discuss** with other PyPSA users, organise projects, share news, and get in touch with the community you can use the `discord server `_. * For **bugs and feature requests**, please use the `issue tracker `_. * We strongly welcome anyone interested in providing **contributions** to this project. If you have any ideas, suggestions or encounter problems, feel invited to file issues or make pull requests on `Github `_. For further information on how to contribute, please refer to :ref:`contributing`. diff --git a/envs/environment.yaml b/envs/environment.yaml index f970c9ba..9d800fdc 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -10,7 +10,7 @@ dependencies: - python>=3.8 - pip -- pypsa>=0.21.3 +- pypsa>=0.23 - atlite>=0.2.9 - dask diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 133ceb6e..44ebf404 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -19,6 +19,10 @@ if config["enable"].get("prepare_links_p_nom", False): rule build_electricity_demand: + params: + snapshots=config["snapshots"], + countries=config["countries"], + load=config["load"], input: ancient("data/load_raw.csv"), output: @@ -34,6 +38,10 @@ rule build_electricity_demand: rule build_powerplants: + params: + powerplants_filter=config["electricity"]["powerplants_filter"], + custom_powerplants=config["electricity"]["custom_powerplants"], + countries=config["countries"], input: base_network=RESOURCES + "networks/base.nc", custom_powerplants="data/custom_powerplants.csv", @@ -79,6 +87,8 @@ rule base_network: rule build_shapes: + params: + countries=config["countries"], input: naturalearth=ancient("data/bundle/naturalearth/ne_10m_admin_0_countries.shp"), eez=ancient("data/bundle/eez/World_EEZ_v8_2014.shp"), @@ -104,6 +114,8 @@ rule build_shapes: rule build_bus_regions: + params: + countries=config["countries"], input: country_shapes=RESOURCES + "country_shapes.geojson", offshore_shapes=RESOURCES + "offshore_shapes.geojson", @@ -125,6 +137,9 @@ rule build_bus_regions: if config["enable"].get("build_cutout", False): rule build_cutout: + params: + snapshots=config["snapshots"], + cutouts=config["atlite"]["cutouts"], input: regions_onshore=RESOURCES + "regions_onshore.geojson", regions_offshore=RESOURCES + "regions_offshore.geojson", @@ -186,6 +201,8 @@ rule build_ship_raster: rule build_renewable_profiles: + params: + renewable=config["renewable"], input: base_network=RESOURCES + "networks/base.nc", corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"), @@ -235,6 +252,9 @@ rule build_renewable_profiles: rule build_hydro_profile: + params: + hydro=config["renewable"]["hydro"], + countries=config["countries"], input: country_shapes=RESOURCES + "country_shapes.geojson", eia_hydro_generation="data/eia_hydro_annual_generation.csv", @@ -252,6 +272,14 @@ rule build_hydro_profile: rule add_electricity: + params: + length_factor=config["lines"]["length_factor"], + scaling_factor=config["load"]["scaling_factor"], + countries=config["countries"], + renewable=config["renewable"], + electricity=config["electricity"], + conventional=config.get("conventional", {}), + costs=config["costs"], input: **{ f"profile_{tech}": RESOURCES + f"profile_{tech}.nc" @@ -287,6 +315,15 @@ rule add_electricity: rule simplify_network: + params: + simplify_network=config["clustering"]["simplify_network"], + aggregation_strategies=config["clustering"].get("aggregation_strategies", {}), + focus_weights=config.get("focus_weights", None), + renewable_carriers=config["electricity"]["renewable_carriers"], + max_hours=config["electricity"]["max_hours"], + length_factor=config["lines"]["length_factor"], + p_max_pu=config["links"].get("p_max_pu", 1.0), + costs=config["costs"], input: network=RESOURCES + "networks/elec.nc", tech_costs=COSTS, @@ -312,6 +349,16 @@ rule simplify_network: rule cluster_network: + params: + cluster_network=config["clustering"]["cluster_network"], + aggregation_strategies=config["clustering"].get("aggregation_strategies", {}), + custom_busmap=config["enable"].get("custom_busmap", False), + focus_weights=config.get("focus_weights", None), + renewable_carriers=config["electricity"]["renewable_carriers"], + conventional_carriers=config["electricity"].get("conventional_carriers", []), + max_hours=config["electricity"]["max_hours"], + length_factor=config["lines"]["length_factor"], + costs=config["costs"], input: network=RESOURCES + "networks/elec_s{simpl}.nc", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}.geojson", @@ -343,6 +390,10 @@ rule cluster_network: rule add_extra_components: + params: + extendable_carriers=config["electricity"]["extendable_carriers"], + max_hours=config["electricity"]["max_hours"], + costs=config["costs"], input: network=RESOURCES + "networks/elec_s{simpl}_{clusters}.nc", tech_costs=COSTS, @@ -362,6 +413,14 @@ rule add_extra_components: rule prepare_network: + params: + links=config["links"], + lines=config["lines"], + co2base=config["electricity"]["co2base"], + co2limit=config["electricity"]["co2limit"], + gaslimit=config["electricity"].get("gaslimit"), + max_hours=config["electricity"]["max_hours"], + costs=config["costs"], input: RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc", tech_costs=COSTS, diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 1b724d1a..1c3507fd 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -140,6 +140,8 @@ if not (config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]): rule build_heat_demands: + params: + snapshots=config["snapshots"], input: pop_layout=RESOURCES + "pop_layout_{scope}.nc", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", @@ -160,6 +162,8 @@ rule build_heat_demands: rule build_temperature_profiles: + params: + snapshots=config["snapshots"], input: pop_layout=RESOURCES + "pop_layout_{scope}.nc", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", @@ -181,6 +185,8 @@ rule build_temperature_profiles: rule build_cop_profiles: + params: + heat_pump_sink_T=config["sector"]["heat_pump_sink_T"], input: temp_soil_total=RESOURCES + "temp_soil_total_elec_s{simpl}_{clusters}.nc", temp_soil_rural=RESOURCES + "temp_soil_rural_elec_s{simpl}_{clusters}.nc", @@ -208,6 +214,9 @@ rule build_cop_profiles: rule build_solar_thermal_profiles: + params: + snapshots=config["snapshots"], + solar_thermal=config["solar_thermal"], input: pop_layout=RESOURCES + "pop_layout_{scope}.nc", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", @@ -228,6 +237,9 @@ rule build_solar_thermal_profiles: rule build_energy_totals: + params: + countries=config["countries"], + energy=config["energy"], input: nuts3_shapes=RESOURCES + "nuts3_shapes.geojson", co2="data/eea/UNFCCC_v23.csv", @@ -253,6 +265,8 @@ rule build_energy_totals: rule build_biomass_potentials: + params: + biomass=config["biomass"], input: enspreso_biomass=HTTP.remote( "https://cidportal.jrc.ec.europa.eu/ftp/jrc-opendata/ENSPRESO/ENSPRESO_BIOMASS.xlsx", @@ -315,6 +329,10 @@ if not config["sector"]["biomass_transport"]: if config["sector"]["regional_co2_sequestration_potential"]["enable"]: rule build_sequestration_potentials: + params: + sequestration_potential=config["sector"][ + "regional_co2_sequestration_potential" + ], input: sequestration_potential=HTTP.remote( "https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson", @@ -368,6 +386,8 @@ rule build_salt_cavern_potentials: rule build_ammonia_production: + params: + countries=config["countries"], input: usgs="data/myb1-2017-nitro.xls", output: @@ -386,6 +406,9 @@ rule build_ammonia_production: rule build_industry_sector_ratios: + params: + industry=config["industry"], + ammonia=config["sector"].get("ammonia", False), input: ammonia_production=RESOURCES + "ammonia_production.csv", idees="data/jrc-idees-2015", @@ -405,6 +428,9 @@ rule build_industry_sector_ratios: rule build_industrial_production_per_country: + params: + industry=config["industry"], + countries=config["countries"], input: ammonia_production=RESOURCES + "ammonia_production.csv", jrc="data/jrc-idees-2015", @@ -426,6 +452,8 @@ rule build_industrial_production_per_country: rule build_industrial_production_per_country_tomorrow: + params: + industry=config["industry"], input: industrial_production_per_country=RESOURCES + "industrial_production_per_country.csv", @@ -450,6 +478,9 @@ rule build_industrial_production_per_country_tomorrow: rule build_industrial_distribution_key: + params: + hotmaps_locate_missing=config["industry"].get("hotmaps_locate_missing", False), + countries=config["countries"], input: regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", @@ -524,6 +555,9 @@ rule build_industrial_energy_demand_per_node: rule build_industrial_energy_demand_per_country_today: + params: + countries=config["countries"], + industry=config["industry"], input: jrc="data/jrc-idees-2015", ammonia_production=RESOURCES + "ammonia_production.csv", @@ -570,6 +604,9 @@ rule build_industrial_energy_demand_per_node_today: if config["sector"]["retrofitting"]["retro_endogen"]: rule build_retro_cost: + params: + retrofitting=config["sector"]["retrofitting"], + countries=config["countries"], input: building_stock="data/retro/data_building_stock.csv", data_tabula="data/retro/tabula-calculator-calcsetbuilding.csv", @@ -640,6 +677,9 @@ rule build_shipping_demand: rule build_transport_demand: + params: + snapshots=config["snapshots"], + sector=config["sector"], input: clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv", pop_weighted_energy_totals=RESOURCES @@ -666,6 +706,18 @@ rule build_transport_demand: rule prepare_sector_network: params: + co2_budget=config["co2_budget"], + conventional_carriers=config["existing_capacities"]["conventional_carriers"], + foresight=config["foresight"], + costs=config["costs"], + sector=config["sector"], + industry=config["industry"], + pypsa_eur=config["pypsa_eur"], + length_factor=config["lines"]["length_factor"], + planning_horizons=config["scenario"]["planning_horizons"], + countries=config["countries"], + emissions_scope=config["energy"]["emissions"], + eurostat_report_year=config["energy"]["eurostat_report_year"], RDIR=RDIR, input: **build_retro_cost_output, diff --git a/rules/postprocess.smk b/rules/postprocess.smk index fae0f856..ac80cd10 100644 --- a/rules/postprocess.smk +++ b/rules/postprocess.smk @@ -9,6 +9,9 @@ localrules: rule plot_network: + params: + foresight=config["foresight"], + plotting=config["plotting"], input: overrides="data/override_component_attrs", network=RESULTS @@ -67,6 +70,10 @@ rule copy_conda_env: rule make_summary: params: + foresight=config["foresight"], + costs=config["costs"], + snapshots=config["snapshots"], + scenario=config["scenario"], RDIR=RDIR, input: overrides="data/override_component_attrs", @@ -114,6 +121,10 @@ rule make_summary: rule plot_summary: params: + countries=config["countries"], + planning_horizons=config["scenario"]["planning_horizons"], + sector_opts=config["scenario"]["sector_opts"], + plotting=config["plotting"], RDIR=RDIR, input: costs=RESULTS + "csvs/costs.csv", diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 0a96406a..4bfbd6c6 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -19,6 +19,8 @@ if config["enable"].get("retrieve_databundle", True): datafiles.extend(["natura/Natura2000_end2015.shp", "GEBCO_2014_2D.nc"]) rule retrieve_databundle: + params: + tutorial=config["tutorial"], output: expand("data/bundle/{file}", file=datafiles), log: diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index fc70ce42..f73a5b0c 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -4,6 +4,13 @@ rule solve_network: + params: + solving=config["solving"], + foresight=config["foresight"], + planning_horizons=config["scenario"]["planning_horizons"], + co2_sequestration_potential=config["sector"].get( + "co2_sequestration_potential", 200 + ), input: network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", output: @@ -28,6 +35,8 @@ rule solve_network: rule solve_operations_network: + params: + options=config["solving"]["options"], input: network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", output: diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index ec4281ff..086375c2 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -4,6 +4,11 @@ rule add_existing_baseyear: + params: + baseyear=config["scenario"]["planning_horizons"][0], + sector=config["sector"], + existing_capacities=config["existing_capacities"], + costs=config["costs"], input: overrides="data/override_component_attrs", network=RESULTS @@ -42,6 +47,10 @@ rule add_existing_baseyear: rule add_brownfield: + params: + H2_retrofit=config["sector"]["H2_retrofit"], + H2_retrofit_capacity_per_CH4=config["sector"]["H2_retrofit_capacity_per_CH4"], + threshold_capacity=config["existing_capacities"]["threshold_capacity"], input: overrides="data/override_component_attrs", network=RESULTS @@ -74,6 +83,13 @@ ruleorder: add_existing_baseyear > add_brownfield rule solve_sector_network_myopic: + params: + solving=config["solving"], + foresight=config["foresight"], + planning_horizons=config["scenario"]["planning_horizons"], + co2_sequestration_potential=config["sector"].get( + "co2_sequestration_potential", 200 + ), input: overrides="data/override_component_attrs", network=RESULTS diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index f05925b0..fc430f4d 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -4,6 +4,13 @@ rule solve_sector_network: + params: + solving=config["solving"], + foresight=config["foresight"], + planning_horizons=config["scenario"]["planning_horizons"], + co2_sequestration_potential=config["sector"].get( + "co2_sequestration_potential", 200 + ), input: overrides="data/override_component_attrs", network=RESULTS diff --git a/scripts/_helpers.py b/scripts/_helpers.py index a67fb105..512cb87a 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -82,7 +82,7 @@ def load_network(import_name=None, custom_components=None): As in pypsa.Network(import_name) custom_components : dict Dictionary listing custom components. - For using ``snakemake.config['override_components']`` + For using ``snakemake.params['override_components']`` in ``config/config.yaml`` define: .. code:: yaml @@ -385,10 +385,11 @@ def mock_snakemake(rulename, configfiles=[], **wildcards): def override_component_attrs(directory): - """Tell PyPSA that links can have multiple outputs by - overriding the component_attrs. This can be done for - as many buses as you need with format busi for i = 2,3,4,5,.... - See https://pypsa.org/doc/components.html#link-with-multiple-outputs-or-inputs + """ + Tell PyPSA that links can have multiple outputs by overriding the + component_attrs. This can be done for as many buses as you need with format + busi for i = 2,3,4,5,.... See https://pypsa.org/doc/components.html#link- + with-multiple-outputs-or-inputs. Parameters ---------- diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index e08b86d7..b33d2f19 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -49,7 +49,7 @@ def add_brownfield(n, n_p, year): ) ] - threshold = snakemake.config["existing_capacities"]["threshold_capacity"] + threshold = snakemake.params.threshold_capacity if not chp_heat.empty: threshold_chp_heat = ( @@ -87,7 +87,7 @@ def add_brownfield(n, n_p, year): # deal with gas network pipe_carrier = ["gas pipeline"] - if snakemake.config["sector"]["H2_retrofit"]: + if snakemake.params.H2_retrofit: # drop capacities of previous year to avoid duplicating to_drop = n.links.carrier.isin(pipe_carrier) & (n.links.build_year != year) n.mremove("Link", n.links.loc[to_drop].index) @@ -98,7 +98,7 @@ def add_brownfield(n, n_p, year): & (n.links.build_year != year) ].index gas_pipes_i = n.links[n.links.carrier.isin(pipe_carrier)].index - CH4_per_H2 = 1 / snakemake.config["sector"]["H2_retrofit_capacity_per_CH4"] + CH4_per_H2 = 1 / snakemake.params.H2_retrofit_capacity_per_CH4 fr = "H2 pipeline retrofitted" to = "gas pipeline" # today's pipe capacity diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index f910dee4..594988bd 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -137,7 +137,7 @@ def _add_missing_carriers_from_costs(n, costs, carriers): n.import_components_from_dataframe(emissions, "Carrier") -def load_costs(tech_costs, config, elec_config, Nyears=1.0): +def load_costs(tech_costs, config, max_hours, Nyears=1.0): # set all asset costs and other parameters costs = pd.read_csv(tech_costs, index_col=[0, 1]).sort_index() @@ -180,7 +180,6 @@ def load_costs(tech_costs, config, elec_config, Nyears=1.0): dict(capital_cost=capital_cost, marginal_cost=0.0, co2_emissions=0.0) ) - max_hours = elec_config["max_hours"] costs.loc["battery"] = costs_for_storage( costs.loc["battery storage"], costs.loc["battery inverter"], @@ -371,7 +370,7 @@ def attach_conventional_generators( ppl, conventional_carriers, extendable_carriers, - conventional_config, + conventional_params, conventional_inputs, ): carriers = set(conventional_carriers) | set(extendable_carriers["Generator"]) @@ -408,17 +407,19 @@ def attach_conventional_generators( lifetime=(ppl.dateout - ppl.datein).fillna(np.inf), ) - for carrier in conventional_config: + for carrier in conventional_params: # Generators with technology affected idx = n.generators.query("carrier == @carrier").index - for attr in list(set(conventional_config[carrier]) & set(n.generators)): - values = conventional_config[carrier][attr] + for attr in list(set(conventional_params[carrier]) & set(n.generators)): + values = conventional_params[carrier][attr] if f"conventional_{carrier}_{attr}" in conventional_inputs: # Values affecting generators of technology k country-specific # First map generator buses to countries; then map countries to p_max_pu - values = pd.read_csv(values, index_col=0).iloc[:, 0] + values = pd.read_csv( + snakemake.input[f"conventional_{carrier}_{attr}"], index_col=0 + ).iloc[:, 0] bus_values = n.buses.country.map(values) n.generators[attr].update( n.generators.loc[idx].bus.map(bus_values).dropna() @@ -428,7 +429,7 @@ def attach_conventional_generators( n.generators.loc[idx, attr] = values -def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **config): +def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **params): _add_missing_carriers_from_costs(n, costs, carriers) ppl = ( @@ -483,9 +484,9 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **con ) if "PHS" in carriers and not phs.empty: - # fill missing max hours to config value and + # fill missing max hours to params value and # assume no natural inflow due to lack of data - max_hours = config.get("PHS_max_hours", 6) + max_hours = params.get("PHS_max_hours", 6) phs = phs.replace({"max_hours": {0: max_hours}}) n.madd( "StorageUnit", @@ -501,7 +502,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **con ) if "hydro" in carriers and not hydro.empty: - hydro_max_hours = config.get("hydro_max_hours") + hydro_max_hours = params.get("hydro_max_hours") assert hydro_max_hours is not None, "No path for hydro capacities given." @@ -651,16 +652,7 @@ def attach_OPSD_renewables(n, tech_map): n.generators.p_nom_min.update(gens.bus.map(caps).dropna()) -def estimate_renewable_capacities(n, config): - year = config["electricity"]["estimate_renewable_capacities"]["year"] - tech_map = config["electricity"]["estimate_renewable_capacities"][ - "technology_mapping" - ] - countries = config["countries"] - expansion_limit = config["electricity"]["estimate_renewable_capacities"][ - "expansion_limit" - ] - +def estimate_renewable_capacities(n, year, tech_map, expansion_limit, countries): if not len(countries) or not len(tech_map): return @@ -723,48 +715,33 @@ if __name__ == "__main__": snakemake = mock_snakemake("add_electricity") configure_logging(snakemake) + params = snakemake.params + n = pypsa.Network(snakemake.input.base_network) Nyears = n.snapshot_weightings.objective.sum() / 8760.0 costs = load_costs( snakemake.input.tech_costs, - snakemake.config["costs"], - snakemake.config["electricity"], + params.costs, + params.electricity["max_hours"], Nyears, ) ppl = load_powerplants(snakemake.input.powerplants) - if "renewable_carriers" in snakemake.config["electricity"]: - renewable_carriers = set(snakemake.config["electricity"]["renewable_carriers"]) - else: - logger.warning( - "Missing key `renewable_carriers` under config entry `electricity`. " - "In future versions, this will raise an error. " - "Falling back to carriers listed under `renewable`." - ) - renewable_carriers = snakemake.config["renewable"] - - extendable_carriers = snakemake.config["electricity"]["extendable_carriers"] - if not (set(renewable_carriers) & set(extendable_carriers["Generator"])): - logger.warning( - "No renewables found in config entry `extendable_carriers`. " - "In future versions, these have to be explicitly listed. " - "Falling back to all renewables." - ) - - conventional_carriers = snakemake.config["electricity"]["conventional_carriers"] - attach_load( n, snakemake.input.regions, snakemake.input.load, snakemake.input.nuts3_shapes, - snakemake.config["countries"], - snakemake.config["load"]["scaling_factor"], + params.countries, + params.scaling_factor, ) - update_transmission_costs(n, costs, snakemake.config["lines"]["length_factor"]) + update_transmission_costs(n, costs, params.length_factor) + renewable_carriers = set(params.electricity["renewable_carriers"]) + extendable_carriers = params.electricity["extendable_carriers"] + conventional_carriers = params.electricity["conventional_carriers"] conventional_inputs = { k: v for k, v in snakemake.input.items() if k.startswith("conventional_") } @@ -774,7 +751,7 @@ if __name__ == "__main__": ppl, conventional_carriers, extendable_carriers, - snakemake.config.get("conventional", {}), + params.conventional, conventional_inputs, ) @@ -784,67 +761,32 @@ if __name__ == "__main__": snakemake.input, renewable_carriers, extendable_carriers, - snakemake.config["lines"]["length_factor"], + params.length_factor, ) if "hydro" in renewable_carriers: - conf = snakemake.config["renewable"]["hydro"] + para = params.renewable["hydro"] attach_hydro( n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities, - conf.pop("carriers", []), - **conf, + para.pop("carriers", []), + **para, ) - if "estimate_renewable_capacities" not in snakemake.config["electricity"]: - logger.warning( - "Missing key `estimate_renewable_capacities` under config entry `electricity`. " - "In future versions, this will raise an error. " - "Falling back to whether ``estimate_renewable_capacities_from_capacity_stats`` is in the config." - ) - if ( - "estimate_renewable_capacities_from_capacity_stats" - in snakemake.config["electricity"] - ): - estimate_renewable_caps = { - "enable": True, - **snakemake.config["electricity"][ - "estimate_renewable_capacities_from_capacity_stats" - ], - } - else: - estimate_renewable_caps = {"enable": False} - else: - estimate_renewable_caps = snakemake.config["electricity"][ - "estimate_renewable_capacities" - ] - if "enable" not in estimate_renewable_caps: - logger.warning( - "Missing key `enable` under config entry `estimate_renewable_capacities`. " - "In future versions, this will raise an error. Falling back to False." - ) - estimate_renewable_caps = {"enable": False} - if "from_opsd" not in estimate_renewable_caps: - logger.warning( - "Missing key `from_opsd` under config entry `estimate_renewable_capacities`. " - "In future versions, this will raise an error. " - "Falling back to whether `renewable_capacities_from_opsd` is non-empty." - ) - from_opsd = bool( - snakemake.config["electricity"].get("renewable_capacities_from_opsd", False) - ) - estimate_renewable_caps["from_opsd"] = from_opsd - + estimate_renewable_caps = params.electricity["estimate_renewable_capacities"] if estimate_renewable_caps["enable"]: + tech_map = estimate_renewable_caps["technology_mapping"] + expansion_limit = estimate_renewable_caps["expansion_limit"] + year = estimate_renewable_caps["year"] + if estimate_renewable_caps["from_opsd"]: - tech_map = snakemake.config["electricity"]["estimate_renewable_capacities"][ - "technology_mapping" - ] attach_OPSD_renewables(n, tech_map) - estimate_renewable_capacities(n, snakemake.config) + estimate_renewable_capacities( + n, year, tech_map, expansion_limit, params.countries + ) update_p_nom_max(n) diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index 5bc0960d..52cb585e 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -157,7 +157,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas # Fill missing DateOut dateout = ( df_agg.loc[biomass_i, "DateIn"] - + snakemake.config["costs"]["fill_values"]["lifetime"] + + snakemake.params.costs["fill_values"]["lifetime"] ) df_agg.loc[biomass_i, "DateOut"] = df_agg.loc[biomass_i, "DateOut"].fillna(dateout) @@ -218,7 +218,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas capacity = df.loc[grouping_year, generator] capacity = capacity[~capacity.isna()] capacity = capacity[ - capacity > snakemake.config["existing_capacities"]["threshold_capacity"] + capacity > snakemake.params.existing_capacities["threshold_capacity"] ] suffix = "-ac" if generator == "offwind" else "" name_suffix = f" {generator}{suffix}-{grouping_year}" @@ -582,7 +582,7 @@ def add_heating_capacities_installed_before_baseyear( ) # delete links with capacities below threshold - threshold = snakemake.config["existing_capacities"]["threshold_capacity"] + threshold = snakemake.params.existing_capacities["threshold_capacity"] n.mremove( "Link", [ @@ -612,10 +612,10 @@ if __name__ == "__main__": update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) - options = snakemake.config["sector"] + options = snakemake.params.sector opts = snakemake.wildcards.sector_opts.split("-") - baseyear = snakemake.config["scenario"]["planning_horizons"][0] + baseyear = snakemake.params.baseyear overrides = override_component_attrs(snakemake.input.overrides) n = pypsa.Network(snakemake.input.network, override_component_attrs=overrides) @@ -626,14 +626,12 @@ if __name__ == "__main__": Nyears = n.snapshot_weightings.generators.sum() / 8760.0 costs = prepare_costs( snakemake.input.costs, - snakemake.config["costs"], + snakemake.params.costs, Nyears, ) - grouping_years_power = snakemake.config["existing_capacities"][ - "grouping_years_power" - ] - grouping_years_heat = snakemake.config["existing_capacities"]["grouping_years_heat"] + grouping_years_power = snakemake.params.existing_capacities["grouping_years_power"] + grouping_years_heat = snakemake.params.existing_capacities["grouping_years_heat"] add_power_capacities_installed_before_baseyear( n, grouping_years_power, costs, baseyear ) @@ -650,7 +648,7 @@ if __name__ == "__main__": .to_pandas() .reindex(index=n.snapshots) ) - default_lifetime = snakemake.config["costs"]["fill_values"]["lifetime"] + default_lifetime = snakemake.params.costs["fill_values"]["lifetime"] add_heating_capacities_installed_before_baseyear( n, baseyear, diff --git a/scripts/add_extra_components.py b/scripts/add_extra_components.py index 020370e5..f034a7f2 100644 --- a/scripts/add_extra_components.py +++ b/scripts/add_extra_components.py @@ -67,9 +67,8 @@ idx = pd.IndexSlice logger = logging.getLogger(__name__) -def attach_storageunits(n, costs, elec_opts): - carriers = elec_opts["extendable_carriers"]["StorageUnit"] - max_hours = elec_opts["max_hours"] +def attach_storageunits(n, costs, extendable_carriers, max_hours): + carriers = extendable_carriers["StorageUnit"] _add_missing_carriers_from_costs(n, costs, carriers) @@ -99,8 +98,8 @@ def attach_storageunits(n, costs, elec_opts): ) -def attach_stores(n, costs, elec_opts): - carriers = elec_opts["extendable_carriers"]["Store"] +def attach_stores(n, costs, extendable_carriers): + carriers = extendable_carriers["Store"] _add_missing_carriers_from_costs(n, costs, carriers) @@ -187,11 +186,10 @@ def attach_stores(n, costs, elec_opts): ) -def attach_hydrogen_pipelines(n, costs, elec_opts): - ext_carriers = elec_opts["extendable_carriers"] - as_stores = ext_carriers.get("Store", []) +def attach_hydrogen_pipelines(n, costs, extendable_carriers): + as_stores = extendable_carriers.get("Store", []) - if "H2 pipeline" not in ext_carriers.get("Link", []): + if "H2 pipeline" not in extendable_carriers.get("Link", []): return assert "H2" in as_stores, ( @@ -235,16 +233,17 @@ if __name__ == "__main__": configure_logging(snakemake) n = pypsa.Network(snakemake.input.network) - elec_config = snakemake.config["electricity"] + extendable_carriers = snakemake.params.extendable_carriers + max_hours = snakemake.params.max_hours Nyears = n.snapshot_weightings.objective.sum() / 8760.0 costs = load_costs( - snakemake.input.tech_costs, snakemake.config["costs"], elec_config, Nyears + snakemake.input.tech_costs, snakemake.params.costs, max_hours, Nyears ) - attach_storageunits(n, costs, elec_config) - attach_stores(n, costs, elec_config) - attach_hydrogen_pipelines(n, costs, elec_config) + attach_storageunits(n, costs, extendable_carriers, max_hours) + attach_stores(n, costs, extendable_carriers) + attach_hydrogen_pipelines(n, costs, extendable_carriers) add_nice_carrier_names(n, snakemake.config) diff --git a/scripts/build_ammonia_production.py b/scripts/build_ammonia_production.py index d78d627e..1bcdf9ae 100644 --- a/scripts/build_ammonia_production.py +++ b/scripts/build_ammonia_production.py @@ -30,7 +30,7 @@ if __name__ == "__main__": ammonia.index = cc.convert(ammonia.index, to="iso2") years = [str(i) for i in range(2013, 2018)] - countries = ammonia.index.intersection(snakemake.config["countries"]) + countries = ammonia.index.intersection(snakemake.params.countries) ammonia = ammonia.loc[countries, years].astype(float) # convert from ktonN to ktonNH3 diff --git a/scripts/build_biomass_potentials.py b/scripts/build_biomass_potentials.py index 21d0e623..d200a78e 100644 --- a/scripts/build_biomass_potentials.py +++ b/scripts/build_biomass_potentials.py @@ -210,9 +210,9 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_biomass_potentials", simpl="", clusters="5") - config = snakemake.config["biomass"] - year = config["year"] - scenario = config["scenario"] + params = snakemake.params.biomass + year = params["year"] + scenario = params["scenario"] enspreso = enspreso_biomass_potentials(year, scenario) @@ -228,7 +228,7 @@ if __name__ == "__main__": df.to_csv(snakemake.output.biomass_potentials_all) - grouper = {v: k for k, vv in config["classes"].items() for v in vv} + grouper = {v: k for k, vv in params["classes"].items() for v in vv} df = df.groupby(grouper, axis=1).sum() df *= 1e6 # TWh/a to MWh/a diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index 6dc3b5a4..a6500bb0 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -116,7 +116,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_bus_regions") configure_logging(snakemake) - countries = snakemake.config["countries"] + countries = snakemake.params.countries n = pypsa.Network(snakemake.input.base_network) diff --git a/scripts/build_cop_profiles.py b/scripts/build_cop_profiles.py index 5d36cd5b..4b1d952e 100644 --- a/scripts/build_cop_profiles.py +++ b/scripts/build_cop_profiles.py @@ -39,7 +39,7 @@ if __name__ == "__main__": for source in ["air", "soil"]: source_T = xr.open_dataarray(snakemake.input[f"temp_{source}_{area}"]) - delta_T = snakemake.config["sector"]["heat_pump_sink_T"] - source_T + delta_T = snakemake.params.heat_pump_sink_T - source_T cop = coefficient_of_performance(delta_T, source) diff --git a/scripts/build_cutout.py b/scripts/build_cutout.py index 365797d2..9a7f9e00 100644 --- a/scripts/build_cutout.py +++ b/scripts/build_cutout.py @@ -106,9 +106,9 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_cutout", cutout="europe-2013-era5") configure_logging(snakemake) - cutout_params = snakemake.config["atlite"]["cutouts"][snakemake.wildcards.cutout] + cutout_params = snakemake.params.cutouts[snakemake.wildcards.cutout] - snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"]) + snapshots = pd.date_range(freq="h", **snakemake.params.snapshots) time = [snapshots[0], snapshots[-1]] cutout_params["time"] = slice(*cutout_params.get("time", time)) diff --git a/scripts/build_electricity_demand.py b/scripts/build_electricity_demand.py index b86b4a5f..ba1fb881 100755 --- a/scripts/build_electricity_demand.py +++ b/scripts/build_electricity_demand.py @@ -279,16 +279,16 @@ if __name__ == "__main__": configure_logging(snakemake) - powerstatistics = snakemake.config["load"]["power_statistics"] - interpolate_limit = snakemake.config["load"]["interpolate_limit"] - countries = snakemake.config["countries"] - snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"]) + powerstatistics = snakemake.params.load["power_statistics"] + interpolate_limit = snakemake.params.load["interpolate_limit"] + countries = snakemake.params.countries + snapshots = pd.date_range(freq="h", **snakemake.params.snapshots) years = slice(snapshots[0], snapshots[-1]) - time_shift = snakemake.config["load"]["time_shift_for_large_gaps"] + time_shift = snakemake.params.load["time_shift_for_large_gaps"] load = load_timeseries(snakemake.input[0], years, countries, powerstatistics) - if snakemake.config["load"]["manual_adjustments"]: + if snakemake.params.load["manual_adjustments"]: load = manual_adjustment(load, snakemake.input[0], powerstatistics) logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.") diff --git a/scripts/build_energy_totals.py b/scripts/build_energy_totals.py index 45fc960f..891c4e2a 100644 --- a/scripts/build_energy_totals.py +++ b/scripts/build_energy_totals.py @@ -737,16 +737,16 @@ if __name__ == "__main__": logging.basicConfig(level=snakemake.config["logging"]["level"]) - config = snakemake.config["energy"] + params = snakemake.params.energy nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index("index") population = nuts3["pop"].groupby(nuts3.country).sum() - countries = snakemake.config["countries"] + countries = snakemake.params.countries idees_countries = pd.Index(countries).intersection(eu28) - data_year = config["energy_totals_year"] - report_year = snakemake.config["energy"]["eurostat_report_year"] + data_year = params["energy_totals_year"] + report_year = snakemake.params.energy["eurostat_report_year"] input_eurostat = snakemake.input.eurostat eurostat = build_eurostat(input_eurostat, countries, report_year, data_year) swiss = build_swiss(data_year) @@ -755,8 +755,8 @@ if __name__ == "__main__": energy = build_energy_totals(countries, eurostat, swiss, idees) energy.to_csv(snakemake.output.energy_name) - base_year_emissions = config["base_emissions_year"] - emissions_scope = snakemake.config["energy"]["emissions"] + base_year_emissions = params["base_emissions_year"] + emissions_scope = snakemake.params.energy["emissions"] eea_co2 = build_eea_co2(snakemake.input.co2, base_year_emissions, emissions_scope) eurostat_co2 = build_eurostat_co2( input_eurostat, countries, report_year, base_year_emissions diff --git a/scripts/build_heat_demand.py b/scripts/build_heat_demand.py index 56ceb4b7..73494260 100644 --- a/scripts/build_heat_demand.py +++ b/scripts/build_heat_demand.py @@ -27,7 +27,7 @@ if __name__ == "__main__": cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1) client = Client(cluster, asynchronous=True) - time = pd.date_range(freq="h", **snakemake.config["snapshots"]) + time = pd.date_range(freq="h", **snakemake.params.snapshots) cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( diff --git a/scripts/build_hydro_profile.py b/scripts/build_hydro_profile.py index 0e8cfa27..bed666f2 100644 --- a/scripts/build_hydro_profile.py +++ b/scripts/build_hydro_profile.py @@ -130,10 +130,10 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_hydro_profile") configure_logging(snakemake) - config_hydro = snakemake.config["renewable"]["hydro"] + params_hydro = snakemake.params.hydro cutout = atlite.Cutout(snakemake.input.cutout) - countries = snakemake.config["countries"] + countries = snakemake.params.countries country_shapes = ( gpd.read_file(snakemake.input.country_shapes) .set_index("name")["geometry"] @@ -151,7 +151,7 @@ if __name__ == "__main__": normalize_using_yearly=eia_stats, ) - if "clip_min_inflow" in config_hydro: - inflow = inflow.where(inflow > config_hydro["clip_min_inflow"], 0) + if "clip_min_inflow" in params_hydro: + inflow = inflow.where(inflow > params_hydro["clip_min_inflow"], 0) inflow.to_netcdf(snakemake.output[0]) diff --git a/scripts/build_industrial_distribution_key.py b/scripts/build_industrial_distribution_key.py index 69daf64d..e93e43c2 100644 --- a/scripts/build_industrial_distribution_key.py +++ b/scripts/build_industrial_distribution_key.py @@ -73,7 +73,7 @@ def prepare_hotmaps_database(regions): df[["srid", "coordinates"]] = df.geom.str.split(";", expand=True) - if snakemake.config["industry"].get("hotmaps_locate_missing", False): + if snakemake.params.hotmaps_locate_missing: df = locate_missing_industrial_sites(df) # remove those sites without valid locations @@ -143,7 +143,7 @@ if __name__ == "__main__": logging.basicConfig(level=snakemake.config["logging"]["level"]) - countries = snakemake.config["countries"] + countries = snakemake.params.countries regions = gpd.read_file(snakemake.input.regions_onshore).set_index("name") diff --git a/scripts/build_industrial_energy_demand_per_country_today.py b/scripts/build_industrial_energy_demand_per_country_today.py index 703997b1..9ca0d003 100644 --- a/scripts/build_industrial_energy_demand_per_country_today.py +++ b/scripts/build_industrial_energy_demand_per_country_today.py @@ -101,8 +101,8 @@ def add_ammonia_energy_demand(demand): def get_ammonia_by_fuel(x): fuels = { - "gas": config["MWh_CH4_per_tNH3_SMR"], - "electricity": config["MWh_elec_per_tNH3_SMR"], + "gas": params["MWh_CH4_per_tNH3_SMR"], + "electricity": params["MWh_elec_per_tNH3_SMR"], } return pd.Series({k: x * v for k, v in fuels.items()}) @@ -112,7 +112,7 @@ def add_ammonia_energy_demand(demand): index=demand.index, fill_value=0.0 ) - ammonia = pd.DataFrame({"ammonia": ammonia * config["MWh_NH3_per_tNH3"]}).T + ammonia = pd.DataFrame({"ammonia": ammonia * params["MWh_NH3_per_tNH3"]}).T demand["Ammonia"] = ammonia.unstack().reindex(index=demand.index, fill_value=0.0) @@ -178,9 +178,9 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_industrial_energy_demand_per_country_today") - config = snakemake.config["industry"] - year = config.get("reference_year", 2015) - countries = pd.Index(snakemake.config["countries"]) + params = snakemake.params.industry + year = params.get("reference_year", 2015) + countries = pd.Index(snakemake.params.countries) demand = industrial_energy_demand(countries.intersection(eu28), year) diff --git a/scripts/build_industrial_production_per_country.py b/scripts/build_industrial_production_per_country.py index 437806b3..74cb1949 100644 --- a/scripts/build_industrial_production_per_country.py +++ b/scripts/build_industrial_production_per_country.py @@ -264,9 +264,9 @@ def separate_basic_chemicals(demand, year): # assume HVC, methanol, chlorine production proportional to non-ammonia basic chemicals distribution_key = demand["Basic chemicals"] / demand["Basic chemicals"].sum() - demand["HVC"] = config["HVC_production_today"] * 1e3 * distribution_key - demand["Chlorine"] = config["chlorine_production_today"] * 1e3 * distribution_key - demand["Methanol"] = config["methanol_production_today"] * 1e3 * distribution_key + demand["HVC"] = params["HVC_production_today"] * 1e3 * distribution_key + demand["Chlorine"] = params["chlorine_production_today"] * 1e3 * distribution_key + demand["Methanol"] = params["methanol_production_today"] * 1e3 * distribution_key demand.drop(columns=["Basic chemicals"], inplace=True) @@ -279,11 +279,11 @@ if __name__ == "__main__": logging.basicConfig(level=snakemake.config["logging"]["level"]) - countries = snakemake.config["countries"] + countries = snakemake.params.countries - year = snakemake.config["industry"]["reference_year"] + year = snakemake.params.industry["reference_year"] - config = snakemake.config["industry"] + params = snakemake.params.industry jrc_dir = snakemake.input.jrc eurostat_dir = snakemake.input.eurostat diff --git a/scripts/build_industrial_production_per_country_tomorrow.py b/scripts/build_industrial_production_per_country_tomorrow.py index 6c445608..ffed5195 100644 --- a/scripts/build_industrial_production_per_country_tomorrow.py +++ b/scripts/build_industrial_production_per_country_tomorrow.py @@ -15,7 +15,7 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_industrial_production_per_country_tomorrow") - config = snakemake.config["industry"] + params = snakemake.params.industry investment_year = int(snakemake.wildcards.planning_horizons) @@ -25,8 +25,8 @@ if __name__ == "__main__": keys = ["Integrated steelworks", "Electric arc"] total_steel = production[keys].sum(axis=1) - st_primary_fraction = get(config["St_primary_fraction"], investment_year) - dri_fraction = get(config["DRI_fraction"], investment_year) + st_primary_fraction = get(params["St_primary_fraction"], investment_year) + dri_fraction = get(params["DRI_fraction"], investment_year) int_steel = production["Integrated steelworks"].sum() fraction_persistent_primary = st_primary_fraction * total_steel.sum() / int_steel @@ -51,7 +51,7 @@ if __name__ == "__main__": key_pri = "Aluminium - primary production" key_sec = "Aluminium - secondary production" - al_primary_fraction = get(config["Al_primary_fraction"], investment_year) + al_primary_fraction = get(params["Al_primary_fraction"], investment_year) fraction_persistent_primary = ( al_primary_fraction * total_aluminium.sum() / production[key_pri].sum() ) @@ -60,15 +60,15 @@ if __name__ == "__main__": production[key_sec] = total_aluminium - production[key_pri] production["HVC (mechanical recycling)"] = ( - get(config["HVC_mechanical_recycling_fraction"], investment_year) + get(params["HVC_mechanical_recycling_fraction"], investment_year) * production["HVC"] ) production["HVC (chemical recycling)"] = ( - get(config["HVC_chemical_recycling_fraction"], investment_year) + get(params["HVC_chemical_recycling_fraction"], investment_year) * production["HVC"] ) - production["HVC"] *= get(config["HVC_primary_fraction"], investment_year) + production["HVC"] *= get(params["HVC_primary_fraction"], investment_year) fn = snakemake.output.industrial_production_per_country_tomorrow production.to_csv(fn, float_format="%.2f") diff --git a/scripts/build_industry_sector_ratios.py b/scripts/build_industry_sector_ratios.py index 54f2cfdc..45705002 100644 --- a/scripts/build_industry_sector_ratios.py +++ b/scripts/build_industry_sector_ratios.py @@ -185,10 +185,10 @@ def iron_and_steel(): df[sector] = df["Electric arc"] # add H2 consumption for DRI at 1.7 MWh H2 /ton steel - df.at["hydrogen", sector] = config["H2_DRI"] + df.at["hydrogen", sector] = params["H2_DRI"] # add electricity consumption in DRI shaft (0.322 MWh/tSl) - df.at["elec", sector] += config["elec_DRI"] + df.at["elec", sector] += params["elec_DRI"] ## Integrated steelworks # could be used in combination with CCS) @@ -383,19 +383,19 @@ def chemicals_industry(): assert s_emi.index[0] == sector # convert from MtHVC/a to ktHVC/a - s_out = config["HVC_production_today"] * 1e3 + s_out = params["HVC_production_today"] * 1e3 # tCO2/t material df.loc["process emission", sector] += ( s_emi["Process emissions"] - - config["petrochemical_process_emissions"] * 1e3 - - config["NH3_process_emissions"] * 1e3 + - params["petrochemical_process_emissions"] * 1e3 + - params["NH3_process_emissions"] * 1e3 ) / s_out # emissions originating from feedstock, could be non-fossil origin # tCO2/t material df.loc["process emission from feedstock", sector] += ( - config["petrochemical_process_emissions"] * 1e3 + params["petrochemical_process_emissions"] * 1e3 ) / s_out # convert from ktoe/a to GWh/a @@ -405,18 +405,18 @@ def chemicals_industry(): # subtract ammonia energy demand (in ktNH3/a) ammonia = pd.read_csv(snakemake.input.ammonia_production, index_col=0) ammonia_total = ammonia.loc[ammonia.index.intersection(eu28), str(year)].sum() - df.loc["methane", sector] -= ammonia_total * config["MWh_CH4_per_tNH3_SMR"] - df.loc["elec", sector] -= ammonia_total * config["MWh_elec_per_tNH3_SMR"] + df.loc["methane", sector] -= ammonia_total * params["MWh_CH4_per_tNH3_SMR"] + df.loc["elec", sector] -= ammonia_total * params["MWh_elec_per_tNH3_SMR"] # subtract chlorine demand - chlorine_total = config["chlorine_production_today"] - df.loc["hydrogen", sector] -= chlorine_total * config["MWh_H2_per_tCl"] - df.loc["elec", sector] -= chlorine_total * config["MWh_elec_per_tCl"] + chlorine_total = params["chlorine_production_today"] + df.loc["hydrogen", sector] -= chlorine_total * params["MWh_H2_per_tCl"] + df.loc["elec", sector] -= chlorine_total * params["MWh_elec_per_tCl"] # subtract methanol demand - methanol_total = config["methanol_production_today"] - df.loc["methane", sector] -= methanol_total * config["MWh_CH4_per_tMeOH"] - df.loc["elec", sector] -= methanol_total * config["MWh_elec_per_tMeOH"] + methanol_total = params["methanol_production_today"] + df.loc["methane", sector] -= methanol_total * params["MWh_CH4_per_tMeOH"] + df.loc["elec", sector] -= methanol_total * params["MWh_elec_per_tMeOH"] # MWh/t material df.loc[sources, sector] = df.loc[sources, sector] / s_out @@ -427,37 +427,37 @@ def chemicals_industry(): sector = "HVC (mechanical recycling)" df[sector] = 0.0 - df.loc["elec", sector] = config["MWh_elec_per_tHVC_mechanical_recycling"] + df.loc["elec", sector] = params["MWh_elec_per_tHVC_mechanical_recycling"] # HVC chemical recycling sector = "HVC (chemical recycling)" df[sector] = 0.0 - df.loc["elec", sector] = config["MWh_elec_per_tHVC_chemical_recycling"] + df.loc["elec", sector] = params["MWh_elec_per_tHVC_chemical_recycling"] # Ammonia sector = "Ammonia" df[sector] = 0.0 - if snakemake.config["sector"].get("ammonia", False): - df.loc["ammonia", sector] = config["MWh_NH3_per_tNH3"] + if snakemake.params.ammonia: + df.loc["ammonia", sector] = params["MWh_NH3_per_tNH3"] else: - df.loc["hydrogen", sector] = config["MWh_H2_per_tNH3_electrolysis"] - df.loc["elec", sector] = config["MWh_elec_per_tNH3_electrolysis"] + df.loc["hydrogen", sector] = params["MWh_H2_per_tNH3_electrolysis"] + df.loc["elec", sector] = params["MWh_elec_per_tNH3_electrolysis"] # Chlorine sector = "Chlorine" df[sector] = 0.0 - df.loc["hydrogen", sector] = config["MWh_H2_per_tCl"] - df.loc["elec", sector] = config["MWh_elec_per_tCl"] + df.loc["hydrogen", sector] = params["MWh_H2_per_tCl"] + df.loc["elec", sector] = params["MWh_elec_per_tCl"] # Methanol sector = "Methanol" df[sector] = 0.0 - df.loc["methane", sector] = config["MWh_CH4_per_tMeOH"] - df.loc["elec", sector] = config["MWh_elec_per_tMeOH"] + df.loc["methane", sector] = params["MWh_CH4_per_tMeOH"] + df.loc["elec", sector] = params["MWh_elec_per_tMeOH"] # Other chemicals @@ -1465,10 +1465,10 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_industry_sector_ratios") - # TODO make config option + # TODO make params option year = 2015 - config = snakemake.config["industry"] + params = snakemake.params.industry df = pd.concat( [ diff --git a/scripts/build_powerplants.py b/scripts/build_powerplants.py index 9ca67a53..7f396e1e 100755 --- a/scripts/build_powerplants.py +++ b/scripts/build_powerplants.py @@ -115,7 +115,7 @@ if __name__ == "__main__": configure_logging(snakemake) n = pypsa.Network(snakemake.input.base_network) - countries = snakemake.config["countries"] + countries = snakemake.params.countries ppl = ( pm.powerplants(from_url=True) @@ -134,12 +134,12 @@ if __name__ == "__main__": ppl = ppl.query('not (Country in @available_countries and Fueltype == "Bioenergy")') ppl = pd.concat([ppl, opsd]) - ppl_query = snakemake.config["electricity"]["powerplants_filter"] + ppl_query = snakemake.params.powerplants_filter if isinstance(ppl_query, str): ppl.query(ppl_query, inplace=True) # add carriers from own powerplant files: - custom_ppl_query = snakemake.config["electricity"]["custom_powerplants"] + custom_ppl_query = snakemake.params.custom_powerplants ppl = add_custom_powerplants( ppl, snakemake.input.custom_powerplants, custom_ppl_query ) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index c0288aee..9f1f1a51 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -64,7 +64,7 @@ Inputs - ``resources/offshore_shapes.geojson``: confer :ref:`shapes` - ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions` - ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions` -- ``"cutouts/" + config["renewable"][{technology}]['cutout']``: :ref:`cutout` +- ``"cutouts/" + params["renewable"][{technology}]['cutout']``: :ref:`cutout` - ``networks/base.nc``: :ref:`base` Outputs @@ -204,14 +204,14 @@ if __name__ == "__main__": nprocesses = int(snakemake.threads) noprogress = snakemake.config["run"].get("disable_progressbar", True) - config = snakemake.config["renewable"][snakemake.wildcards.technology] - resource = config["resource"] # pv panel config / wind turbine config - correction_factor = config.get("correction_factor", 1.0) - capacity_per_sqkm = config["capacity_per_sqkm"] - p_nom_max_meth = config.get("potential", "conservative") + params = snakemake.params.renewable[snakemake.wildcards.technology] + resource = params["resource"] # pv panel params / wind turbine params + correction_factor = params.get("correction_factor", 1.0) + capacity_per_sqkm = params["capacity_per_sqkm"] + p_nom_max_meth = params.get("potential", "conservative") - if isinstance(config.get("corine", {}), list): - config["corine"] = {"grid_codes": config["corine"]} + if isinstance(params.get("corine", {}), list): + params["corine"] = {"grid_codes": params["corine"]} if correction_factor != 1.0: logger.info(f"correction_factor is set as {correction_factor}") @@ -229,13 +229,13 @@ if __name__ == "__main__": regions = regions.set_index("name").rename_axis("bus") buses = regions.index - res = config.get("excluder_resolution", 100) + res = params.get("excluder_resolution", 100) excluder = atlite.ExclusionContainer(crs=3035, res=res) - if config["natura"]: + if params["natura"]: excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True) - corine = config.get("corine", {}) + corine = params.get("corine", {}) if "grid_codes" in corine: codes = corine["grid_codes"] excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035) @@ -246,28 +246,28 @@ if __name__ == "__main__": snakemake.input.corine, codes=codes, buffer=buffer, crs=3035 ) - if "ship_threshold" in config: + if "ship_threshold" in params: shipping_threshold = ( - config["ship_threshold"] * 8760 * 6 + params["ship_threshold"] * 8760 * 6 ) # approximation because 6 years of data which is hourly collected func = functools.partial(np.less, shipping_threshold) excluder.add_raster( snakemake.input.ship_density, codes=func, crs=4326, allow_no_overlap=True ) - if config.get("max_depth"): + if params.get("max_depth"): # lambda not supported for atlite + multiprocessing # use named function np.greater with partially frozen argument instead # and exclude areas where: -max_depth > grid cell depth - func = functools.partial(np.greater, -config["max_depth"]) + func = functools.partial(np.greater, -params["max_depth"]) excluder.add_raster(snakemake.input.gebco, codes=func, crs=4326, nodata=-1000) - if "min_shore_distance" in config: - buffer = config["min_shore_distance"] + if "min_shore_distance" in params: + buffer = params["min_shore_distance"] excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer) - if "max_shore_distance" in config: - buffer = config["max_shore_distance"] + if "max_shore_distance" in params: + buffer = params["max_shore_distance"] excluder.add_geometry( snakemake.input.country_shapes, buffer=buffer, invert=True ) @@ -358,13 +358,13 @@ if __name__ == "__main__": # select only buses with some capacity and minimal capacity factor ds = ds.sel( bus=( - (ds["profile"].mean("time") > config.get("min_p_max_pu", 0.0)) - & (ds["p_nom_max"] > config.get("min_p_nom_max", 0.0)) + (ds["profile"].mean("time") > params.get("min_p_max_pu", 0.0)) + & (ds["p_nom_max"] > params.get("min_p_nom_max", 0.0)) ) ) - if "clip_p_max_pu" in config: - min_p_max_pu = config["clip_p_max_pu"] + if "clip_p_max_pu" in params: + min_p_max_pu = params["clip_p_max_pu"] ds["profile"] = ds["profile"].where(ds["profile"] >= min_p_max_pu, 0) ds.to_netcdf(snakemake.output.profile) diff --git a/scripts/build_retro_cost.py b/scripts/build_retro_cost.py index 9dbfc375..c830415e 100644 --- a/scripts/build_retro_cost.py +++ b/scripts/build_retro_cost.py @@ -305,7 +305,7 @@ def prepare_building_stock_data(): u_values.set_index(["country_code", "subsector", "bage", "type"], inplace=True) # only take in config.yaml specified countries into account - countries = snakemake.config["countries"] + countries = snakemake.params.countries area_tot = area_tot.loc[countries] return u_values, country_iso_dic, countries, area_tot, area @@ -698,8 +698,8 @@ def get_solar_gains_per_year(window_area): def map_to_lstrength(l_strength, df): """ - renames column names from a pandas dataframe to map tabula retrofitting - strengths [2 = moderate, 3 = ambitious] to l_strength + Renames column names from a pandas dataframe to map tabula retrofitting + strengths [2 = moderate, 3 = ambitious] to l_strength. """ middle = len(l_strength) // 2 map_to_l = pd.MultiIndex.from_arrays( @@ -1040,7 +1040,7 @@ if __name__ == "__main__": # ******** config ********************************************************* - retro_opts = snakemake.config["sector"]["retrofitting"] + retro_opts = snakemake.params.retrofitting interest_rate = retro_opts["interest_rate"] annualise_cost = retro_opts["annualise_cost"] # annualise the investment costs tax_weighting = retro_opts[ diff --git a/scripts/build_sequestration_potentials.py b/scripts/build_sequestration_potentials.py index 012effe8..e19a96da 100644 --- a/scripts/build_sequestration_potentials.py +++ b/scripts/build_sequestration_potentials.py @@ -41,7 +41,7 @@ if __name__ == "__main__": "build_sequestration_potentials", simpl="", clusters="181" ) - cf = snakemake.config["sector"]["regional_co2_sequestration_potential"] + cf = snakemake.params.sequestration_potential gdf = gpd.read_file(snakemake.input.sequestration_potential[0]) diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 50d21e12..eb837409 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -234,6 +234,7 @@ def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp): manual = gpd.GeoDataFrame( [["BA1", "BA", 3871.0], ["RS1", "RS", 7210.0], ["AL1", "AL", 2893.0]], columns=["NUTS_ID", "country", "pop"], + geometry=gpd.GeoSeries(), ) manual["geometry"] = manual["country"].map(country_shapes) manual = manual.dropna() @@ -254,13 +255,11 @@ if __name__ == "__main__": snakemake = mock_snakemake("build_shapes") configure_logging(snakemake) - country_shapes = countries( - snakemake.input.naturalearth, snakemake.config["countries"] - ) + country_shapes = countries(snakemake.input.naturalearth, snakemake.params.countries) country_shapes.reset_index().to_file(snakemake.output.country_shapes) offshore_shapes = eez( - country_shapes, snakemake.input.eez, snakemake.config["countries"] + country_shapes, snakemake.input.eez, snakemake.params.countries ) offshore_shapes.reset_index().to_file(snakemake.output.offshore_shapes) diff --git a/scripts/build_solar_thermal_profiles.py b/scripts/build_solar_thermal_profiles.py index f4eb1557..d285691a 100644 --- a/scripts/build_solar_thermal_profiles.py +++ b/scripts/build_solar_thermal_profiles.py @@ -27,9 +27,9 @@ if __name__ == "__main__": cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1) client = Client(cluster, asynchronous=True) - config = snakemake.config["solar_thermal"] + config = snakemake.params.solar_thermal - time = pd.date_range(freq="h", **snakemake.config["snapshots"]) + time = pd.date_range(freq="h", **snakemake.params.snapshots) cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index 8f6d6c6c..9db37c25 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -27,7 +27,7 @@ if __name__ == "__main__": cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1) client = Client(cluster, asynchronous=True) - time = pd.date_range(freq="h", **snakemake.config["snapshots"]) + time = pd.date_range(freq="h", **snakemake.params.snapshots) cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( diff --git a/scripts/build_transport_demand.py b/scripts/build_transport_demand.py index 6b8bd04f..c5bf4632 100644 --- a/scripts/build_transport_demand.py +++ b/scripts/build_transport_demand.py @@ -175,9 +175,9 @@ if __name__ == "__main__": snakemake.input.pop_weighted_energy_totals, index_col=0 ) - options = snakemake.config["sector"] + options = snakemake.params.sector - snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"], tz="UTC") + snapshots = pd.date_range(freq="h", **snakemake.params.snapshots, tz="UTC") nyears = len(snapshots) / 8760 diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 7572d3b3..52685af2 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -186,7 +186,7 @@ def get_feature_for_hac(n, buses_i=None, feature=None): if "offwind" in carriers: carriers.remove("offwind") carriers = np.append( - carriers, network.generators.carrier.filter(like="offwind").unique() + carriers, n.generators.carrier.filter(like="offwind").unique() ) if feature.split("-")[1] == "cap": @@ -424,7 +424,10 @@ def clustering_for_n_clusters( n.links.eval("underwater_fraction * length").div(nc.links.length).dropna() ) nc.links["capital_cost"] = nc.links["capital_cost"].add( - (nc.links.length - n.links.length).clip(lower=0).mul(extended_link_costs), + (nc.links.length - n.links.length) + .clip(lower=0) + .mul(extended_link_costs) + .dropna(), fill_value=0, ) @@ -460,28 +463,18 @@ if __name__ == "__main__": snakemake = mock_snakemake("cluster_network", simpl="", clusters="5") configure_logging(snakemake) + params = snakemake.params + solver_name = snakemake.config["solving"]["solver"]["name"] + n = pypsa.Network(snakemake.input.network) - focus_weights = snakemake.config.get("focus_weights", None) - - renewable_carriers = pd.Index( - [ - tech - for tech in n.generators.carrier.unique() - if tech in snakemake.config["renewable"] - ] - ) - - exclude_carriers = snakemake.config["clustering"]["cluster_network"].get( - "exclude_carriers", [] - ) + exclude_carriers = params.cluster_network["exclude_carriers"] aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers) if snakemake.wildcards.clusters.endswith("m"): n_clusters = int(snakemake.wildcards.clusters[:-1]) - conventional = set( - snakemake.config["electricity"].get("conventional_carriers", []) + aggregate_carriers = set(params.conventional_carriers).intersection( + aggregate_carriers ) - aggregate_carriers = conventional.intersection(aggregate_carriers) elif snakemake.wildcards.clusters == "all": n_clusters = len(n.buses) else: @@ -495,13 +488,12 @@ if __name__ == "__main__": n, busmap, linemap, linemap, pd.Series(dtype="O") ) else: - line_length_factor = snakemake.config["lines"]["length_factor"] Nyears = n.snapshot_weightings.objective.sum() / 8760 hvac_overhead_cost = load_costs( snakemake.input.tech_costs, - snakemake.config["costs"], - snakemake.config["electricity"], + params.costs, + params.max_hours, Nyears, ).at["HVAC overhead", "capital_cost"] @@ -512,16 +504,16 @@ if __name__ == "__main__": ).all() or x.isnull().all(), "The `potential` configuration option must agree for all renewable carriers, for now!" return v - aggregation_strategies = snakemake.config["clustering"].get( - "aggregation_strategies", {} - ) # translate str entries of aggregation_strategies to pd.Series functions: aggregation_strategies = { - p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()} - for p in aggregation_strategies.keys() + p: { + k: getattr(pd.Series, v) + for k, v in params.aggregation_strategies[p].items() + } + for p in params.aggregation_strategies.keys() } - custom_busmap = snakemake.config["enable"].get("custom_busmap", False) + custom_busmap = params.custom_busmap if custom_busmap: custom_busmap = pd.read_csv( snakemake.input.custom_busmap, index_col=0, squeeze=True @@ -529,21 +521,18 @@ if __name__ == "__main__": custom_busmap.index = custom_busmap.index.astype(str) logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}") - cluster_config = snakemake.config.get("clustering", {}).get( - "cluster_network", {} - ) clustering = clustering_for_n_clusters( n, n_clusters, custom_busmap, aggregate_carriers, - line_length_factor, - aggregation_strategies, - snakemake.config["solving"]["solver"]["name"], - cluster_config.get("algorithm", "hac"), - cluster_config.get("feature", "solar+onwind-time"), + params.length_factor, + params.aggregation_strategies, + solver_name, + params.cluster_network["algorithm"], + params.cluster_network["feature"], hvac_overhead_cost, - focus_weights, + params.focus_weights, ) update_p_nom_max(clustering.network) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 3d743942..00fca2fd 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -198,7 +198,7 @@ def calculate_costs(n, label, costs): def calculate_cumulative_cost(): - planning_horizons = snakemake.config["scenario"]["planning_horizons"] + planning_horizons = snakemake.params.scenario["planning_horizons"] cumulative_cost = pd.DataFrame( index=df["costs"].sum().index, @@ -688,19 +688,19 @@ if __name__ == "__main__": (cluster, ll, opt + sector_opt, planning_horizon): "results/" + snakemake.params.RDIR + f"/postnetworks/elec_s{simpl}_{cluster}_l{ll}_{opt}_{sector_opt}_{planning_horizon}.nc" - for simpl in snakemake.config["scenario"]["simpl"] - for cluster in snakemake.config["scenario"]["clusters"] - for opt in snakemake.config["scenario"]["opts"] - for sector_opt in snakemake.config["scenario"]["sector_opts"] - for ll in snakemake.config["scenario"]["ll"] - for planning_horizon in snakemake.config["scenario"]["planning_horizons"] + for simpl in snakemake.params.scenario["simpl"] + for cluster in snakemake.params.scenario["clusters"] + for opt in snakemake.params.scenario["opts"] + for sector_opt in snakemake.params.scenario["sector_opts"] + for ll in snakemake.params.scenario["ll"] + for planning_horizon in snakemake.params.scenario["planning_horizons"] } - Nyears = len(pd.date_range(freq="h", **snakemake.config["snapshots"])) / 8760 + Nyears = len(pd.date_range(freq="h", **snakemake.params.snapshots)) / 8760 costs_db = prepare_costs( snakemake.input.costs, - snakemake.config["costs"], + snakemake.params.costs, Nyears, ) @@ -710,7 +710,7 @@ if __name__ == "__main__": to_csv(df) - if snakemake.config["foresight"] == "myopic": + if snakemake.params.foresight == "myopic": cumulative_cost = calculate_cumulative_cost() cumulative_cost.to_csv( "results/" + snakemake.params.RDIR + "/csvs/cumulative_cost.csv" diff --git a/scripts/plot_network.py b/scripts/plot_network.py index 0a22b2e5..184d86f0 100644 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -70,7 +70,7 @@ def plot_map( transmission=False, with_legend=True, ): - tech_colors = snakemake.config["plotting"]["tech_colors"] + tech_colors = snakemake.params.plotting["tech_colors"] n = network.copy() assign_location(n) @@ -116,9 +116,7 @@ def plot_map( costs = costs.stack() # .sort_index() # hack because impossible to drop buses... - eu_location = snakemake.config["plotting"].get( - "eu_node_location", dict(x=-5.5, y=46) - ) + eu_location = snakemake.params.plotting.get("eu_node_location", dict(x=-5.5, y=46)) n.buses.loc["EU gas", "x"] = eu_location["x"] n.buses.loc["EU gas", "y"] = eu_location["y"] @@ -315,7 +313,7 @@ def plot_h2_map(network, regions): h2_new = n.links[n.links.carrier == "H2 pipeline"] h2_retro = n.links[n.links.carrier == "H2 pipeline retrofitted"] - if snakemake.config["foresight"] == "myopic": + if snakemake.params.foresight == "myopic": # sum capacitiy for pipelines from different investment periods h2_new = group_pipes(h2_new) @@ -558,7 +556,7 @@ def plot_ch4_map(network): link_widths_used = max_usage / linewidth_factor link_widths_used[max_usage < line_lower_threshold] = 0.0 - tech_colors = snakemake.config["plotting"]["tech_colors"] + tech_colors = snakemake.params.plotting["tech_colors"] pipe_colors = { "gas pipeline": "#f08080", @@ -700,7 +698,7 @@ def plot_map_without(network): # hack because impossible to drop buses... if "EU gas" in n.buses.index: - eu_location = snakemake.config["plotting"].get( + eu_location = snakemake.params.plotting.get( "eu_node_location", dict(x=-5.5, y=46) ) n.buses.loc["EU gas", "x"] = eu_location["x"] @@ -876,7 +874,7 @@ def plot_series(network, carrier="AC", name="test"): stacked=True, linewidth=0.0, color=[ - snakemake.config["plotting"]["tech_colors"][i.replace(suffix, "")] + snakemake.params.plotting["tech_colors"][i.replace(suffix, "")] for i in new_columns ], ) @@ -937,7 +935,7 @@ if __name__ == "__main__": regions = gpd.read_file(snakemake.input.regions).set_index("name") - map_opts = snakemake.config["plotting"]["map"] + map_opts = snakemake.params.plotting["map"] if map_opts["boundaries"] is None: map_opts["boundaries"] = regions.total_bounds[[0, 2, 1, 3]] + [-1, 1, -1, 1] diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index cfa8e361..e7de5473 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -142,10 +142,10 @@ def plot_costs(): df = df.groupby(df.index.map(rename_techs)).sum() - to_drop = df.index[df.max(axis=1) < snakemake.config["plotting"]["costs_threshold"]] + to_drop = df.index[df.max(axis=1) < snakemake.params.plotting["costs_threshold"]] logger.info( - f"Dropping technology with costs below {snakemake.config['plotting']['costs_threshold']} EUR billion per year" + f"Dropping technology with costs below {snakemake.params['plotting']['costs_threshold']} EUR billion per year" ) logger.debug(df.loc[to_drop]) @@ -165,7 +165,7 @@ def plot_costs(): kind="bar", ax=ax, stacked=True, - color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index], + color=[snakemake.params.plotting["tech_colors"][i] for i in new_index], ) handles, labels = ax.get_legend_handles_labels() @@ -173,7 +173,7 @@ def plot_costs(): handles.reverse() labels.reverse() - ax.set_ylim([0, snakemake.config["plotting"]["costs_max"]]) + ax.set_ylim([0, snakemake.params.plotting["costs_max"]]) ax.set_ylabel("System Cost [EUR billion per year]") @@ -201,11 +201,11 @@ def plot_energy(): df = df.groupby(df.index.map(rename_techs)).sum() to_drop = df.index[ - df.abs().max(axis=1) < snakemake.config["plotting"]["energy_threshold"] + df.abs().max(axis=1) < snakemake.params.plotting["energy_threshold"] ] logger.info( - f"Dropping all technology with energy consumption or production below {snakemake.config['plotting']['energy_threshold']} TWh/a" + f"Dropping all technology with energy consumption or production below {snakemake.params['plotting']['energy_threshold']} TWh/a" ) logger.debug(df.loc[to_drop]) @@ -227,7 +227,7 @@ def plot_energy(): kind="bar", ax=ax, stacked=True, - color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index], + color=[snakemake.params.plotting["tech_colors"][i] for i in new_index], ) handles, labels = ax.get_legend_handles_labels() @@ -237,8 +237,8 @@ def plot_energy(): ax.set_ylim( [ - snakemake.config["plotting"]["energy_min"], - snakemake.config["plotting"]["energy_max"], + snakemake.params.plotting["energy_min"], + snakemake.params.plotting["energy_max"], ] ) @@ -287,7 +287,7 @@ def plot_balances(): df = df.groupby(df.index.map(rename_techs)).sum() to_drop = df.index[ - df.abs().max(axis=1) < snakemake.config["plotting"]["energy_threshold"] / 10 + df.abs().max(axis=1) < snakemake.params.plotting["energy_threshold"] / 10 ] if v[0] in co2_carriers: @@ -296,7 +296,7 @@ def plot_balances(): units = "TWh/a" logger.debug( - f"Dropping technology energy balance smaller than {snakemake.config['plotting']['energy_threshold']/10} {units}" + f"Dropping technology energy balance smaller than {snakemake.params['plotting']['energy_threshold']/10} {units}" ) logger.debug(df.loc[to_drop]) @@ -317,7 +317,7 @@ def plot_balances(): kind="bar", ax=ax, stacked=True, - color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index], + color=[snakemake.params.plotting["tech_colors"][i] for i in new_index], ) handles, labels = ax.get_legend_handles_labels() @@ -455,10 +455,10 @@ def plot_carbon_budget_distribution(input_eurostat): ax1 = plt.subplot(gs1[0, 0]) ax1.set_ylabel("CO$_2$ emissions (Gt per year)", fontsize=22) ax1.set_ylim([0, 5]) - ax1.set_xlim([1990, snakemake.config["scenario"]["planning_horizons"][-1] + 1]) + ax1.set_xlim([1990, snakemake.params.planning_horizons[-1] + 1]) path_cb = "results/" + snakemake.params.RDIR + "/csvs/" - countries = snakemake.config["countries"] + countries = snakemake.params.countries e_1990 = co2_emissions_year(countries, input_eurostat, opts, year=1990) CO2_CAP = pd.read_csv(path_cb + "carbon_budget_distribution.csv", index_col=0) @@ -555,7 +555,7 @@ if __name__ == "__main__": plot_balances() - for sector_opts in snakemake.config["scenario"]["sector_opts"]: + for sector_opts in snakemake.params.sector_opts: opts = sector_opts.split("-") for o in opts: if "cb" in o: diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index e16339ba..5e5cdbca 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -268,12 +268,12 @@ if __name__ == "__main__": Nyears = n.snapshot_weightings.objective.sum() / 8760.0 costs = load_costs( snakemake.input.tech_costs, - snakemake.config["costs"], - snakemake.config["electricity"], + snakemake.params.costs, + snakemake.params.max_hours, Nyears, ) - set_line_s_max_pu(n, snakemake.config["lines"]["s_max_pu"]) + set_line_s_max_pu(n, snakemake.params.lines["s_max_pu"]) for o in opts: m = re.match(r"^\d+h$", o, re.IGNORECASE) @@ -292,11 +292,11 @@ if __name__ == "__main__": if "Co2L" in o: m = re.findall("[0-9]*\.?[0-9]+$", o) if len(m) > 0: - co2limit = float(m[0]) * snakemake.config["electricity"]["co2base"] + co2limit = float(m[0]) * snakemake.params.co2base add_co2limit(n, co2limit, Nyears) logger.info("Setting CO2 limit according to wildcard value.") else: - add_co2limit(n, snakemake.config["electricity"]["co2limit"], Nyears) + add_co2limit(n, snakemake.params.co2limit, Nyears) logger.info("Setting CO2 limit according to config value.") break @@ -308,7 +308,7 @@ if __name__ == "__main__": add_gaslimit(n, limit, Nyears) logger.info("Setting gas usage limit according to wildcard value.") else: - add_gaslimit(n, snakemake.config["electricity"].get("gaslimit"), Nyears) + add_gaslimit(n, snakemake.params.gaslimit, Nyears) logger.info("Setting gas usage limit according to config value.") break @@ -337,7 +337,7 @@ if __name__ == "__main__": add_emission_prices(n, dict(co2=float(m[0]))) else: logger.info("Setting emission prices according to config value.") - add_emission_prices(n, snakemake.config["costs"]["emission_prices"]) + add_emission_prices(n, snakemake.params.costs["emission_prices"]) break ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:] @@ -345,10 +345,10 @@ if __name__ == "__main__": set_line_nom_max( n, - s_nom_max_set=snakemake.config["lines"]["s_nom_max"], - p_nom_max_set=snakemake.config["links"]["p_nom_max"], - s_nom_max_ext=snakemake.config["lines"]["max_extension"], - p_nom_max_ext=snakemake.config["links"]["max_extension"], + s_nom_max_set=snakemake.params.lines.get("s_nom_max", np.inf), + p_nom_max_set=snakemake.params.links.get("p_nom_max", np.inf), + s_nom_max_set=snakemake.params.lines.get("max_extension", np.inf), + p_nom_max_set=snakemake.params.links.get("max_extension", np.inf), ) if "ATK" in opts: diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 3009bfd9..c8b2002e 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -200,12 +200,12 @@ def co2_emissions_year( """ Calculate CO2 emissions in one specific year (e.g. 1990 or 2018). """ - emissions_scope = snakemake.config["energy"]["emissions"] + emissions_scope = snakemake.params.energy["emissions"] eea_co2 = build_eea_co2(snakemake.input.co2, year, emissions_scope) # TODO: read Eurostat data from year > 2014 # this only affects the estimation of CO2 emissions for BA, RS, AL, ME, MK - report_year = snakemake.config["energy"]["eurostat_report_year"] + report_year = snakemake.params.energy["eurostat_report_year"] if year > 2014: eurostat_co2 = build_eurostat_co2( input_eurostat, countries, report_year, year=2014 @@ -241,7 +241,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year): carbon_budget = float(o[o.find("cb") + 2 : o.find("ex")]) r = float(o[o.find("ex") + 2 :]) - countries = snakemake.config["countries"] + countries = snakemake.params.countries e_1990 = co2_emissions_year( countries, input_eurostat, opts, emissions_scope, report_year, year=1990 @@ -252,7 +252,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year): countries, input_eurostat, opts, emissions_scope, report_year, year=2018 ) - planning_horizons = snakemake.config["scenario"]["planning_horizons"] + planning_horizons = snakemake.params.planning_horizons t_0 = planning_horizons[0] if "be" in o: @@ -391,7 +391,7 @@ def update_wind_solar_costs(n, costs): with xr.open_dataset(profile) as ds: underwater_fraction = ds["underwater_fraction"].to_pandas() connection_cost = ( - snakemake.config["lines"]["length_factor"] + snakemake.params.length_factor * ds["average_distance"].to_pandas() * ( underwater_fraction @@ -483,8 +483,8 @@ def remove_elec_base_techs(n): batteries and H2) from base electricity-only network, since they're added here differently using links. """ - for c in n.iterate_components(snakemake.config["pypsa_eur"]): - to_keep = snakemake.config["pypsa_eur"][c.name] + for c in n.iterate_components(snakemake.params.pypsa_eur): + to_keep = snakemake.params.pypsa_eur[c.name] to_remove = pd.Index(c.df.carrier.unique()).symmetric_difference(to_keep) if to_remove.empty: continue @@ -674,7 +674,7 @@ def add_dac(n, costs): def add_co2limit(n, nyears=1.0, limit=0.0): logger.info(f"Adding CO2 budget limit as per unit of 1990 levels of {limit}") - countries = snakemake.config["countries"] + countries = snakemake.params.countries sectors = emission_sectors_from_opts(opts) @@ -727,7 +727,7 @@ def cycling_shift(df, steps=1): return df -def prepare_costs(cost_file, config, nyears): +def prepare_costs(cost_file, params, nyears): # set all asset costs and other parameters costs = pd.read_csv(cost_file, index_col=[0, 1]).sort_index() @@ -739,7 +739,7 @@ def prepare_costs(cost_file, config, nyears): costs.loc[:, "value"].unstack(level=1).groupby("technology").sum(min_count=1) ) - costs = costs.fillna(config["fill_values"]) + costs = costs.fillna(params["fill_values"]) def annuity_factor(v): return calculate_annuity(v["lifetime"], v["discount rate"]) + v["FOM"] / 100 @@ -787,7 +787,7 @@ def add_ammonia(n, costs): nodes = pop_layout.index - cf_industry = snakemake.config["industry"] + cf_industry = snakemake.params.industry n.add("Carrier", "NH3") @@ -1102,10 +1102,14 @@ def add_storage_and_grids(n, costs): lifetime=costs.at["OCGT", "lifetime"], ) - cavern_types = snakemake.config["sector"]["hydrogen_underground_storage_locations"] + cavern_types = snakemake.params.sector["hydrogen_underground_storage_locations"] h2_caverns = pd.read_csv(snakemake.input.h2_cavern, index_col=0) - if not h2_caverns.empty and options["hydrogen_underground_storage"]: + if ( + not h2_caverns.empty + and options["hydrogen_underground_storage"] + and set(cavern_types).intersection(h2_caverns.columns) + ): h2_caverns = h2_caverns[cavern_types].sum(axis=1) # only use sites with at least 2 TWh potential @@ -3269,7 +3273,7 @@ if __name__ == "__main__": update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) - options = snakemake.config["sector"] + options = snakemake.params.sector opts = snakemake.wildcards.sector_opts.split("-") @@ -3284,7 +3288,7 @@ if __name__ == "__main__": costs = prepare_costs( snakemake.input.costs, - snakemake.config["costs"], + snakemake.params.costs, nyears, ) @@ -3296,10 +3300,10 @@ if __name__ == "__main__": spatial = define_spatial(pop_layout.index, options) - if snakemake.config["foresight"] == "myopic": + if snakemake.params.foresight == "myopic": add_lifetime_wind_solar(n, costs) - conventional = snakemake.config["existing_capacities"]["conventional_carriers"] + conventional = snakemake.params.conventional_carriers for carrier in conventional: add_carrier_buses(n, carrier) @@ -3368,15 +3372,15 @@ if __name__ == "__main__": n = set_temporal_aggregation(n, opts, solver_name) limit_type = "config" - limit = get(snakemake.config["co2_budget"], investment_year) + limit = get(snakemake.params.co2_budget, investment_year) for o in opts: if "cb" not in o: continue limit_type = "carbon budget" fn = "results/" + snakemake.params.RDIR + "/csvs/carbon_budget_distribution.csv" if not os.path.exists(fn): - emissions_scope = snakemake.config["energy"]["emissions"] - report_year = snakemake.config["energy"]["eurostat_report_year"] + emissions_scope = snakemake.params.emissions_scope + report_year = snakemake.params.eurostat_report_year build_carbon_budget( o, snakemake.input.eurostat, fn, emissions_scope, report_year ) @@ -3411,8 +3415,8 @@ if __name__ == "__main__": if options["electricity_grid_connection"]: add_electricity_grid_connection(n, costs) - first_year_myopic = (snakemake.config["foresight"] == "myopic") and ( - snakemake.config["scenario"]["planning_horizons"][0] == investment_year + first_year_myopic = (snakemake.params.foresight == "myopic") and ( + snakemake.params.planning_horizons[0] == investment_year ) if options.get("cluster_heat_buses", False) and not first_year_myopic: diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index 0c6a7feb..c7053bed 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -53,14 +53,13 @@ if __name__ == "__main__": snakemake ) # TODO Make logging compatible with progressbar (see PR #102) - if snakemake.config["tutorial"]: + if snakemake.params.tutorial: url = "https://zenodo.org/record/3517921/files/pypsa-eur-tutorial-data-bundle.tar.xz" else: url = "https://zenodo.org/record/3517935/files/pypsa-eur-data-bundle.tar.xz" - # Save locations tarball_fn = Path(f"{rootpath}/bundle.tar.xz") - to_fn = Path(f"{rootpath}/data") + to_fn = Path(rootpath) / Path(snakemake.output[0]).parent.parent logger.info(f"Downloading databundle from '{url}'.") disable_progress = snakemake.config["run"].get("disable_progressbar", False) diff --git a/scripts/retrieve_gas_infrastructure_data.py b/scripts/retrieve_gas_infrastructure_data.py index dda7bd8c..42b726db 100644 --- a/scripts/retrieve_gas_infrastructure_data.py +++ b/scripts/retrieve_gas_infrastructure_data.py @@ -29,7 +29,7 @@ if __name__ == "__main__": # Save locations zip_fn = Path(f"{rootpath}/IGGIELGN.zip") - to_fn = Path(f"{rootpath}/data/gas_network/scigrid-gas") + to_fn = Path(rootpath) / Path(snakemake.output[0]).parent.parent logger.info(f"Downloading databundle from '{url}'.") disable_progress = snakemake.config["run"].get("disable_progressbar", False) diff --git a/scripts/retrieve_sector_databundle.py b/scripts/retrieve_sector_databundle.py index 97426ab2..0d172c8d 100644 --- a/scripts/retrieve_sector_databundle.py +++ b/scripts/retrieve_sector_databundle.py @@ -10,23 +10,25 @@ import logging logger = logging.getLogger(__name__) -import os -import sys import tarfile from pathlib import Path -# Add pypsa-eur scripts to path for import of _helpers -sys.path.insert(0, os.getcwd() + "/../pypsa-eur/scripts") - from _helpers import configure_logging, progress_retrieve if __name__ == "__main__": + if "snakemake" not in globals(): + from _helpers import mock_snakemake + + snakemake = mock_snakemake("retrieve_databundle") + rootpath = ".." + else: + rootpath = "." configure_logging(snakemake) url = "https://zenodo.org/record/5824485/files/pypsa-eur-sec-data-bundle.tar.gz" - tarball_fn = Path("sector-bundle.tar.gz") - to_fn = Path("data") + tarball_fn = Path(f"{rootpath}/sector-bundle.tar.gz") + to_fn = Path(rootpath) / Path(snakemake.output[0]).parent.parent logger.info(f"Downloading databundle from '{url}'.") disable_progress = snakemake.config["run"].get("disable_progressbar", False) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 2be8c36a..79161760 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -149,17 +149,17 @@ def simplify_network_to_380(n): return n, trafo_map -def _prepare_connection_costs_per_link(n, costs, config): +def _prepare_connection_costs_per_link(n, costs, renewable_carriers, length_factor): if n.links.empty: return {} connection_costs_per_link = {} - for tech in config["renewable"]: + for tech in renewable_carriers: if tech.startswith("offwind"): connection_costs_per_link[tech] = ( n.links.length - * config["lines"]["length_factor"] + * length_factor * ( n.links.underwater_fraction * costs.at[tech + "-connection-submarine", "capital_cost"] @@ -172,10 +172,18 @@ def _prepare_connection_costs_per_link(n, costs, config): def _compute_connection_costs_to_bus( - n, busmap, costs, config, connection_costs_per_link=None, buses=None + n, + busmap, + costs, + renewable_carriers, + length_factor, + connection_costs_per_link=None, + buses=None, ): if connection_costs_per_link is None: - connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config) + connection_costs_per_link = _prepare_connection_costs_per_link( + n, costs, renewable_carriers, length_factor + ) if buses is None: buses = busmap.index[busmap.index != busmap.values] @@ -265,7 +273,16 @@ def _aggregate_and_move_components( n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)]) -def simplify_links(n, costs, config, output, aggregation_strategies=dict()): +def simplify_links( + n, + costs, + renewables, + length_factor, + p_max_pu, + exclude_carriers, + output, + aggregation_strategies=dict(), +): ## Complex multi-node links are folded into end-points logger.info("Simplifying connected link components") @@ -315,7 +332,9 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()): busmap = n.buses.index.to_series() - connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config) + connection_costs_per_link = _prepare_connection_costs_per_link( + n, costs, renewables, length_factor + ) connection_costs_to_bus = pd.DataFrame( 0.0, index=n.buses.index, columns=list(connection_costs_per_link) ) @@ -333,12 +352,17 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()): ) busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]] connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus( - n, busmap, costs, config, connection_costs_per_link, buses + n, + busmap, + costs, + renewables, + length_factor, + connection_costs_per_link, + buses, ) all_links = [i for _, i in sum(links, [])] - p_max_pu = config["links"].get("p_max_pu", 1.0) lengths = n.links.loc[all_links, "length"] name = lengths.idxmax() + "+{}".format(len(links) - 1) params = dict( @@ -377,10 +401,6 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()): logger.debug("Collecting all components using the busmap") - exclude_carriers = config["clustering"]["simplify_network"].get( - "exclude_carriers", [] - ) - _aggregate_and_move_components( n, busmap, @@ -392,19 +412,23 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()): return n, busmap -def remove_stubs(n, costs, config, output, aggregation_strategies=dict()): +def remove_stubs( + n, + costs, + renewable_carriers, + length_factor, + simplify_network, + output, + aggregation_strategies=dict(), +): logger.info("Removing stubs") - across_borders = config["clustering"]["simplify_network"].get( - "remove_stubs_across_borders", True - ) + across_borders = simplify_network["remove_stubs_across_borders"] matching_attrs = [] if across_borders else ["country"] busmap = busmap_by_stubs(n, matching_attrs) - connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config) - - exclude_carriers = config["clustering"]["simplify_network"].get( - "exclude_carriers", [] + connection_costs_to_bus = _compute_connection_costs_to_bus( + n, busmap, costs, renewable_carriers, length_factor ) _aggregate_and_move_components( @@ -413,7 +437,7 @@ def remove_stubs(n, costs, config, output, aggregation_strategies=dict()): connection_costs_to_bus, output, aggregation_strategies=aggregation_strategies, - exclude_carriers=exclude_carriers, + exclude_carriers=simplify_network["exclude_carriers"], ) return n, busmap @@ -473,26 +497,22 @@ def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None): def cluster( - n, n_clusters, config, algorithm="hac", feature=None, aggregation_strategies=dict() + n, + n_clusters, + focus_weights, + solver_name, + algorithm="hac", + feature=None, + aggregation_strategies=dict(), ): logger.info(f"Clustering to {n_clusters} buses") - focus_weights = config.get("focus_weights", None) - - renewable_carriers = pd.Index( - [ - tech - for tech in n.generators.carrier.unique() - if tech.split("-", 2)[0] in config["renewable"] - ] - ) - clustering = clustering_for_n_clusters( n, n_clusters, custom_busmap=False, aggregation_strategies=aggregation_strategies, - solver_name=config["solving"]["solver"]["name"], + solver_name=solver_name, algorithm=algorithm, feature=feature, focus_weights=focus_weights, @@ -508,67 +528,69 @@ if __name__ == "__main__": snakemake = mock_snakemake("simplify_network", simpl="") configure_logging(snakemake) - n = pypsa.Network(snakemake.input.network) + params = snakemake.params + solver_name = snakemake.config["solving"]["solver"]["name"] + + n = pypsa.Network(snakemake.input.network) + Nyears = n.snapshot_weightings.objective.sum() / 8760 - aggregation_strategies = snakemake.config["clustering"].get( - "aggregation_strategies", {} - ) # translate str entries of aggregation_strategies to pd.Series functions: aggregation_strategies = { - p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()} - for p in aggregation_strategies.keys() + p: { + k: getattr(pd.Series, v) + for k, v in params.aggregation_strategies[p].items() + } + for p in params.aggregation_strategies.keys() } n, trafo_map = simplify_network_to_380(n) - Nyears = n.snapshot_weightings.objective.sum() / 8760 - technology_costs = load_costs( snakemake.input.tech_costs, - snakemake.config["costs"], - snakemake.config["electricity"], + params.costs, + params.max_hours, Nyears, ) n, simplify_links_map = simplify_links( - n, technology_costs, snakemake.config, snakemake.output, aggregation_strategies + n, + technology_costs, + params.renewable_carriers, + params.length_factor, + params.p_max_pu, + params.simplify_network["exclude_carriers"], + snakemake.output, + aggregation_strategies, ) busmaps = [trafo_map, simplify_links_map] - cluster_config = snakemake.config["clustering"]["simplify_network"] - if cluster_config.get("remove_stubs", True): + if params.simplify_network["remove_stubs"]: n, stub_map = remove_stubs( n, technology_costs, - snakemake.config, + params.renewable_carriers, + params.length_factor, + params.simplify_network, snakemake.output, aggregation_strategies=aggregation_strategies, ) busmaps.append(stub_map) - if cluster_config.get("to_substations", False): + if params.simplify_network["to_substations"]: n, substation_map = aggregate_to_substations(n, aggregation_strategies) busmaps.append(substation_map) # treatment of outliers (nodes without a profile for considered carrier): # all nodes that have no profile of the given carrier are being aggregated to closest neighbor - if ( - snakemake.config.get("clustering", {}) - .get("cluster_network", {}) - .get("algorithm", "hac") - == "hac" - or cluster_config.get("algorithm", "hac") == "hac" - ): - carriers = ( - cluster_config.get("feature", "solar+onwind-time").split("-")[0].split("+") - ) + if params.simplify_network["algorithm"] == "hac": + carriers = params.simplify_network["feature"].split("-")[0].split("+") for carrier in carriers: buses_i = list( set(n.buses.index) - set(n.generators.query("carrier == @carrier").bus) ) logger.info( - f"clustering preparaton (hac): aggregating {len(buses_i)} buses of type {carrier}." + f"clustering preparation (hac): aggregating {len(buses_i)} buses of type {carrier}." ) n, busmap_hac = aggregate_to_substations(n, aggregation_strategies, buses_i) busmaps.append(busmap_hac) @@ -577,9 +599,10 @@ if __name__ == "__main__": n, cluster_map = cluster( n, int(snakemake.wildcards.simpl), - snakemake.config, - cluster_config.get("algorithm", "hac"), - cluster_config.get("feature", None), + params.focus_weights, + solver_name, + params.simplify_network["algorithm"], + params.simplify_network["feature"], aggregation_strategies, ) busmaps.append(cluster_map) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 8f4fdf5b..d80e38a0 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -44,14 +44,14 @@ pypsa.pf.logger.setLevel(logging.WARNING) from pypsa.descriptors import get_switchable_as_dense as get_as_dense -def add_land_use_constraint(n, config): +def add_land_use_constraint(n, planning_horizons, config): if "m" in snakemake.wildcards.clusters: - _add_land_use_constraint_m(n, config) + _add_land_use_constraint_m(n, planning_horizons, config) else: - _add_land_use_constraint(n, config) + _add_land_use_constraint(n) -def _add_land_use_constraint(n, config): +def _add_land_use_constraint(n): # warning: this will miss existing offwind which is not classed AC-DC and has carrier 'offwind' for carrier in ["solar", "onwind", "offwind-ac", "offwind-dc"]: @@ -80,10 +80,10 @@ def _add_land_use_constraint(n, config): n.generators.p_nom_max.clip(lower=0, inplace=True) -def _add_land_use_constraint_m(n, config): +def _add_land_use_constraint_m(n, planning_horizons, config): # if generators clustering is lower than network clustering, land_use accounting is at generators clusters - planning_horizons = config["scenario"]["planning_horizons"] + planning_horizons = param["planning_horizons"] grouping_years = config["existing_capacities"]["grouping_years"] current_horizon = snakemake.wildcards.planning_horizons @@ -141,7 +141,14 @@ def add_co2_sequestration_limit(n, limit=200): ) -def prepare_network(n, solve_opts=None, config=None): +def prepare_network( + n, + solve_opts=None, + config=None, + foresight=None, + planning_horizons=None, + co2_sequestration_potential=None, +): if "clip_p_max_pu" in solve_opts: for df in ( n.generators_t.p_max_pu, @@ -191,11 +198,11 @@ def prepare_network(n, solve_opts=None, config=None): n.set_snapshots(n.snapshots[:nhours]) n.snapshot_weightings[:] = 8760.0 / nhours - if config["foresight"] == "myopic": - add_land_use_constraint(n, config) + if foresight == "myopic": + add_land_use_constraint(n, planning_horizons, config) if n.stores.carrier.eq("co2 stored").any(): - limit = config["sector"].get("co2_sequestration_potential", 200) + limit = co2_sequestration_potential add_co2_sequestration_limit(n, limit=limit) return n @@ -590,16 +597,15 @@ def extra_functionality(n, snapshots): add_pipe_retrofit_constraint(n) -def solve_network(n, config, opts="", **kwargs): - set_of_options = config["solving"]["solver"]["options"] - solver_options = ( - config["solving"]["solver_options"][set_of_options] if set_of_options else {} - ) - solver_name = config["solving"]["solver"]["name"] - cf_solving = config["solving"]["options"] +def solve_network(n, config, solving, opts="", **kwargs): + set_of_options = solving["solver"]["options"] + solver_options = solving["solver_options"][set_of_options] if set_of_options else {} + solver_name = solving["solver"]["name"] + cf_solving = solving["options"] track_iterations = cf_solving.get("track_iterations", False) min_iterations = cf_solving.get("min_iterations", 4) max_iterations = cf_solving.get("max_iterations", 6) + transmission_losses = cf_solving.get("transmission_losses", 0) # add to network for extra_functionality n.config = config @@ -613,6 +619,7 @@ def solve_network(n, config, opts="", **kwargs): if skip_iterations: status, condition = n.optimize( solver_name=solver_name, + transmission_losses=transmission_losses, extra_functionality=extra_functionality, **solver_options, **kwargs, @@ -623,6 +630,7 @@ def solve_network(n, config, opts="", **kwargs): track_iterations=track_iterations, min_iterations=min_iterations, max_iterations=max_iterations, + transmission_losses=transmission_losses, extra_functionality=extra_functionality, **solver_options, **kwargs, @@ -662,7 +670,7 @@ if __name__ == "__main__": if "sector_opts" in snakemake.wildcards.keys(): opts += "-" + snakemake.wildcards.sector_opts opts = [o for o in opts.split("-") if o != ""] - solve_opts = snakemake.config["solving"]["options"] + solve_opts = snakemake.params.solving["options"] np.random.seed(solve_opts.get("seed", 123)) @@ -672,10 +680,21 @@ if __name__ == "__main__": else: n = pypsa.Network(snakemake.input.network) - n = prepare_network(n, solve_opts, config=snakemake.config) + n = prepare_network( + n, + solve_opts, + config=snakemake.config, + foresight=snakemake.params.foresight, + planning_horizons=snakemake.params.planning_horizons, + co2_sequestration_potential=snakemake.params["co2_sequestration_potential"], + ) n = solve_network( - n, config=snakemake.config, opts=opts, log_fn=snakemake.log.solver + n, + config=snakemake.config, + solving=snakemake.params.solving, + opts=opts, + log_fn=snakemake.log.solver, ) n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index 27520485..37e853e5 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -41,7 +41,7 @@ if __name__ == "__main__": opts = (snakemake.wildcards.opts + "-" + snakemake.wildcards.sector_opts).split("-") opts = [o for o in opts if o != ""] - solve_opts = snakemake.config["solving"]["options"] + solve_opts = snakemake.params.options np.random.seed(solve_opts.get("seed", 123))