From 527c77824c8eba0695327fb60b9cff23df0d3744 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 5 Jan 2023 16:41:49 +0100 Subject: [PATCH 001/122] allow carbon capture in biogas upgrading (closes #49) --- scripts/prepare_sector_network.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b6c052be..540b3f23 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1874,10 +1874,12 @@ def add_biomass(n, costs): bus0=spatial.gas.biogas, bus1=spatial.gas.nodes, bus2="co2 atmosphere", + bus3="co2 stored", carrier="biogas to gas", capital_cost=costs.loc["biogas upgrading", "fixed"], marginal_cost=costs.loc["biogas upgrading", "VOM"], efficiency2=-costs.at['gas', 'CO2 intensity'], + efficiency3=costs.at["biogas", "CO2 stored"], p_nom_extendable=True ) From 54346b071e07fd7e4f9eed00698a5d3bfc62a678 Mon Sep 17 00:00:00 2001 From: euronion <42553970+euronion@users.noreply.github.com> Date: Sat, 22 Apr 2023 20:33:38 +0200 Subject: [PATCH 002/122] Address pandas deprecation of date_parser for pandas.read_csv --- doc/release_notes.rst | 2 ++ envs/environment.yaml | 2 +- scripts/build_electricity_demand.py | 5 +---- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index f859646b..7f4dac1c 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -17,6 +17,8 @@ Upcoming Release * Renamed script file from PyPSA-EUR ``build_load_data`` to ``build_electricity_demand``. +* Adressed deprecation warnings for ``pandas=2.0``. ``pandas=2.0`` is now minimum requirement. + PyPSA-Eur 0.8.0 (18th March 2023) ================================= diff --git a/envs/environment.yaml b/envs/environment.yaml index 0a9891a5..baa84f69 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -26,7 +26,7 @@ dependencies: - lxml - powerplantmatching>=0.5.5 - numpy<1.24 -- pandas>=1.4 +- pandas>=2.0 - geopandas>=0.11.0 - xarray - rioxarray diff --git a/scripts/build_electricity_demand.py b/scripts/build_electricity_demand.py index b86b4a5f..790a958f 100755 --- a/scripts/build_electricity_demand.py +++ b/scripts/build_electricity_demand.py @@ -80,11 +80,8 @@ def load_timeseries(fn, years, countries, powerstatistics=True): def rename(s): return s[: -len(pattern)] - def date_parser(x): - return dateutil.parser.parse(x, ignoretz=True) - return ( - pd.read_csv(fn, index_col=0, parse_dates=[0], date_parser=date_parser) + pd.read_csv(fn, index_col=0, parse_dates=[0], date_format="%Y-%m-%dT%H:%M:%SZ") .filter(like=pattern) .rename(columns=rename) .dropna(how="all", axis=0) From f4ff3dffc2567dd6cef17f4459bb9a5ef3818d64 Mon Sep 17 00:00:00 2001 From: Fabian Hofmann Date: Mon, 24 Apr 2023 13:09:04 +0200 Subject: [PATCH 003/122] Update doc/release_notes.rst --- doc/release_notes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 7f4dac1c..120e8fbd 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -17,7 +17,7 @@ Upcoming Release * Renamed script file from PyPSA-EUR ``build_load_data`` to ``build_electricity_demand``. -* Adressed deprecation warnings for ``pandas=2.0``. ``pandas=2.0`` is now minimum requirement. +* Addressed deprecation warnings for ``pandas=2.0``. ``pandas=2.0`` is now minimum requirement. PyPSA-Eur 0.8.0 (18th March 2023) ================================= From cc162a9e028fb7a2bac5289e27b90ab57e46f10b Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 31 Jul 2023 17:09:59 +0200 Subject: [PATCH 004/122] option for losses on bidirectional links via link splitting --- config/config.default.yaml | 5 ++++ scripts/prepare_sector_network.py | 40 +++++++++++++++++++++++++++++++ scripts/solve_network.py | 22 +++++++++++++++++ 3 files changed, 67 insertions(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index b162b75d..4413b8f5 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -478,6 +478,11 @@ sector: electricity_distribution_grid: true electricity_distribution_grid_cost_factor: 1.0 electricity_grid_connection: true + transmission_losses: + # per 1000 km + DC: 0 + H2 pipeline: 0 + gas pipeline: 0 H2_network: true gas_network: false H2_retrofit: false diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 11406bff..8719c281 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3280,6 +3280,34 @@ def set_temporal_aggregation(n, opts, solver_name): return n +def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): + "Split bidirectional links into two unidirectional links to include transmission losses." + + carrier_i = n.links.query("carrier == @carrier").index + + if not losses_per_thousand_km or carrier_i.empty: + return + + logger.info( + f"Specified losses for {carrier} transmission. Splitting bidirectional links." + ) + + carrier_i = n.links.query("carrier == @carrier").index + n.links.loc[carrier_i, "p_min_pu"] = 0 + n.links["reversed"] = False + n.links.loc[carrier_i, "efficiency"] = ( + 1 - n.links.loc[carrier_i, "length"] * losses_per_thousand_km / 1e3 + ) + rev_links = ( + n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) + ) + rev_links.capital_cost = 0 + rev_links.reversed = True + rev_links.index = rev_links.index.map(lambda x: x + "-reversed") + + n.links = pd.concat([n.links, rev_links], sort=False) + + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake @@ -3446,6 +3474,18 @@ if __name__ == "__main__": if options["electricity_grid_connection"]: add_electricity_grid_connection(n, costs) + for k, v in options["transmission_losses"].items(): + lossy_bidirectional_links(n, k, v) + + # Workaround: Remove lines with conflicting (and unrealistic) properties + # cf. https://github.com/PyPSA/pypsa-eur/issues/444 + if snakemake.config["solving"]["options"]["transmission_losses"]: + idx = n.lines.query("num_parallel == 0").index + logger.info( + f"Removing {len(idx)} line(s) with properties conflicting with transmission losses functionality." + ) + n.mremove("Line", idx) + first_year_myopic = (snakemake.params.foresight == "myopic") and ( snakemake.params.planning_horizons[0] == investment_year ) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 836544b4..a68ca074 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -494,6 +494,27 @@ def add_battery_constraints(n): n.model.add_constraints(lhs == 0, name="Link-charger_ratio") +def add_lossy_bidirectional_link_constraints(n): + if not n.links.p_nom_extendable.any() or not "reversed" in n.links.columns: + return + + carriers = n.links.loc[n.links.reversed, "carrier"].unique() + + backward_i = n.links.query( + "carrier in @carriers and reversed and p_nom_extendable" + ).index + forward_i = n.links.query( + "carrier in @carriers and ~reversed and p_nom_extendable" + ).index + + assert len(forward_i) == len(backward_i) + + lhs = n.model["Link-p_nom"].loc[backward_i] + rhs = n.model["Link-p_nom"].loc[forward_i] + + n.model.add_constraints(lhs == rhs, name="Link-bidirectional_sync") + + def add_chp_constraints(n): electric = ( n.links.index.str.contains("urban central") @@ -593,6 +614,7 @@ def extra_functionality(n, snapshots): if "EQ" in o: add_EQ_constraints(n, o) add_battery_constraints(n) + add_lossy_bidirectional_link_constraints(n) add_pipe_retrofit_constraint(n) From e4eff27e508406055284ba77f4727df7e2dcbc6c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 3 Aug 2023 13:09:12 +0200 Subject: [PATCH 005/122] fix capacity synchronisation between forward and backward lossy links --- scripts/prepare_sector_network.py | 4 ++-- scripts/solve_network.py | 6 +----- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 8719c281..b8eb8bc1 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3294,7 +3294,6 @@ def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): carrier_i = n.links.query("carrier == @carrier").index n.links.loc[carrier_i, "p_min_pu"] = 0 - n.links["reversed"] = False n.links.loc[carrier_i, "efficiency"] = ( 1 - n.links.loc[carrier_i, "length"] * losses_per_thousand_km / 1e3 ) @@ -3302,10 +3301,11 @@ def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) rev_links.capital_cost = 0 - rev_links.reversed = True + rev_links["reversed"] = True rev_links.index = rev_links.index.map(lambda x: x + "-reversed") n.links = pd.concat([n.links, rev_links], sort=False) + n.links["reversed"] = n.links["reversed"].fillna(False) if __name__ == "__main__": diff --git a/scripts/solve_network.py b/scripts/solve_network.py index a68ca074..5e8c0356 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -500,14 +500,10 @@ def add_lossy_bidirectional_link_constraints(n): carriers = n.links.loc[n.links.reversed, "carrier"].unique() - backward_i = n.links.query( - "carrier in @carriers and reversed and p_nom_extendable" - ).index forward_i = n.links.query( "carrier in @carriers and ~reversed and p_nom_extendable" ).index - - assert len(forward_i) == len(backward_i) + backward_i = forward_i + "-reversed" lhs = n.model["Link-p_nom"].loc[backward_i] rhs = n.model["Link-p_nom"].loc[forward_i] From d7cb13246b807e7907c49ad1214559be92d2f363 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 7 Aug 2023 14:31:19 +0200 Subject: [PATCH 006/122] link losses: exponential rather than linear model --- config/config.default.yaml | 13 ++++++++----- scripts/prepare_sector_network.py | 15 ++++++++++----- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 4413b8f5..1b0a2260 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -478,11 +478,14 @@ sector: electricity_distribution_grid: true electricity_distribution_grid_cost_factor: 1.0 electricity_grid_connection: true - transmission_losses: - # per 1000 km - DC: 0 - H2 pipeline: 0 - gas pipeline: 0 + transmission_efficiency: + DC: + efficiency_static: 0.98 + efficiency_per_1000km: 0.977 + H2 pipeline: + efficiency_per_1000km: 0.979 + gas pipeline: + efficiency_per_1000km: 0.977 H2_network: true gas_network: false H2_retrofit: false diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b8eb8bc1..48f5f41f 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3280,22 +3280,27 @@ def set_temporal_aggregation(n, opts, solver_name): return n -def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): +def lossy_bidirectional_links(n, carrier, efficiencies={}): "Split bidirectional links into two unidirectional links to include transmission losses." carrier_i = n.links.query("carrier == @carrier").index - if not losses_per_thousand_km or carrier_i.empty: + if not any(v != 1. for v in efficiencies.values()) or carrier_i.empty: return + efficiency_static = efficiencies.get("efficiency_static", 1) + efficiency_per_1000km = efficiencies.get("efficiency_per_1000km", 1) + logger.info( - f"Specified losses for {carrier} transmission. Splitting bidirectional links." + f"Specified losses for {carrier} transmission" + f"(static: {efficiency_static}, per 1000km: {efficiency_per_1000km})." + "Splitting bidirectional links." ) carrier_i = n.links.query("carrier == @carrier").index n.links.loc[carrier_i, "p_min_pu"] = 0 n.links.loc[carrier_i, "efficiency"] = ( - 1 - n.links.loc[carrier_i, "length"] * losses_per_thousand_km / 1e3 + efficiency_static * efficiency_per_1000km ** (n.links.loc[carrier_i, "length"] / 1e3) ) rev_links = ( n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) @@ -3474,7 +3479,7 @@ if __name__ == "__main__": if options["electricity_grid_connection"]: add_electricity_grid_connection(n, costs) - for k, v in options["transmission_losses"].items(): + for k, v in options["transmission_efficiency"].items(): lossy_bidirectional_links(n, k, v) # Workaround: Remove lines with conflicting (and unrealistic) properties From 118cabe8a60b238ef11aafc980a406011ea9f0fb Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 8 Aug 2023 17:56:22 +0200 Subject: [PATCH 007/122] add option to consider compression losses in pipelines as electricity demand --- config/config.default.yaml | 6 ++++-- scripts/prepare_sector_network.py | 11 ++++++++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 1b0a2260..81a26a0b 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -483,9 +483,11 @@ sector: efficiency_static: 0.98 efficiency_per_1000km: 0.977 H2 pipeline: - efficiency_per_1000km: 0.979 + efficiency_per_1000km: 1 # 0.979 + compression_per_1000km: 0.019 gas pipeline: - efficiency_per_1000km: 0.977 + efficiency_per_1000km: 1 #0.977 + compression_per_1000km: 0.01 H2_network: true gas_network: false H2_retrofit: false diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 48f5f41f..7b58329c 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3285,11 +3285,16 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): carrier_i = n.links.query("carrier == @carrier").index +<<<<<<< HEAD if not any(v != 1. for v in efficiencies.values()) or carrier_i.empty: +======= + if not any((v != 1.0) or (v >= 0) for v in efficiencies.values()) or carrier_i.empty: +>>>>>>> 5822adb0 (add option to consider compression losses in pipelines as electricity demand) return efficiency_static = efficiencies.get("efficiency_static", 1) efficiency_per_1000km = efficiencies.get("efficiency_per_1000km", 1) + compression_per_1000km = efficiencies.get("compression_per_1000km", 0) logger.info( f"Specified losses for {carrier} transmission" @@ -3297,7 +3302,6 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): "Splitting bidirectional links." ) - carrier_i = n.links.query("carrier == @carrier").index n.links.loc[carrier_i, "p_min_pu"] = 0 n.links.loc[carrier_i, "efficiency"] = ( efficiency_static * efficiency_per_1000km ** (n.links.loc[carrier_i, "length"] / 1e3) @@ -3312,6 +3316,11 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links = pd.concat([n.links, rev_links], sort=False) n.links["reversed"] = n.links["reversed"].fillna(False) + # do compression losses after concatenation to take electricity consumption at bus0 in either direction + carrier_i = n.links.query("carrier == @carrier").index + if compression_per_1000km > 0: + n.links.loc[carrier_i, "bus2"] = n.links.loc[carrier_i, "bus0"].map(n.buses.location) # electricity + n.links.loc[carrier_i, "efficiency2"] = - compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 if __name__ == "__main__": if "snakemake" not in globals(): From 592bc4eee7f57ef93e104f266595cb6d8ded754d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 12 Sep 2023 17:28:42 +0200 Subject: [PATCH 008/122] cherry-pick --- scripts/prepare_sector_network.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 7b58329c..de02095d 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3285,11 +3285,10 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): carrier_i = n.links.query("carrier == @carrier").index -<<<<<<< HEAD - if not any(v != 1. for v in efficiencies.values()) or carrier_i.empty: -======= - if not any((v != 1.0) or (v >= 0) for v in efficiencies.values()) or carrier_i.empty: ->>>>>>> 5822adb0 (add option to consider compression losses in pipelines as electricity demand) + if ( + not any((v != 1.0) or (v >= 0) for v in efficiencies.values()) + or carrier_i.empty + ): return efficiency_static = efficiencies.get("efficiency_static", 1) @@ -3303,8 +3302,10 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): ) n.links.loc[carrier_i, "p_min_pu"] = 0 - n.links.loc[carrier_i, "efficiency"] = ( - efficiency_static * efficiency_per_1000km ** (n.links.loc[carrier_i, "length"] / 1e3) + n.links.loc[ + carrier_i, "efficiency" + ] = efficiency_static * efficiency_per_1000km ** ( + n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) @@ -3319,8 +3320,13 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): # do compression losses after concatenation to take electricity consumption at bus0 in either direction carrier_i = n.links.query("carrier == @carrier").index if compression_per_1000km > 0: - n.links.loc[carrier_i, "bus2"] = n.links.loc[carrier_i, "bus0"].map(n.buses.location) # electricity - n.links.loc[carrier_i, "efficiency2"] = - compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 + n.links.loc[carrier_i, "bus2"] = n.links.loc[carrier_i, "bus0"].map( + n.buses.location + ) # electricity + n.links.loc[carrier_i, "efficiency2"] = ( + -compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 + ) + if __name__ == "__main__": if "snakemake" not in globals(): From 666e79e2fdb7b86348a81e097a0c6e200872b661 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 10 Aug 2023 17:13:19 +0200 Subject: [PATCH 009/122] improve logging for lossy bidirectional links --- scripts/prepare_sector_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index de02095d..6355f603 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3296,8 +3296,8 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): compression_per_1000km = efficiencies.get("compression_per_1000km", 0) logger.info( - f"Specified losses for {carrier} transmission" - f"(static: {efficiency_static}, per 1000km: {efficiency_per_1000km})." + f"Specified losses for {carrier} transmission " + f"(static: {efficiency_static}, per 1000km: {efficiency_per_1000km}, compression per 1000km: {compression_per_1000km}). " "Splitting bidirectional links." ) From bde04eeac9dad86b9d05ce6d23f48d98a728ba7f Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 29 Aug 2023 16:32:01 +0200 Subject: [PATCH 010/122] lossy_bidirectional_links: set length of reversed lines to 0 to avoid double counting in line volume limit --- scripts/prepare_sector_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 6355f603..cd5d9570 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3311,6 +3311,7 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) rev_links.capital_cost = 0 + rev_links.length = 0 rev_links["reversed"] = True rev_links.index = rev_links.index.map(lambda x: x + "-reversed") From 9b9090c76cb1fbd601626342ce569d87e490d9d0 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Wed, 18 Oct 2023 16:59:49 +0200 Subject: [PATCH 011/122] add option for additional national carbon budget constraints --- config/config.default.yaml | 17 +++++++ rules/solve_myopic.smk | 2 + scripts/solve_network.py | 99 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 118 insertions(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index 7dc0cf76..325bbbaa 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -84,6 +84,22 @@ co2_budget: 2045: 0.032 2050: 0.000 +co2_budget_national: + 2030: + 'DE': 0.350 + 'AT': 0.450 + 'BE': 0.450 + 'CH': 0.450 + 'CZ': 0.450 + 'DK': 0.450 + 'FR': 0.450 + 'GB': 0.450 + 'LU': 0.450 + 'NL': 0.450 + 'NO': 0.450 + 'PL': 0.450 + 'SE': 0.450 + # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#electricity electricity: voltages: [220., 300., 380.] @@ -454,6 +470,7 @@ sector: hydrogen_turbine: false SMR: true SMR_cc: true + co2_budget_national: false regional_co2_sequestration_potential: enable: false attribute: 'conservative estimate Mt' diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 8a93d24a..06fd9b79 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -88,11 +88,13 @@ rule solve_sector_network_myopic: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), + countries=config["countries"], input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", costs="data/costs_{planning_horizons}.csv", config=RESULTS + "config.yaml", + co2_totals_name=RESOURCES + "co2_totals.csv", output: RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 224d4714..f5dd79e0 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -41,6 +41,8 @@ logger = logging.getLogger(__name__) pypsa.pf.logger.setLevel(logging.WARNING) from pypsa.descriptors import get_switchable_as_dense as get_as_dense +from prepare_sector_network import emission_sectors_from_opts + def add_land_use_constraint(n, planning_horizons, config): if "m" in snakemake.wildcards.clusters: @@ -762,6 +764,92 @@ def add_pipe_retrofit_constraint(n): n.model.add_constraints(lhs == rhs, name="Link-pipe_retrofit") +def add_co2limit_country(n, config, limit_countries, nyears=1.0): + """ + Add a set of emissions limit constraints for specified countries. + + The countries and emissions limits are specified in the config file entry 'co2_budget_country_{investment_year}'. + + Parameters + ---------- + n : pypsa.Network + config : dict + limit_countries : dict + nyears: float, optional + Used to scale the emissions constraint to the number of snapshots of the base network. + """ + logger.info(f"Adding CO2 budget limit for each country as per unit of 1990 levels") + + # TODO: n.config (submodule) vs snakemake.config (main module, overwrite/overwritten config)? + # countries = config.countries + # print(config) + countries = ['AT', 'BE', 'CH', 'CZ', 'DE', 'DK', 'FR', 'GB', 'LU', 'NL', 'NO', 'PL', 'SE'] + + # TODO: import function from prepare_sector_network? Move to common place? + sectors = emission_sectors_from_opts(opts) + + # convert Mt to tCO2 + co2_totals = 1e6 * pd.read_csv(snakemake.input.co2_totals_name, index_col=0) + + co2_limit_countries = co2_totals.loc[countries, sectors].sum(axis=1) + co2_limit_countries = co2_limit_countries.loc[co2_limit_countries.index.isin(limit_countries.keys())] + + co2_limit_countries *= co2_limit_countries.index.map(limit_countries) * nyears + + p = n.model["Link-p"] # dimension: (time, component) + + # NB: Most country-specific links retain their locational information in bus1 (except for DAC, where it is in bus2) + country = n.links.bus1.map(n.buses.location).map(n.buses.country) + country_DAC = ( + n.links[n.links.carrier == "DAC"] + .bus2.map(n.buses.location) + .map(n.buses.country) + ) + country[country_DAC.index] = country_DAC + + lhs = [] + for port in [col[3:] for col in n.links if col.startswith("bus")]: + if port == str(0): + efficiency = ( + n.links["efficiency"].apply(lambda x: 1.0).rename("efficiency0") + ) + elif port == str(1): + efficiency = n.links["efficiency"].rename("efficiency1") + else: + efficiency = n.links[f"efficiency{port}"] + mask = n.links[f"bus{port}"].map(n.buses.carrier).eq("co2") + + idx = n.links[mask].index + + grouping = country.loc[idx] + + if not grouping.isnull().all(): + expr = ( + (p.loc[:, idx] * efficiency[idx]) + .groupby(grouping, axis=1) + .sum() + .sum(dims="snapshot") + ) + lhs.append(expr) + + lhs = sum(lhs) # dimension: (country) + lhs = lhs.rename({list(lhs.dims.keys())[0]: "country"}) + rhs = pd.Series(co2_limit_countries) # dimension: (country) + + for ct in lhs.indexes["country"]: + n.model.add_constraints( + lhs.loc[ct] <= rhs[ct], + name=f"GlobalConstraint-co2_limit_per_country{ct}", + ) + n.add( + "GlobalConstraint", + f"co2_limit_per_country{ct}", + constant=rhs[ct], + sense="<=", + type="", + ) + + def extra_functionality(n, snapshots): """ Collects supplementary constraints which will be passed to @@ -792,6 +880,17 @@ def extra_functionality(n, snapshots): add_carbon_budget_constraint(n, snapshots) add_retrofit_gas_boiler_constraint(n, snapshots) + if n.config["sector"]["co2_budget_national"]: + # prepare co2 constraint + nhours = n.snapshot_weightings.generators.sum() + nyears = nhours / 8760 + investment_year = int(snakemake.wildcards.planning_horizons[-4:]) + limit_countries = snakemake.config["co2_budget_national"][investment_year] + + # add co2 constraint for each country + logger.info(f"Add CO2 limit for each country") + add_co2limit_country(n, config, limit_countries, nyears) + def solve_network(n, config, solving, opts="", **kwargs): set_of_options = solving["solver"]["options"] From a35f5479aedd933773634a931e407d0535a6da64 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Tue, 24 Oct 2023 14:06:17 +0200 Subject: [PATCH 012/122] add links instead of equal-and-opposite fuel/emissions load pairs for land transport oil (ICEs), naphtha for industry and kerosene for aviation (before summed as 'oil'), shipping oil, shipping methanol, agriculture machinery oil --- scripts/prepare_sector_network.py | 256 +++++++++++++++++++----------- 1 file changed, 165 insertions(+), 91 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index ee2f0e3c..989bdb78 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -135,6 +135,7 @@ def define_spatial(nodes, options): spatial.oil = SimpleNamespace() spatial.oil.nodes = ["EU oil"] spatial.oil.locations = ["EU"] + spatial.oil.land_transport = nodes + " land transport oil" # uranium spatial.uranium = SimpleNamespace() @@ -1467,8 +1468,8 @@ def add_land_transport(n, costs): n.madd( "Bus", nodes, - location=nodes, suffix=" EV battery", + location=nodes, carrier="Li ion", unit="MWh_el", ) @@ -1568,28 +1569,31 @@ def add_land_transport(n, costs): ice_efficiency = options["transport_internal_combustion_efficiency"] n.madd( - "Load", - nodes, - suffix=" land transport oil", - bus=spatial.oil.nodes, + "Bus", + spatial.oil.land_transport, + location=nodes, carrier="land transport oil", - p_set=ice_share / ice_efficiency * transport[nodes], + unit="land transport", ) - co2 = ( - ice_share - / ice_efficiency - * transport[nodes].sum().sum() - / nhours - * costs.at["oil", "CO2 intensity"] - ) - - n.add( + n.madd( "Load", - "land transport oil emissions", - bus="co2 atmosphere", - carrier="land transport oil emissions", - p_set=-co2, + spatial.oil.land_transport, + bus=spatial.oil.land_transport, + carrier="land transport oil", + p_set=ice_share / ice_efficiency * transport[nodes].rename(columns=lambda x: x + " land transport oil"), + ) + + n.madd( + "Link", + spatial.oil.land_transport, + bus0=spatial.oil.nodes, + bus1=spatial.oil.land_transport, + bus2="co2 atmosphere", + carrier="land transport oil", + efficiency=ice_efficiency, + efficiency2=costs.at["oil", "CO2 intensity"], + p_nom_extendable=True, ) @@ -2611,46 +2615,36 @@ def add_industry(n, costs): ) p_set_methanol = shipping_methanol_share * p_set.sum() * efficiency - n.madd( + n.add( + "Bus", + "EU shipping methanol", + location="EU", + carrier="shipping methanol", + unit="MWh_LHV", + ) + + n.add( "Load", - spatial.methanol.nodes, - suffix=" shipping methanol", - bus=spatial.methanol.nodes, + "shipping methanol", + bus="EU shipping methanol", carrier="shipping methanol", p_set=p_set_methanol, ) - # CO2 intensity methanol based on stoichiometric calculation with 22.7 GJ/t methanol (32 g/mol), CO2 (44 g/mol), 277.78 MWh/TJ = 0.218 t/MWh - co2 = p_set_methanol / options["MWh_MeOH_per_tCO2"] - - n.add( - "Load", - "shipping methanol emissions", - bus="co2 atmosphere", - carrier="shipping methanol emissions", - p_set=-co2, - ) - - if shipping_oil_share: - p_set_oil = shipping_oil_share * p_set.sum() + if len(spatial.methanol.nodes) == 1: + link_names = ["EU shipping methanol"] + else: + link_names = nodes + " shipping methanol" n.madd( - "Load", - spatial.oil.nodes, - suffix=" shipping oil", - bus=spatial.oil.nodes, - carrier="shipping oil", - p_set=p_set_oil, - ) - - co2 = p_set_oil * costs.at["oil", "CO2 intensity"] - - n.add( - "Load", - "shipping oil emissions", - bus="co2 atmosphere", - carrier="shipping oil emissions", - p_set=-co2, + "Link", + link_names, + bus0=spatial.methanol.nodes, + bus1="EU shipping methanol", + bus2="co2 atmosphere", + carrier="shipping methanol", + p_nom_extendable=True, + efficiency2=1 / options["MWh_MeOH_per_tCO2"], # CO2 intensity methanol based on stoichiometric calculation with 22.7 GJ/t methanol (32 g/mol), CO2 (44 g/mol), 277.78 MWh/TJ = 0.218 t/MWh ) if "oil" not in n.buses.carrier.unique(): @@ -2683,6 +2677,41 @@ def add_industry(n, costs): marginal_cost=costs.at["oil", "fuel"], ) + if shipping_oil_share: + p_set_oil = shipping_oil_share * p_set.sum() + + n.add( + "Bus", + "EU shipping oil", + location="EU", + carrier="shipping oil", + unit="MWh_LHV", + ) + + n.add( + "Load", + "shipping oil", + bus="EU shipping oil", + carrier="shipping oil", + p_set=p_set_oil, + ) + + if len(spatial.oil.nodes) == 1: + link_names = ["EU shipping oil"] + else: + link_names = nodes + " shipping oil" + + n.madd( + "Link", + link_names, + bus0=spatial.oil.nodes, + bus1="EU shipping oil", + bus2="co2 atmosphere", + carrier="shipping oil", + p_nom_extendable=True, + efficiency2=costs.at["oil", "CO2 intensity"], + ) + if options["oil_boilers"]: nodes_heat = create_nodes_for_heat_sector()[0] @@ -2724,19 +2753,49 @@ def add_industry(n, costs): lifetime=costs.at["Fischer-Tropsch", "lifetime"], ) + # naphtha demand_factor = options.get("HVC_demand_factor", 1) - p_set = demand_factor * industrial_demand.loc[nodes, "naphtha"].sum() / nhours if demand_factor != 1: logger.warning(f"Changing HVC demand by {demand_factor*100-100:+.2f}%.") - n.madd( + # NB: CO2 gets released again to atmosphere when plastics decay + # except for the process emissions when naphtha is used for petrochemicals, which can be captured with other industry process emissions + # convert process emissions from feedstock from MtCO2 to energy demand + p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours + + n.add( + "Bus", + "EU naphtha for industry", + location="EU", + carrier="naphtha for industry", + unit="MWh_LHV", + ) + + n.add( "Load", - ["naphtha for industry"], - bus=spatial.oil.nodes, + "naphtha for industry", + bus="EU naphtha for industry", carrier="naphtha for industry", p_set=p_set, ) + if len(spatial.oil.nodes) == 1: + link_names = ["EU naphtha for industry"] + else: + link_names = nodes + " naphtha for industry" + + n.madd( + "Link", + link_names, + bus0=spatial.oil.nodes, + bus1="EU naphtha for industry", + bus2="co2 atmosphere", + carrier="naphtha for industry", + p_nom_extendable=True, + efficiency2=costs.at["oil", "CO2 intensity"], + ) + + # aviation demand_factor = options.get("aviation_demand_factor", 1) all_aviation = ["total international aviation", "total domestic aviation"] p_set = ( @@ -2748,29 +2807,36 @@ def add_industry(n, costs): if demand_factor != 1: logger.warning(f"Changing aviation demand by {demand_factor*100-100:+.2f}%.") - n.madd( - "Load", - ["kerosene for aviation"], - bus=spatial.oil.nodes, + n.add( + "Bus", + "EU kerosene for aviation", + location="EU", carrier="kerosene for aviation", - p_set=p_set, - ) - - # NB: CO2 gets released again to atmosphere when plastics decay or kerosene is burned - # except for the process emissions when naphtha is used for petrochemicals, which can be captured with other industry process emissions - # tco2 per hour - co2_release = ["naphtha for industry", "kerosene for aviation"] - co2 = ( - n.loads.loc[co2_release, "p_set"].sum() * costs.at["oil", "CO2 intensity"] - - industrial_demand.loc[nodes, "process emission from feedstock"].sum() / nhours + unit="MWh_LHV", ) n.add( "Load", - "oil emissions", - bus="co2 atmosphere", - carrier="oil emissions", - p_set=-co2, + "kerosene for aviation", + bus="EU kerosene for aviation", + carrier="kerosene for aviation", + p_set=p_set, + ) + + if len(spatial.oil.nodes) == 1: + link_names = ["EU kerosene for aviation"] + else: + link_names = nodes + " kerosene for aviation" + + n.madd( + "Link", + link_names, + bus0=spatial.oil.nodes, + bus1="EU kerosene for aviation", + bus2="co2 atmosphere", + carrier="kerosene for aviation", + p_nom_extendable=True, + efficiency2=costs.at["oil", "CO2 intensity"], ) # TODO simplify bus expression @@ -3018,28 +3084,36 @@ def add_agriculture(n, costs): ) if oil_share > 0: - n.madd( - "Load", - ["agriculture machinery oil"], - bus=spatial.oil.nodes, + n.add( + "Bus", + "EU agriculture machinery oil", + location="EU", carrier="agriculture machinery oil", - p_set=oil_share * machinery_nodal_energy.sum() * 1e6 / nhours, - ) - - co2 = ( - oil_share - * machinery_nodal_energy.sum() - * 1e6 - / nhours - * costs.at["oil", "CO2 intensity"] + unit="MWh_LHV", ) n.add( "Load", - "agriculture machinery oil emissions", - bus="co2 atmosphere", - carrier="agriculture machinery oil emissions", - p_set=-co2, + "agriculture machinery oil", + bus="EU agriculture machinery oil", + carrier="agriculture machinery oil", + p_set=oil_share * machinery_nodal_energy.sum() * 1e6 / nhours, + ) + + if len(spatial.oil.nodes) == 1: + link_names = ["EU agriculture machinery oil"] + else: + link_names = nodes + " agriculture machinery oil" + + n.madd( + "Link", + link_names, + bus0=spatial.oil.nodes, + bus1="EU agriculture machinery oil", + bus2="co2 atmosphere", + carrier="agriculture machinery oil", + p_nom_extendable=True, + efficiency2=costs.at["oil", "CO2 intensity"], ) From 94afba7c5d195b2cf6d7d17016e78040c4659440 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Tue, 24 Oct 2023 16:39:33 +0200 Subject: [PATCH 013/122] add coal tech_color to config --- config/config.default.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index 325bbbaa..cafb9d1d 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -816,6 +816,7 @@ plotting: Coal: '#545454' coal: '#545454' Coal marginal: '#545454' + coal for industry: '#343434' solid: '#545454' Lignite: '#826837' lignite: '#826837' From 7cb677d0e6056f52560e6ddb53e88f76861d8fc2 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Tue, 24 Oct 2023 16:39:58 +0200 Subject: [PATCH 014/122] clean up function add_co2limit_country --- scripts/solve_network.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index f5dd79e0..b372b366 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -764,7 +764,7 @@ def add_pipe_retrofit_constraint(n): n.model.add_constraints(lhs == rhs, name="Link-pipe_retrofit") -def add_co2limit_country(n, config, limit_countries, nyears=1.0): +def add_co2limit_country(n, limit_countries, nyears=1.0): """ Add a set of emissions limit constraints for specified countries. @@ -780,10 +780,7 @@ def add_co2limit_country(n, config, limit_countries, nyears=1.0): """ logger.info(f"Adding CO2 budget limit for each country as per unit of 1990 levels") - # TODO: n.config (submodule) vs snakemake.config (main module, overwrite/overwritten config)? - # countries = config.countries - # print(config) - countries = ['AT', 'BE', 'CH', 'CZ', 'DE', 'DK', 'FR', 'GB', 'LU', 'NL', 'NO', 'PL', 'SE'] + countries = n.config["countries"] # TODO: import function from prepare_sector_network? Move to common place? sectors = emission_sectors_from_opts(opts) @@ -814,7 +811,7 @@ def add_co2limit_country(n, config, limit_countries, nyears=1.0): n.links["efficiency"].apply(lambda x: 1.0).rename("efficiency0") ) elif port == str(1): - efficiency = n.links["efficiency"].rename("efficiency1") + efficiency = n.links["efficiency"] else: efficiency = n.links[f"efficiency{port}"] mask = n.links[f"bus{port}"].map(n.buses.carrier).eq("co2") @@ -889,7 +886,7 @@ def extra_functionality(n, snapshots): # add co2 constraint for each country logger.info(f"Add CO2 limit for each country") - add_co2limit_country(n, config, limit_countries, nyears) + add_co2limit_country(n, limit_countries, nyears) def solve_network(n, config, solving, opts="", **kwargs): From e2b2eafbc12e17254a0a517c87ae76bf08962585 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Tue, 24 Oct 2023 16:46:58 +0200 Subject: [PATCH 015/122] add geographical resolution to oil and methanol for options['co2_budget_national'] to include all necessary links in national co2 budget constraints --- scripts/add_existing_baseyear.py | 2 +- scripts/prepare_sector_network.py | 187 +++++++++++++++++------------- 2 files changed, 106 insertions(+), 83 deletions(-) diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index 1474b004..7ddc6b1d 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -303,7 +303,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas else: bus0 = vars(spatial)[carrier[generator]].nodes if "EU" not in vars(spatial)[carrier[generator]].locations: - bus0 = bus0.intersection(capacity.index + " gas") + bus0 = bus0.intersection(capacity.index + " " + carrier[generator]) # check for missing bus missing_bus = pd.Index(bus0).difference(n.buses.index) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 989bdb78..34bfdce7 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -128,13 +128,33 @@ def define_spatial(nodes, options): # methanol spatial.methanol = SimpleNamespace() - spatial.methanol.nodes = ["EU methanol"] - spatial.methanol.locations = ["EU"] + + if options["co2_budget_national"]: + spatial.methanol.nodes = nodes + " methanol" + spatial.methanol.locations = nodes + spatial.methanol.shipping = nodes + " shipping methanol" + else: + spatial.methanol.nodes = ["EU methanol"] + spatial.methanol.locations = ["EU"] + spatial.methanol.shipping = ["EU shipping methanol"] # oil spatial.oil = SimpleNamespace() - spatial.oil.nodes = ["EU oil"] - spatial.oil.locations = ["EU"] + + if options["co2_budget_national"]: + spatial.oil.nodes = nodes + " oil" + spatial.oil.locations = nodes + spatial.oil.naphtha = nodes + " naphtha for industry" + spatial.oil.kerosene = nodes + " kerosene for aviation" + spatial.oil.shipping = nodes + " shipping oil" + spatial.oil.agriculture_machinery = nodes + " agriculture machinery oil" + else: + spatial.oil.nodes = ["EU oil"] + spatial.oil.locations = ["EU"] + spatial.oil.naphtha = ["EU naphtha for industry"] + spatial.oil.kerosene = ["EU kerosene for aviation"] + spatial.oil.shipping = ["EU shipping oil"] + spatial.oil.agriculture_machinery = ["EU agriculture machinery oil"] spatial.oil.land_transport = nodes + " land transport oil" # uranium @@ -2613,34 +2633,34 @@ def add_industry(n, costs): efficiency = ( options["shipping_oil_efficiency"] / options["shipping_methanol_efficiency"] ) - p_set_methanol = shipping_methanol_share * p_set.sum() * efficiency - n.add( + # need to aggregate potentials if methanol not nodally resolved + if options["co2_budget_national"]: + p_set_methanol = shipping_methanol_share * p_set * efficiency + else: + p_set_methanol = shipping_methanol_share * p_set.sum() * efficiency + + n.madd( "Bus", - "EU shipping methanol", - location="EU", + spatial.methanol.shipping, + location=spatial.methanol.locations, carrier="shipping methanol", unit="MWh_LHV", ) - n.add( + n.madd( "Load", - "shipping methanol", - bus="EU shipping methanol", + spatial.methanol.shipping, + bus=spatial.methanol.shipping, carrier="shipping methanol", p_set=p_set_methanol, ) - if len(spatial.methanol.nodes) == 1: - link_names = ["EU shipping methanol"] - else: - link_names = nodes + " shipping methanol" - n.madd( "Link", - link_names, + spatial.methanol.shipping, bus0=spatial.methanol.nodes, - bus1="EU shipping methanol", + bus1=spatial.methanol.shipping, bus2="co2 atmosphere", carrier="shipping methanol", p_nom_extendable=True, @@ -2678,34 +2698,33 @@ def add_industry(n, costs): ) if shipping_oil_share: - p_set_oil = shipping_oil_share * p_set.sum() + # need to aggregate potentials if oil not nodally resolved + if options["co2_budget_national"]: + p_set_oil = shipping_oil_share * p_set + else: + p_set_oil = shipping_oil_share * p_set.sum() - n.add( + n.madd( "Bus", - "EU shipping oil", - location="EU", + spatial.oil.shipping, + location=spatial.oil.locations, carrier="shipping oil", unit="MWh_LHV", ) - n.add( + n.madd( "Load", - "shipping oil", - bus="EU shipping oil", + spatial.oil.shipping, + bus=spatial.oil.shipping, carrier="shipping oil", p_set=p_set_oil, ) - if len(spatial.oil.nodes) == 1: - link_names = ["EU shipping oil"] - else: - link_names = nodes + " shipping oil" - n.madd( "Link", - link_names, + spatial.oil.shipping, bus0=spatial.oil.nodes, - bus1="EU shipping oil", + bus1=spatial.oil.shipping, bus2="co2 atmosphere", carrier="shipping oil", p_nom_extendable=True, @@ -2761,34 +2780,33 @@ def add_industry(n, costs): # NB: CO2 gets released again to atmosphere when plastics decay # except for the process emissions when naphtha is used for petrochemicals, which can be captured with other industry process emissions # convert process emissions from feedstock from MtCO2 to energy demand - p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours + # need to aggregate potentials if oil not nodally resolved + if options["co2_budget_national"]: + p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]) / nhours + else: + p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours - n.add( + n.madd( "Bus", - "EU naphtha for industry", - location="EU", + spatial.oil.naphtha, + location=spatial.oil.locations, carrier="naphtha for industry", unit="MWh_LHV", ) - n.add( + n.madd( "Load", - "naphtha for industry", - bus="EU naphtha for industry", + spatial.oil.naphtha, + bus=spatial.oil.naphtha, carrier="naphtha for industry", p_set=p_set, ) - if len(spatial.oil.nodes) == 1: - link_names = ["EU naphtha for industry"] - else: - link_names = nodes + " naphtha for industry" - n.madd( "Link", - link_names, + spatial.oil.naphtha, bus0=spatial.oil.nodes, - bus1="EU naphtha for industry", + bus1=spatial.oil.naphtha, bus2="co2 atmosphere", carrier="naphtha for industry", p_nom_extendable=True, @@ -2797,42 +2815,47 @@ def add_industry(n, costs): # aviation demand_factor = options.get("aviation_demand_factor", 1) - all_aviation = ["total international aviation", "total domestic aviation"] - p_set = ( - demand_factor - * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1).sum() - * 1e6 - / nhours - ) if demand_factor != 1: logger.warning(f"Changing aviation demand by {demand_factor*100-100:+.2f}%.") - n.add( + all_aviation = ["total international aviation", "total domestic aviation"] + # need to aggregate potentials if oil not nodally resolved + if options["co2_budget_national"]: + p_set = ( + demand_factor + * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1) + * 1e6 + / nhours + ) + else: + p_set = ( + demand_factor + * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1).sum() + * 1e6 + / nhours + ) + + n.madd( "Bus", - "EU kerosene for aviation", - location="EU", + spatial.oil.kerosene, + location=spatial.oil.locations, carrier="kerosene for aviation", unit="MWh_LHV", ) - n.add( + n.madd( "Load", - "kerosene for aviation", - bus="EU kerosene for aviation", + spatial.oil.kerosene, + bus=spatial.oil.kerosene, carrier="kerosene for aviation", p_set=p_set, ) - if len(spatial.oil.nodes) == 1: - link_names = ["EU kerosene for aviation"] - else: - link_names = nodes + " kerosene for aviation" - n.madd( "Link", - link_names, + spatial.oil.kerosene, bus0=spatial.oil.nodes, - bus1="EU kerosene for aviation", + bus1=spatial.oil.kerosene, bus2="co2 atmosphere", carrier="kerosene for aviation", p_nom_extendable=True, @@ -3062,7 +3085,7 @@ def add_agriculture(n, costs): machinery_nodal_energy = pop_weighted_energy_totals.loc[ nodes, "total agriculture machinery" - ] + ] * 1e6 if electric_share > 0: efficiency_gain = ( @@ -3079,37 +3102,37 @@ def add_agriculture(n, costs): p_set=electric_share / efficiency_gain * machinery_nodal_energy - * 1e6 / nhours, ) if oil_share > 0: - n.add( + # need to aggregate potentials if oil not nodally resolved + if options["co2_budget_national"]: + p_set = oil_share * machinery_nodal_energy / nhours + else: + p_set = oil_share * machinery_nodal_energy.sum() / nhours + + n.madd( "Bus", - "EU agriculture machinery oil", - location="EU", + spatial.oil.agriculture_machinery, + location=spatial.oil.locations, carrier="agriculture machinery oil", unit="MWh_LHV", ) - n.add( + n.madd( "Load", - "agriculture machinery oil", - bus="EU agriculture machinery oil", + spatial.oil.agriculture_machinery, + bus=spatial.oil.agriculture_machinery, carrier="agriculture machinery oil", - p_set=oil_share * machinery_nodal_energy.sum() * 1e6 / nhours, + p_set=p_set, ) - if len(spatial.oil.nodes) == 1: - link_names = ["EU agriculture machinery oil"] - else: - link_names = nodes + " agriculture machinery oil" - n.madd( "Link", - link_names, + spatial.oil.agriculture_machinery, bus0=spatial.oil.nodes, - bus1="EU agriculture machinery oil", + bus1=spatial.oil.agriculture_machinery, bus2="co2 atmosphere", carrier="agriculture machinery oil", p_nom_extendable=True, From 2ad9ca8f7b10155d2b8f738d11a4948ac5f17fb1 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Thu, 26 Oct 2023 11:17:57 +0200 Subject: [PATCH 016/122] add regionalised oil load for process emissions from naphtha as feedstock --- scripts/prepare_sector_network.py | 30 +++++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 34bfdce7..54830106 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2782,9 +2782,25 @@ def add_industry(n, costs): # convert process emissions from feedstock from MtCO2 to energy demand # need to aggregate potentials if oil not nodally resolved if options["co2_budget_national"]: - p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]) / nhours + p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]) / nhours else: - p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours + p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours + + if options["co2_budget_national"]: + p_set_process_emissions = ( + demand_factor + * (industrial_demand.loc[nodes, "process emission from feedstock"] + / costs.at["oil", "CO2 intensity"]) + / nhours + ) + else: + p_set_process_emissions = ( + demand_factor + * (industrial_demand.loc[nodes, "process emission from feedstock"] + / costs.at["oil", "CO2 intensity"] + ).sum() + / nhours + ) n.madd( "Bus", @@ -2799,7 +2815,15 @@ def add_industry(n, costs): spatial.oil.naphtha, bus=spatial.oil.naphtha, carrier="naphtha for industry", - p_set=p_set, + p_set=p_set_plastics, + ) + + n.madd( + "Load", + ["naphtha for industry into process emissions from feedstock"], + bus=spatial.oil.nodes, + carrier="naphtha for industry", + p_set=p_set_process_emissions, ) n.madd( From 82ac430fd92f2724918ff0e25568fdd57b75a9a5 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Wed, 8 Nov 2023 09:57:24 +0100 Subject: [PATCH 017/122] fix spatial resolution for solid biomass links and naphtha oil loads under 'co2_spatial: true' flag --- scripts/prepare_sector_network.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 54830106..a5ca8941 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -145,6 +145,7 @@ def define_spatial(nodes, options): spatial.oil.nodes = nodes + " oil" spatial.oil.locations = nodes spatial.oil.naphtha = nodes + " naphtha for industry" + spatial.oil.naphtha_process_emissions = nodes + " naphtha process emissions" spatial.oil.kerosene = nodes + " kerosene for aviation" spatial.oil.shipping = nodes + " shipping oil" spatial.oil.agriculture_machinery = nodes + " agriculture machinery oil" @@ -152,6 +153,7 @@ def define_spatial(nodes, options): spatial.oil.nodes = ["EU oil"] spatial.oil.locations = ["EU"] spatial.oil.naphtha = ["EU naphtha for industry"] + spatial.oil.naphtha_process_emissions = "EU naphtha process emissions" spatial.oil.kerosene = ["EU kerosene for aviation"] spatial.oil.shipping = ["EU shipping oil"] spatial.oil.agriculture_machinery = ["EU agriculture machinery oil"] @@ -2443,9 +2445,14 @@ def add_industry(n, costs): efficiency=1.0, ) + if len(spatial.biomass.industry_cc)<=1 and len(spatial.co2.nodes)>1: + link_names = nodes + " " + spatial.biomass.industry_cc + else: + link_names = spatial.biomass.industry_cc + n.madd( "Link", - spatial.biomass.industry_cc, + link_names, bus0=spatial.biomass.nodes, bus1=spatial.biomass.industry, bus2="co2 atmosphere", @@ -2820,7 +2827,7 @@ def add_industry(n, costs): n.madd( "Load", - ["naphtha for industry into process emissions from feedstock"], + spatial.oil.naphtha_process_emissions, bus=spatial.oil.nodes, carrier="naphtha for industry", p_set=p_set_process_emissions, From 014a4cd62e3bc2f41e9e0ccd8e04ff6c169e9a60 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sun, 12 Nov 2023 18:42:53 +0100 Subject: [PATCH 018/122] fix for losses with multi-period investment --- scripts/solve_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 83281284..fa59f7a3 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -697,7 +697,8 @@ def add_lossy_bidirectional_link_constraints(n): if not n.links.p_nom_extendable.any() or not "reversed" in n.links.columns: return - carriers = n.links.loc[n.links.reversed, "carrier"].unique() + reversed_links = n.links.reversed.fillna(0).astype(bool) + carriers = n.links.loc[reversed_links, "carrier"].unique() forward_i = n.links.query( "carrier in @carriers and ~reversed and p_nom_extendable" From d9ec127f996f854cc775cdfcc6db18cd26cf3ea5 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Tue, 21 Nov 2023 14:55:32 +0100 Subject: [PATCH 019/122] Add process emissions to country emissions constraint, fix snapshot weighting --- scripts/solve_network.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index b372b366..e2edb2eb 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -795,7 +795,7 @@ def add_co2limit_country(n, limit_countries, nyears=1.0): p = n.model["Link-p"] # dimension: (time, component) - # NB: Most country-specific links retain their locational information in bus1 (except for DAC, where it is in bus2) + # NB: Most country-specific links retain their locational information in bus1 (except for DAC, where it is in bus2, and process emissions, where it is in bus0) country = n.links.bus1.map(n.buses.location).map(n.buses.country) country_DAC = ( n.links[n.links.carrier == "DAC"] @@ -803,6 +803,12 @@ def add_co2limit_country(n, limit_countries, nyears=1.0): .map(n.buses.country) ) country[country_DAC.index] = country_DAC + country_process_emissions = ( + n.links[n.links.carrier.str.contains("process emissions")] + .bus0.map(n.buses.location) + .map(n.buses.country) + ) + country[country_process_emissions.index] = country_process_emissions lhs = [] for port in [col[3:] for col in n.links if col.startswith("bus")]: @@ -818,13 +824,18 @@ def add_co2limit_country(n, limit_countries, nyears=1.0): idx = n.links[mask].index + international = n.links.carrier.map( + lambda x: 0.4 if x in ["kerosene for aviation", "shipping oil"] else 1.0 + ) grouping = country.loc[idx] if not grouping.isnull().all(): expr = ( - (p.loc[:, idx] * efficiency[idx]) + ((p.loc[:, idx] * efficiency[idx] * international[idx]) .groupby(grouping, axis=1) .sum() + *n.snapshot_weightings.generators + ) .sum(dims="snapshot") ) lhs.append(expr) @@ -935,6 +946,10 @@ def solve_network(n, config, solving, opts="", **kwargs): f"Solving status '{status}' with termination condition '{condition}'" ) if "infeasible" in condition: + m = n.model + labels = m.compute_infeasibilities() + print(labels) + m.print_infeasibilities() raise RuntimeError("Solving status 'infeasible'") return n From e8324b9c2788339837caf8718898b14f879ea4d8 Mon Sep 17 00:00:00 2001 From: lisazeyen <35347358+lisazeyen@users.noreply.github.com> Date: Fri, 24 Nov 2023 09:58:24 +0100 Subject: [PATCH 020/122] fix bug when oil copper plated --- scripts/prepare_sector_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index a5ca8941..81e4d6e3 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -153,7 +153,7 @@ def define_spatial(nodes, options): spatial.oil.nodes = ["EU oil"] spatial.oil.locations = ["EU"] spatial.oil.naphtha = ["EU naphtha for industry"] - spatial.oil.naphtha_process_emissions = "EU naphtha process emissions" + spatial.oil.naphtha_process_emissions = ["EU naphtha process emissions"] spatial.oil.kerosene = ["EU kerosene for aviation"] spatial.oil.shipping = ["EU shipping oil"] spatial.oil.agriculture_machinery = ["EU agriculture machinery oil"] From 3ff925e797574afc11193d7f63316fdbdde03e12 Mon Sep 17 00:00:00 2001 From: lisazeyen <35347358+lisazeyen@users.noreply.github.com> Date: Fri, 24 Nov 2023 10:00:07 +0100 Subject: [PATCH 021/122] add load shedding for all energy carriers --- scripts/solve_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index e2edb2eb..97c78dad 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -352,7 +352,7 @@ def prepare_network( # http://journal.frontiersin.org/article/10.3389/fenrg.2015.00055/full # TODO: retrieve color and nice name from config n.add("Carrier", "load", color="#dd2e23", nice_name="Load shedding") - buses_i = n.buses.query("carrier == 'AC'").index + buses_i = n.buses.index if not np.isscalar(load_shedding): # TODO: do not scale via sign attribute (use Eur/MWh instead of Eur/kWh) load_shedding = 1e2 # Eur/kWh From cea62de438b7c358bf23ad306dccb300b13beab7 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Mon, 4 Dec 2023 16:46:11 +0100 Subject: [PATCH 022/122] solve_network: quick fix so duals can be read from CO2 constrain --- scripts/solve_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 97c78dad..2413f4c9 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -841,10 +841,10 @@ def add_co2limit_country(n, limit_countries, nyears=1.0): lhs.append(expr) lhs = sum(lhs) # dimension: (country) - lhs = lhs.rename({list(lhs.dims.keys())[0]: "country"}) + lhs = lhs.rename({list(lhs.dims.keys())[0]: "snapshot"}) rhs = pd.Series(co2_limit_countries) # dimension: (country) - for ct in lhs.indexes["country"]: + for ct in lhs.indexes["snapshot"]: n.model.add_constraints( lhs.loc[ct] <= rhs[ct], name=f"GlobalConstraint-co2_limit_per_country{ct}", From 66178a5a27625b7055d029403c66bd7ac6df1da5 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Mon, 4 Dec 2023 16:46:45 +0100 Subject: [PATCH 023/122] solve_network: fix sign for country CO2 when bus0=atmosphere So that DAC extracts CO2 rather than pumping into air; for p>0, link withdraws from bus0, but injects into bus1/2/3, so you have to take account of this sign difference- --- scripts/solve_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 2413f4c9..53170da9 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -814,7 +814,7 @@ def add_co2limit_country(n, limit_countries, nyears=1.0): for port in [col[3:] for col in n.links if col.startswith("bus")]: if port == str(0): efficiency = ( - n.links["efficiency"].apply(lambda x: 1.0).rename("efficiency0") + n.links["efficiency"].apply(lambda x: -1.0).rename("efficiency0") ) elif port == str(1): efficiency = n.links["efficiency"] From bbf9ca2d9be0af6fe80ffcc667556405ab0bddbc Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Dec 2023 11:58:28 +0100 Subject: [PATCH 024/122] bug fix: naming of p_set when co2_national is True Without this naming fix, the p_set is a NaN once added --- scripts/prepare_sector_network.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 81e4d6e3..606e17b3 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2643,7 +2643,7 @@ def add_industry(n, costs): # need to aggregate potentials if methanol not nodally resolved if options["co2_budget_national"]: - p_set_methanol = shipping_methanol_share * p_set * efficiency + p_set_methanol = shipping_methanol_share * p_set.rename(lambda x : x + " shipping methanol") * efficiency else: p_set_methanol = shipping_methanol_share * p_set.sum() * efficiency @@ -2707,7 +2707,7 @@ def add_industry(n, costs): if shipping_oil_share: # need to aggregate potentials if oil not nodally resolved if options["co2_budget_national"]: - p_set_oil = shipping_oil_share * p_set + p_set_oil = shipping_oil_share * p_set.rename(lambda x: x + " shipping oil") else: p_set_oil = shipping_oil_share * p_set.sum() @@ -2789,15 +2789,16 @@ def add_industry(n, costs): # convert process emissions from feedstock from MtCO2 to energy demand # need to aggregate potentials if oil not nodally resolved if options["co2_budget_national"]: - p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]) / nhours + p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).rename(lambda x: x + " naphtha for industry") / nhours else: p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours + if options["co2_budget_national"]: p_set_process_emissions = ( demand_factor * (industrial_demand.loc[nodes, "process emission from feedstock"] - / costs.at["oil", "CO2 intensity"]) + / costs.at["oil", "CO2 intensity"]).rename(lambda x: x + " naphtha process emissions") / nhours ) else: @@ -2857,7 +2858,7 @@ def add_industry(n, costs): * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1) * 1e6 / nhours - ) + ).rename(lambda x: x + " kerosene for aviation") else: p_set = ( demand_factor @@ -3139,7 +3140,7 @@ def add_agriculture(n, costs): if oil_share > 0: # need to aggregate potentials if oil not nodally resolved if options["co2_budget_national"]: - p_set = oil_share * machinery_nodal_energy / nhours + p_set = oil_share * machinery_nodal_energy.rename(lambda x: x + " agriculture machinery oil") / nhours else: p_set = oil_share * machinery_nodal_energy.sum() / nhours From 2d323d1b879751bc96303bd6c6a54fda2c90eccb Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Dec 2023 12:27:07 +0100 Subject: [PATCH 025/122] bug fix: ICE efficiency for land transport was applied twice This was overestimating ICE oil demand by factor 1/0.3. --- scripts/prepare_sector_network.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 606e17b3..342a6b15 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1613,7 +1613,6 @@ def add_land_transport(n, costs): bus1=spatial.oil.land_transport, bus2="co2 atmosphere", carrier="land transport oil", - efficiency=ice_efficiency, efficiency2=costs.at["oil", "CO2 intensity"], p_nom_extendable=True, ) From 00e86e6435816fe007fb25d62de90cf58fbc01c4 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Dec 2023 13:28:08 +0100 Subject: [PATCH 026/122] bug fix: route process emissions from steam cracker to correct bus Now naphtha demand causes process emissions from steak crackers to route to process emissions bus, then rest of CO2 goes to atmosphere. --- scripts/prepare_sector_network.py | 46 +++++++------------------------ 1 file changed, 10 insertions(+), 36 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 342a6b15..8e995dd6 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -145,7 +145,6 @@ def define_spatial(nodes, options): spatial.oil.nodes = nodes + " oil" spatial.oil.locations = nodes spatial.oil.naphtha = nodes + " naphtha for industry" - spatial.oil.naphtha_process_emissions = nodes + " naphtha process emissions" spatial.oil.kerosene = nodes + " kerosene for aviation" spatial.oil.shipping = nodes + " shipping oil" spatial.oil.agriculture_machinery = nodes + " agriculture machinery oil" @@ -153,7 +152,6 @@ def define_spatial(nodes, options): spatial.oil.nodes = ["EU oil"] spatial.oil.locations = ["EU"] spatial.oil.naphtha = ["EU naphtha for industry"] - spatial.oil.naphtha_process_emissions = ["EU naphtha process emissions"] spatial.oil.kerosene = ["EU kerosene for aviation"] spatial.oil.shipping = ["EU shipping oil"] spatial.oil.agriculture_machinery = ["EU agriculture machinery oil"] @@ -2783,31 +2781,10 @@ def add_industry(n, costs): if demand_factor != 1: logger.warning(f"Changing HVC demand by {demand_factor*100-100:+.2f}%.") - # NB: CO2 gets released again to atmosphere when plastics decay - # except for the process emissions when naphtha is used for petrochemicals, which can be captured with other industry process emissions - # convert process emissions from feedstock from MtCO2 to energy demand - # need to aggregate potentials if oil not nodally resolved if options["co2_budget_national"]: - p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).rename(lambda x: x + " naphtha for industry") / nhours + p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].rename(lambda x: x + " naphtha for industry") / nhours else: - p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours - - - if options["co2_budget_national"]: - p_set_process_emissions = ( - demand_factor - * (industrial_demand.loc[nodes, "process emission from feedstock"] - / costs.at["oil", "CO2 intensity"]).rename(lambda x: x + " naphtha process emissions") - / nhours - ) - else: - p_set_process_emissions = ( - demand_factor - * (industrial_demand.loc[nodes, "process emission from feedstock"] - / costs.at["oil", "CO2 intensity"] - ).sum() - / nhours - ) + p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].sum() / nhours n.madd( "Bus", @@ -2825,13 +2802,10 @@ def add_industry(n, costs): p_set=p_set_plastics, ) - n.madd( - "Load", - spatial.oil.naphtha_process_emissions, - bus=spatial.oil.nodes, - carrier="naphtha for industry", - p_set=p_set_process_emissions, - ) + # some CO2 from naphtha are process emissions from steam cracker + # rest of CO2 released to atmosphere either in waste-to-energy or decay + process_co2_per_naphtha = industrial_demand.loc[nodes, "process emission from feedstock"].sum() / industrial_demand.loc[nodes, "naphtha"].sum() + emitted_co2_per_naphtha = costs.at["oil", "CO2 intensity"] - process_co2_per_naphtha n.madd( "Link", @@ -2839,9 +2813,11 @@ def add_industry(n, costs): bus0=spatial.oil.nodes, bus1=spatial.oil.naphtha, bus2="co2 atmosphere", + bus3=spatial.co2.process_emissions, carrier="naphtha for industry", p_nom_extendable=True, - efficiency2=costs.at["oil", "CO2 intensity"], + efficiency2=emitted_co2_per_naphtha, + efficiency3=process_co2_per_naphtha, ) # aviation @@ -2941,7 +2917,7 @@ def add_industry(n, costs): unit="t_co2", ) - sel = ["process emission", "process emission from feedstock"] + sel = ["process emission"] if options["co2_spatial"] or options["co2network"]: p_set = ( -industrial_demand.loc[nodes, sel] @@ -2952,8 +2928,6 @@ def add_industry(n, costs): else: p_set = -industrial_demand.loc[nodes, sel].sum(axis=1).sum() / nhours - # this should be process emissions fossil+feedstock - # then need load on atmosphere for feedstock emissions that are currently going to atmosphere via Link Fischer-Tropsch demand n.madd( "Load", spatial.co2.process_emissions, From 326ed63329d55d5a84f9230840161cdb3673e27a Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Dec 2023 17:53:28 +0100 Subject: [PATCH 027/122] add_brownfield: disable grid expansion if LV already hit Numerical problems were causing infeasibilities otherwise --- scripts/add_brownfield.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index 74102580..fb1453fd 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -119,6 +119,32 @@ def add_brownfield(n, n_p, year): n.links.loc[new_pipes, "p_nom"] = 0.0 n.links.loc[new_pipes, "p_nom_min"] = 0.0 +def disable_grid_expansion_if_LV_limit_hit(n): + if not "lv_limit" in n.global_constraints.index: + return + + #calculate minimum LV + attr = "nom_min" + dc = n.links.index[n.links.carrier == "DC"] + tot = (n.lines["s_" + attr]*n.lines["length"]).sum() + (n.links.loc[dc,"p_" + attr]*n.links.loc[dc,"length"]).sum() + + diff = n.global_constraints.at["lv_limit","constant"]-tot + + #allow small numerical differences + limit = 1 + + if diff < limit: + logger.info(f"LV is already reached (gap {diff}), disabling expansion and LV limit") + expandable_acs = n.lines.index[n.lines.s_nom_extendable] + n.lines.loc[expandable_acs,"s_nom_extendable"] = False + n.lines.loc[expandable_acs,"s_nom"] = n.lines.loc[expandable_acs,"s_nom_min"] + + expandable_dcs = n.links.index[n.links.p_nom_extendable & (n.links.carrier == "DC")] + n.links.loc[expandable_dcs,"p_nom_extendable"] = False + n.links.loc[expandable_dcs,"p_nom"] = n.links.loc[expandable_dcs,"p_nom_min"] + + n.global_constraints.drop("lv_limit", + inplace=True) if __name__ == "__main__": if "snakemake" not in globals(): @@ -150,5 +176,7 @@ if __name__ == "__main__": add_brownfield(n, n_p, year) + disable_grid_expansion_if_LV_limit_hit(n) + n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output[0]) From d6a11c28c937ff058c90b88a4028d87f5da46e65 Mon Sep 17 00:00:00 2001 From: millingermarkus Date: Mon, 11 Dec 2023 10:49:44 +0100 Subject: [PATCH 028/122] Added co2 storage shadow price output to csv --- scripts/make_summary.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 3ec01b66..5afc247e 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -446,6 +446,8 @@ def calculate_metrics(n, label, metrics): if "CO2Limit" in n.global_constraints.index: metrics.at["co2_shadow", label] = n.global_constraints.at["CO2Limit", "mu"] + if "co2_sequestration_limit" in n.global_constraints.index: + metrics.at["co2_storage_shadow", label] = n.global_constraints.at["co2_sequestration_limit", "mu"] return metrics From 889a5dd5a29cfdb140270405f090252d0fb13720 Mon Sep 17 00:00:00 2001 From: Thomas Gilon Date: Mon, 11 Dec 2023 17:18:31 +0100 Subject: [PATCH 029/122] Fix typo in buses definition for oil boilers in add_industry in prepare_sector_network --- doc/release_notes.rst | 1 + scripts/prepare_sector_network.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index c111e9bc..ff11dfe4 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -40,6 +40,7 @@ Upcoming Release * A bug preventing custom powerplants specified in ``data/custom_powerplants.csv`` was fixed. (https://github.com/PyPSA/pypsa-eur/pull/732) * Fix nodal fraction in ``add_existing_year`` when using distributed generators +* Fix typo in buses definition for oil boilers in ``add_industry`` in ``prepare_sector_network`` PyPSA-Eur 0.8.1 (27th July 2023) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 8a4f98ce..d5ca27a7 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2709,7 +2709,7 @@ def add_industry(n, costs): nodes_heat[name] + f" {name} oil boiler", p_nom_extendable=True, bus0=spatial.oil.nodes, - bus1=nodes_heat[name] + f" {name} heat", + bus1=nodes_heat[name] + f" {name} heat", bus2="co2 atmosphere", carrier=f"{name} oil boiler", efficiency=costs.at["decentral oil boiler", "efficiency"], From f0eae99a07c022d7b1e6ee37b12133d21c904c5e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 12:51:46 +0000 Subject: [PATCH 030/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/cluster_network.py | 2 +- scripts/make_summary.py | 4 +++- scripts/simplify_network.py | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 0f3f351f..28f08396 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -470,7 +470,7 @@ if __name__ == "__main__": n = pypsa.Network(snakemake.input.network) # remove integer outputs for compatibility with PyPSA v0.26.0 - n.generators.drop("n_mod", axis=1, inplace=True, errors='ignore') + n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore") exclude_carriers = params.cluster_network["exclude_carriers"] aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 5afc247e..fb13e91e 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -447,7 +447,9 @@ def calculate_metrics(n, label, metrics): metrics.at["co2_shadow", label] = n.global_constraints.at["CO2Limit", "mu"] if "co2_sequestration_limit" in n.global_constraints.index: - metrics.at["co2_storage_shadow", label] = n.global_constraints.at["co2_sequestration_limit", "mu"] + metrics.at["co2_storage_shadow", label] = n.global_constraints.at[ + "co2_sequestration_limit", "mu" + ] return metrics diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index d12062c2..f88d10d4 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -537,7 +537,7 @@ if __name__ == "__main__": Nyears = n.snapshot_weightings.objective.sum() / 8760 # remove integer outputs for compatibility with PyPSA v0.26.0 - n.generators.drop("n_mod", axis=1, inplace=True, errors='ignore') + n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore") n, trafo_map = simplify_network_to_380(n) From 830019a6e5d5ced3403bd4d5d9e28e2d66fe621b Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 15 Dec 2023 09:50:47 +0100 Subject: [PATCH 031/122] add rule that allows cost data to be modified --- rules/build_sector.smk | 4 ++-- rules/retrieve.smk | 14 ++++++++++++++ scripts/modify_cost_data.py | 12 ++++++++++++ 3 files changed, 28 insertions(+), 2 deletions(-) create mode 100644 scripts/modify_cost_data.py diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 5a9e8646..596c0305 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -743,9 +743,9 @@ rule prepare_sector_network: else RESOURCES + "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv", heat_profile="data/heat_load_profile_BDEW.csv", - costs="data/costs_{}.csv".format(config["costs"]["year"]) + costs="data/costs_{}-modified.csv".format(config["costs"]["year"]) if config["foresight"] == "overnight" - else "data/costs_{planning_horizons}.csv", + else "data/costs_{planning_horizons}-modified.csv", profile_offwind_ac=RESOURCES + "profile_offwind-ac.nc", profile_offwind_dc=RESOURCES + "profile_offwind-dc.nc", h2_cavern=RESOURCES + "salt_cavern_potentials_s{simpl}_{clusters}.csv", diff --git a/rules/retrieve.smk b/rules/retrieve.smk index b830be25..18b424ff 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -259,3 +259,17 @@ if config["enable"]["retrieve"]: "../envs/environment.yaml" script: "../scripts/retrieve_monthly_fuel_prices.py" + + +rule modify_cost_data: + input: + costs="data/costs_{year}.csv", + output: + "data/costs_{year}-modified.csv" + log: + LOGS + "modify_cost_data_{year}.log", + resources: + mem_mb=1000, + retries: 2 + script: + "../scripts/modify_cost_data.py" diff --git a/scripts/modify_cost_data.py b/scripts/modify_cost_data.py new file mode 100644 index 00000000..3e1f12f4 --- /dev/null +++ b/scripts/modify_cost_data.py @@ -0,0 +1,12 @@ + +import pandas as pd + +costs = pd.read_csv(snakemake.input.costs, index_col=[0, 1]).sort_index() + +if "modifications" in snakemake.input.keys(): + modifications = pd.read_csv(snakemake.input.modifications, index_col=[0, 1]).sort_index() + costs.loc[modifications.index] = modifications + print(modifications) + print( costs.loc[modifications.index]) + +costs.to_csv(snakemake.output[0]) From c5a123b4f443a29e8f0f7446ed45ab48230ed1a9 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 15 Dec 2023 14:57:03 +0100 Subject: [PATCH 032/122] allow additional functionality for solving to be added by file To add this, overwrite the rule with a new argument: snakemake.input.additional_functionality --- scripts/solve_network.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 53170da9..dce63efe 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -44,6 +44,7 @@ from pypsa.descriptors import get_switchable_as_dense as get_as_dense from prepare_sector_network import emission_sectors_from_opts + def add_land_use_constraint(n, planning_horizons, config): if "m" in snakemake.wildcards.clusters: _add_land_use_constraint_m(n, planning_horizons, config) @@ -899,6 +900,13 @@ def extra_functionality(n, snapshots): logger.info(f"Add CO2 limit for each country") add_co2limit_country(n, limit_countries, nyears) + if "additional_functionality" in snakemake.input.keys(): + import importlib, os, sys + sys.path.append(os.path.dirname(snakemake.input.additional_functionality)) + additional_functionality = importlib.import_module(os.path.splitext(os.path.basename(snakemake.input.additional_functionality))[0]) + + additional_functionality.additional_functionality(n, snapshots, snakemake.wildcards.planning_horizons) + def solve_network(n, config, solving, opts="", **kwargs): set_of_options = solving["solver"]["options"] From 1a7f093e037ed177468c163863a7bbd929d322c3 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 15 Dec 2023 17:18:36 +0100 Subject: [PATCH 033/122] solve: pass wildcards and config to additional_functionality --- scripts/solve_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index dce63efe..6f88b904 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -905,7 +905,7 @@ def extra_functionality(n, snapshots): sys.path.append(os.path.dirname(snakemake.input.additional_functionality)) additional_functionality = importlib.import_module(os.path.splitext(os.path.basename(snakemake.input.additional_functionality))[0]) - additional_functionality.additional_functionality(n, snapshots, snakemake.wildcards.planning_horizons) + additional_functionality.additional_functionality(n, snapshots, snakemake.wildcards, config) def solve_network(n, config, solving, opts="", **kwargs): From b3753d73d75eedaddb8da9f6dcfc4bcf8793831a Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Wed, 20 Dec 2023 09:22:40 +0100 Subject: [PATCH 034/122] undo addition of script to allow cost modifications This undoes commit 830019a6e5d5ced3403bd4d5d9e28e2d66fe621b. Reason: this was introduced for the PyPSA-Ariadne derivative, but can be handled more elegantly within the derivative repository. --- rules/build_sector.smk | 4 ++-- rules/retrieve.smk | 14 -------------- scripts/modify_cost_data.py | 12 ------------ 3 files changed, 2 insertions(+), 28 deletions(-) delete mode 100644 scripts/modify_cost_data.py diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 596c0305..5a9e8646 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -743,9 +743,9 @@ rule prepare_sector_network: else RESOURCES + "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv", heat_profile="data/heat_load_profile_BDEW.csv", - costs="data/costs_{}-modified.csv".format(config["costs"]["year"]) + costs="data/costs_{}.csv".format(config["costs"]["year"]) if config["foresight"] == "overnight" - else "data/costs_{planning_horizons}-modified.csv", + else "data/costs_{planning_horizons}.csv", profile_offwind_ac=RESOURCES + "profile_offwind-ac.nc", profile_offwind_dc=RESOURCES + "profile_offwind-dc.nc", h2_cavern=RESOURCES + "salt_cavern_potentials_s{simpl}_{clusters}.csv", diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 18b424ff..b830be25 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -259,17 +259,3 @@ if config["enable"]["retrieve"]: "../envs/environment.yaml" script: "../scripts/retrieve_monthly_fuel_prices.py" - - -rule modify_cost_data: - input: - costs="data/costs_{year}.csv", - output: - "data/costs_{year}-modified.csv" - log: - LOGS + "modify_cost_data_{year}.log", - resources: - mem_mb=1000, - retries: 2 - script: - "../scripts/modify_cost_data.py" diff --git a/scripts/modify_cost_data.py b/scripts/modify_cost_data.py deleted file mode 100644 index 3e1f12f4..00000000 --- a/scripts/modify_cost_data.py +++ /dev/null @@ -1,12 +0,0 @@ - -import pandas as pd - -costs = pd.read_csv(snakemake.input.costs, index_col=[0, 1]).sort_index() - -if "modifications" in snakemake.input.keys(): - modifications = pd.read_csv(snakemake.input.modifications, index_col=[0, 1]).sort_index() - costs.loc[modifications.index] = modifications - print(modifications) - print( costs.loc[modifications.index]) - -costs.to_csv(snakemake.output[0]) From 8a55a55d20215dfe32cc49bae5de3e0f05411f1f Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Thu, 21 Dec 2023 16:08:43 +0100 Subject: [PATCH 035/122] copperplate oil/methanol supply; allow demand to be regional Force a single supply bus for oil/methanol (until we allow oil/methanol transport). Introduce new config switches "regional_oil/methanol_demand" that allow demand to be regionalised. This is important if regional CO2 budgets need to be enforced. --- config/config.default.yaml | 2 + scripts/prepare_sector_network.py | 103 ++++++++++++++++-------------- 2 files changed, 57 insertions(+), 48 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index cafb9d1d..c1e7ed0f 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -471,6 +471,8 @@ sector: SMR: true SMR_cc: true co2_budget_national: false + regional_methanol_demand: false #set to true if regional CO2 constraints needed + regional_oil_demand: false #set to true if regional CO2 constraints needed regional_co2_sequestration_potential: enable: false attribute: 'conservative estimate Mt' diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 8e995dd6..b5a0c0d5 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -127,35 +127,42 @@ def define_spatial(nodes, options): spatial.h2.locations = nodes # methanol + + #beware: unlike other carriers, uses locations rather than locations+carriername + #this allows to avoid separation between nodes and locations + spatial.methanol = SimpleNamespace() - if options["co2_budget_national"]: - spatial.methanol.nodes = nodes + " methanol" - spatial.methanol.locations = nodes + spatial.methanol.nodes = ["EU methanol"] + spatial.methanol.locations = ["EU"] + + if options["regional_methanol_demand"]: + spatial.methanol.demand_locations = nodes spatial.methanol.shipping = nodes + " shipping methanol" else: - spatial.methanol.nodes = ["EU methanol"] - spatial.methanol.locations = ["EU"] + spatial.methanol.demand_locations = ["EU"] spatial.methanol.shipping = ["EU shipping methanol"] # oil spatial.oil = SimpleNamespace() - if options["co2_budget_national"]: - spatial.oil.nodes = nodes + " oil" - spatial.oil.locations = nodes + spatial.oil.nodes = ["EU oil"] + spatial.oil.locations = ["EU"] + + if options["regional_oil_demand"]: + spatial.oil.demand_locations = nodes spatial.oil.naphtha = nodes + " naphtha for industry" spatial.oil.kerosene = nodes + " kerosene for aviation" spatial.oil.shipping = nodes + " shipping oil" spatial.oil.agriculture_machinery = nodes + " agriculture machinery oil" + spatial.oil.land_transport = nodes + " land transport oil" else: - spatial.oil.nodes = ["EU oil"] - spatial.oil.locations = ["EU"] + spatial.oil.demand_locations = ["EU"] spatial.oil.naphtha = ["EU naphtha for industry"] spatial.oil.kerosene = ["EU kerosene for aviation"] spatial.oil.shipping = ["EU shipping oil"] spatial.oil.agriculture_machinery = ["EU agriculture machinery oil"] - spatial.oil.land_transport = nodes + " land transport oil" + spatial.oil.land_transport = ["EU land transport oil"] # uranium spatial.uranium = SimpleNamespace() @@ -1588,10 +1595,15 @@ def add_land_transport(n, costs): ice_efficiency = options["transport_internal_combustion_efficiency"] + p_set_land_transport_oil = ice_share / ice_efficiency * transport[nodes].rename(columns=lambda x: x + " land transport oil") + + if not options["regional_oil_demand"]: + p_set_land_transport_oil = p_set_land_transport_oil.sum(axis=1).to_frame(name="EU land transport oil") + n.madd( "Bus", spatial.oil.land_transport, - location=nodes, + location=spatial.oil.demand_locations, carrier="land transport oil", unit="land transport", ) @@ -1601,7 +1613,7 @@ def add_land_transport(n, costs): spatial.oil.land_transport, bus=spatial.oil.land_transport, carrier="land transport oil", - p_set=ice_share / ice_efficiency * transport[nodes].rename(columns=lambda x: x + " land transport oil"), + p_set=p_set_land_transport_oil, ) n.madd( @@ -2638,16 +2650,15 @@ def add_industry(n, costs): options["shipping_oil_efficiency"] / options["shipping_methanol_efficiency"] ) - # need to aggregate potentials if methanol not nodally resolved - if options["co2_budget_national"]: - p_set_methanol = shipping_methanol_share * p_set.rename(lambda x : x + " shipping methanol") * efficiency - else: - p_set_methanol = shipping_methanol_share * p_set.sum() * efficiency + p_set_methanol = shipping_methanol_share * p_set.rename(lambda x : x + " shipping methanol") * efficiency + + if not options["regional_methanol_demand"]: + p_set_methanol = p_set_methanol.sum() n.madd( "Bus", spatial.methanol.shipping, - location=spatial.methanol.locations, + location=spatial.methanol.demand_locations, carrier="shipping methanol", unit="MWh_LHV", ) @@ -2684,7 +2695,8 @@ def add_industry(n, costs): # could correct to e.g. 0.001 EUR/kWh * annuity and O&M n.madd( "Store", - [oil_bus + " Store" for oil_bus in spatial.oil.nodes], + spatial.oil.nodes, + suffix=" Store", bus=spatial.oil.nodes, e_nom_extendable=True, e_cyclic=True, @@ -2702,16 +2714,16 @@ def add_industry(n, costs): ) if shipping_oil_share: - # need to aggregate potentials if oil not nodally resolved - if options["co2_budget_national"]: - p_set_oil = shipping_oil_share * p_set.rename(lambda x: x + " shipping oil") - else: - p_set_oil = shipping_oil_share * p_set.sum() + + p_set_oil = shipping_oil_share * p_set.rename(lambda x: x + " shipping oil") + + if not options["regional_oil_demand"]: + p_set_oil = p_set_oil.sum() n.madd( "Bus", spatial.oil.shipping, - location=spatial.oil.locations, + location=spatial.oil.demand_locations, carrier="shipping oil", unit="MWh_LHV", ) @@ -2781,15 +2793,15 @@ def add_industry(n, costs): if demand_factor != 1: logger.warning(f"Changing HVC demand by {demand_factor*100-100:+.2f}%.") - if options["co2_budget_national"]: - p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].rename(lambda x: x + " naphtha for industry") / nhours - else: - p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].sum() / nhours + p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].rename(lambda x: x + " naphtha for industry") / nhours + + if not options["regional_oil_demand"]: + p_set_plastics = p_set_plastics.sum() n.madd( "Bus", spatial.oil.naphtha, - location=spatial.oil.locations, + location=spatial.oil.demand_locations, carrier="naphtha for industry", unit="MWh_LHV", ) @@ -2826,26 +2838,21 @@ def add_industry(n, costs): logger.warning(f"Changing aviation demand by {demand_factor*100-100:+.2f}%.") all_aviation = ["total international aviation", "total domestic aviation"] - # need to aggregate potentials if oil not nodally resolved - if options["co2_budget_national"]: - p_set = ( + + p_set = ( demand_factor * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1) * 1e6 / nhours ).rename(lambda x: x + " kerosene for aviation") - else: - p_set = ( - demand_factor - * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1).sum() - * 1e6 - / nhours - ) + + if not options["regional_oil_demand"]: + p_set = p_set.sum() n.madd( "Bus", spatial.oil.kerosene, - location=spatial.oil.locations, + location=spatial.oil.demand_locations, carrier="kerosene for aviation", unit="MWh_LHV", ) @@ -3111,16 +3118,16 @@ def add_agriculture(n, costs): ) if oil_share > 0: - # need to aggregate potentials if oil not nodally resolved - if options["co2_budget_national"]: - p_set = oil_share * machinery_nodal_energy.rename(lambda x: x + " agriculture machinery oil") / nhours - else: - p_set = oil_share * machinery_nodal_energy.sum() / nhours + + p_set = oil_share * machinery_nodal_energy.rename(lambda x: x + " agriculture machinery oil") / nhours + + if not options["regional_oil_demand"]: + p_set = p_set.sum() n.madd( "Bus", spatial.oil.agriculture_machinery, - location=spatial.oil.locations, + location=spatial.oil.demand_locations, carrier="agriculture machinery oil", unit="MWh_LHV", ) From 25c5751565500aa6c7ea901f358a25186f6a734f Mon Sep 17 00:00:00 2001 From: euronion <42553970+euronion@users.noreply.github.com> Date: Thu, 21 Dec 2023 18:39:06 +0100 Subject: [PATCH 036/122] Check WDPA url also a month forward --- doc/release_notes.rst | 2 ++ rules/retrieve.smk | 35 ++++++++++++++++++++++------------- 2 files changed, 24 insertions(+), 13 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index ee5a954b..1c259cb5 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -31,6 +31,8 @@ Upcoming Release * Rule ``retrieve_irena`` get updated values for renewables capacities. +* Rule ``retrieve_wdpa`` updated to not only check for current and previous, but also potentially next months dataset availability. + * Split configuration to enable SMR and SMR CC. * The configuration setting for country focus weights when clustering the diff --git a/rules/retrieve.smk b/rules/retrieve.smk index ac89e360..b3969f41 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -246,22 +246,31 @@ if config["enable"]["retrieve"]: if config["enable"]["retrieve"]: - current_month = datetime.now().strftime("%b") - current_year = datetime.now().strftime("%Y") - bYYYY = f"{current_month}{current_year}" + + # Some logic to find the correct file URL + # Sometimes files are released delayed or ahead of schedule, check which file is currently available def check_file_exists(url): - response = requests.head(url) - return response.status_code == 200 + response = requests.head(url) + return response.status_code == 200 - url = f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip" + # Basic pattern where WDPA files can be found + url_pattern = "https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bY}_Public.zip" - if not check_file_exists(url): - prev_month = (datetime.now() - timedelta(30)).strftime("%b") - bYYYY = f"{prev_month}{current_year}" - assert check_file_exists( - f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip" - ), "The file does not exist." + # 3-letter month + 4 digit year for current/previous/next month to test + current_monthyear = datetime.now().strftime("%b%Y") + prev_monthyear = (datetime.now() - timedelta(30)).strftime("%b%Y") + next_monthyear = (datetime.now() + timedelta(30)).strftime("%b%Y") + + # Test prioritised: current month -> previous -> next + for bY in [current_monthyear, prev_monthyear, next_monthyear]: + if check_file_exists(url := url_pattern.format(bY=bY)): + break + else: + # If None of the three URLs are working + url = False + + assert url, f"No WDPA files found at {url_pattern} for bY='{current_monthyear}, {prev_monthyear}, or {next_monthyear}'" # Downloading protected area database from WDPA # extract the main zip and then merge the contained 3 zipped shapefiles @@ -269,7 +278,7 @@ if config["enable"]["retrieve"]: rule download_wdpa: input: HTTP.remote( - f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public_shp.zip", + url, static=True, keep_local=True, ), From 3a474af71fcd5f4793e7ab64f90bce4c295fc173 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 21 Dec 2023 17:40:46 +0000 Subject: [PATCH 037/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/retrieve.smk | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index b3969f41..de3a6709 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -246,13 +246,12 @@ if config["enable"]["retrieve"]: if config["enable"]["retrieve"]: - # Some logic to find the correct file URL # Sometimes files are released delayed or ahead of schedule, check which file is currently available def check_file_exists(url): - response = requests.head(url) - return response.status_code == 200 + response = requests.head(url) + return response.status_code == 200 # Basic pattern where WDPA files can be found url_pattern = "https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bY}_Public.zip" @@ -270,7 +269,9 @@ if config["enable"]["retrieve"]: # If None of the three URLs are working url = False - assert url, f"No WDPA files found at {url_pattern} for bY='{current_monthyear}, {prev_monthyear}, or {next_monthyear}'" + assert ( + url + ), f"No WDPA files found at {url_pattern} for bY='{current_monthyear}, {prev_monthyear}, or {next_monthyear}'" # Downloading protected area database from WDPA # extract the main zip and then merge the contained 3 zipped shapefiles From d817212896a7fd754a5c764ca5d01217db843a2f Mon Sep 17 00:00:00 2001 From: euronion <42553970+euronion@users.noreply.github.com> Date: Fri, 22 Dec 2023 14:40:30 +0100 Subject: [PATCH 038/122] Fix missing bYYYY --- rules/retrieve.smk | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index de3a6709..d08f94db 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -254,7 +254,7 @@ if config["enable"]["retrieve"]: return response.status_code == 200 # Basic pattern where WDPA files can be found - url_pattern = "https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bY}_Public.zip" + url_pattern = "https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip" # 3-letter month + 4 digit year for current/previous/next month to test current_monthyear = datetime.now().strftime("%b%Y") @@ -262,8 +262,8 @@ if config["enable"]["retrieve"]: next_monthyear = (datetime.now() + timedelta(30)).strftime("%b%Y") # Test prioritised: current month -> previous -> next - for bY in [current_monthyear, prev_monthyear, next_monthyear]: - if check_file_exists(url := url_pattern.format(bY=bY)): + for bYYYY in [current_monthyear, prev_monthyear, next_monthyear]: + if check_file_exists(url := url_pattern.format(bYYYY=bYYYY)): break else: # If None of the three URLs are working From fec641ce0a9d1cdc66f6a035b376cec44abed8c6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 22 Dec 2023 13:41:23 +0000 Subject: [PATCH 039/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/retrieve.smk | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index d08f94db..2485a842 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -254,7 +254,9 @@ if config["enable"]["retrieve"]: return response.status_code == 200 # Basic pattern where WDPA files can be found - url_pattern = "https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip" + url_pattern = ( + "https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip" + ) # 3-letter month + 4 digit year for current/previous/next month to test current_monthyear = datetime.now().strftime("%b%Y") From 3c3d49017e525c9c73c6bf0f46d1a8e3d56bc4c3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 25 Dec 2023 18:33:27 +0000 Subject: [PATCH 040/122] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 23.12.0 → 23.12.1](https://github.com/psf/black/compare/23.12.0...23.12.1) - [github.com/macisamuele/language-formatters-pre-commit-hooks: v2.11.0 → v2.12.0](https://github.com/macisamuele/language-formatters-pre-commit-hooks/compare/v2.11.0...v2.12.0) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 82987bf8..7b9009c3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -51,7 +51,7 @@ repos: # Formatting with "black" coding style - repo: https://github.com/psf/black - rev: 23.12.0 + rev: 23.12.1 hooks: # Format Python files - id: black @@ -67,7 +67,7 @@ repos: # Do YAML formatting (before the linter checks it for misses) - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - rev: v2.11.0 + rev: v2.12.0 hooks: - id: pretty-format-yaml args: [--autofix, --indent, "2", --preserve-quotes] From 471c97d4996c103201c72a3c131f65818a3020aa Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 11:20:14 +0100 Subject: [PATCH 041/122] wdpa: correct _shp url --- rules/retrieve.smk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 2485a842..4657bc66 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -255,7 +255,7 @@ if config["enable"]["retrieve"]: # Basic pattern where WDPA files can be found url_pattern = ( - "https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip" + "https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public_shp.zip" ) # 3-letter month + 4 digit year for current/previous/next month to test From c5f48f957d311eedf652039d43946384d9ece727 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 11:20:40 +0100 Subject: [PATCH 042/122] wdpa: give snakemake appearance of time-invariant wdpa files --- rules/retrieve.smk | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 4657bc66..4fe0cd7b 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -286,10 +286,10 @@ if config["enable"]["retrieve"]: keep_local=True, ), params: - zip=RESOURCES + f"WDPA_{bYYYY}_shp.zip", - folder=directory(RESOURCES + f"WDPA_{bYYYY}"), + zip=RESOURCES + f"WDPA_shp.zip", + folder=directory(RESOURCES + f"WDPA"), output: - gpkg=RESOURCES + f"WDPA_{bYYYY}.gpkg", + gpkg=RESOURCES + f"WDPA.gpkg", run: shell("cp {input} {params.zip}") shell("unzip -o {params.zip} -d {params.folder}") @@ -312,10 +312,10 @@ if config["enable"]["retrieve"]: keep_local=True, ), params: - zip=RESOURCES + f"WDPA_WDOECM_{bYYYY}_marine.zip", - folder=directory(RESOURCES + f"WDPA_WDOECM_{bYYYY}_marine"), + zip=RESOURCES + f"WDPA_WDOECM_marine.zip", + folder=directory(RESOURCES + f"WDPA_WDOECM_marine"), output: - gpkg=RESOURCES + f"WDPA_WDOECM_{bYYYY}_marine.gpkg", + gpkg=RESOURCES + f"WDPA_WDOECM_marine.gpkg", run: shell("cp {input} {params.zip}") shell("unzip -o {params.zip} -d {params.folder}") From accffe7bc279ea91904a265332c8054874b50938 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 11:21:24 +0100 Subject: [PATCH 043/122] wdpa time-invariant filenames in build_electricity.smk --- rules/build_electricity.smk | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 873e7c3a..c98d6a02 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -210,8 +210,8 @@ rule determine_availability_matrix_MD_UA: input: copernicus=RESOURCES + "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif", - wdpa=RESOURCES + f"WDPA_{bYYYY}.gpkg", - wdpa_marine=RESOURCES + f"WDPA_WDOECM_{bYYYY}_marine.gpkg", + wdpa=RESOURCES + f"WDPA.gpkg", + wdpa_marine=RESOURCES + f"WDPA_WDOECM_marine.gpkg", gebco=lambda w: ( "data/bundle/GEBCO_2014_2D.nc" if "max_depth" in config["renewable"][w.technology].keys() From 4f815a9cd78ca5bbb5a213db9de7ffc3281b6c03 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 11:29:01 +0100 Subject: [PATCH 044/122] move wdpa and glc datasets to data directory and protect --- rules/build_electricity.smk | 7 +++---- rules/retrieve.smk | 15 +++++++-------- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index c98d6a02..6308552f 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -208,10 +208,9 @@ rule build_ship_raster: rule determine_availability_matrix_MD_UA: input: - copernicus=RESOURCES - + "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif", - wdpa=RESOURCES + f"WDPA.gpkg", - wdpa_marine=RESOURCES + f"WDPA_WDOECM_marine.gpkg", + copernicus="data/Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif", + wdpa="data/WDPA.gpkg", + wdpa_marine="data/WDPA_WDOECM_marine.gpkg", gebco=lambda w: ( "data/bundle/GEBCO_2014_2D.nc" if "max_depth" in config["renewable"][w.technology].keys() diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 4fe0cd7b..584894b1 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -239,8 +239,7 @@ if config["enable"]["retrieve"]: static=True, ), output: - RESOURCES - + "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif", + "data/Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif", run: move(input[0], output[0]) @@ -286,10 +285,10 @@ if config["enable"]["retrieve"]: keep_local=True, ), params: - zip=RESOURCES + f"WDPA_shp.zip", - folder=directory(RESOURCES + f"WDPA"), + zip="data/WDPA_shp.zip", + folder=directory("data/WDPA"), output: - gpkg=RESOURCES + f"WDPA.gpkg", + gpkg=protected("data/WDPA.gpkg"), run: shell("cp {input} {params.zip}") shell("unzip -o {params.zip} -d {params.folder}") @@ -312,10 +311,10 @@ if config["enable"]["retrieve"]: keep_local=True, ), params: - zip=RESOURCES + f"WDPA_WDOECM_marine.zip", - folder=directory(RESOURCES + f"WDPA_WDOECM_marine"), + zip="data/WDPA_WDOECM_marine.zip", + folder=directory("data/WDPA_WDOECM_marine"), output: - gpkg=RESOURCES + f"WDPA_WDOECM_marine.gpkg", + gpkg=protected("data/WDPA_WDOECM_marine.gpkg"), run: shell("cp {input} {params.zip}") shell("unzip -o {params.zip} -d {params.folder}") From 02b0996dacd87321c0ce9463dbc928644d2a955b Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 11:29:19 +0100 Subject: [PATCH 045/122] remove write-protection on resources/natura.tiff --- rules/retrieve.smk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 584894b1..0483a833 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -113,7 +113,7 @@ if config["enable"]["retrieve"] and config["enable"].get( static=True, ), output: - protected(RESOURCES + "natura.tiff"), + RESOURCES + "natura.tiff", log: LOGS + "retrieve_natura_raster.log", resources: From 97619888593d796ce6f92dc3750b7aafdd84b263 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 11:32:53 +0100 Subject: [PATCH 046/122] increase pypsa min version to 0.26.1 --- envs/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index 1d9592c6..535acbdb 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -11,7 +11,7 @@ dependencies: - pip - atlite>=0.2.9 -- pypsa>=0.26.0 +- pypsa>=0.26.1 - linopy - dask From 71985d5e3aa173386c0decb301750f09c5408041 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 12:34:14 +0100 Subject: [PATCH 047/122] validate checksums for zenodo downloads --- doc/release_notes.rst | 3 ++ rules/common.smk | 3 ++ rules/retrieve.smk | 6 ++- scripts/_helpers.py | 59 +++++++++++++++++++++ scripts/retrieve_databundle.py | 4 +- scripts/retrieve_gas_infrastructure_data.py | 4 +- scripts/retrieve_sector_databundle.py | 4 +- 7 files changed, 79 insertions(+), 4 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index d7931f0e..7b1b6d73 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -54,6 +54,9 @@ Upcoming Release reconnected to the main Ukrainian grid with the configuration option `reconnect_crimea`. +* Validate downloads from Zenodo using MD5 checksums. This identifies corrupted + or incomplete downloads. + **Bugs and Compatibility** diff --git a/rules/common.smk b/rules/common.smk index d3416050..a1537c10 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -2,6 +2,9 @@ # # SPDX-License-Identifier: MIT +import os, sys +sys.path.insert(0, os.path.abspath("scripts")) +from _helpers import validate_checksum def memory(w): factor = 3.0 diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 4fe0cd7b..e2e63427 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -77,6 +77,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cutout", True retries: 2 run: move(input[0], output[0]) + validate_checksum(output[0], input[0]) if config["enable"]["retrieve"] and config["enable"].get("retrieve_cost_data", True): @@ -113,7 +114,7 @@ if config["enable"]["retrieve"] and config["enable"].get( static=True, ), output: - protected(RESOURCES + "natura.tiff"), + RESOURCES + "natura.tiff", log: LOGS + "retrieve_natura_raster.log", resources: @@ -121,6 +122,7 @@ if config["enable"]["retrieve"] and config["enable"].get( retries: 2 run: move(input[0], output[0]) + validate_checksum(output[0], input[0]) if config["enable"]["retrieve"] and config["enable"].get( @@ -226,6 +228,7 @@ if config["enable"]["retrieve"]: retries: 2 run: move(input[0], output[0]) + validate_checksum(output[0], input[0]) if config["enable"]["retrieve"]: @@ -243,6 +246,7 @@ if config["enable"]["retrieve"]: + "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif", run: move(input[0], output[0]) + validate_checksum(output[0], input[0]) if config["enable"]["retrieve"]: diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 398f3a30..d906872d 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: MIT import contextlib +import hashlib import logging import os import urllib @@ -11,6 +12,7 @@ from pathlib import Path import pandas as pd import pytz +import requests import yaml from pypsa.components import component_attrs, components from pypsa.descriptors import Dict @@ -318,3 +320,60 @@ def update_config_with_sector_opts(config, sector_opts): if o.startswith("CF+"): l = o.split("+")[1:] update_config(config, parse(l)) + + +def get_checksum_from_zenodo(file_url): + parts = file_url.split("/") + record_id = parts[parts.index("record") + 1] + filename = parts[-1] + + response = requests.get(f"https://zenodo.org/api/records/{record_id}", timeout=30) + response.raise_for_status() + data = response.json() + + for file in data["files"]: + if file["key"] == filename: + return file["checksum"] + return None + + +def validate_checksum(file_path, zenodo_url=None, checksum=None): + """ + Validate file checksum against provided or Zenodo-retrieved checksum. + Calculates the hash of a file using 64KB chunks. Compares it against a given + checksum or one from a Zenodo URL. + + Parameters + ---------- + file_path : str + Path to the file for checksum validation. + zenodo_url : str, optional + URL of the file on Zenodo to fetch the checksum. + checksum : str, optional + Checksum (format 'hash_type:checksum_value') for validation. + + Raises + ------ + AssertionError + If the checksum does not match, or if neither `checksum` nor `zenodo_url` is provided. + + + Examples + -------- + >>> validate_checksum('/path/to/file', checksum='md5:abc123...') + >>> validate_checksum('/path/to/file', zenodo_url='https://zenodo.org/record/12345/files/example.txt') + + If the checksum is invalid, an AssertionError will be raised. + """ + assert checksum or zenodo_url, "Either checksum or zenodo_url must be provided" + if zenodo_url: + checksum = get_checksum_from_zenodo(zenodo_url) + hash_type, checksum = checksum.split(":") + hasher = hashlib.new(hash_type) + with open(file_path, "rb") as f: + for chunk in iter(lambda: f.read(65536), b""): # 64kb chunks + hasher.update(chunk) + calculated_checksum = hasher.hexdigest() + assert ( + calculated_checksum == checksum + ), "Checksum is invalid. This may be due to an incomplete download. Delete the file and re-execute the rule." diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index 75d8519e..25894063 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -36,7 +36,7 @@ import logging import tarfile from pathlib import Path -from _helpers import configure_logging, progress_retrieve +from _helpers import configure_logging, progress_retrieve, validate_checksum logger = logging.getLogger(__name__) @@ -65,6 +65,8 @@ if __name__ == "__main__": disable_progress = snakemake.config["run"].get("disable_progressbar", False) progress_retrieve(url, tarball_fn, disable=disable_progress) + validate_checksum(tarball_fn, url) + logger.info("Extracting databundle.") tarfile.open(tarball_fn).extractall(to_fn) diff --git a/scripts/retrieve_gas_infrastructure_data.py b/scripts/retrieve_gas_infrastructure_data.py index 42b726db..d984b9fe 100644 --- a/scripts/retrieve_gas_infrastructure_data.py +++ b/scripts/retrieve_gas_infrastructure_data.py @@ -11,7 +11,7 @@ import logging import zipfile from pathlib import Path -from _helpers import progress_retrieve +from _helpers import progress_retrieve, validate_checksum logger = logging.getLogger(__name__) @@ -35,6 +35,8 @@ if __name__ == "__main__": disable_progress = snakemake.config["run"].get("disable_progressbar", False) progress_retrieve(url, zip_fn, disable=disable_progress) + validate_checksum(zip_fn, url) + logger.info("Extracting databundle.") zipfile.ZipFile(zip_fn).extractall(to_fn) diff --git a/scripts/retrieve_sector_databundle.py b/scripts/retrieve_sector_databundle.py index 0d172c8d..cb6cc969 100644 --- a/scripts/retrieve_sector_databundle.py +++ b/scripts/retrieve_sector_databundle.py @@ -13,7 +13,7 @@ logger = logging.getLogger(__name__) import tarfile from pathlib import Path -from _helpers import configure_logging, progress_retrieve +from _helpers import configure_logging, progress_retrieve, validate_checksum if __name__ == "__main__": if "snakemake" not in globals(): @@ -34,6 +34,8 @@ if __name__ == "__main__": disable_progress = snakemake.config["run"].get("disable_progressbar", False) progress_retrieve(url, tarball_fn, disable=disable_progress) + validate_checksum(tarball_fn, url) + logger.info("Extracting databundle.") tarfile.open(tarball_fn).extractall(to_fn) From de3b6c9573f075d2bdb93ea0af852c8b31ca03b3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 29 Dec 2023 11:38:41 +0000 Subject: [PATCH 048/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/common.smk | 2 ++ scripts/_helpers.py | 11 +++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index a1537c10..2c8cf69c 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -3,9 +3,11 @@ # SPDX-License-Identifier: MIT import os, sys + sys.path.insert(0, os.path.abspath("scripts")) from _helpers import validate_checksum + def memory(w): factor = 3.0 for o in w.opts.split("-"): diff --git a/scripts/_helpers.py b/scripts/_helpers.py index d906872d..9945f70f 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -340,8 +340,8 @@ def get_checksum_from_zenodo(file_url): def validate_checksum(file_path, zenodo_url=None, checksum=None): """ Validate file checksum against provided or Zenodo-retrieved checksum. - Calculates the hash of a file using 64KB chunks. Compares it against a given - checksum or one from a Zenodo URL. + Calculates the hash of a file using 64KB chunks. Compares it against a + given checksum or one from a Zenodo URL. Parameters ---------- @@ -360,8 +360,11 @@ def validate_checksum(file_path, zenodo_url=None, checksum=None): Examples -------- - >>> validate_checksum('/path/to/file', checksum='md5:abc123...') - >>> validate_checksum('/path/to/file', zenodo_url='https://zenodo.org/record/12345/files/example.txt') + >>> validate_checksum("/path/to/file", checksum="md5:abc123...") + >>> validate_checksum( + ... "/path/to/file", + ... zenodo_url="https://zenodo.org/record/12345/files/example.txt", + ... ) If the checksum is invalid, an AssertionError will be raised. """ From 8a11bdb4b132f6803485c01c2d222cad8e9d3c66 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 17:19:19 +0100 Subject: [PATCH 049/122] solve_network: option to inject custom extra functionalities from source file --- config/config.default.yaml | 1 + doc/configtables/solving.csv | 1 + doc/release_notes.rst | 5 +++++ rules/solve_myopic.smk | 3 +++ rules/solve_overnight.smk | 3 +++ rules/solve_perfect.smk | 3 +++ scripts/solve_network.py | 7 +++++++ 7 files changed, 23 insertions(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index a6df173b..0ff742e7 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -627,6 +627,7 @@ solving: skip_iterations: true rolling_horizon: false seed: 123 + custom_extra_functionality: "data/custom_extra_functionality.py" # options that go into the optimize function track_iterations: false min_iterations: 4 diff --git a/doc/configtables/solving.csv b/doc/configtables/solving.csv index 45d50d84..9d47c043 100644 --- a/doc/configtables/solving.csv +++ b/doc/configtables/solving.csv @@ -6,6 +6,7 @@ options,,, -- skip_iterations,bool,"{'true','false'}","Skip iterating, do not update impedances of branches. Defaults to true." -- rolling_horizon,bool,"{'true','false'}","Whether to optimize the network in a rolling horizon manner, where the snapshot range is split into slices of size `horizon` which are solved consecutively." -- seed,--,int,Random seed for increased deterministic behaviour. +-- custom_extra_functionality,--,str,Path to a Python file with custom extra functionality code to be injected into the solving rules of the workflow. -- track_iterations,bool,"{'true','false'}",Flag whether to store the intermediate branch capacities and objective function values are recorded for each iteration in ``network.lines['s_nom_opt_X']`` (where ``X`` labels the iteration) -- min_iterations,--,int,Minimum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run. -- max_iterations,--,int,Maximum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run. diff --git a/doc/release_notes.rst b/doc/release_notes.rst index d7931f0e..a7644682 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -54,6 +54,11 @@ Upcoming Release reconnected to the main Ukrainian grid with the configuration option `reconnect_crimea`. +* Add option to reference an additional source file where users can specify + custom ``extra_functionality`` constraints in the configuration file. The + default setting points to an empty hull at + ``data/custom_extra_functionality.py``. + **Bugs and Compatibility** diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 8a93d24a..217547b9 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -88,6 +88,9 @@ rule solve_sector_network_myopic: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), + custom_extra_functionality=workflow.source_path( + config["solver"]["options"].get("custom_extra_functionality", "") + ), input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index c7700760..8ac56db8 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -11,6 +11,9 @@ rule solve_sector_network: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), + custom_extra_functionality=workflow.source_path( + config["solver"]["options"].get("custom_extra_functionality", "") + ), input: network=RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index ef4e367d..322ced8d 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -118,6 +118,9 @@ rule solve_sector_network_perfect: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), + custom_extra_functionality=workflow.source_path( + config["solver"]["options"].get("custom_extra_functionality", "") + ), input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc", diff --git a/scripts/solve_network.py b/scripts/solve_network.py index ff2a2f23..539c4e72 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -26,7 +26,9 @@ Additionally, some extra constraints specified in :mod:`solve_network` are added the workflow for all scenarios in the configuration file (``scenario:``) based on the rule :mod:`solve_network`. """ +import importlib import logging +import os import re import numpy as np @@ -792,6 +794,11 @@ def extra_functionality(n, snapshots): add_carbon_budget_constraint(n, snapshots) add_retrofit_gas_boiler_constraint(n, snapshots) + if snakemake.params.custom_extra_functionality: + source_path = snakemake.params.custom_extra_functionality + module_name = os.path.splitext(os.path.basename(source_path))[0] + module = importlib.import_module(module_name) + module.custom_extra_functionality(n, snapshots) def solve_network(n, config, solving, opts="", **kwargs): set_of_options = solving["solver"]["options"] From fba320bfa7ae05a86e567426848577d42a25b337 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 29 Dec 2023 16:20:24 +0000 Subject: [PATCH 050/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/solve_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 539c4e72..d79a6342 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -800,6 +800,7 @@ def extra_functionality(n, snapshots): module = importlib.import_module(module_name) module.custom_extra_functionality(n, snapshots) + def solve_network(n, config, solving, opts="", **kwargs): set_of_options = solving["solver"]["options"] cf_solving = solving["options"] From 4b6dd2908324eda4c1722cb9ba41d330df6da443 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 17:23:11 +0100 Subject: [PATCH 051/122] add dummy file and assert path exists --- data/custom_extra_functionality.py | 9 +++++++++ scripts/solve_network.py | 1 + 2 files changed, 10 insertions(+) create mode 100644 data/custom_extra_functionality.py diff --git a/data/custom_extra_functionality.py b/data/custom_extra_functionality.py new file mode 100644 index 00000000..98b0c026 --- /dev/null +++ b/data/custom_extra_functionality.py @@ -0,0 +1,9 @@ +# SPDX-FileCopyrightText: : 2023- The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT + +def custom_extra_functionality(n, snapshots): + """ + Add custom extra functionality constraints. + """ + pass diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 539c4e72..ed28c51c 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -796,6 +796,7 @@ def extra_functionality(n, snapshots): if snakemake.params.custom_extra_functionality: source_path = snakemake.params.custom_extra_functionality + assert os.path.exists(source_path), f"{source_path} does not exist" module_name = os.path.splitext(os.path.basename(source_path))[0] module = importlib.import_module(module_name) module.custom_extra_functionality(n, snapshots) From 876a28b688719273cc8fd9531845ae78c0e7edbe Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 29 Dec 2023 16:23:52 +0000 Subject: [PATCH 052/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- data/custom_extra_functionality.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/data/custom_extra_functionality.py b/data/custom_extra_functionality.py index 98b0c026..0ac24cea 100644 --- a/data/custom_extra_functionality.py +++ b/data/custom_extra_functionality.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- # SPDX-FileCopyrightText: : 2023- The PyPSA-Eur Authors # # SPDX-License-Identifier: MIT + def custom_extra_functionality(n, snapshots): """ Add custom extra functionality constraints. From a5ba2565a0abfcbe84d25c75a66fd2639bf08ca1 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 17:30:03 +0100 Subject: [PATCH 053/122] correct config location --- rules/solve_myopic.smk | 2 +- rules/solve_overnight.smk | 2 +- rules/solve_perfect.smk | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 217547b9..afa8ad2c 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -89,7 +89,7 @@ rule solve_sector_network_myopic: "co2_sequestration_potential", 200 ), custom_extra_functionality=workflow.source_path( - config["solver"]["options"].get("custom_extra_functionality", "") + config["solving"]["options"].get("custom_extra_functionality", "") ), input: network=RESULTS diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index 8ac56db8..fc2f74df 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -12,7 +12,7 @@ rule solve_sector_network: "co2_sequestration_potential", 200 ), custom_extra_functionality=workflow.source_path( - config["solver"]["options"].get("custom_extra_functionality", "") + config["solving"]["options"].get("custom_extra_functionality", "") ), input: network=RESULTS diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index 322ced8d..63be5cc1 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -119,7 +119,7 @@ rule solve_sector_network_perfect: "co2_sequestration_potential", 200 ), custom_extra_functionality=workflow.source_path( - config["solver"]["options"].get("custom_extra_functionality", "") + config["solving"]["options"].get("custom_extra_functionality", "") ), input: network=RESULTS From 79ca64382b56e69315e641c9441e7a036a97313a Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sun, 31 Dec 2023 14:15:37 +0100 Subject: [PATCH 054/122] correct path for custom_extra_functionality --- config/config.default.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 0ff742e7..b8945f75 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -627,7 +627,7 @@ solving: skip_iterations: true rolling_horizon: false seed: 123 - custom_extra_functionality: "data/custom_extra_functionality.py" + custom_extra_functionality: "../data/custom_extra_functionality.py" # options that go into the optimize function track_iterations: false min_iterations: 4 From adf2c96dc13deb0b4cb3df077e3c552a3dbcddd1 Mon Sep 17 00:00:00 2001 From: Jess <122939887+jessLryan@users.noreply.github.com> Date: Mon, 1 Jan 2024 12:14:38 +0000 Subject: [PATCH 055/122] Update index.rst fixed 2 broken links --- doc/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/index.rst b/doc/index.rst index d30dd8b9..909a96a2 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -116,7 +116,7 @@ of the individual parts. topics we are working on. Please feel free to help or make suggestions. This project is currently maintained by the `Department of Digital -Transformation in Energy Systems `_ at the +Transformation in Energy Systems `_ at the `Technische Universität Berlin `_. Previous versions were developed within the `IAI `_ at the `Karlsruhe Institute of Technology (KIT) `_ which was funded by From f28e9b47d26cc108c99103c3ce63b34d0ed2011d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 16:00:41 +0100 Subject: [PATCH 056/122] add custom_extra_functionality param to solve_electricity rule --- rules/solve_electricity.smk | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index c396ebd5..2c956097 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -11,6 +11,9 @@ rule solve_network: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), + custom_extra_functionality=workflow.source_path( + config["solving"]["options"].get("custom_extra_functionality", "") + ), input: network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", config=RESULTS + "config.yaml", From 340bf778498a4a6ceffe246b8ab3245a9ccc84f7 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 16:01:28 +0100 Subject: [PATCH 057/122] clarify that source_path is relative to directory --- doc/configtables/solving.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/configtables/solving.csv b/doc/configtables/solving.csv index 9d47c043..dcff54e4 100644 --- a/doc/configtables/solving.csv +++ b/doc/configtables/solving.csv @@ -6,7 +6,7 @@ options,,, -- skip_iterations,bool,"{'true','false'}","Skip iterating, do not update impedances of branches. Defaults to true." -- rolling_horizon,bool,"{'true','false'}","Whether to optimize the network in a rolling horizon manner, where the snapshot range is split into slices of size `horizon` which are solved consecutively." -- seed,--,int,Random seed for increased deterministic behaviour. --- custom_extra_functionality,--,str,Path to a Python file with custom extra functionality code to be injected into the solving rules of the workflow. +-- custom_extra_functionality,--,str,Path to a Python file with custom extra functionality code to be injected into the solving rules of the workflow relative to ``rules`` directory. -- track_iterations,bool,"{'true','false'}",Flag whether to store the intermediate branch capacities and objective function values are recorded for each iteration in ``network.lines['s_nom_opt_X']`` (where ``X`` labels the iteration) -- min_iterations,--,int,Minimum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run. -- max_iterations,--,int,Maximum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run. From 1b569dde1bcbcd32175d41b0ba3ed265e76a1aad Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Tue, 2 Jan 2024 16:02:10 +0100 Subject: [PATCH 058/122] move code for national CO2 budgets out of extra_functionality This can be added by derived workflows like PyPSA-Eur via additional_functionality. Changed additional_functionality to pass snakemake rather than wildcards and config separately. This gives maximal flexibility. --- config/config.default.yaml | 17 ------ scripts/solve_network.py | 110 +------------------------------------ 2 files changed, 1 insertion(+), 126 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index c1e7ed0f..6d2ebd9f 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -84,22 +84,6 @@ co2_budget: 2045: 0.032 2050: 0.000 -co2_budget_national: - 2030: - 'DE': 0.350 - 'AT': 0.450 - 'BE': 0.450 - 'CH': 0.450 - 'CZ': 0.450 - 'DK': 0.450 - 'FR': 0.450 - 'GB': 0.450 - 'LU': 0.450 - 'NL': 0.450 - 'NO': 0.450 - 'PL': 0.450 - 'SE': 0.450 - # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#electricity electricity: voltages: [220., 300., 380.] @@ -470,7 +454,6 @@ sector: hydrogen_turbine: false SMR: true SMR_cc: true - co2_budget_national: false regional_methanol_demand: false #set to true if regional CO2 constraints needed regional_oil_demand: false #set to true if regional CO2 constraints needed regional_co2_sequestration_potential: diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 6f88b904..433b175b 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -41,9 +41,6 @@ logger = logging.getLogger(__name__) pypsa.pf.logger.setLevel(logging.WARNING) from pypsa.descriptors import get_switchable_as_dense as get_as_dense -from prepare_sector_network import emission_sectors_from_opts - - def add_land_use_constraint(n, planning_horizons, config): if "m" in snakemake.wildcards.clusters: @@ -765,100 +762,6 @@ def add_pipe_retrofit_constraint(n): n.model.add_constraints(lhs == rhs, name="Link-pipe_retrofit") -def add_co2limit_country(n, limit_countries, nyears=1.0): - """ - Add a set of emissions limit constraints for specified countries. - - The countries and emissions limits are specified in the config file entry 'co2_budget_country_{investment_year}'. - - Parameters - ---------- - n : pypsa.Network - config : dict - limit_countries : dict - nyears: float, optional - Used to scale the emissions constraint to the number of snapshots of the base network. - """ - logger.info(f"Adding CO2 budget limit for each country as per unit of 1990 levels") - - countries = n.config["countries"] - - # TODO: import function from prepare_sector_network? Move to common place? - sectors = emission_sectors_from_opts(opts) - - # convert Mt to tCO2 - co2_totals = 1e6 * pd.read_csv(snakemake.input.co2_totals_name, index_col=0) - - co2_limit_countries = co2_totals.loc[countries, sectors].sum(axis=1) - co2_limit_countries = co2_limit_countries.loc[co2_limit_countries.index.isin(limit_countries.keys())] - - co2_limit_countries *= co2_limit_countries.index.map(limit_countries) * nyears - - p = n.model["Link-p"] # dimension: (time, component) - - # NB: Most country-specific links retain their locational information in bus1 (except for DAC, where it is in bus2, and process emissions, where it is in bus0) - country = n.links.bus1.map(n.buses.location).map(n.buses.country) - country_DAC = ( - n.links[n.links.carrier == "DAC"] - .bus2.map(n.buses.location) - .map(n.buses.country) - ) - country[country_DAC.index] = country_DAC - country_process_emissions = ( - n.links[n.links.carrier.str.contains("process emissions")] - .bus0.map(n.buses.location) - .map(n.buses.country) - ) - country[country_process_emissions.index] = country_process_emissions - - lhs = [] - for port in [col[3:] for col in n.links if col.startswith("bus")]: - if port == str(0): - efficiency = ( - n.links["efficiency"].apply(lambda x: -1.0).rename("efficiency0") - ) - elif port == str(1): - efficiency = n.links["efficiency"] - else: - efficiency = n.links[f"efficiency{port}"] - mask = n.links[f"bus{port}"].map(n.buses.carrier).eq("co2") - - idx = n.links[mask].index - - international = n.links.carrier.map( - lambda x: 0.4 if x in ["kerosene for aviation", "shipping oil"] else 1.0 - ) - grouping = country.loc[idx] - - if not grouping.isnull().all(): - expr = ( - ((p.loc[:, idx] * efficiency[idx] * international[idx]) - .groupby(grouping, axis=1) - .sum() - *n.snapshot_weightings.generators - ) - .sum(dims="snapshot") - ) - lhs.append(expr) - - lhs = sum(lhs) # dimension: (country) - lhs = lhs.rename({list(lhs.dims.keys())[0]: "snapshot"}) - rhs = pd.Series(co2_limit_countries) # dimension: (country) - - for ct in lhs.indexes["snapshot"]: - n.model.add_constraints( - lhs.loc[ct] <= rhs[ct], - name=f"GlobalConstraint-co2_limit_per_country{ct}", - ) - n.add( - "GlobalConstraint", - f"co2_limit_per_country{ct}", - constant=rhs[ct], - sense="<=", - type="", - ) - - def extra_functionality(n, snapshots): """ Collects supplementary constraints which will be passed to @@ -889,23 +792,12 @@ def extra_functionality(n, snapshots): add_carbon_budget_constraint(n, snapshots) add_retrofit_gas_boiler_constraint(n, snapshots) - if n.config["sector"]["co2_budget_national"]: - # prepare co2 constraint - nhours = n.snapshot_weightings.generators.sum() - nyears = nhours / 8760 - investment_year = int(snakemake.wildcards.planning_horizons[-4:]) - limit_countries = snakemake.config["co2_budget_national"][investment_year] - - # add co2 constraint for each country - logger.info(f"Add CO2 limit for each country") - add_co2limit_country(n, limit_countries, nyears) - if "additional_functionality" in snakemake.input.keys(): import importlib, os, sys sys.path.append(os.path.dirname(snakemake.input.additional_functionality)) additional_functionality = importlib.import_module(os.path.splitext(os.path.basename(snakemake.input.additional_functionality))[0]) - additional_functionality.additional_functionality(n, snapshots, snakemake.wildcards, config) + additional_functionality.additional_functionality(n, snapshots, snakemake) def solve_network(n, config, solving, opts="", **kwargs): From f494dd85b969f9491a2d9bf81ea98008452440a7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 15:21:49 +0000 Subject: [PATCH 059/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/add_brownfield.py | 31 ++++++++------ scripts/prepare_sector_network.py | 69 ++++++++++++++++++++----------- scripts/solve_network.py | 11 ++++- 3 files changed, 73 insertions(+), 38 deletions(-) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index fb1453fd..ffdaf46b 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -119,32 +119,39 @@ def add_brownfield(n, n_p, year): n.links.loc[new_pipes, "p_nom"] = 0.0 n.links.loc[new_pipes, "p_nom_min"] = 0.0 + def disable_grid_expansion_if_LV_limit_hit(n): if not "lv_limit" in n.global_constraints.index: return - #calculate minimum LV + # calculate minimum LV attr = "nom_min" dc = n.links.index[n.links.carrier == "DC"] - tot = (n.lines["s_" + attr]*n.lines["length"]).sum() + (n.links.loc[dc,"p_" + attr]*n.links.loc[dc,"length"]).sum() + tot = (n.lines["s_" + attr] * n.lines["length"]).sum() + ( + n.links.loc[dc, "p_" + attr] * n.links.loc[dc, "length"] + ).sum() - diff = n.global_constraints.at["lv_limit","constant"]-tot + diff = n.global_constraints.at["lv_limit", "constant"] - tot - #allow small numerical differences + # allow small numerical differences limit = 1 if diff < limit: - logger.info(f"LV is already reached (gap {diff}), disabling expansion and LV limit") + logger.info( + f"LV is already reached (gap {diff}), disabling expansion and LV limit" + ) expandable_acs = n.lines.index[n.lines.s_nom_extendable] - n.lines.loc[expandable_acs,"s_nom_extendable"] = False - n.lines.loc[expandable_acs,"s_nom"] = n.lines.loc[expandable_acs,"s_nom_min"] + n.lines.loc[expandable_acs, "s_nom_extendable"] = False + n.lines.loc[expandable_acs, "s_nom"] = n.lines.loc[expandable_acs, "s_nom_min"] - expandable_dcs = n.links.index[n.links.p_nom_extendable & (n.links.carrier == "DC")] - n.links.loc[expandable_dcs,"p_nom_extendable"] = False - n.links.loc[expandable_dcs,"p_nom"] = n.links.loc[expandable_dcs,"p_nom_min"] + expandable_dcs = n.links.index[ + n.links.p_nom_extendable & (n.links.carrier == "DC") + ] + n.links.loc[expandable_dcs, "p_nom_extendable"] = False + n.links.loc[expandable_dcs, "p_nom"] = n.links.loc[expandable_dcs, "p_nom_min"] + + n.global_constraints.drop("lv_limit", inplace=True) - n.global_constraints.drop("lv_limit", - inplace=True) if __name__ == "__main__": if "snakemake" not in globals(): diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b5a0c0d5..f1ddce2d 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -128,8 +128,8 @@ def define_spatial(nodes, options): # methanol - #beware: unlike other carriers, uses locations rather than locations+carriername - #this allows to avoid separation between nodes and locations + # beware: unlike other carriers, uses locations rather than locations+carriername + # this allows to avoid separation between nodes and locations spatial.methanol = SimpleNamespace() @@ -1595,10 +1595,16 @@ def add_land_transport(n, costs): ice_efficiency = options["transport_internal_combustion_efficiency"] - p_set_land_transport_oil = ice_share / ice_efficiency * transport[nodes].rename(columns=lambda x: x + " land transport oil") + p_set_land_transport_oil = ( + ice_share + / ice_efficiency + * transport[nodes].rename(columns=lambda x: x + " land transport oil") + ) if not options["regional_oil_demand"]: - p_set_land_transport_oil = p_set_land_transport_oil.sum(axis=1).to_frame(name="EU land transport oil") + p_set_land_transport_oil = p_set_land_transport_oil.sum(axis=1).to_frame( + name="EU land transport oil" + ) n.madd( "Bus", @@ -2454,7 +2460,7 @@ def add_industry(n, costs): efficiency=1.0, ) - if len(spatial.biomass.industry_cc)<=1 and len(spatial.co2.nodes)>1: + if len(spatial.biomass.industry_cc) <= 1 and len(spatial.co2.nodes) > 1: link_names = nodes + " " + spatial.biomass.industry_cc else: link_names = spatial.biomass.industry_cc @@ -2650,7 +2656,11 @@ def add_industry(n, costs): options["shipping_oil_efficiency"] / options["shipping_methanol_efficiency"] ) - p_set_methanol = shipping_methanol_share * p_set.rename(lambda x : x + " shipping methanol") * efficiency + p_set_methanol = ( + shipping_methanol_share + * p_set.rename(lambda x: x + " shipping methanol") + * efficiency + ) if not options["regional_methanol_demand"]: p_set_methanol = p_set_methanol.sum() @@ -2679,7 +2689,10 @@ def add_industry(n, costs): bus2="co2 atmosphere", carrier="shipping methanol", p_nom_extendable=True, - efficiency2=1 / options["MWh_MeOH_per_tCO2"], # CO2 intensity methanol based on stoichiometric calculation with 22.7 GJ/t methanol (32 g/mol), CO2 (44 g/mol), 277.78 MWh/TJ = 0.218 t/MWh + efficiency2=1 + / options[ + "MWh_MeOH_per_tCO2" + ], # CO2 intensity methanol based on stoichiometric calculation with 22.7 GJ/t methanol (32 g/mol), CO2 (44 g/mol), 277.78 MWh/TJ = 0.218 t/MWh ) if "oil" not in n.buses.carrier.unique(): @@ -2714,7 +2727,6 @@ def add_industry(n, costs): ) if shipping_oil_share: - p_set_oil = shipping_oil_share * p_set.rename(lambda x: x + " shipping oil") if not options["regional_oil_demand"]: @@ -2793,7 +2805,13 @@ def add_industry(n, costs): if demand_factor != 1: logger.warning(f"Changing HVC demand by {demand_factor*100-100:+.2f}%.") - p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].rename(lambda x: x + " naphtha for industry") / nhours + p_set_plastics = ( + demand_factor + * industrial_demand.loc[nodes, "naphtha"].rename( + lambda x: x + " naphtha for industry" + ) + / nhours + ) if not options["regional_oil_demand"]: p_set_plastics = p_set_plastics.sum() @@ -2816,7 +2834,10 @@ def add_industry(n, costs): # some CO2 from naphtha are process emissions from steam cracker # rest of CO2 released to atmosphere either in waste-to-energy or decay - process_co2_per_naphtha = industrial_demand.loc[nodes, "process emission from feedstock"].sum() / industrial_demand.loc[nodes, "naphtha"].sum() + process_co2_per_naphtha = ( + industrial_demand.loc[nodes, "process emission from feedstock"].sum() + / industrial_demand.loc[nodes, "naphtha"].sum() + ) emitted_co2_per_naphtha = costs.at["oil", "CO2 intensity"] - process_co2_per_naphtha n.madd( @@ -2840,11 +2861,11 @@ def add_industry(n, costs): all_aviation = ["total international aviation", "total domestic aviation"] p_set = ( - demand_factor - * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1) - * 1e6 - / nhours - ).rename(lambda x: x + " kerosene for aviation") + demand_factor + * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1) + * 1e6 + / nhours + ).rename(lambda x: x + " kerosene for aviation") if not options["regional_oil_demand"]: p_set = p_set.sum() @@ -3095,9 +3116,9 @@ def add_agriculture(n, costs): f"Total agriculture machinery shares sum up to {total_share:.2%}, corresponding to increased or decreased demand assumptions." ) - machinery_nodal_energy = pop_weighted_energy_totals.loc[ - nodes, "total agriculture machinery" - ] * 1e6 + machinery_nodal_energy = ( + pop_weighted_energy_totals.loc[nodes, "total agriculture machinery"] * 1e6 + ) if electric_share > 0: efficiency_gain = ( @@ -3111,15 +3132,15 @@ def add_agriculture(n, costs): suffix=" agriculture machinery electric", bus=nodes, carrier="agriculture machinery electric", - p_set=electric_share - / efficiency_gain - * machinery_nodal_energy - / nhours, + p_set=electric_share / efficiency_gain * machinery_nodal_energy / nhours, ) if oil_share > 0: - - p_set = oil_share * machinery_nodal_energy.rename(lambda x: x + " agriculture machinery oil") / nhours + p_set = ( + oil_share + * machinery_nodal_energy.rename(lambda x: x + " agriculture machinery oil") + / nhours + ) if not options["regional_oil_demand"]: p_set = p_set.sum() diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 433b175b..4bdbb543 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -793,9 +793,16 @@ def extra_functionality(n, snapshots): add_retrofit_gas_boiler_constraint(n, snapshots) if "additional_functionality" in snakemake.input.keys(): - import importlib, os, sys + import importlib + import os + import sys + sys.path.append(os.path.dirname(snakemake.input.additional_functionality)) - additional_functionality = importlib.import_module(os.path.splitext(os.path.basename(snakemake.input.additional_functionality))[0]) + additional_functionality = importlib.import_module( + os.path.splitext( + os.path.basename(snakemake.input.additional_functionality) + )[0] + ) additional_functionality.additional_functionality(n, snapshots, snakemake) From e3539b0e69cb753e01af5b1c9e3538087b72165e Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 16:31:16 +0100 Subject: [PATCH 060/122] heat vent: add bus --- scripts/prepare_sector_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index d5ca27a7..329560c7 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1697,6 +1697,7 @@ def add_heat(n, costs): n.madd( "Generator", nodes[name] + f" {name} heat vent", + bus=nodes[name] + f" {name} heat", location=nodes[name], carrier=name + " heat vent", p_nom_extendable=True, From 30c1a1c857b01e944284d984c540a86e882a2258 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 16:31:48 +0100 Subject: [PATCH 061/122] address deprecation warnings --- config/config.default.yaml | 3 ++ scripts/build_biomass_potentials.py | 2 +- scripts/build_energy_totals.py | 56 ++++++++++++++--------------- scripts/build_line_rating.py | 2 +- scripts/build_retro_cost.py | 5 ++- scripts/build_ship_raster.py | 2 +- scripts/prepare_sector_network.py | 10 +++--- 7 files changed, 41 insertions(+), 39 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index a6df173b..37664ad6 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -158,6 +158,7 @@ renewable: resource: method: wind turbine: Vestas_V112_3MW + add_cutout_windspeed: true capacity_per_sqkm: 3 # correction_factor: 0.93 corine: @@ -173,6 +174,7 @@ renewable: resource: method: wind turbine: NREL_ReferenceTurbine_5MW_offshore + add_cutout_windspeed: true capacity_per_sqkm: 2 correction_factor: 0.8855 corine: [44, 255] @@ -188,6 +190,7 @@ renewable: resource: method: wind turbine: NREL_ReferenceTurbine_5MW_offshore + add_cutout_windspeed: true capacity_per_sqkm: 2 correction_factor: 0.8855 corine: [44, 255] diff --git a/scripts/build_biomass_potentials.py b/scripts/build_biomass_potentials.py index d7c467cf..aae1fb98 100644 --- a/scripts/build_biomass_potentials.py +++ b/scripts/build_biomass_potentials.py @@ -134,7 +134,7 @@ def disaggregate_nuts0(bio): # get population in nuts2 pop_nuts2 = pop.loc[pop.index.str.len() == 4] by_country = pop_nuts2.total.groupby(pop_nuts2.ct).sum() - pop_nuts2["fraction"] = pop_nuts2.total / pop_nuts2.ct.map(by_country) + pop_nuts2.loc[:, "fraction"] = pop_nuts2.total / pop_nuts2.ct.map(by_country) # distribute nuts0 data to nuts2 by population bio_nodal = bio.loc[pop_nuts2.ct] diff --git a/scripts/build_energy_totals.py b/scripts/build_energy_totals.py index 6f9585c1..67b86466 100644 --- a/scripts/build_energy_totals.py +++ b/scripts/build_energy_totals.py @@ -189,12 +189,12 @@ def idees_per_country(ct, year, base_dir): ct_totals["total residential water"] = df.at["Water heating"] assert df.index[23] == "Electricity" - ct_totals["electricity residential water"] = df[23] + ct_totals["electricity residential water"] = df.iloc[23] ct_totals["total residential cooking"] = df["Cooking"] assert df.index[30] == "Electricity" - ct_totals["electricity residential cooking"] = df[30] + ct_totals["electricity residential cooking"] = df.iloc[30] df = pd.read_excel(fn_residential, "RES_summary", index_col=0)[year] @@ -202,13 +202,13 @@ def idees_per_country(ct, year, base_dir): ct_totals["total residential"] = df[row] assert df.index[47] == "Electricity" - ct_totals["electricity residential"] = df[47] + ct_totals["electricity residential"] = df.iloc[47] assert df.index[46] == "Derived heat" - ct_totals["derived heat residential"] = df[46] + ct_totals["derived heat residential"] = df.iloc[46] assert df.index[50] == "Thermal uses" - ct_totals["thermal uses residential"] = df[50] + ct_totals["thermal uses residential"] = df.iloc[50] # services @@ -222,12 +222,12 @@ def idees_per_country(ct, year, base_dir): ct_totals["total services water"] = df["Hot water"] assert df.index[24] == "Electricity" - ct_totals["electricity services water"] = df[24] + ct_totals["electricity services water"] = df.iloc[24] ct_totals["total services cooking"] = df["Catering"] assert df.index[31] == "Electricity" - ct_totals["electricity services cooking"] = df[31] + ct_totals["electricity services cooking"] = df.iloc[31] df = pd.read_excel(fn_tertiary, "SER_summary", index_col=0)[year] @@ -235,13 +235,13 @@ def idees_per_country(ct, year, base_dir): ct_totals["total services"] = df[row] assert df.index[50] == "Electricity" - ct_totals["electricity services"] = df[50] + ct_totals["electricity services"] = df.iloc[50] assert df.index[49] == "Derived heat" - ct_totals["derived heat services"] = df[49] + ct_totals["derived heat services"] = df.iloc[49] assert df.index[53] == "Thermal uses" - ct_totals["thermal uses services"] = df[53] + ct_totals["thermal uses services"] = df.iloc[53] # agriculture, forestry and fishing @@ -282,28 +282,28 @@ def idees_per_country(ct, year, base_dir): ct_totals["total two-wheel"] = df["Powered 2-wheelers (Gasoline)"] assert df.index[19] == "Passenger cars" - ct_totals["total passenger cars"] = df[19] + ct_totals["total passenger cars"] = df.iloc[19] assert df.index[30] == "Battery electric vehicles" - ct_totals["electricity passenger cars"] = df[30] + ct_totals["electricity passenger cars"] = df.iloc[30] assert df.index[31] == "Motor coaches, buses and trolley buses" - ct_totals["total other road passenger"] = df[31] + ct_totals["total other road passenger"] = df.iloc[31] assert df.index[39] == "Battery electric vehicles" - ct_totals["electricity other road passenger"] = df[39] + ct_totals["electricity other road passenger"] = df.iloc[39] assert df.index[41] == "Light duty vehicles" - ct_totals["total light duty road freight"] = df[41] + ct_totals["total light duty road freight"] = df.iloc[41] assert df.index[49] == "Battery electric vehicles" - ct_totals["electricity light duty road freight"] = df[49] + ct_totals["electricity light duty road freight"] = df.iloc[49] row = "Heavy duty vehicles (Diesel oil incl. biofuels)" ct_totals["total heavy duty road freight"] = df[row] assert df.index[61] == "Passenger cars" - ct_totals["passenger car efficiency"] = df[61] + ct_totals["passenger car efficiency"] = df.iloc[61] df = pd.read_excel(fn_transport, "TrRail_ene", index_col=0)[year] @@ -312,39 +312,39 @@ def idees_per_country(ct, year, base_dir): ct_totals["electricity rail"] = df["Electricity"] assert df.index[15] == "Passenger transport" - ct_totals["total rail passenger"] = df[15] + ct_totals["total rail passenger"] = df.iloc[15] assert df.index[16] == "Metro and tram, urban light rail" assert df.index[19] == "Electric" assert df.index[20] == "High speed passenger trains" - ct_totals["electricity rail passenger"] = df[[16, 19, 20]].sum() + ct_totals["electricity rail passenger"] = df.iloc[[16, 19, 20]].sum() assert df.index[21] == "Freight transport" - ct_totals["total rail freight"] = df[21] + ct_totals["total rail freight"] = df.iloc[21] assert df.index[23] == "Electric" - ct_totals["electricity rail freight"] = df[23] + ct_totals["electricity rail freight"] = df.iloc[23] df = pd.read_excel(fn_transport, "TrAvia_ene", index_col=0)[year] assert df.index[6] == "Passenger transport" - ct_totals["total aviation passenger"] = df[6] + ct_totals["total aviation passenger"] = df.iloc[6] assert df.index[10] == "Freight transport" - ct_totals["total aviation freight"] = df[10] + ct_totals["total aviation freight"] = df.iloc[10] assert df.index[7] == "Domestic" - ct_totals["total domestic aviation passenger"] = df[7] + ct_totals["total domestic aviation passenger"] = df.iloc[7] assert df.index[8] == "International - Intra-EU" assert df.index[9] == "International - Extra-EU" - ct_totals["total international aviation passenger"] = df[[8, 9]].sum() + ct_totals["total international aviation passenger"] = df.iloc[[8, 9]].sum() assert df.index[11] == "Domestic and International - Intra-EU" - ct_totals["total domestic aviation freight"] = df[11] + ct_totals["total domestic aviation freight"] = df.iloc[11] assert df.index[12] == "International - Extra-EU" - ct_totals["total international aviation freight"] = df[12] + ct_totals["total international aviation freight"] = df.iloc[12] ct_totals["total domestic aviation"] = ( ct_totals["total domestic aviation freight"] @@ -364,7 +364,7 @@ def idees_per_country(ct, year, base_dir): df = pd.read_excel(fn_transport, "TrRoad_act", index_col=0)[year] assert df.index[85] == "Passenger cars" - ct_totals["passenger cars"] = df[85] + ct_totals["passenger cars"] = df.iloc[85] return pd.Series(ct_totals, name=ct) diff --git a/scripts/build_line_rating.py b/scripts/build_line_rating.py index 032ba39c..c53d2899 100755 --- a/scripts/build_line_rating.py +++ b/scripts/build_line_rating.py @@ -119,7 +119,7 @@ def calculate_line_rating(n, cutout): .apply(lambda x: int(re.findall(r"(\d+)-bundle", x)[0])) ) # Set default number of bundles per line - relevant_lines["n_bundle"].fillna(1, inplace=True) + relevant_lines["n_bundle"] = relevant_lines["n_bundle"].fillna(1) R *= relevant_lines["n_bundle"] R = calculate_resistance(T=353, R_ref=R) Imax = cutout.line_rating(shapes, R, D=0.0218, Ts=353, epsilon=0.8, alpha=0.8) diff --git a/scripts/build_retro_cost.py b/scripts/build_retro_cost.py index f5313c21..03c46651 100644 --- a/scripts/build_retro_cost.py +++ b/scripts/build_retro_cost.py @@ -836,8 +836,7 @@ def calculate_heat_losses(u_values, data_tabula, l_strength, temperature_factor) F_red_temp = map_to_lstrength(l_strength, F_red_temp) Q_ht = ( - heat_transfer_perm2.groupby(level=1, axis=1) - .sum() + heat_transfer_perm2.T.groupby(level=1).sum().T .mul(F_red_temp.droplevel(0, axis=1)) .mul(temperature_factor.reindex(heat_transfer_perm2.index, level=0), axis=0) ) @@ -878,7 +877,7 @@ def calculate_gain_utilisation_factor(heat_transfer_perm2, Q_ht, Q_gain): Calculates gain utilisation factor nu. """ # time constant of the building tau [h] = c_m [Wh/(m^2K)] * 1 /(H_tr_e+H_tb*H_ve) [m^2 K /W] - tau = c_m / heat_transfer_perm2.groupby(level=1, axis=1).sum() + tau = c_m / heat_transfer_perm2.T.groupby(axis=1).sum().T alpha = alpha_H_0 + (tau / tau_H_0) # heat balance ratio gamma = (1 / Q_ht).mul(Q_gain.sum(axis=1), axis=0) diff --git a/scripts/build_ship_raster.py b/scripts/build_ship_raster.py index 90e006b0..02f4d5d5 100644 --- a/scripts/build_ship_raster.py +++ b/scripts/build_ship_raster.py @@ -64,7 +64,7 @@ if __name__ == "__main__": with zipfile.ZipFile(snakemake.input.ship_density) as zip_f: zip_f.extract("shipdensity_global.tif") with rioxarray.open_rasterio("shipdensity_global.tif") as ship_density: - ship_density = ship_density.drop(["band"]).sel( + ship_density = ship_density.drop_vars(["band"]).sel( x=slice(min(xs), max(Xs)), y=slice(max(Ys), min(ys)) ) ship_density.rio.to_raster(snakemake.output[0]) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 329560c7..b3a706d8 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1630,7 +1630,7 @@ def build_heat_demand(n): electric_nodes = n.loads.index[n.loads.carrier == "electricity"] n.loads_t.p_set[electric_nodes] = ( n.loads_t.p_set[electric_nodes] - - electric_heat_supply.groupby(level=1, axis=1).sum()[electric_nodes] + - electric_heat_supply.T.groupby(level=1).sum().T[electric_nodes] ) return heat_demand @@ -1724,15 +1724,15 @@ def add_heat(n, costs): if sector in name: heat_load = ( heat_demand[[sector + " water", sector + " space"]] - .groupby(level=1, axis=1) - .sum()[nodes[name]] + .T.groupby(level=1) + .sum().T[nodes[name]] .multiply(factor) ) if name == "urban central": heat_load = ( - heat_demand.groupby(level=1, axis=1) - .sum()[nodes[name]] + heat_demand.T.groupby(level=1) + .sum().T[nodes[name]] .multiply( factor * (1 + options["district_heating"]["district_heating_loss"]) ) From e580ac85d962e0ef9d24716125655a0e59712f8e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 15:33:08 +0000 Subject: [PATCH 062/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/build_retro_cost.py | 5 +++-- scripts/prepare_sector_network.py | 6 ++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/scripts/build_retro_cost.py b/scripts/build_retro_cost.py index 03c46651..3ca2b174 100644 --- a/scripts/build_retro_cost.py +++ b/scripts/build_retro_cost.py @@ -836,8 +836,9 @@ def calculate_heat_losses(u_values, data_tabula, l_strength, temperature_factor) F_red_temp = map_to_lstrength(l_strength, F_red_temp) Q_ht = ( - heat_transfer_perm2.T.groupby(level=1).sum().T - .mul(F_red_temp.droplevel(0, axis=1)) + heat_transfer_perm2.T.groupby(level=1) + .sum() + .T.mul(F_red_temp.droplevel(0, axis=1)) .mul(temperature_factor.reindex(heat_transfer_perm2.index, level=0), axis=0) ) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b3a706d8..2480754c 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1725,14 +1725,16 @@ def add_heat(n, costs): heat_load = ( heat_demand[[sector + " water", sector + " space"]] .T.groupby(level=1) - .sum().T[nodes[name]] + .sum() + .T[nodes[name]] .multiply(factor) ) if name == "urban central": heat_load = ( heat_demand.T.groupby(level=1) - .sum().T[nodes[name]] + .sum() + .T[nodes[name]] .multiply( factor * (1 + options["district_heating"]["district_heating_loss"]) ) From f2a636c62cbc3dca93eaf7df7ad6686012f1e8da Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:12:49 +0100 Subject: [PATCH 063/122] bugfix: correct unit of capital_cost of Haber-Bosch --- scripts/prepare_sector_network.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 2480754c..ac0b618b 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -796,6 +796,8 @@ def add_ammonia(n, costs): "Bus", spatial.ammonia.nodes, location=spatial.ammonia.locations, carrier="NH3" ) + MWh_elec_per_MWh_NH3 = cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] + n.madd( "Link", nodes, @@ -805,14 +807,10 @@ def add_ammonia(n, costs): bus2=nodes + " H2", p_nom_extendable=True, carrier="Haber-Bosch", - efficiency=1 - / ( - cf_industry["MWh_elec_per_tNH3_electrolysis"] - / cf_industry["MWh_NH3_per_tNH3"] - ), # output: MW_NH3 per MW_elec + efficiency=1 / MWh_elec_per_MWh_NH3, efficiency2=-cf_industry["MWh_H2_per_tNH3_electrolysis"] / cf_industry["MWh_elec_per_tNH3_electrolysis"], # input: MW_H2 per MW_elec - capital_cost=costs.at["Haber-Bosch", "fixed"], + capital_cost=costs.at["Haber-Bosch", "fixed"] / MWh_elec_per_MWh_NH3, lifetime=costs.at["Haber-Bosch", "lifetime"], ) From 2678fdef993eb2c0d1b325c4e260040ed2e19174 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 17:13:34 +0000 Subject: [PATCH 064/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index ac0b618b..1dc2b3ef 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -796,7 +796,9 @@ def add_ammonia(n, costs): "Bus", spatial.ammonia.nodes, location=spatial.ammonia.locations, carrier="NH3" ) - MWh_elec_per_MWh_NH3 = cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] + MWh_elec_per_MWh_NH3 = ( + cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] + ) n.madd( "Link", From 0720ccb00d268c7f81fed419a313cb8e2c5d6924 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:14:11 +0100 Subject: [PATCH 065/122] add release note --- doc/release_notes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index a3659b9b..c319bce9 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -38,6 +38,8 @@ Upcoming Release * Split configuration to enable SMR and SMR CC. +* Bugfix: The unit of the capital cost of Haber-Bosch plants was corrected. + * The configuration setting for country focus weights when clustering the network has been moved from ``focus_weights:`` to ``clustering: focus_weights:``. Backwards compatibility to old config files is maintained. From fd81058008b3532ecba0145e60541bea5e08f343 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:35:10 +0100 Subject: [PATCH 066/122] add VOM of PtX processes (closes #747) --- doc/release_notes.rst | 2 ++ scripts/prepare_sector_network.py | 20 +++++++++++++------- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index c319bce9..494abde1 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -44,6 +44,8 @@ Upcoming Release network has been moved from ``focus_weights:`` to ``clustering: focus_weights:``. Backwards compatibility to old config files is maintained. +* Add VOM as marginal cost to PtX processes. + * The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument. * Merged option to extend geographical scope to Ukraine and Moldova. These diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 1dc2b3ef..8620d240 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -813,6 +813,7 @@ def add_ammonia(n, costs): efficiency2=-cf_industry["MWh_H2_per_tNH3_electrolysis"] / cf_industry["MWh_elec_per_tNH3_electrolysis"], # input: MW_H2 per MW_elec capital_cost=costs.at["Haber-Bosch", "fixed"] / MWh_elec_per_MWh_NH3, + marginal_cost=costs.at["Haber-Bosch", "VOM"] / MWh_elec_per_MWh_NH3, lifetime=costs.at["Haber-Bosch", "lifetime"], ) @@ -1023,7 +1024,7 @@ def insert_gas_distribution_costs(n, costs): f"Inserting gas distribution grid with investment cost factor of {f_costs}" ) - capital_cost = costs.loc["electricity distribution grid"]["fixed"] * f_costs + capital_cost = costs.at["electricity distribution grid", "fixed"] * f_costs # gas boilers gas_b = n.links.index[ @@ -1100,6 +1101,7 @@ def add_storage_and_grids(n, costs): efficiency=costs.at["OCGT", "efficiency"], capital_cost=costs.at["OCGT", "fixed"] * costs.at["OCGT", "efficiency"], # NB: fixed cost is per MWel + marginal_cost=costs.at["OCGT", "VOM"], lifetime=costs.at["OCGT", "lifetime"], ) @@ -2168,8 +2170,8 @@ def add_biomass(n, costs): bus1=spatial.gas.nodes, bus2="co2 atmosphere", carrier="biogas to gas", - capital_cost=costs.loc["biogas upgrading", "fixed"], - marginal_cost=costs.loc["biogas upgrading", "VOM"], + capital_cost=costs.at["biogas upgrading", "fixed"], + marginal_cost=costs.at["biogas upgrading", "VOM"], efficiency2=-costs.at["gas", "CO2 intensity"], p_nom_extendable=True, ) @@ -2318,7 +2320,7 @@ def add_biomass(n, costs): + costs.at["BtL", "CO2 stored"], p_nom_extendable=True, capital_cost=costs.at["BtL", "fixed"], - marginal_cost=costs.at["BtL", "efficiency"] * costs.loc["BtL", "VOM"], + marginal_cost=costs.at["BtL", "efficiency"] * costs.at["BtL", "VOM"], ) # TODO: Update with energy penalty @@ -2339,7 +2341,7 @@ def add_biomass(n, costs): p_nom_extendable=True, capital_cost=costs.at["BtL", "fixed"] + costs.at["biomass CHP capture", "fixed"] * costs.at["BtL", "CO2 stored"], - marginal_cost=costs.at["BtL", "efficiency"] * costs.loc["BtL", "VOM"], + marginal_cost=costs.at["BtL", "efficiency"] * costs.at["BtL", "VOM"], ) # BioSNG from solid biomass @@ -2358,7 +2360,7 @@ def add_biomass(n, costs): + costs.at["BioSNG", "CO2 stored"], p_nom_extendable=True, capital_cost=costs.at["BioSNG", "fixed"], - marginal_cost=costs.at["BioSNG", "efficiency"] * costs.loc["BioSNG", "VOM"], + marginal_cost=costs.at["BioSNG", "efficiency"] * costs.at["BioSNG", "VOM"], ) # TODO: Update with energy penalty for CC @@ -2382,7 +2384,7 @@ def add_biomass(n, costs): capital_cost=costs.at["BioSNG", "fixed"] + costs.at["biomass CHP capture", "fixed"] * costs.at["BioSNG", "CO2 stored"], - marginal_cost=costs.at["BioSNG", "efficiency"] * costs.loc["BioSNG", "VOM"], + marginal_cost=costs.at["BioSNG", "efficiency"] * costs.at["BioSNG", "VOM"], ) @@ -2615,6 +2617,8 @@ def add_industry(n, costs): p_min_pu=options.get("min_part_load_methanolisation", 0), capital_cost=costs.at["methanolisation", "fixed"] * options["MWh_MeOH_per_MWh_H2"], # EUR/MW_H2/a + marginal_cost=options["MWh_MeOH_per_MWh_H2"] + * costs.at["methanolisation", "VOM"], lifetime=costs.at["methanolisation", "lifetime"], efficiency=options["MWh_MeOH_per_MWh_H2"], efficiency2=-options["MWh_MeOH_per_MWh_H2"] / options["MWh_MeOH_per_MWh_e"], @@ -2732,6 +2736,8 @@ def add_industry(n, costs): efficiency=costs.at["Fischer-Tropsch", "efficiency"], capital_cost=costs.at["Fischer-Tropsch", "fixed"] * costs.at["Fischer-Tropsch", "efficiency"], # EUR/MW_H2/a + marginal_cost=costs.at["Fischer-Tropsch", "efficiency"] + * costs.at["Fischer-Tropsch", "VOM"], efficiency2=-costs.at["oil", "CO2 intensity"] * costs.at["Fischer-Tropsch", "efficiency"], p_nom_extendable=True, From 6714858e177ca9a040862906e9b326ce22ecca6a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 17:47:46 +0000 Subject: [PATCH 067/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 139e4836..62bca811 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2163,7 +2163,6 @@ def add_biomass(n, costs): e_initial=solid_biomass_potentials_spatial, ) - n.madd( "Link", spatial.gas.biogas_to_gas, @@ -2178,10 +2177,9 @@ def add_biomass(n, costs): efficiency=costs.at["biogas", "efficiency"], efficiency2=-costs.at["gas", "CO2 intensity"], efficiency3=costs.at["biogas", "CO2 stored"], - p_nom_extendable=True + p_nom_extendable=True, ) - if options.get("biomass_upgrading_cc"): # Assuming for costs that the CO2 from upgrading is pure, such as in amine scrubbing. I.e., with and without CC is # equivalent. Adding biomass CHP capture because biogas is often small-scale and decentral so further From 9884dee7a0737aa2eca00e60828659b02fa0cecc Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:48:52 +0100 Subject: [PATCH 068/122] remove biogas upgrading CC in normal link --- scripts/prepare_sector_network.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 139e4836..1d6e4ab0 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2170,14 +2170,12 @@ def add_biomass(n, costs): bus0=spatial.gas.biogas, bus1=spatial.gas.nodes, bus2="co2 atmosphere", - bus3="co2 stored", carrier="biogas to gas", capital_cost=costs.at["biogas", "fixed"] + costs.at["biogas upgrading", "fixed"], marginal_cost=costs.at["biogas upgrading", "VOM"], efficiency=costs.at["biogas", "efficiency"], efficiency2=-costs.at["gas", "CO2 intensity"], - efficiency3=costs.at["biogas", "CO2 stored"], p_nom_extendable=True ) From b0cca00b7088767e46806dcebd64800d73247d42 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:51:03 +0100 Subject: [PATCH 069/122] add documentation and release note for biogas upgrading CC --- config/config.default.yaml | 2 ++ doc/configtables/sector.csv | 1 + doc/release_notes.rst | 2 ++ 3 files changed, 5 insertions(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index 37664ad6..dd36f1d8 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -497,6 +497,7 @@ sector: gas_distribution_grid_cost_factor: 1.0 biomass_spatial: false biomass_transport: false + biomass_upgrading_cc: false conventional_generation: OCGT: gas biomass_to_liquid: false @@ -778,6 +779,7 @@ plotting: fossil gas: '#e05b09' natural gas: '#e05b09' biogas to gas: '#e36311' + biogas to gas CC: '#e51245' CCGT: '#a85522' CCGT marginal: '#a85522' allam: '#B98F76' diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index 856ea074..890b448c 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -118,6 +118,7 @@ gas_distribution_grid _cost_factor,,,Multiplier for the investment cost of the g ,,, biomass_spatial,--,"{true, false}",Add option for resolving biomass demand regionally biomass_transport,--,"{true, false}",Add option for transporting solid biomass between nodes +biomass_upgrading_cc,--,"{true, false}",Add option to capture CO2 from biomass upgrading conventional_generation,,,Add a more detailed description of conventional carriers. Any power generation requires the consumption of fuel from nodes representing that fuel. biomass_to_liquid,--,"{true, false}",Add option for transforming solid biomass into liquid fuel with the same properties as oil biosng,--,"{true, false}",Add option for transforming solid biomass into synthesis gas with the same properties as natural gas diff --git a/doc/release_notes.rst b/doc/release_notes.rst index c319bce9..cfb67d77 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -46,6 +46,8 @@ Upcoming Release * The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument. +* Add option to capture CO2 contained in biogas when upgrading (``sector: biogas_to_gas_cc``). + * Merged option to extend geographical scope to Ukraine and Moldova. These countries are excluded by default and is currently constrained to power-sector only parts of the workflow. A special config file From a10a60b95139f1cead8158607fc12cb0b5c5b069 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:52:22 +0100 Subject: [PATCH 070/122] rename setting from biomass_upgrading_cc to biogas_upgrading_cc --- config/config.default.yaml | 2 +- doc/configtables/sector.csv | 2 +- scripts/prepare_sector_network.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index dd36f1d8..7bfd3f01 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -497,7 +497,7 @@ sector: gas_distribution_grid_cost_factor: 1.0 biomass_spatial: false biomass_transport: false - biomass_upgrading_cc: false + biogas_upgrading_cc: false conventional_generation: OCGT: gas biomass_to_liquid: false diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index 890b448c..280c1906 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -118,7 +118,7 @@ gas_distribution_grid _cost_factor,,,Multiplier for the investment cost of the g ,,, biomass_spatial,--,"{true, false}",Add option for resolving biomass demand regionally biomass_transport,--,"{true, false}",Add option for transporting solid biomass between nodes -biomass_upgrading_cc,--,"{true, false}",Add option to capture CO2 from biomass upgrading +biogas_upgrading_cc,--,"{true, false}",Add option to capture CO2 from biomass upgrading conventional_generation,,,Add a more detailed description of conventional carriers. Any power generation requires the consumption of fuel from nodes representing that fuel. biomass_to_liquid,--,"{true, false}",Add option for transforming solid biomass into liquid fuel with the same properties as oil biosng,--,"{true, false}",Add option for transforming solid biomass into synthesis gas with the same properties as natural gas diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 2f4ce271..aaaf3773 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2178,7 +2178,7 @@ def add_biomass(n, costs): p_nom_extendable=True, ) - if options.get("biomass_upgrading_cc"): + if options.get("biogas_upgrading_cc"): # Assuming for costs that the CO2 from upgrading is pure, such as in amine scrubbing. I.e., with and without CC is # equivalent. Adding biomass CHP capture because biogas is often small-scale and decentral so further # from e.g. CO2 grid or buyers. This is a proxy for the added cost for e.g. a raw biogas pipeline to a central upgrading facility From 5e4a81f82896485dcaa850394449a19ec194e852 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:04:34 +0100 Subject: [PATCH 071/122] haber-bosch: use DECHEMA source for hydrogen input --- config/config.default.yaml | 2 +- scripts/prepare_sector_network.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 37664ad6..97efa555 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -547,7 +547,7 @@ industry: MWh_NH3_per_tNH3: 5.166 MWh_CH4_per_tNH3_SMR: 10.8 MWh_elec_per_tNH3_SMR: 0.7 - MWh_H2_per_tNH3_electrolysis: 6.5 + MWh_H2_per_tNH3_electrolysis: 5.93 MWh_elec_per_tNH3_electrolysis: 1.17 MWh_NH3_per_MWh_H2_cracker: 1.46 # https://github.com/euronion/trace/blob/44a5ff8401762edbef80eff9cfe5a47c8d3c8be4/data/efficiencies.csv NH3_process_emissions: 24.5 diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 1dc2b3ef..442bc564 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -810,8 +810,7 @@ def add_ammonia(n, costs): p_nom_extendable=True, carrier="Haber-Bosch", efficiency=1 / MWh_elec_per_MWh_NH3, - efficiency2=-cf_industry["MWh_H2_per_tNH3_electrolysis"] - / cf_industry["MWh_elec_per_tNH3_electrolysis"], # input: MW_H2 per MW_elec + efficiency2=-costs.at["Haber-Bosch", "hydrogen-input"] / MWh_elec_per_MWh_NH3, capital_cost=costs.at["Haber-Bosch", "fixed"] / MWh_elec_per_MWh_NH3, lifetime=costs.at["Haber-Bosch", "lifetime"], ) From 438b40cdb1bcdfe484db3585364b10cf8e4faca7 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:09:46 +0100 Subject: [PATCH 072/122] haber-bosch: use DECHEMA source for electricity input --- config/config.default.yaml | 2 +- scripts/prepare_sector_network.py | 10 +++------- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 97efa555..31858a5b 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -548,7 +548,7 @@ industry: MWh_CH4_per_tNH3_SMR: 10.8 MWh_elec_per_tNH3_SMR: 0.7 MWh_H2_per_tNH3_electrolysis: 5.93 - MWh_elec_per_tNH3_electrolysis: 1.17 + MWh_elec_per_tNH3_electrolysis: 0.2473 MWh_NH3_per_MWh_H2_cracker: 1.46 # https://github.com/euronion/trace/blob/44a5ff8401762edbef80eff9cfe5a47c8d3c8be4/data/efficiencies.csv NH3_process_emissions: 24.5 petrochemical_process_emissions: 25.5 diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 442bc564..d23143ff 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -796,10 +796,6 @@ def add_ammonia(n, costs): "Bus", spatial.ammonia.nodes, location=spatial.ammonia.locations, carrier="NH3" ) - MWh_elec_per_MWh_NH3 = ( - cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] - ) - n.madd( "Link", nodes, @@ -809,9 +805,9 @@ def add_ammonia(n, costs): bus2=nodes + " H2", p_nom_extendable=True, carrier="Haber-Bosch", - efficiency=1 / MWh_elec_per_MWh_NH3, - efficiency2=-costs.at["Haber-Bosch", "hydrogen-input"] / MWh_elec_per_MWh_NH3, - capital_cost=costs.at["Haber-Bosch", "fixed"] / MWh_elec_per_MWh_NH3, + efficiency=1 / costs.at["Haber-Bosch", "electricity-input"], + efficiency2=-costs.at["Haber-Bosch", "hydrogen-input"] / costs.at["Haber-Bosch", "electricity-input"], + capital_cost=costs.at["Haber-Bosch", "fixed"] / costs.at["Haber-Bosch", "electricity-input"], lifetime=costs.at["Haber-Bosch", "lifetime"], ) From 815b8283115b427189f7fe364c6ca1070427e2d8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 18:11:42 +0000 Subject: [PATCH 073/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 49d26726..e9d97ade 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -808,9 +808,12 @@ def add_ammonia(n, costs): p_nom_extendable=True, carrier="Haber-Bosch", efficiency=1 / costs.at["Haber-Bosch", "electricity-input"], - efficiency2=-costs.at["Haber-Bosch", "hydrogen-input"] / costs.at["Haber-Bosch", "electricity-input"], - capital_cost=costs.at["Haber-Bosch", "fixed"] / costs.at["Haber-Bosch", "electricity-input"], - marginal_cost=costs.at["Haber-Bosch", "VOM"] / costs.at["Haber-Bosch", "electricity-input"], + efficiency2=-costs.at["Haber-Bosch", "hydrogen-input"] + / costs.at["Haber-Bosch", "electricity-input"], + capital_cost=costs.at["Haber-Bosch", "fixed"] + / costs.at["Haber-Bosch", "electricity-input"], + marginal_cost=costs.at["Haber-Bosch", "VOM"] + / costs.at["Haber-Bosch", "electricity-input"], lifetime=costs.at["Haber-Bosch", "lifetime"], ) From ebc25fbf61b469de27c3b0e69bed336d7f3b167e Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:12:41 +0100 Subject: [PATCH 074/122] add release note --- doc/release_notes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index b7035974..634209c7 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -48,6 +48,8 @@ Upcoming Release * The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument. +* Switch to using hydrogen and electricity inputs for Haber-Bosch from https://github.com/PyPSA/technology-data. + * Add option to capture CO2 contained in biogas when upgrading (``sector: biogas_to_gas_cc``). * Merged option to extend geographical scope to Ukraine and Moldova. These From c7790d7c60f93bac41e2bac423848efa696f1f3d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:16:42 +0100 Subject: [PATCH 075/122] change default offshore turbine to NREL Reference 2020 ATB 5.5 MW --- config/config.default.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 2a3be87b..a6c3c1d6 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -173,7 +173,7 @@ renewable: cutout: europe-2013-era5 resource: method: wind - turbine: NREL_ReferenceTurbine_5MW_offshore + turbine: NREL_ReferenceTurbine_2020ATB_5.5MW add_cutout_windspeed: true capacity_per_sqkm: 2 correction_factor: 0.8855 @@ -189,7 +189,7 @@ renewable: cutout: europe-2013-era5 resource: method: wind - turbine: NREL_ReferenceTurbine_5MW_offshore + turbine: NREL_ReferenceTurbine_2020ATB_5.5MW add_cutout_windspeed: true capacity_per_sqkm: 2 correction_factor: 0.8855 From 37df47110cee21c8dbf923462d7f60d2d414a7dd Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 13 Sep 2023 10:49:54 +0200 Subject: [PATCH 076/122] biomass_boiler: add pelletizing cost --- scripts/prepare_sector_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 9c1a85d7..ae5d12df 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2333,6 +2333,7 @@ def add_biomass(n, costs): efficiency=costs.at["biomass boiler", "efficiency"], capital_cost=costs.at["biomass boiler", "efficiency"] * costs.at["biomass boiler", "fixed"], + marginal_cost=costs.at["biomass boiler", "pelletizing cost"], lifetime=costs.at["biomass boiler", "lifetime"], ) From 4988e77be5e2ae1ba9089deca3dba86a47925c62 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:33:02 +0100 Subject: [PATCH 077/122] add release note --- doc/release_notes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index b7035974..782ebdee 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -46,6 +46,8 @@ Upcoming Release * Add VOM as marginal cost to PtX processes. +* Add pelletizing costs for biomass boilers. + * The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument. * Add option to capture CO2 contained in biogas when upgrading (``sector: biogas_to_gas_cc``). From 872c92d1c047e056d0604bebd43bcf16f855d2b2 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:45:02 +0100 Subject: [PATCH 078/122] extended waste heat from PtX, revised minimum part loads --- config/config.default.yaml | 10 +++++--- doc/release_notes.rst | 8 +++++++ scripts/prepare_sector_network.py | 38 +++++++++++++++++++++++++++++++ 3 files changed, 53 insertions(+), 3 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 2a3be87b..87349856 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -480,11 +480,15 @@ sector: - nearshore # within 50 km of sea # - offshore ammonia: false - min_part_load_fischer_tropsch: 0.9 - min_part_load_methanolisation: 0.5 + min_part_load_fischer_tropsch: 0.7 + min_part_load_methanolisation: 0.3 + min_part_load_methanation: 0.3 use_fischer_tropsch_waste_heat: true + use_haber_bosch_waste_heat: true + use_methanolisation_waste_heat: true + use_methanation_waste_heat: true use_fuel_cell_waste_heat: true - use_electrolysis_waste_heat: false + use_electrolysis_waste_heat: true electricity_distribution_grid: true electricity_distribution_grid_cost_factor: 1.0 electricity_grid_connection: true diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 634209c7..5e2c1a6b 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -44,6 +44,14 @@ Upcoming Release network has been moved from ``focus_weights:`` to ``clustering: focus_weights:``. Backwards compatibility to old config files is maintained. +* Extend options for waste usage from Haber-Bosch, methanolisation and methanation. + +* Use electrolysis waste heat by default. + +* Add new ``sector_opts`` wildcard option "nowasteheat" to disable all waste heat usage. + +* Set minimum part loads for PtX processes to 30% for methanolisation and methanation, and to 70% for Fischer-Tropsch synthesis. + * Add VOM as marginal cost to PtX processes. * The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument. diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index e9d97ade..d797e30a 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1345,6 +1345,7 @@ def add_storage_and_grids(n, costs): bus2=spatial.co2.nodes, p_nom_extendable=True, carrier="Sabatier", + p_min_pu=options.get("min_part_load_methanation", 0), efficiency=costs.at["methanation", "efficiency"], efficiency2=-costs.at["methanation", "efficiency"] * costs.at["gas", "CO2 intensity"], @@ -2982,6 +2983,34 @@ def add_waste_heat(n): 0.95 - n.links.loc[urban_central + " Fischer-Tropsch", "efficiency"] ) + if options["use_methanation_waste_heat"]: + n.links.loc[urban_central + " Sabatier", "bus3"] = ( + urban_central + " urban central heat" + ) + n.links.loc[urban_central + " Sabatier", "efficiency3"] = ( + 0.95 - n.links.loc[urban_central + " Sabatier", "efficiency"] + ) + + # DEA quotes 15% of total input (11% of which are high-value heat) + if options["use_haber_bosch_waste_heat"]: + n.links.loc[urban_central + " Haber-Bosch", "bus3"] = ( + urban_central + " urban central heat" + ) + total_energy_input = (cf_industry["MWh_H2_per_tNH3_electrolysis"] + cf_industry["MWh_elec_per_tNH3_electrolysis"]) / cf_industry["MWh_NH3_per_tNH3"] + electricity_input = cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] + n.links.loc[urban_central + " Haber-Bosch", "efficiency3"] = ( + 0.15 * total_energy_input / electricity_input + ) + + if options["use_methanolisation_waste_heat"]: + n.links.loc[urban_central + " methanolisation", "bus4"] = ( + urban_central + " urban central heat" + ) + n.links.loc[urban_central + " methanolisation", "efficiency4"] = ( + costs.at["methanolisation", "heat-output"] + / costs.at["methanolisation", "hydrogen-input"] + ) + # TODO integrate usable waste heat efficiency into technology-data from DEA if options.get("use_electrolysis_waste_heat", False): n.links.loc[urban_central + " H2 Electrolysis", "bus2"] = ( @@ -3426,6 +3455,15 @@ if __name__ == "__main__": if "nodistrict" in opts: options["district_heating"]["progress"] = 0.0 + if "nowasteheat" in opts: + logger.info("Disabling waste heat.") + options["use_fischer_tropsch_waste_heat"] = False + options["use_methanolisation_waste_heat"] = False + options["use_haber_bosch_waste_heat"] = False + options["use_methanation_waste_heat"] = False + options["use_fuel_cell_waste_heat"] = False + options["use_electrolysis_waste_heat"] = False + if "T" in opts: add_land_transport(n, costs) From 777899f686b4641d9fc52c09b6c9db6a30be4e1d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 18:45:53 +0000 Subject: [PATCH 079/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index d797e30a..c39ac9a0 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2996,8 +2996,14 @@ def add_waste_heat(n): n.links.loc[urban_central + " Haber-Bosch", "bus3"] = ( urban_central + " urban central heat" ) - total_energy_input = (cf_industry["MWh_H2_per_tNH3_electrolysis"] + cf_industry["MWh_elec_per_tNH3_electrolysis"]) / cf_industry["MWh_NH3_per_tNH3"] - electricity_input = cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] + total_energy_input = ( + cf_industry["MWh_H2_per_tNH3_electrolysis"] + + cf_industry["MWh_elec_per_tNH3_electrolysis"] + ) / cf_industry["MWh_NH3_per_tNH3"] + electricity_input = ( + cf_industry["MWh_elec_per_tNH3_electrolysis"] + / cf_industry["MWh_NH3_per_tNH3"] + ) n.links.loc[urban_central + " Haber-Bosch", "efficiency3"] = ( 0.15 * total_energy_input / electricity_input ) From 71b27b524ead6afe24a2e065198b299389c4fda6 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:57:40 +0100 Subject: [PATCH 080/122] prevent failure if potential waste heat technologies not present --- scripts/prepare_sector_network.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index c39ac9a0..aa3e65fd 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2974,8 +2974,10 @@ def add_waste_heat(n): if not urban_central.empty: urban_central = urban_central.str[: -len(" urban central heat")] + link_carriers = n.links.carrier.unique() + # TODO what is the 0.95 and should it be a config option? - if options["use_fischer_tropsch_waste_heat"]: + if options["use_fischer_tropsch_waste_heat"] and "Fischer-Tropsch" in link_carriers: n.links.loc[urban_central + " Fischer-Tropsch", "bus3"] = ( urban_central + " urban central heat" ) @@ -2983,7 +2985,7 @@ def add_waste_heat(n): 0.95 - n.links.loc[urban_central + " Fischer-Tropsch", "efficiency"] ) - if options["use_methanation_waste_heat"]: + if options["use_methanation_waste_heat"] and "Sabatier" in link_carriers: n.links.loc[urban_central + " Sabatier", "bus3"] = ( urban_central + " urban central heat" ) @@ -2992,7 +2994,7 @@ def add_waste_heat(n): ) # DEA quotes 15% of total input (11% of which are high-value heat) - if options["use_haber_bosch_waste_heat"]: + if options["use_haber_bosch_waste_heat"] and "Haber-Bosch" in link_carriers: n.links.loc[urban_central + " Haber-Bosch", "bus3"] = ( urban_central + " urban central heat" ) @@ -3008,7 +3010,7 @@ def add_waste_heat(n): 0.15 * total_energy_input / electricity_input ) - if options["use_methanolisation_waste_heat"]: + if options["use_methanolisation_waste_heat"] and "methanolisation" in link_carriers: n.links.loc[urban_central + " methanolisation", "bus4"] = ( urban_central + " urban central heat" ) @@ -3018,7 +3020,7 @@ def add_waste_heat(n): ) # TODO integrate usable waste heat efficiency into technology-data from DEA - if options.get("use_electrolysis_waste_heat", False): + if options.get("use_electrolysis_waste_heat", False) and "H2 Electrolysis" in link_carriers: n.links.loc[urban_central + " H2 Electrolysis", "bus2"] = ( urban_central + " urban central heat" ) @@ -3026,7 +3028,7 @@ def add_waste_heat(n): 0.84 - n.links.loc[urban_central + " H2 Electrolysis", "efficiency"] ) - if options["use_fuel_cell_waste_heat"]: + if options["use_fuel_cell_waste_heat"] and "H2 Fuel Cell" in link_carriers: n.links.loc[urban_central + " H2 Fuel Cell", "bus2"] = ( urban_central + " urban central heat" ) From fb5b10780536f1c3f337f7cbba5da185876795ba Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 18:59:36 +0000 Subject: [PATCH 081/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index aa3e65fd..4e7ef6c6 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2977,7 +2977,10 @@ def add_waste_heat(n): link_carriers = n.links.carrier.unique() # TODO what is the 0.95 and should it be a config option? - if options["use_fischer_tropsch_waste_heat"] and "Fischer-Tropsch" in link_carriers: + if ( + options["use_fischer_tropsch_waste_heat"] + and "Fischer-Tropsch" in link_carriers + ): n.links.loc[urban_central + " Fischer-Tropsch", "bus3"] = ( urban_central + " urban central heat" ) @@ -3010,7 +3013,10 @@ def add_waste_heat(n): 0.15 * total_energy_input / electricity_input ) - if options["use_methanolisation_waste_heat"] and "methanolisation" in link_carriers: + if ( + options["use_methanolisation_waste_heat"] + and "methanolisation" in link_carriers + ): n.links.loc[urban_central + " methanolisation", "bus4"] = ( urban_central + " urban central heat" ) @@ -3020,7 +3026,10 @@ def add_waste_heat(n): ) # TODO integrate usable waste heat efficiency into technology-data from DEA - if options.get("use_electrolysis_waste_heat", False) and "H2 Electrolysis" in link_carriers: + if ( + options.get("use_electrolysis_waste_heat", False) + and "H2 Electrolysis" in link_carriers + ): n.links.loc[urban_central + " H2 Electrolysis", "bus2"] = ( urban_central + " urban central heat" ) From fa03c61187a232c452714979515967910474f14b Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 31 Jul 2023 10:52:37 +0200 Subject: [PATCH 082/122] gas_input: switch production data from scigrid to gem --- rules/build_sector.smk | 3 +- scripts/build_gas_input_locations.py | 56 +++++++++++++++++++++------- 2 files changed, 44 insertions(+), 15 deletions(-) diff --git a/rules/build_sector.smk b/rules/build_sector.smk index dd49fc6f..1e8c70ba 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -85,12 +85,11 @@ if config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]: rule build_gas_input_locations: input: - lng=HTTP.remote( + gem=HTTP.remote( "https://globalenergymonitor.org/wp-content/uploads/2023/07/Europe-Gas-Tracker-2023-03-v3.xlsx", keep_local=True, ), entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson", - production="data/gas_network/scigrid-gas/data/IGGIELGN_Productions.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_offshore=RESOURCES diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index a3b945ab..07707658 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -23,11 +23,10 @@ def read_scigrid_gas(fn): return df -def build_gem_lng_data(lng_fn): - df = pd.read_excel(lng_fn[0], sheet_name="LNG terminals - data") +def build_gem_lng_data(fn): + df = pd.read_excel(fn[0], sheet_name="LNG terminals - data") df = df.set_index("ComboID") - remove_status = ["Cancelled"] remove_country = ["Cyprus", "Turkey"] remove_terminal = ["Puerto de la Luz LNG Terminal", "Gran Canaria LNG Terminal"] @@ -42,9 +41,43 @@ def build_gem_lng_data(lng_fn): return gpd.GeoDataFrame(df, geometry=geometry, crs="EPSG:4326") -def build_gas_input_locations(lng_fn, entry_fn, prod_fn, countries): +def build_gem_prod_data(fn): + df = pd.read_excel(fn[0], sheet_name="Gas extraction - main") + df = df.set_index("GEM Unit ID") + + remove_country = ["Cyprus", "Türkiye"] + remove_fuel_type = ["oil"] + + df = df.query( + "Status != 'shut in' \ + & 'Fuel type' != 'oil' \ + & Country != @remove_country \ + & ~Latitude.isna() \ + & ~Longitude.isna()" + ).copy() + + p = pd.read_excel(fn[0], sheet_name="Gas extraction - production") + p = p.set_index("GEM Unit ID") + p = p[p["Fuel description"] == 'gas' ] + + capacities = pd.DataFrame(index=df.index) + for key in ["production", "production design capacity", "reserves"]: + cap = p.loc[p["Production/reserves"] == key, "Quantity (converted)"].groupby("GEM Unit ID").sum().reindex(df.index) + # assume capacity such that 3% of reserves can be extracted per year (25% quantile) + annualization_factor = 0.03 if key == "reserves" else 1. + capacities[key] = cap * annualization_factor + + df["mcm_per_year"] = capacities["production"] \ + .combine_first(capacities["production design capacity"]) \ + .combine_first(capacities["reserves"]) + + geometry = gpd.points_from_xy(df["Longitude"], df["Latitude"]) + return gpd.GeoDataFrame(df, geometry=geometry, crs="EPSG:4326") + + +def build_gas_input_locations(gem_fn, entry_fn, countries): # LNG terminals - lng = build_gem_lng_data(lng_fn) + lng = build_gem_lng_data(gem_fn) # Entry points from outside the model scope entry = read_scigrid_gas(entry_fn) @@ -56,16 +89,14 @@ def build_gas_input_locations(lng_fn, entry_fn, prod_fn, countries): ] # production sites inside the model scope - prod = read_scigrid_gas(prod_fn) - prod = prod.loc[ - (prod.geometry.y > 35) & (prod.geometry.x < 30) & (prod.country_code != "DE") - ] + prod = build_gem_prod_data(gem_fn) mcm_per_day_to_mw = 437.5 # MCM/day to MWh/h + mcm_per_year_to_mw = 1.199 # MCM/year to MWh/h mtpa_to_mw = 1649.224 # mtpa to MWh/h lng["p_nom"] = lng["CapacityInMtpa"] * mtpa_to_mw entry["p_nom"] = entry["max_cap_from_to_M_m3_per_d"] * mcm_per_day_to_mw - prod["p_nom"] = prod["max_supply_M_m3_per_d"] * mcm_per_day_to_mw + prod["p_nom"] = prod["mcm_per_year"] * mcm_per_year_to_mw lng["type"] = "lng" entry["type"] = "pipeline" @@ -83,7 +114,7 @@ if __name__ == "__main__": snakemake = mock_snakemake( "build_gas_input_locations", simpl="", - clusters="37", + clusters="128", ) logging.basicConfig(level=snakemake.config["logging"]["level"]) @@ -104,9 +135,8 @@ if __name__ == "__main__": countries = regions.index.str[:2].unique().str.replace("GB", "UK") gas_input_locations = build_gas_input_locations( - snakemake.input.lng, + snakemake.input.gem, snakemake.input.entry, - snakemake.input.production, countries, ) From 7c058f1ed333d41703e62d3d406d0d61a803da7d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 31 Jul 2023 12:20:43 +0200 Subject: [PATCH 083/122] add locations, capacities and costs of existing gas storage --- rules/build_sector.smk | 1 + scripts/build_gas_input_locations.py | 24 ++++++++++++++++-------- scripts/prepare_sector_network.py | 17 ++++++++++++----- 3 files changed, 29 insertions(+), 13 deletions(-) diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 1e8c70ba..ab8ff4ed 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -90,6 +90,7 @@ if config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]: keep_local=True, ), entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson", + storage="data/gas_network/scigrid-gas/data/IGGIELGN_Storages.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_offshore=RESOURCES diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index 07707658..ad449202 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -75,7 +75,7 @@ def build_gem_prod_data(fn): return gpd.GeoDataFrame(df, geometry=geometry, crs="EPSG:4326") -def build_gas_input_locations(gem_fn, entry_fn, countries): +def build_gas_input_locations(gem_fn, entry_fn, sto_fn, countries): # LNG terminals lng = build_gem_lng_data(gem_fn) @@ -88,23 +88,30 @@ def build_gas_input_locations(gem_fn, entry_fn, countries): | (entry.from_country == "NO") # malformed datapoint # entries from NO to GB ] + sto = read_scigrid_gas(sto_fn) + remove_country = ["RU", "UA", "TR", "BY"] + sto = sto.query("country_code != @remove_country") + # production sites inside the model scope prod = build_gem_prod_data(gem_fn) mcm_per_day_to_mw = 437.5 # MCM/day to MWh/h mcm_per_year_to_mw = 1.199 # MCM/year to MWh/h mtpa_to_mw = 1649.224 # mtpa to MWh/h - lng["p_nom"] = lng["CapacityInMtpa"] * mtpa_to_mw - entry["p_nom"] = entry["max_cap_from_to_M_m3_per_d"] * mcm_per_day_to_mw - prod["p_nom"] = prod["mcm_per_year"] * mcm_per_year_to_mw + mcm_to_gwh = 11.36 # MCM to GWh + lng["capacity"] = lng["CapacityInMtpa"] * mtpa_to_mw + entry["capacity"] = entry["max_cap_from_to_M_m3_per_d"] * mcm_per_day_to_mw + prod["capacity"] = prod["mcm_per_year"] * mcm_per_year_to_mw + sto["capacity"] = sto["max_cushionGas_M_m3"] * mcm_to_gwh lng["type"] = "lng" entry["type"] = "pipeline" prod["type"] = "production" + sto["type"] = "storage" - sel = ["geometry", "p_nom", "type"] + sel = ["geometry", "capacity", "type"] - return pd.concat([prod[sel], entry[sel], lng[sel]], ignore_index=True) + return pd.concat([prod[sel], entry[sel], lng[sel], sto[sel]], ignore_index=True) if __name__ == "__main__": @@ -137,6 +144,7 @@ if __name__ == "__main__": gas_input_locations = build_gas_input_locations( snakemake.input.gem, snakemake.input.entry, + snakemake.input.storage, countries, ) @@ -147,8 +155,8 @@ if __name__ == "__main__": gas_input_nodes.to_file(snakemake.output.gas_input_nodes, driver="GeoJSON") gas_input_nodes_s = ( - gas_input_nodes.groupby(["bus", "type"])["p_nom"].sum().unstack() + gas_input_nodes.groupby(["bus", "type"])["capacity"].sum().unstack() ) - gas_input_nodes_s.columns.name = "p_nom" + gas_input_nodes_s.columns.name = "capacity" gas_input_nodes_s.to_csv(snakemake.output.gas_input_nodes_simplified) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index e9d97ade..9387d4b1 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -454,10 +454,11 @@ def add_carrier_buses(n, carrier, nodes=None): n.add("Carrier", carrier) unit = "MWh_LHV" if carrier == "gas" else "MWh_th" + # preliminary value for non-gas carriers to avoid zeros + capital_cost = costs.at["gas storage", "fixed"] if carrier == "gas" else 0.02 n.madd("Bus", nodes, location=location, carrier=carrier, unit=unit) - # capital cost could be corrected to e.g. 0.2 EUR/kWh * annuity and O&M n.madd( "Store", nodes + " Store", @@ -465,8 +466,7 @@ def add_carrier_buses(n, carrier, nodes=None): e_nom_extendable=True, e_cyclic=True, carrier=carrier, - capital_cost=0.2 - * costs.at[carrier, "discount rate"], # preliminary value to avoid zeros + capital_cost=capital_cost, ) n.madd( @@ -1162,7 +1162,7 @@ def add_storage_and_grids(n, costs): if options["gas_network"]: logger.info( - "Add natural gas infrastructure, incl. LNG terminals, production and entry-points." + "Add natural gas infrastructure, incl. LNG terminals, production, storage and entry-points." ) if options["H2_retrofit"]: @@ -1207,10 +1207,17 @@ def add_storage_and_grids(n, costs): remove_i = n.generators[gas_i & internal_i].index n.generators.drop(remove_i, inplace=True) - p_nom = gas_input_nodes.sum(axis=1).rename(lambda x: x + " gas") + input_types = ["lng", "pipeline", "production"] + p_nom = gas_input_nodes[input_types].sum(axis=1).rename(lambda x: x + " gas") n.generators.loc[gas_i, "p_nom_extendable"] = False n.generators.loc[gas_i, "p_nom"] = p_nom + # add existing gas storage capacity + gas_i = n.stores.carrier == "gas" + e_nom = gas_input_nodes["storage"].rename(lambda x: x + " gas Store").reindex(n.stores.index).fillna(0.) * 1e3 # MWh_LHV + e_nom.clip(upper=e_nom.quantile(0.98), inplace=True) # limit extremely large storage + n.stores.loc[gas_i, "e_nom_min"] = e_nom + # add candidates for new gas pipelines to achieve full connectivity G = nx.Graph() From 252f6d2c15838dc17ded00271f4edc05b417bec8 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 08:13:01 +0100 Subject: [PATCH 084/122] pre-commit formatting --- .pre-commit-config.yaml | 2 +- scripts/build_gas_input_locations.py | 21 ++++++++++++++------- scripts/prepare_sector_network.py | 12 ++++++++++-- 3 files changed, 25 insertions(+), 10 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7b9009c3..78e70b57 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -50,7 +50,7 @@ repos: - id: blackdoc # Formatting with "black" coding style -- repo: https://github.com/psf/black +- repo: https://github.com/psf/black-pre-commit-mirror rev: 23.12.1 hooks: # Format Python files diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index ad449202..2f967c75 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -47,7 +47,7 @@ def build_gem_prod_data(fn): remove_country = ["Cyprus", "Türkiye"] remove_fuel_type = ["oil"] - + df = df.query( "Status != 'shut in' \ & 'Fuel type' != 'oil' \ @@ -58,18 +58,25 @@ def build_gem_prod_data(fn): p = pd.read_excel(fn[0], sheet_name="Gas extraction - production") p = p.set_index("GEM Unit ID") - p = p[p["Fuel description"] == 'gas' ] + p = p[p["Fuel description"] == "gas"] capacities = pd.DataFrame(index=df.index) for key in ["production", "production design capacity", "reserves"]: - cap = p.loc[p["Production/reserves"] == key, "Quantity (converted)"].groupby("GEM Unit ID").sum().reindex(df.index) + cap = ( + p.loc[p["Production/reserves"] == key, "Quantity (converted)"] + .groupby("GEM Unit ID") + .sum() + .reindex(df.index) + ) # assume capacity such that 3% of reserves can be extracted per year (25% quantile) - annualization_factor = 0.03 if key == "reserves" else 1. + annualization_factor = 0.03 if key == "reserves" else 1.0 capacities[key] = cap * annualization_factor - df["mcm_per_year"] = capacities["production"] \ - .combine_first(capacities["production design capacity"]) \ + df["mcm_per_year"] = ( + capacities["production"] + .combine_first(capacities["production design capacity"]) .combine_first(capacities["reserves"]) + ) geometry = gpd.points_from_xy(df["Longitude"], df["Latitude"]) return gpd.GeoDataFrame(df, geometry=geometry, crs="EPSG:4326") @@ -88,7 +95,7 @@ def build_gas_input_locations(gem_fn, entry_fn, sto_fn, countries): | (entry.from_country == "NO") # malformed datapoint # entries from NO to GB ] - sto = read_scigrid_gas(sto_fn) + sto = read_scigrid_gas(sto_fn) remove_country = ["RU", "UA", "TR", "BY"] sto = sto.query("country_code != @remove_country") diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 9387d4b1..d5c979fa 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1214,8 +1214,16 @@ def add_storage_and_grids(n, costs): # add existing gas storage capacity gas_i = n.stores.carrier == "gas" - e_nom = gas_input_nodes["storage"].rename(lambda x: x + " gas Store").reindex(n.stores.index).fillna(0.) * 1e3 # MWh_LHV - e_nom.clip(upper=e_nom.quantile(0.98), inplace=True) # limit extremely large storage + e_nom = ( + gas_input_nodes["storage"] + .rename(lambda x: x + " gas Store") + .reindex(n.stores.index) + .fillna(0.0) + * 1e3 + ) # MWh_LHV + e_nom.clip( + upper=e_nom.quantile(0.98), inplace=True + ) # limit extremely large storage n.stores.loc[gas_i, "e_nom_min"] = e_nom # add candidates for new gas pipelines to achieve full connectivity From 4983a2e02178dfe501358ce24636f877ecd4f478 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 08:19:52 +0100 Subject: [PATCH 085/122] add release note --- doc/release_notes.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 634209c7..36823791 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -65,6 +65,9 @@ Upcoming Release * Validate downloads from Zenodo using MD5 checksums. This identifies corrupted or incomplete downloads. +* Add locations, capacities and costs of existing gas storage using Global + Energy Monitor's `Europe Gas Tracker + `_. **Bugs and Compatibility** From 19b503d7580faf75dba539923d255c37f4038fd7 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 11 Aug 2023 12:07:03 +0200 Subject: [PATCH 086/122] retrieve.smk: add scigrid storages to files of interest --- rules/retrieve.smk | 1 + 1 file changed, 1 insertion(+) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 4c9ca814..4ded2a46 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -169,6 +169,7 @@ if config["enable"]["retrieve"] and ( "IGGIELGN_LNGs.geojson", "IGGIELGN_BorderPoints.geojson", "IGGIELGN_Productions.geojson", + "IGGIELGN_Storages.geojson", "IGGIELGN_PipeSegments.geojson", ] From 0d03d384cc0ce27e681b76d14418b6d1b5cf9d1c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 09:07:08 +0100 Subject: [PATCH 087/122] lossy_bidirectional_links: use original length for loss calculation --- scripts/prepare_sector_network.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 998f954e..09de541a 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3309,15 +3309,16 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( - n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) + n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) ) - rev_links.capital_cost = 0 - rev_links.length = 0 + rev_links["capital_cost"] = 0 + rev_links["length"] = 0 rev_links["reversed"] = True rev_links.index = rev_links.index.map(lambda x: x + "-reversed") n.links = pd.concat([n.links, rev_links], sort=False) n.links["reversed"] = n.links["reversed"].fillna(False) + n.links["length_original"] = n.links["length_original"].fillna(n.links.length) # do compression losses after concatenation to take electricity consumption at bus0 in either direction carrier_i = n.links.query("carrier == @carrier").index @@ -3326,7 +3327,7 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.buses.location ) # electricity n.links.loc[carrier_i, "efficiency2"] = ( - -compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 + -compression_per_1000km * n.links.loc[carrier_i, "length_original"] / 1e3 ) From 2b2bad392f6c83771472d93ca2df597608ea6b26 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 08:08:21 +0000 Subject: [PATCH 088/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 09de541a..bab8de7b 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3309,7 +3309,9 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( - n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) + n.links.loc[carrier_i] + .copy() + .rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) ) rev_links["capital_cost"] = 0 rev_links["length"] = 0 From 075ffb5c043edf16b1a9b69c4be3ed31da7919b4 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 09:26:08 +0100 Subject: [PATCH 089/122] add release notes and documentation --- doc/release_notes.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 505c747e..82f63252 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,6 +10,13 @@ Release Notes Upcoming Release ================ +* Add option to specify losses for bidirectional links, e.g. pipelines or HVDC + links, in configuration file under ``sector: transmission_efficiency:``. Users + can specify static or length-dependent values as well as a length-dependent + electricity demand for compression, which is implemented as a multi-link to + the local electricity buses. The bidirectional links will then be split into + two unidirectional links with linked capacities. + * Updated Global Energy Monitor LNG terminal data to March 2023 version. * For industry distribution, use EPRTR as fallback if ETS data is not available. From d829d6fd3da28cc7103648132b07726deda1b9c8 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 09:28:24 +0100 Subject: [PATCH 090/122] add release notes and documentation --- doc/configtables/sector.csv | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index d610c862..2767c603 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -107,6 +107,11 @@ electricity_distribution _grid,--,"{true, false}",Add a simplified representatio electricity_distribution _grid_cost_factor,,,Multiplies the investment cost of the electricity distribution grid ,,, electricity_grid _connection,--,"{true, false}",Add the cost of electricity grid connection for onshore wind and solar +transmission_efficiency,,,Section to specify transmission losses or compression energy demands of bidirectional links. Splits them into two capacity-linked unidirectional links. +-- {carrier},--,str,The carrier of the link. +-- -- efficiency_static,p.u.,float,Length-independent transmission efficiency. +-- -- efficiency_per_1000km,p.u. per 1000 km,float,Length-dependent transmission efficiency ($\eta^{\text{length}}$) +-- -- compression_per_1000km,p.u. per 1000 km,float,Length-dependent electricity demand for compression ($\eta \cdot \text{length}$) implemented as multi-link to local electricity bus. H2_network,--,"{true, false}",Add option for new hydrogen pipelines gas_network,--,"{true, false}","Add existing natural gas infrastructure, incl. LNG terminals, production and entry-points. The existing gas network is added with a lossless transport model. A length-weighted `k-edge augmentation algorithm `_ can be run to add new candidate gas pipelines such that all regions of the model can be connected to the gas network. When activated, all the gas demands are regionally disaggregated as well." H2_retrofit,--,"{true, false}",Add option for retrofiting existing pipelines to transport hydrogen. From 9d939fa635f8a0b55f7049dd23a29facfeda1471 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 10:12:43 +0100 Subject: [PATCH 091/122] remove helmeth option --- config/config.default.yaml | 2 -- doc/configtables/sector.csv | 1 - doc/release_notes.rst | 2 ++ scripts/plot_network.py | 4 ++-- scripts/plot_summary.py | 1 - scripts/prepare_sector_network.py | 17 ----------------- 6 files changed, 4 insertions(+), 23 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index d7704a27..e8ca22dc 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -451,7 +451,6 @@ sector: solar_cf_correction: 0.788457 # = >>> 1/1.2683 marginal_cost_storage: 0. #1e-4 methanation: true - helmeth: false coal_cc: false dac: true co2_vent: false @@ -954,7 +953,6 @@ plotting: Sabatier: '#9850ad' methanation: '#c44ce6' methane: '#c44ce6' - helmeth: '#e899ff' # synfuels Fischer-Tropsch: '#25c49a' liquid: '#25c49a' diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index 280c1906..57e6ce3d 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -71,7 +71,6 @@ solar_thermal,--,"{true, false}",Add option for using solar thermal to generate solar_cf_correction,--,float,The correction factor for the value provided by the solar thermal profile calculations marginal_cost_storage,currency/MWh ,float,The marginal cost of discharging batteries in distributed grids methanation,--,"{true, false}",Add option for transforming hydrogen and CO2 into methane using methanation. -helmeth,--,"{true, false}",Add option for transforming power into gas using HELMETH (Integrated High-Temperature ELectrolysis and METHanation for Effective Power to Gas Conversion) coal_cc,--,"{true, false}",Add option for coal CHPs with carbon capture dac,--,"{true, false}",Add option for Direct Air Capture (DAC) co2_vent,--,"{true, false}",Add option for vent out CO2 from storages to the atmosphere. diff --git a/doc/release_notes.rst b/doc/release_notes.rst index f84c0f83..5ac7925e 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -79,6 +79,8 @@ Upcoming Release Energy Monitor's `Europe Gas Tracker `_. +* Remove HELMETH option. + **Bugs and Compatibility** * A bug preventing custom powerplants specified in ``data/custom_powerplants.csv`` was fixed. (https://github.com/PyPSA/pypsa-eur/pull/732) diff --git a/scripts/plot_network.py b/scripts/plot_network.py index f44bb6de..67481120 100644 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -31,7 +31,7 @@ def rename_techs_tyndp(tech): tech = rename_techs(tech) if "heat pump" in tech or "resistive heater" in tech: return "power-to-heat" - elif tech in ["H2 Electrolysis", "methanation", "helmeth", "H2 liquefaction"]: + elif tech in ["H2 Electrolysis", "methanation", "H2 liquefaction"]: return "power-to-gas" elif tech == "H2": return "H2 storage" @@ -495,7 +495,7 @@ def plot_ch4_map(network): # make a fake MultiIndex so that area is correct for legend fossil_gas.index = pd.MultiIndex.from_product([fossil_gas.index, ["fossil gas"]]) - methanation_i = n.links[n.links.carrier.isin(["helmeth", "Sabatier"])].index + methanation_i = n.links.query("carrier == 'Sabatier'").index methanation = ( abs( n.links_t.p1.loc[:, methanation_i].mul( diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index 5804e785..67ac9b55 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -121,7 +121,6 @@ preferred_order = pd.Index( "gas boiler", "gas", "natural gas", - "helmeth", "methanation", "ammonia", "hydrogen storage", diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index c4a67a38..f746fe9c 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1369,23 +1369,6 @@ def add_storage_and_grids(n, costs): lifetime=costs.at["methanation", "lifetime"], ) - if options["helmeth"]: - n.madd( - "Link", - spatial.nodes, - suffix=" helmeth", - bus0=nodes, - bus1=spatial.gas.nodes, - bus2=spatial.co2.nodes, - carrier="helmeth", - p_nom_extendable=True, - efficiency=costs.at["helmeth", "efficiency"], - efficiency2=-costs.at["helmeth", "efficiency"] - * costs.at["gas", "CO2 intensity"], - capital_cost=costs.at["helmeth", "fixed"], - lifetime=costs.at["helmeth", "lifetime"], - ) - if options.get("coal_cc"): n.madd( "Link", From 92df7bbb9c786667364f7358f5ee90caad87ec1d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 10:27:42 +0100 Subject: [PATCH 092/122] build_renewable_profiles: improve logging of time passed --- scripts/build_renewable_profiles.py | 29 +++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 3a1c525e..ef8683cb 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -277,15 +277,14 @@ if __name__ == "__main__": snakemake.input.country_shapes, buffer=buffer, invert=True ) + logger.info("Calculate landuse availability...") + start = time.time() + kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress) - if noprogress: - logger.info("Calculate landuse availabilities...") - start = time.time() - availability = cutout.availabilitymatrix(regions, excluder, **kwargs) - duration = time.time() - start - logger.info(f"Completed availability calculation ({duration:2.2f}s)") - else: - availability = cutout.availabilitymatrix(regions, excluder, **kwargs) + availability = cutout.availabilitymatrix(regions, excluder, **kwargs) + + duration = time.time() - start + logger.info(f"Completed landuse availability calculation ({duration:2.2f}s)") # For Moldova and Ukraine: Overwrite parts not covered by Corine with # externally determined available areas @@ -304,8 +303,19 @@ if __name__ == "__main__": func = getattr(cutout, resource.pop("method")) if client is not None: resource["dask_kwargs"] = {"scheduler": client} + + logger.info("Calculate average capacity factor...") + start = time.time() + capacity_factor = correction_factor * func(capacity_factor=True, **resource) layout = capacity_factor * area * capacity_per_sqkm + + duration = time.time() - start + logger.info(f"Completed average capacity factor calculation ({duration:2.2f}s)") + + logger.info("Calculate weighted capacity factor time series...") + start = time.time() + profile, capacities = func( matrix=availability.stack(spatial=["y", "x"]), layout=layout, @@ -315,6 +325,9 @@ if __name__ == "__main__": **resource, ) + duration = time.time() - start + logger.info(f"Completed weighted capacity factor time series calculation ({duration:2.2f}s)") + logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')") if p_nom_max_meth == "simple": p_nom_max = capacity_per_sqkm * availability @ area From fdb63bc6ca4c3aa332104d26bca1c0a5d5c546c1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:29:08 +0000 Subject: [PATCH 093/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/build_renewable_profiles.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index ef8683cb..83c79482 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -326,7 +326,9 @@ if __name__ == "__main__": ) duration = time.time() - start - logger.info(f"Completed weighted capacity factor time series calculation ({duration:2.2f}s)") + logger.info( + f"Completed weighted capacity factor time series calculation ({duration:2.2f}s)" + ) logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')") if p_nom_max_meth == "simple": From 6b344c9901f7aa78d8714ad00cb9626b2773cb37 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 10:33:33 +0100 Subject: [PATCH 094/122] renewable_profiles: remove conservative potential estimation method --- config/config.default.yaml | 4 -- doc/configtables/offwind-ac.csv | 1 - doc/configtables/offwind-dc.csv | 1 - doc/configtables/onwind.csv | 1 - doc/configtables/solar.csv | 1 - scripts/build_renewable_profiles.py | 98 ++++++++++++----------------- 6 files changed, 40 insertions(+), 66 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index d7704a27..dc818e84 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -167,7 +167,6 @@ renewable: distance_grid_codes: [1, 2, 3, 4, 5, 6] natura: true excluder_resolution: 100 - potential: simple # or conservative clip_p_max_pu: 1.e-2 offwind-ac: cutout: europe-2013-era5 @@ -183,7 +182,6 @@ renewable: max_depth: 50 max_shore_distance: 30000 excluder_resolution: 200 - potential: simple # or conservative clip_p_max_pu: 1.e-2 offwind-dc: cutout: europe-2013-era5 @@ -199,7 +197,6 @@ renewable: max_depth: 50 min_shore_distance: 30000 excluder_resolution: 200 - potential: simple # or conservative clip_p_max_pu: 1.e-2 solar: cutout: europe-2013-sarah @@ -214,7 +211,6 @@ renewable: corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] natura: true excluder_resolution: 100 - potential: simple # or conservative clip_p_max_pu: 1.e-2 hydro: cutout: europe-2013-era5 diff --git a/doc/configtables/offwind-ac.csv b/doc/configtables/offwind-ac.csv index 6b756799..c3512a9e 100644 --- a/doc/configtables/offwind-ac.csv +++ b/doc/configtables/offwind-ac.csv @@ -12,5 +12,4 @@ ship_threshold,--,float,"Ship density threshold from which areas are excluded." max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential." min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential." max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." diff --git a/doc/configtables/offwind-dc.csv b/doc/configtables/offwind-dc.csv index 1f72228a..35095597 100644 --- a/doc/configtables/offwind-dc.csv +++ b/doc/configtables/offwind-dc.csv @@ -12,5 +12,4 @@ ship_threshold,--,float,"Ship density threshold from which areas are excluded." max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential." min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build." max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." diff --git a/doc/configtables/onwind.csv b/doc/configtables/onwind.csv index ba9482e5..b7e823b3 100644 --- a/doc/configtables/onwind.csv +++ b/doc/configtables/onwind.csv @@ -9,7 +9,6 @@ corine,,, -- distance,m,float,"Distance to keep from areas specified in ``distance_grid_codes``" -- distance_grid_codes,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes to which wind turbines must maintain a distance specified in the setting ``distance``." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." correction_factor,--,float,"Correction factor for capacity factor time series." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." diff --git a/doc/configtables/solar.csv b/doc/configtables/solar.csv index 803445d5..7da1281b 100644 --- a/doc/configtables/solar.csv +++ b/doc/configtables/solar.csv @@ -10,6 +10,5 @@ capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of solar panel placem correction_factor,--,float,"A correction factor for the capacity factor (availability) time series." corine,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for solar panel placement." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 3a1c525e..c33bdf9b 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -7,10 +7,10 @@ """ Calculates for each network node the (i) installable capacity (based on land- use), (ii) the available generation time series (based on weather data), and -(iii) the average distance from the node for onshore wind, AC-connected -offshore wind, DC-connected offshore wind and solar PV generators. In addition -for offshore wind it calculates the fraction of the grid connection which is -under water. +(iii) the average distance from the node for onshore wind, AC-connected offshore +wind, DC-connected offshore wind and solar PV generators. In addition for +offshore wind it calculates the fraction of the grid connection which is under +water. .. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`. @@ -26,20 +26,9 @@ Relevant settings renewable: {technology}: - cutout: - corine: - grid_codes: - distance: - natura: - max_depth: - max_shore_distance: - min_shore_distance: - capacity_per_sqkm: - correction_factor: - potential: - min_p_max_pu: - clip_p_max_pu: - resource: + cutout: corine: grid_codes: distance: natura: max_depth: + max_shore_distance: min_shore_distance: capacity_per_sqkm: + correction_factor: min_p_max_pu: clip_p_max_pu: resource: .. seealso:: Documentation of the configuration file ``config/config.yaml`` at @@ -48,21 +37,30 @@ Relevant settings Inputs ------ -- ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC) `_ inventory on `44 classes `_ of land use (e.g. forests, arable land, industrial, urban areas). +- ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC) + `_ inventory on `44 + classes `_ of + land use (e.g. forests, arable land, industrial, urban areas). .. image:: img/corine.png :scale: 33 % -- ``data/bundle/GEBCO_2014_2D.nc``: A `bathymetric `_ data set with a global terrain model for ocean and land at 15 arc-second intervals by the `General Bathymetric Chart of the Oceans (GEBCO) `_. +- ``data/bundle/GEBCO_2014_2D.nc``: A `bathymetric + `_ data set with a global terrain + model for ocean and land at 15 arc-second intervals by the `General + Bathymetric Chart of the Oceans (GEBCO) + `_. .. image:: img/gebco_2019_grid_image.jpg :scale: 50 % - **Source:** `GEBCO `_ + **Source:** `GEBCO + `_ - ``resources/natura.tiff``: confer :ref:`natura` - ``resources/offshore_shapes.geojson``: confer :ref:`shapes` -- ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions` +- ``resources/regions_onshore.geojson``: (if not offshore wind), confer + :ref:`busregions` - ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions` - ``"cutouts/" + params["renewable"][{technology}]['cutout']``: :ref:`cutout` - ``networks/base.nc``: :ref:`base` @@ -128,25 +126,25 @@ Description This script functions at two main spatial resolutions: the resolution of the network nodes and their `Voronoi cells `_, and the resolution of the -cutout grid cells for the weather data. Typically the weather data grid is -finer than the network nodes, so we have to work out the distribution of -generators across the grid cells within each Voronoi cell. This is done by -taking account of a combination of the available land at each grid cell and the -capacity factor there. +cutout grid cells for the weather data. Typically the weather data grid is finer +than the network nodes, so we have to work out the distribution of generators +across the grid cells within each Voronoi cell. This is done by taking account +of a combination of the available land at each grid cell and the capacity factor +there. First the script computes how much of the technology can be installed at each cutout grid cell and each node using the `GLAES -`_ library. This uses the CORINE land use data, -Natura2000 nature reserves and GEBCO bathymetry data. +`_ library. This uses the CORINE land use +data, Natura2000 nature reserves and GEBCO bathymetry data. .. image:: img/eligibility.png :scale: 50 % :align: center -To compute the layout of generators in each node's Voronoi cell, the -installable potential in each grid cell is multiplied with the capacity factor -at each grid cell. This is done since we assume more generators are installed -at cells with a higher capacity factor. +To compute the layout of generators in each node's Voronoi cell, the installable +potential in each grid cell is multiplied with the capacity factor at each grid +cell. This is done since we assume more generators are installed at cells with a +higher capacity factor. .. image:: img/offwinddc-gridcell.png :scale: 50 % @@ -164,20 +162,14 @@ at cells with a higher capacity factor. :scale: 50 % :align: center -This layout is then used to compute the generation availability time series -from the weather data cutout from ``atlite``. +This layout is then used to compute the generation availability time series from +the weather data cutout from ``atlite``. -Two methods are available to compute the maximal installable potential for the -node (`p_nom_max`): ``simple`` and ``conservative``: - -- ``simple`` adds up the installable potentials of the individual grid cells. - If the model comes close to this limit, then the time series may slightly - overestimate production since it is assumed the geographical distribution is - proportional to capacity factor. - -- ``conservative`` assertains the nodal limit by increasing capacities - proportional to the layout until the limit of an individual grid cell is - reached. +The maximal installable potential for the node (`p_nom_max`) is computed by +adding up the installable potentials of the individual grid cells. +If the model comes close to this limit, then the time series may slightly +overestimate production since it is assumed the geographical distribution is +proportional to capacity factor. """ import functools import logging @@ -210,7 +202,6 @@ if __name__ == "__main__": resource = params["resource"] # pv panel params / wind turbine params correction_factor = params.get("correction_factor", 1.0) capacity_per_sqkm = params["capacity_per_sqkm"] - p_nom_max_meth = params.get("potential", "conservative") if isinstance(params.get("corine", {}), list): params["corine"] = {"grid_codes": params["corine"]} @@ -315,17 +306,8 @@ if __name__ == "__main__": **resource, ) - logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')") - if p_nom_max_meth == "simple": - p_nom_max = capacity_per_sqkm * availability @ area - elif p_nom_max_meth == "conservative": - max_cap_factor = capacity_factor.where(availability != 0).max(["x", "y"]) - p_nom_max = capacities / max_cap_factor - else: - raise AssertionError( - 'Config key `potential` should be one of "simple" ' - f'(default) or "conservative", not "{p_nom_max_meth}"' - ) + logger.info(f"Calculating maximal capacity per bus") + p_nom_max = capacity_per_sqkm * availability @ area logger.info("Calculate average distances.") layoutmatrix = (layout * availability).stack(spatial=["y", "x"]) From 38d587944b8625cfb208f6cc0c5046b1a3ee97d6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:35:05 +0000 Subject: [PATCH 095/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/build_renewable_profiles.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index c33bdf9b..0ad840ba 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -7,10 +7,10 @@ """ Calculates for each network node the (i) installable capacity (based on land- use), (ii) the available generation time series (based on weather data), and -(iii) the average distance from the node for onshore wind, AC-connected offshore -wind, DC-connected offshore wind and solar PV generators. In addition for -offshore wind it calculates the fraction of the grid connection which is under -water. +(iii) the average distance from the node for onshore wind, AC-connected +offshore wind, DC-connected offshore wind and solar PV generators. In addition +for offshore wind it calculates the fraction of the grid connection which is +under water. .. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`. From e423945e7d709d1cb59d85caab4b306bd752c045 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 10:54:53 +0100 Subject: [PATCH 096/122] gas_input: ensure all columns exist even if column empty --- scripts/build_gas_input_locations.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index 2f967c75..9ad3760d 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -161,8 +161,12 @@ if __name__ == "__main__": gas_input_nodes.to_file(snakemake.output.gas_input_nodes, driver="GeoJSON") + ensure_columns = ["lng", "pipeline", "production", "storage"] gas_input_nodes_s = ( - gas_input_nodes.groupby(["bus", "type"])["capacity"].sum().unstack() + gas_input_nodes.groupby(["bus", "type"])["capacity"] + .sum() + .unstack() + .reindex(columns=ensure_columns) ) gas_input_nodes_s.columns.name = "capacity" From 29afffb4ca1b8480d88580769960b9536c17ef26 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 11:31:56 +0100 Subject: [PATCH 097/122] fix potential duplicate renaming of length_original --- scripts/prepare_sector_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 2ba64e87..54d5d7c8 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3441,8 +3441,9 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): rev_links = ( n.links.loc[carrier_i] .copy() - .rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) + .rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) + rev_links["length_original"] = rev_links["length"] rev_links["capital_cost"] = 0 rev_links["length"] = 0 rev_links["reversed"] = True From 4606cb131b292c02e95b2af3583e7df48561fcb9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 10:44:14 +0000 Subject: [PATCH 098/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 54d5d7c8..815bf6ff 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3439,9 +3439,7 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( - n.links.loc[carrier_i] - .copy() - .rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) + n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) rev_links["length_original"] = rev_links["length"] rev_links["capital_cost"] = 0 From 05495ce48413d2aee4c351da29b230cd62add824 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 12:46:42 +0100 Subject: [PATCH 099/122] fix lossy bidirectional link coupling countraint for myopic --- scripts/solve_network.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index a2125895..0bfc68ff 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -691,13 +691,24 @@ def add_lossy_bidirectional_link_constraints(n): if not n.links.p_nom_extendable.any() or not "reversed" in n.links.columns: return - reversed_links = n.links.reversed.fillna(0).astype(bool) - carriers = n.links.loc[reversed_links, "carrier"].unique() + n.links["reversed"] = n.links.reversed.fillna(0).astype(bool) + carriers = n.links.loc[n.links.reversed, "carrier"].unique() forward_i = n.links.query( "carrier in @carriers and ~reversed and p_nom_extendable" ).index - backward_i = forward_i + "-reversed" + + def get_backward_i(forward_i): + return pd.Index( + [ + re.sub(r"-(\d{4})$", r"-reversed-\1", s) + if re.search(r"-\d{4}$", s) + else s + "-reversed" + for s in forward_i + ] + ) + + backward_i = get_backward_i(forward_i) lhs = n.model["Link-p_nom"].loc[backward_i] rhs = n.model["Link-p_nom"].loc[forward_i] From 80f9259bac4742b0f819ddc6542da458a7690874 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 12:57:22 +0100 Subject: [PATCH 100/122] handle gas pipeline retrofitting with lossy links --- scripts/solve_network.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 0bfc68ff..98afd49d 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -774,9 +774,13 @@ def add_pipe_retrofit_constraint(n): """ Add constraint for retrofitting existing CH4 pipelines to H2 pipelines. """ - gas_pipes_i = n.links.query("carrier == 'gas pipeline' and p_nom_extendable").index + if "reversed" not in n.links.columns: + n.links["reversed"] = False + gas_pipes_i = n.links.query( + "carrier == 'gas pipeline' and p_nom_extendable and ~reversed" + ).index h2_retrofitted_i = n.links.query( - "carrier == 'H2 pipeline retrofitted' and p_nom_extendable" + "carrier == 'H2 pipeline retrofitted' and p_nom_extendable and ~reversed" ).index if h2_retrofitted_i.empty or gas_pipes_i.empty: From a3cfc8cde51a87ed2fe0babdd4f5c5b42cf993be Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:05:46 +0100 Subject: [PATCH 101/122] add heat vent to tech_colors --- config/config.default.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index d7704a27..b9fb76f4 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -885,6 +885,7 @@ plotting: # heat demand Heat load: '#cc1f1f' heat: '#cc1f1f' + heat vent: '#aa3344' heat demand: '#cc1f1f' rural heat: '#ff5c5c' residential rural heat: '#ff7c7c' From bcafbb1e5459ac90eb3fbb65f9b3da22149a2f7a Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:15:43 +0100 Subject: [PATCH 102/122] compatibility for config with single node in single country --- scripts/build_clustered_population_layouts.py | 1 - scripts/build_heat_demand.py | 1 - scripts/build_solar_thermal_profiles.py | 1 - scripts/build_temperature_profiles.py | 1 - 4 files changed, 4 deletions(-) diff --git a/scripts/build_clustered_population_layouts.py b/scripts/build_clustered_population_layouts.py index 083f3de4..73972d3d 100644 --- a/scripts/build_clustered_population_layouts.py +++ b/scripts/build_clustered_population_layouts.py @@ -28,7 +28,6 @@ if __name__ == "__main__": gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_heat_demand.py b/scripts/build_heat_demand.py index 73494260..da7c476e 100644 --- a/scripts/build_heat_demand.py +++ b/scripts/build_heat_demand.py @@ -34,7 +34,6 @@ if __name__ == "__main__": gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_solar_thermal_profiles.py b/scripts/build_solar_thermal_profiles.py index d285691a..4e7a6cd4 100644 --- a/scripts/build_solar_thermal_profiles.py +++ b/scripts/build_solar_thermal_profiles.py @@ -36,7 +36,6 @@ if __name__ == "__main__": gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index 9db37c25..d8eaadce 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -34,7 +34,6 @@ if __name__ == "__main__": gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) From d7051e7f66eb3bdbe0f790ea4513cbf01133b09a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 12:16:43 +0000 Subject: [PATCH 103/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/build_clustered_population_layouts.py | 4 +--- scripts/build_heat_demand.py | 4 +--- scripts/build_solar_thermal_profiles.py | 4 +--- scripts/build_temperature_profiles.py | 4 +--- 4 files changed, 4 insertions(+), 12 deletions(-) diff --git a/scripts/build_clustered_population_layouts.py b/scripts/build_clustered_population_layouts.py index 73972d3d..2f237656 100644 --- a/scripts/build_clustered_population_layouts.py +++ b/scripts/build_clustered_population_layouts.py @@ -25,9 +25,7 @@ if __name__ == "__main__": cutout = atlite.Cutout(snakemake.input.cutout) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_heat_demand.py b/scripts/build_heat_demand.py index da7c476e..77768404 100644 --- a/scripts/build_heat_demand.py +++ b/scripts/build_heat_demand.py @@ -31,9 +31,7 @@ if __name__ == "__main__": cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_solar_thermal_profiles.py b/scripts/build_solar_thermal_profiles.py index 4e7a6cd4..ee6ed881 100644 --- a/scripts/build_solar_thermal_profiles.py +++ b/scripts/build_solar_thermal_profiles.py @@ -33,9 +33,7 @@ if __name__ == "__main__": cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index d8eaadce..a13ec3c2 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -31,9 +31,7 @@ if __name__ == "__main__": cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) From 00aa07242a313755f8de1a2a6da7111f4cc1abf6 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Dec 2023 17:53:28 +0100 Subject: [PATCH 104/122] add_brownfield: disable grid expansion if LV already hit Numerical problems were causing infeasibilities otherwise --- scripts/add_brownfield.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index 74102580..fb1453fd 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -119,6 +119,32 @@ def add_brownfield(n, n_p, year): n.links.loc[new_pipes, "p_nom"] = 0.0 n.links.loc[new_pipes, "p_nom_min"] = 0.0 +def disable_grid_expansion_if_LV_limit_hit(n): + if not "lv_limit" in n.global_constraints.index: + return + + #calculate minimum LV + attr = "nom_min" + dc = n.links.index[n.links.carrier == "DC"] + tot = (n.lines["s_" + attr]*n.lines["length"]).sum() + (n.links.loc[dc,"p_" + attr]*n.links.loc[dc,"length"]).sum() + + diff = n.global_constraints.at["lv_limit","constant"]-tot + + #allow small numerical differences + limit = 1 + + if diff < limit: + logger.info(f"LV is already reached (gap {diff}), disabling expansion and LV limit") + expandable_acs = n.lines.index[n.lines.s_nom_extendable] + n.lines.loc[expandable_acs,"s_nom_extendable"] = False + n.lines.loc[expandable_acs,"s_nom"] = n.lines.loc[expandable_acs,"s_nom_min"] + + expandable_dcs = n.links.index[n.links.p_nom_extendable & (n.links.carrier == "DC")] + n.links.loc[expandable_dcs,"p_nom_extendable"] = False + n.links.loc[expandable_dcs,"p_nom"] = n.links.loc[expandable_dcs,"p_nom_min"] + + n.global_constraints.drop("lv_limit", + inplace=True) if __name__ == "__main__": if "snakemake" not in globals(): @@ -150,5 +176,7 @@ if __name__ == "__main__": add_brownfield(n, n_p, year) + disable_grid_expansion_if_LV_limit_hit(n) + n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output[0]) From 42f11752caa06a57f3b4bde2de24f0d5e5e95255 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:35:11 +0100 Subject: [PATCH 105/122] standardise formatting --- scripts/add_brownfield.py | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index fb1453fd..ffdaf46b 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -119,32 +119,39 @@ def add_brownfield(n, n_p, year): n.links.loc[new_pipes, "p_nom"] = 0.0 n.links.loc[new_pipes, "p_nom_min"] = 0.0 + def disable_grid_expansion_if_LV_limit_hit(n): if not "lv_limit" in n.global_constraints.index: return - #calculate minimum LV + # calculate minimum LV attr = "nom_min" dc = n.links.index[n.links.carrier == "DC"] - tot = (n.lines["s_" + attr]*n.lines["length"]).sum() + (n.links.loc[dc,"p_" + attr]*n.links.loc[dc,"length"]).sum() + tot = (n.lines["s_" + attr] * n.lines["length"]).sum() + ( + n.links.loc[dc, "p_" + attr] * n.links.loc[dc, "length"] + ).sum() - diff = n.global_constraints.at["lv_limit","constant"]-tot + diff = n.global_constraints.at["lv_limit", "constant"] - tot - #allow small numerical differences + # allow small numerical differences limit = 1 if diff < limit: - logger.info(f"LV is already reached (gap {diff}), disabling expansion and LV limit") + logger.info( + f"LV is already reached (gap {diff}), disabling expansion and LV limit" + ) expandable_acs = n.lines.index[n.lines.s_nom_extendable] - n.lines.loc[expandable_acs,"s_nom_extendable"] = False - n.lines.loc[expandable_acs,"s_nom"] = n.lines.loc[expandable_acs,"s_nom_min"] + n.lines.loc[expandable_acs, "s_nom_extendable"] = False + n.lines.loc[expandable_acs, "s_nom"] = n.lines.loc[expandable_acs, "s_nom_min"] - expandable_dcs = n.links.index[n.links.p_nom_extendable & (n.links.carrier == "DC")] - n.links.loc[expandable_dcs,"p_nom_extendable"] = False - n.links.loc[expandable_dcs,"p_nom"] = n.links.loc[expandable_dcs,"p_nom_min"] + expandable_dcs = n.links.index[ + n.links.p_nom_extendable & (n.links.carrier == "DC") + ] + n.links.loc[expandable_dcs, "p_nom_extendable"] = False + n.links.loc[expandable_dcs, "p_nom"] = n.links.loc[expandable_dcs, "p_nom_min"] + + n.global_constraints.drop("lv_limit", inplace=True) - n.global_constraints.drop("lv_limit", - inplace=True) if __name__ == "__main__": if "snakemake" not in globals(): From deba2a4ed53163ade07d2ba7a64c4f928ae10c72 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:41:42 +0100 Subject: [PATCH 106/122] tidy code --- scripts/add_brownfield.py | 30 ++++++++++++------------------ 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index ffdaf46b..9ddd3d99 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -124,31 +124,25 @@ def disable_grid_expansion_if_LV_limit_hit(n): if not "lv_limit" in n.global_constraints.index: return - # calculate minimum LV - attr = "nom_min" - dc = n.links.index[n.links.carrier == "DC"] - tot = (n.lines["s_" + attr] * n.lines["length"]).sum() + ( - n.links.loc[dc, "p_" + attr] * n.links.loc[dc, "length"] + total_expansion = ( + n.lines.eval("s_nom_min * length").sum() + + n.links.query("carrier == 'DC'").eval("p_nom_min * length").sum() ).sum() - diff = n.global_constraints.at["lv_limit", "constant"] - tot + lv_limit = n.global_constraints.at["lv_limit", "constant"] # allow small numerical differences - limit = 1 - - if diff < limit: + if lv_limit - total_expansion < 1: logger.info( - f"LV is already reached (gap {diff}), disabling expansion and LV limit" + f"LV is already reached (gap {diff} MWkm), disabling expansion and LV limit" ) - expandable_acs = n.lines.index[n.lines.s_nom_extendable] - n.lines.loc[expandable_acs, "s_nom_extendable"] = False - n.lines.loc[expandable_acs, "s_nom"] = n.lines.loc[expandable_acs, "s_nom_min"] + extendable_acs = n.lines.query("s_nom_extendable").index + n.lines.loc[extendable_acs, "s_nom_extendable"] = False + n.lines.loc[extendable_acs, "s_nom"] = n.lines.loc[extendable_acs, "s_nom_min"] - expandable_dcs = n.links.index[ - n.links.p_nom_extendable & (n.links.carrier == "DC") - ] - n.links.loc[expandable_dcs, "p_nom_extendable"] = False - n.links.loc[expandable_dcs, "p_nom"] = n.links.loc[expandable_dcs, "p_nom_min"] + extendable_dcs = n.links.query("carrier == 'DC' and p_nom_extendable").index + n.links.loc[extendable_dcs, "p_nom_extendable"] = False + n.links.loc[extendable_dcs, "p_nom"] = n.links.loc[extendable_dcs, "p_nom_min"] n.global_constraints.drop("lv_limit", inplace=True) From 257b16efd8efae8848171083c1d4c04ab4af9579 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:47:13 +0100 Subject: [PATCH 107/122] print IIS if solver returns status infeasible --- scripts/solve_network.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index ff2a2f23..8c46e025 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -839,6 +839,9 @@ def solve_network(n, config, solving, opts="", **kwargs): f"Solving status '{status}' with termination condition '{condition}'" ) if "infeasible" in condition: + labels = n.model.compute_infeasibilities() + logger.info("Labels:\n" + labels) + n.model.print_infeasibilities() raise RuntimeError("Solving status 'infeasible'") return n From 2acddb6a7ccf1c6d30bcf8d452e7c2bd61a7a36c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:48:34 +0100 Subject: [PATCH 108/122] add release note --- doc/release_notes.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 5ac7925e..31e492a8 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -81,6 +81,9 @@ Upcoming Release * Remove HELMETH option. +* Print Irreducible Infeasible Subset (IIS) if model is infeasible. Only for + solvers with IIS support. + **Bugs and Compatibility** * A bug preventing custom powerplants specified in ``data/custom_powerplants.csv`` was fixed. (https://github.com/PyPSA/pypsa-eur/pull/732) From ecd85d23d317e1acf106bf1a2b28c82fff77a275 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 15:01:43 +0100 Subject: [PATCH 109/122] add option to use LUISA land coverage data --- config/config.default.yaml | 7 ++++ doc/configtables/offwind-ac.csv | 1 + doc/configtables/offwind-dc.csv | 1 + doc/configtables/onwind.csv | 4 ++ doc/configtables/solar.csv | 1 + doc/release_notes.rst | 10 +++++ rules/build_electricity.smk | 5 +++ rules/retrieve.smk | 16 +++++++ scripts/build_renewable_profiles.py | 65 +++++++++++++++++------------ 9 files changed, 84 insertions(+), 26 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 74844ec0..eddd0271 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -165,6 +165,10 @@ renewable: grid_codes: [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 31, 32] distance: 1000 distance_grid_codes: [1, 2, 3, 4, 5, 6] + luisa: false + # grid_codes: [1111, 1121, 1122, 1123, 1130, 1210, 1221, 1222, 1230, 1241, 1242] + # distance: 1000 + # distance_grid_codes: [1111, 1121, 1122, 1123, 1130, 1210, 1221, 1222, 1230, 1241, 1242] natura: true excluder_resolution: 100 clip_p_max_pu: 1.e-2 @@ -177,6 +181,7 @@ renewable: capacity_per_sqkm: 2 correction_factor: 0.8855 corine: [44, 255] + luisa: false # [0, 5230] natura: true ship_threshold: 400 max_depth: 50 @@ -192,6 +197,7 @@ renewable: capacity_per_sqkm: 2 correction_factor: 0.8855 corine: [44, 255] + luisa: false # [0, 5230] natura: true ship_threshold: 400 max_depth: 50 @@ -209,6 +215,7 @@ renewable: capacity_per_sqkm: 1.7 # correction_factor: 0.854337 corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] + luisa: false # [1111, 1121, 1122, 1123, 1130, 1210, 1221, 1222, 1230, 1241, 1242, 1310, 1320, 1330, 1410, 1421, 1422, 2110, 2120, 2130, 2210, 2220, 2230, 2310, 2410, 2420, 3210, 3320, 3330] natura: true excluder_resolution: 100 clip_p_max_pu: 1.e-2 diff --git a/doc/configtables/offwind-ac.csv b/doc/configtables/offwind-ac.csv index c3512a9e..9dc0614c 100644 --- a/doc/configtables/offwind-ac.csv +++ b/doc/configtables/offwind-ac.csv @@ -7,6 +7,7 @@ capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine place correction_factor,--,float,"Correction factor for capacity factor time series." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." corine,--,"Any *realistic* subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement." +luisa,--,"Any subset of the `LUISA Base Map codes in Annex 1 `_","Specifies areas according to the LUISA Base Map codes which are generally eligible for AC-connected offshore wind turbine placement." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." ship_threshold,--,float,"Ship density threshold from which areas are excluded." max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential." diff --git a/doc/configtables/offwind-dc.csv b/doc/configtables/offwind-dc.csv index 35095597..c947f358 100644 --- a/doc/configtables/offwind-dc.csv +++ b/doc/configtables/offwind-dc.csv @@ -7,6 +7,7 @@ capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine place correction_factor,--,float,"Correction factor for capacity factor time series." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." corine,--,"Any *realistic* subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement." +luisa,--,"Any subset of the `LUISA Base Map codes in Annex 1 `_","Specifies areas according to the LUISA Base Map codes which are generally eligible for DC-connected offshore wind turbine placement." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." ship_threshold,--,float,"Ship density threshold from which areas are excluded." max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential." diff --git a/doc/configtables/onwind.csv b/doc/configtables/onwind.csv index b7e823b3..f6b36e5d 100644 --- a/doc/configtables/onwind.csv +++ b/doc/configtables/onwind.csv @@ -8,6 +8,10 @@ corine,,, -- grid_codes,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for wind turbine placement." -- distance,m,float,"Distance to keep from areas specified in ``distance_grid_codes``" -- distance_grid_codes,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes to which wind turbines must maintain a distance specified in the setting ``distance``." +luisa,,, +-- grid_codes,--,"Any subset of the `LUISA Base Map codes in Annex 1 `_","Specifies areas according to the LUISA Base Map codes which are generally eligible for wind turbine placement." +-- distance,m,float,"Distance to keep from areas specified in ``distance_grid_codes``" +-- distance_grid_codes,--,"Any subset of the `LUISA Base Map codes in Annex 1 `_","Specifies areas according to the LUISA Base Map codes to which wind turbines must maintain a distance specified in the setting ``distance``." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." correction_factor,--,float,"Correction factor for capacity factor time series." diff --git a/doc/configtables/solar.csv b/doc/configtables/solar.csv index 7da1281b..8328d342 100644 --- a/doc/configtables/solar.csv +++ b/doc/configtables/solar.csv @@ -9,6 +9,7 @@ resource,,, capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of solar panel placement." correction_factor,--,float,"A correction factor for the capacity factor (availability) time series." corine,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for solar panel placement." +luisa,--,"Any subset of the `LUISA Base Map codes in Annex 1 `_","Specifies areas according to the LUISA Base Map codes which are generally eligible for solar panel placement." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 31e492a8..a1eb644e 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -84,6 +84,16 @@ Upcoming Release * Print Irreducible Infeasible Subset (IIS) if model is infeasible. Only for solvers with IIS support. +* Add option to use `LUISA Base Map + `_ 50m land + coverage dataset for land eligibility analysis in + :mod:`build_renewable_profiles`. Settings are analogous to the CORINE dataset + but with the key ``luisa:`` in the configuration file. To leverage the + dataset's full advantages, set the excluder resolution to 50m + (``excluder_resolution: 50``). For land category codes, see `Annex 1 of the + technical documentation + `_. + **Bugs and Compatibility** * A bug preventing custom powerplants specified in ``data/custom_powerplants.csv`` was fixed. (https://github.com/PyPSA/pypsa-eur/pull/732) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 6308552f..055cffca 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -268,6 +268,11 @@ rule build_renewable_profiles: if config["renewable"][w.technology]["natura"] else [] ), + luisa=lambda w: ( + "data/LUISA_basemap_020321_50m.tif" + if config["renewable"][w.technology].get("luisa") + else [] + ), gebco=ancient( lambda w: ( "data/bundle/GEBCO_2014_2D.nc" diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 4ded2a46..99ce344e 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -249,6 +249,22 @@ if config["enable"]["retrieve"]: validate_checksum(output[0], input[0]) +if config["enable"]["retrieve"]: + + # Downloading LUISA Base Map for land cover and land use: + # Website: https://ec.europa.eu/jrc/en/luisa + rule retrieve_luisa_land_cover: + input: + HTTP.remote( + "jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/LUISA/EUROPE/Basemaps/LandUse/2018/LATEST/LUISA_basemap_020321_50m.tif", + static=True + ) + output: + "data/LUISA_basemap_020321_50m.tif" + run: + move(input[0], output[0]) + + if config["enable"]["retrieve"]: # Some logic to find the correct file URL # Sometimes files are released delayed or ahead of schedule, check which file is currently available diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index c579f588..60c11921 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -7,10 +7,10 @@ """ Calculates for each network node the (i) installable capacity (based on land- use), (ii) the available generation time series (based on weather data), and -(iii) the average distance from the node for onshore wind, AC-connected -offshore wind, DC-connected offshore wind and solar PV generators. In addition -for offshore wind it calculates the fraction of the grid connection which is -under water. +(iii) the average distance from the node for onshore wind, AC-connected offshore +wind, DC-connected offshore wind and solar PV generators. In addition for +offshore wind it calculates the fraction of the grid connection which is under +water. .. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`. @@ -26,7 +26,7 @@ Relevant settings renewable: {technology}: - cutout: corine: grid_codes: distance: natura: max_depth: + cutout: corine: luisa: grid_codes: distance: natura: max_depth: max_shore_distance: min_shore_distance: capacity_per_sqkm: correction_factor: min_p_max_pu: clip_p_max_pu: resource: @@ -40,11 +40,18 @@ Inputs - ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC) `_ inventory on `44 classes `_ of - land use (e.g. forests, arable land, industrial, urban areas). + land use (e.g. forests, arable land, industrial, urban areas) at 100m + resolution. .. image:: img/corine.png :scale: 33 % +- ``data/LUISA_basemap_020321_50m.tif``: `LUISA Base Map + `_ land + coverage dataset at 50m resolution similar to CORINE. For codes in relation to + CORINE land cover, see `Annex 1 of the technical documentation + `_. + - ``data/bundle/GEBCO_2014_2D.nc``: A `bathymetric `_ data set with a global terrain model for ocean and land at 15 arc-second intervals by the `General @@ -133,9 +140,10 @@ of a combination of the available land at each grid cell and the capacity factor there. First the script computes how much of the technology can be installed at each -cutout grid cell and each node using the `GLAES -`_ library. This uses the CORINE land use -data, Natura2000 nature reserves and GEBCO bathymetry data. +cutout grid cell and each node using the `atlite +`_ library. This uses the CORINE land use data, +LUISA land use data, Natura2000 nature reserves, GEBCO bathymetry data, and +shipping lanes. .. image:: img/eligibility.png :scale: 50 % @@ -166,10 +174,10 @@ This layout is then used to compute the generation availability time series from the weather data cutout from ``atlite``. The maximal installable potential for the node (`p_nom_max`) is computed by -adding up the installable potentials of the individual grid cells. -If the model comes close to this limit, then the time series may slightly -overestimate production since it is assumed the geographical distribution is -proportional to capacity factor. +adding up the installable potentials of the individual grid cells. If the model +comes close to this limit, then the time series may slightly overestimate +production since it is assumed the geographical distribution is proportional to +capacity factor. """ import functools import logging @@ -203,9 +211,6 @@ if __name__ == "__main__": correction_factor = params.get("correction_factor", 1.0) capacity_per_sqkm = params["capacity_per_sqkm"] - if isinstance(params.get("corine", {}), list): - params["corine"] = {"grid_codes": params["corine"]} - if correction_factor != 1.0: logger.info(f"correction_factor is set as {correction_factor}") @@ -231,16 +236,24 @@ if __name__ == "__main__": if params["natura"]: excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True) - corine = params.get("corine", {}) - if "grid_codes" in corine: - codes = corine["grid_codes"] - excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035) - if corine.get("distance", 0.0) > 0.0: - codes = corine["distance_grid_codes"] - buffer = corine["distance"] - excluder.add_raster( - snakemake.input.corine, codes=codes, buffer=buffer, crs=3035 - ) + for landuse in ["corine", "luisa"]: + kwargs = {"nodata": 0} if landuse == "luisa" else {} + landuse = params.get(landuse, {}) + if not landuse: + continue + if isinstance(landuse, list): + landuse = {"grid_codes": landuse} + if "grid_codes" in landuse: + codes = landuse["grid_codes"] + excluder.add_raster( + snakemake.input[landuse], codes=codes, invert=True, crs=3035, **kwargs + ) + if landuse.get("distance", 0.0) > 0.0: + codes = landuse["distance_grid_codes"] + buffer = landuse["distance"] + excluder.add_raster( + snakemake.input[landuse], codes=codes, buffer=buffer, crs=3035, **kwargs + ) if params.get("ship_threshold"): shipping_threshold = ( From 045eeba4cfc17500c9740706c60c9f61fc4a3a68 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 14:02:24 +0000 Subject: [PATCH 110/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/retrieve.smk | 6 +++--- scripts/build_renewable_profiles.py | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 99ce344e..e062091e 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -257,10 +257,10 @@ if config["enable"]["retrieve"]: input: HTTP.remote( "jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/LUISA/EUROPE/Basemaps/LandUse/2018/LATEST/LUISA_basemap_020321_50m.tif", - static=True - ) + static=True, + ), output: - "data/LUISA_basemap_020321_50m.tif" + "data/LUISA_basemap_020321_50m.tif", run: move(input[0], output[0]) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 60c11921..d4cab19d 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -7,10 +7,10 @@ """ Calculates for each network node the (i) installable capacity (based on land- use), (ii) the available generation time series (based on weather data), and -(iii) the average distance from the node for onshore wind, AC-connected offshore -wind, DC-connected offshore wind and solar PV generators. In addition for -offshore wind it calculates the fraction of the grid connection which is under -water. +(iii) the average distance from the node for onshore wind, AC-connected +offshore wind, DC-connected offshore wind and solar PV generators. In addition +for offshore wind it calculates the fraction of the grid connection which is +under water. .. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`. From 94f4383e0289e2b81e9edc20e21f18910156f0e6 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 15:22:13 +0100 Subject: [PATCH 111/122] distinguish dataset name and dataset settings --- scripts/build_renewable_profiles.py | 35 ++++++++++++++++------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index d4cab19d..b58482ae 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -236,23 +236,28 @@ if __name__ == "__main__": if params["natura"]: excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True) - for landuse in ["corine", "luisa"]: - kwargs = {"nodata": 0} if landuse == "luisa" else {} - landuse = params.get(landuse, {}) - if not landuse: - continue - if isinstance(landuse, list): - landuse = {"grid_codes": landuse} - if "grid_codes" in landuse: - codes = landuse["grid_codes"] - excluder.add_raster( - snakemake.input[landuse], codes=codes, invert=True, crs=3035, **kwargs + for dataset in ["corine", "luisa"]: + kwargs = {"nodata": 0} if dataset == "luisa" else {} + if dataset == "luisa" and res > 50: + logger.info( + "LUISA data is available at 50m resolution, " + f"but coarser {res}m resolution is used." ) - if landuse.get("distance", 0.0) > 0.0: - codes = landuse["distance_grid_codes"] - buffer = landuse["distance"] + settings = params.get(dataset, {}) + if not settings: + continue + if isinstance(settings, list): + settings = {"grid_codes": settings} + if "grid_codes" in settings: + codes = settings["grid_codes"] excluder.add_raster( - snakemake.input[landuse], codes=codes, buffer=buffer, crs=3035, **kwargs + snakemake.input[dataset], codes=codes, invert=True, crs=3035, **kwargs + ) + if settings.get("distance", 0.0) > 0.0: + codes = settings["distance_grid_codes"] + buffer = settings["distance"] + excluder.add_raster( + snakemake.input[dataset], codes=codes, buffer=buffer, crs=3035, **kwargs ) if params.get("ship_threshold"): From 1a6031f318aab522d3356c2bb4ef314b3eed76d2 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 15:25:27 +0100 Subject: [PATCH 112/122] only copy config.default.yaml if it exists --- Snakefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Snakefile b/Snakefile index 83530df7..14ce0e40 100644 --- a/Snakefile +++ b/Snakefile @@ -14,7 +14,7 @@ from snakemake.utils import min_version min_version("7.7") -if not exists("config/config.yaml"): +if not exists("config/config.yaml") and exists("config/config.default.yaml"): copyfile("config/config.default.yaml", "config/config.yaml") From d145758fb7ff4bf126ddada8eec6d8f942c93f4f Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 09:00:31 +0100 Subject: [PATCH 113/122] gracefully handle absent extra_functionality file; add file to path --- rules/common.smk | 7 +++++++ rules/solve_electricity.smk | 4 +--- rules/solve_myopic.smk | 4 +--- rules/solve_overnight.smk | 5 +---- rules/solve_perfect.smk | 4 +--- scripts/solve_network.py | 2 ++ 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index 2c8cf69c..44e3a807 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -28,6 +28,13 @@ def memory(w): return int(factor * (10000 + 195 * int(w.clusters))) +def input_custom_extra_functionality(w): + path = config["solving"]["options"].get("custom_extra_functionality", False) + if path: + return workflow.source_path(path) + return [] + + # Check if the workflow has access to the internet by trying to access the HEAD of specified url def has_internet_access(url="www.zenodo.org") -> bool: import http.client as http_client diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index 2c956097..7f6092be 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -11,9 +11,7 @@ rule solve_network: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), - custom_extra_functionality=workflow.source_path( - config["solving"]["options"].get("custom_extra_functionality", "") - ), + custom_extra_functionality=input_custom_extra_functionality, input: network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", config=RESULTS + "config.yaml", diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index afa8ad2c..7ca8857d 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -88,9 +88,7 @@ rule solve_sector_network_myopic: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), - custom_extra_functionality=workflow.source_path( - config["solving"]["options"].get("custom_extra_functionality", "") - ), + custom_extra_functionality=input_custom_extra_functionality, input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index fc2f74df..8686b205 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: MIT - rule solve_sector_network: params: solving=config["solving"], @@ -11,9 +10,7 @@ rule solve_sector_network: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), - custom_extra_functionality=workflow.source_path( - config["solving"]["options"].get("custom_extra_functionality", "") - ), + custom_extra_functionality=input_custom_extra_functionality, input: network=RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index 63be5cc1..a7856fa9 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -118,9 +118,7 @@ rule solve_sector_network_perfect: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), - custom_extra_functionality=workflow.source_path( - config["solving"]["options"].get("custom_extra_functionality", "") - ), + custom_extra_functionality=input_custom_extra_functionality, input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc", diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 5a045577..2f170dff 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -30,6 +30,7 @@ import importlib import logging import os import re +import sys import numpy as np import pandas as pd @@ -831,6 +832,7 @@ def extra_functionality(n, snapshots): if snakemake.params.custom_extra_functionality: source_path = snakemake.params.custom_extra_functionality assert os.path.exists(source_path), f"{source_path} does not exist" + sys.path.append(os.path.dirname(source_path)) module_name = os.path.splitext(os.path.basename(source_path))[0] module = importlib.import_module(module_name) module.custom_extra_functionality(n, snapshots) From 60493fc55829ddc95bd8d55d35b0f505cef5f624 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 08:01:00 +0000 Subject: [PATCH 114/122] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/solve_overnight.smk | 1 + 1 file changed, 1 insertion(+) diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index 8686b205..a3fed042 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -2,6 +2,7 @@ # # SPDX-License-Identifier: MIT + rule solve_sector_network: params: solving=config["solving"], From ab1d93279a76fe2608183ca0ed2968514ce0b3fb Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 09:19:29 +0100 Subject: [PATCH 115/122] move LUISA resolution info to a later point --- scripts/build_renewable_profiles.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index b58482ae..b736f68a 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -238,14 +238,14 @@ if __name__ == "__main__": for dataset in ["corine", "luisa"]: kwargs = {"nodata": 0} if dataset == "luisa" else {} + settings = params.get(dataset, {}) + if not settings: + continue if dataset == "luisa" and res > 50: logger.info( "LUISA data is available at 50m resolution, " f"but coarser {res}m resolution is used." ) - settings = params.get(dataset, {}) - if not settings: - continue if isinstance(settings, list): settings = {"grid_codes": settings} if "grid_codes" in settings: From f451e28f582f970a5b4f3a8336d99c89c67429bb Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 11:42:24 +0100 Subject: [PATCH 116/122] add release notes and documentation --- doc/configtables/sector.csv | 2 ++ doc/release_notes.rst | 17 +++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index 938c120a..5e2514e4 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -79,6 +79,8 @@ hydrogen_fuel_cell,--,"{true, false}",Add option to include hydrogen fuel cell f hydrogen_turbine,--,"{true, false}",Add option to include hydrogen turbine for re-electrification. Assuming OCGT technology costs SMR,--,"{true, false}",Add option for transforming natural gas into hydrogen and CO2 using Steam Methane Reforming (SMR) SMR CC,--,"{true, false}",Add option for transforming natural gas into hydrogen and CO2 using Steam Methane Reforming (SMR) and Carbon Capture (CC) +regional_methanol_demand,--,"{true, false}",Spatially resolve methanol demand. Set to true if regional CO2 constraints needed. +regional_oil_demand,--,"{true, false}",Spatially resolve oil demand. Set to true if regional CO2 constraints needed. regional_co2 _sequestration_potential,,, -- enable,--,"{true, false}",Add option for regionally-resolved geological carbon dioxide sequestration potentials based on `CO2StoP `_. -- attribute,--,string,Name of the attribute for the sequestration potential diff --git a/doc/release_notes.rst b/doc/release_notes.rst index bb9732de..03d9318d 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,6 +10,23 @@ Release Notes Upcoming Release ================ +* Remove all negative loads on the ``co2 atmosphere`` bus representing emissions + for e.g. fixed fossil demands for transport oil. Instead these are handled + more transparently with a fixed transport oil demand and a link taking care of + the emissions to the ``co2 atmosphere`` bus. This is also a preparation for + endogenous transport optimisation, where demand will be subject to + optimisation (e.g. fuel switching in the transport sector). + +* Allow possibility to go from copperplated to regionally resolved methanol and + oil demand with switches ``sector: regional_methanol_demand: true`` and + ``sector: regional_oil_demand: true``. This allows nodal/regional CO2 + constraints to be applied. + +* Process emissions from steam crackers (i.e. naphtha processing for HVC) are now + piped from the consumption link to the process emissions bus where the model + can decide about carbon capture. Previously the process emissions for naphtha + were a fixed load. + * Add option to specify losses for bidirectional links, e.g. pipelines or HVDC links, in configuration file under ``sector: transmission_efficiency:``. Users can specify static or length-dependent values as well as a length-dependent From addaecf77a7048955f49905c2b2f54371d3fd3a3 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 12:43:55 +0100 Subject: [PATCH 117/122] move comments to documentation --- config/config.default.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 57416cc7..9e8f57a6 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -463,8 +463,8 @@ sector: hydrogen_turbine: false SMR: true SMR_cc: true - regional_methanol_demand: false #set to true if regional CO2 constraints needed - regional_oil_demand: false #set to true if regional CO2 constraints needed + regional_methanol_demand: false + regional_oil_demand: false regional_co2_sequestration_potential: enable: false attribute: 'conservative estimate Mt' From 593995675e43d860c120b568ab19f8ffade31b32 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 12:44:47 +0100 Subject: [PATCH 118/122] gurobi: avoid double-logging to console --- scripts/solve_network.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 2f170dff..ecf56a24 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -854,6 +854,9 @@ def solve_network(n, config, solving, opts="", **kwargs): ) kwargs["assign_all_duals"] = cf_solving.get("assign_all_duals", False) + if kwargs["solver_name"] == "gurobi": + logging.getLogger('gurobipy').setLevel(logging.CRITICAL) + rolling_horizon = cf_solving.pop("rolling_horizon", False) skip_iterations = cf_solving.pop("skip_iterations", False) if not n.lines.s_nom_extendable.any(): From 8054ad382c3cc3806c7ec808137206358f77d888 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 12:47:26 +0100 Subject: [PATCH 119/122] prepare_sector_network: simplify process emissions with outsourced feedstock emissions --- scripts/prepare_sector_network.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index e211be15..ea0c4f3f 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2996,16 +2996,14 @@ def add_industry(n, costs): unit="t_co2", ) - sel = ["process emission"] if options["co2_spatial"] or options["co2network"]: p_set = ( - -industrial_demand.loc[nodes, sel] - .sum(axis=1) + -industrial_demand.loc[nodes, "process emission"] .rename(index=lambda x: x + " process emissions") / nhours ) else: - p_set = -industrial_demand.loc[nodes, sel].sum(axis=1).sum() / nhours + p_set = -industrial_demand.loc[nodes, "process emission"].sum() / nhours n.madd( "Load", From cfd689bbec29fad829cef8d2e6b1fdf89fd2ebea Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 13:03:03 +0100 Subject: [PATCH 120/122] add snakemake object to custom_extra_functionality arguments --- data/custom_extra_functionality.py | 2 +- scripts/solve_network.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/data/custom_extra_functionality.py b/data/custom_extra_functionality.py index 0ac24cea..e7a9df0f 100644 --- a/data/custom_extra_functionality.py +++ b/data/custom_extra_functionality.py @@ -4,7 +4,7 @@ # SPDX-License-Identifier: MIT -def custom_extra_functionality(n, snapshots): +def custom_extra_functionality(n, snapshots, snakemake): """ Add custom extra functionality constraints. """ diff --git a/scripts/solve_network.py b/scripts/solve_network.py index ecf56a24..4b988666 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -835,7 +835,7 @@ def extra_functionality(n, snapshots): sys.path.append(os.path.dirname(source_path)) module_name = os.path.splitext(os.path.basename(source_path))[0] module = importlib.import_module(module_name) - module.custom_extra_functionality(n, snapshots) + module.custom_extra_functionality(n, snapshots, snakemake) def solve_network(n, config, solving, opts="", **kwargs): From 34535bcbffd5506faeaba7c8ada546e29f00c7eb Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 13:08:14 +0100 Subject: [PATCH 121/122] custom_extra_functionality: assume same function name as file name --- scripts/solve_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 4b988666..203d8b0f 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -835,7 +835,8 @@ def extra_functionality(n, snapshots): sys.path.append(os.path.dirname(source_path)) module_name = os.path.splitext(os.path.basename(source_path))[0] module = importlib.import_module(module_name) - module.custom_extra_functionality(n, snapshots, snakemake) + custom_extra_functionality = getattr(module, module_name) + custom_extra_functionality(n, snapshots, snakemake) def solve_network(n, config, solving, opts="", **kwargs): From c3bcaee1a22a888d2ba1147ed643fbf7607ffa86 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 13:37:35 +0100 Subject: [PATCH 122/122] common.smk: find _helpers.py also if pypsa-eur is used as module --- rules/common.smk | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index 44e3a807..0e85b620 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -2,9 +2,14 @@ # # SPDX-License-Identifier: MIT -import os, sys +import os, sys, glob + +helper_source_path = [match for match in glob.glob('**/_helpers.py', recursive=True)] + +for path in helper_source_path: + path = os.path.dirname(os.path.abspath(path)) + sys.path.insert(0, os.path.abspath(path)) -sys.path.insert(0, os.path.abspath("scripts")) from _helpers import validate_checksum