From cc162a9e028fb7a2bac5289e27b90ab57e46f10b Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 31 Jul 2023 17:09:59 +0200 Subject: [PATCH 001/101] option for losses on bidirectional links via link splitting --- config/config.default.yaml | 5 ++++ scripts/prepare_sector_network.py | 40 +++++++++++++++++++++++++++++++ scripts/solve_network.py | 22 +++++++++++++++++ 3 files changed, 67 insertions(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index b162b75d..4413b8f5 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -478,6 +478,11 @@ sector: electricity_distribution_grid: true electricity_distribution_grid_cost_factor: 1.0 electricity_grid_connection: true + transmission_losses: + # per 1000 km + DC: 0 + H2 pipeline: 0 + gas pipeline: 0 H2_network: true gas_network: false H2_retrofit: false diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 11406bff..8719c281 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3280,6 +3280,34 @@ def set_temporal_aggregation(n, opts, solver_name): return n +def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): + "Split bidirectional links into two unidirectional links to include transmission losses." + + carrier_i = n.links.query("carrier == @carrier").index + + if not losses_per_thousand_km or carrier_i.empty: + return + + logger.info( + f"Specified losses for {carrier} transmission. Splitting bidirectional links." + ) + + carrier_i = n.links.query("carrier == @carrier").index + n.links.loc[carrier_i, "p_min_pu"] = 0 + n.links["reversed"] = False + n.links.loc[carrier_i, "efficiency"] = ( + 1 - n.links.loc[carrier_i, "length"] * losses_per_thousand_km / 1e3 + ) + rev_links = ( + n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) + ) + rev_links.capital_cost = 0 + rev_links.reversed = True + rev_links.index = rev_links.index.map(lambda x: x + "-reversed") + + n.links = pd.concat([n.links, rev_links], sort=False) + + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake @@ -3446,6 +3474,18 @@ if __name__ == "__main__": if options["electricity_grid_connection"]: add_electricity_grid_connection(n, costs) + for k, v in options["transmission_losses"].items(): + lossy_bidirectional_links(n, k, v) + + # Workaround: Remove lines with conflicting (and unrealistic) properties + # cf. https://github.com/PyPSA/pypsa-eur/issues/444 + if snakemake.config["solving"]["options"]["transmission_losses"]: + idx = n.lines.query("num_parallel == 0").index + logger.info( + f"Removing {len(idx)} line(s) with properties conflicting with transmission losses functionality." + ) + n.mremove("Line", idx) + first_year_myopic = (snakemake.params.foresight == "myopic") and ( snakemake.params.planning_horizons[0] == investment_year ) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 836544b4..a68ca074 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -494,6 +494,27 @@ def add_battery_constraints(n): n.model.add_constraints(lhs == 0, name="Link-charger_ratio") +def add_lossy_bidirectional_link_constraints(n): + if not n.links.p_nom_extendable.any() or not "reversed" in n.links.columns: + return + + carriers = n.links.loc[n.links.reversed, "carrier"].unique() + + backward_i = n.links.query( + "carrier in @carriers and reversed and p_nom_extendable" + ).index + forward_i = n.links.query( + "carrier in @carriers and ~reversed and p_nom_extendable" + ).index + + assert len(forward_i) == len(backward_i) + + lhs = n.model["Link-p_nom"].loc[backward_i] + rhs = n.model["Link-p_nom"].loc[forward_i] + + n.model.add_constraints(lhs == rhs, name="Link-bidirectional_sync") + + def add_chp_constraints(n): electric = ( n.links.index.str.contains("urban central") @@ -593,6 +614,7 @@ def extra_functionality(n, snapshots): if "EQ" in o: add_EQ_constraints(n, o) add_battery_constraints(n) + add_lossy_bidirectional_link_constraints(n) add_pipe_retrofit_constraint(n) From e4eff27e508406055284ba77f4727df7e2dcbc6c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 3 Aug 2023 13:09:12 +0200 Subject: [PATCH 002/101] fix capacity synchronisation between forward and backward lossy links --- scripts/prepare_sector_network.py | 4 ++-- scripts/solve_network.py | 6 +----- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 8719c281..b8eb8bc1 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3294,7 +3294,6 @@ def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): carrier_i = n.links.query("carrier == @carrier").index n.links.loc[carrier_i, "p_min_pu"] = 0 - n.links["reversed"] = False n.links.loc[carrier_i, "efficiency"] = ( 1 - n.links.loc[carrier_i, "length"] * losses_per_thousand_km / 1e3 ) @@ -3302,10 +3301,11 @@ def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) rev_links.capital_cost = 0 - rev_links.reversed = True + rev_links["reversed"] = True rev_links.index = rev_links.index.map(lambda x: x + "-reversed") n.links = pd.concat([n.links, rev_links], sort=False) + n.links["reversed"] = n.links["reversed"].fillna(False) if __name__ == "__main__": diff --git a/scripts/solve_network.py b/scripts/solve_network.py index a68ca074..5e8c0356 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -500,14 +500,10 @@ def add_lossy_bidirectional_link_constraints(n): carriers = n.links.loc[n.links.reversed, "carrier"].unique() - backward_i = n.links.query( - "carrier in @carriers and reversed and p_nom_extendable" - ).index forward_i = n.links.query( "carrier in @carriers and ~reversed and p_nom_extendable" ).index - - assert len(forward_i) == len(backward_i) + backward_i = forward_i + "-reversed" lhs = n.model["Link-p_nom"].loc[backward_i] rhs = n.model["Link-p_nom"].loc[forward_i] From d7cb13246b807e7907c49ad1214559be92d2f363 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 7 Aug 2023 14:31:19 +0200 Subject: [PATCH 003/101] link losses: exponential rather than linear model --- config/config.default.yaml | 13 ++++++++----- scripts/prepare_sector_network.py | 15 ++++++++++----- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 4413b8f5..1b0a2260 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -478,11 +478,14 @@ sector: electricity_distribution_grid: true electricity_distribution_grid_cost_factor: 1.0 electricity_grid_connection: true - transmission_losses: - # per 1000 km - DC: 0 - H2 pipeline: 0 - gas pipeline: 0 + transmission_efficiency: + DC: + efficiency_static: 0.98 + efficiency_per_1000km: 0.977 + H2 pipeline: + efficiency_per_1000km: 0.979 + gas pipeline: + efficiency_per_1000km: 0.977 H2_network: true gas_network: false H2_retrofit: false diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b8eb8bc1..48f5f41f 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3280,22 +3280,27 @@ def set_temporal_aggregation(n, opts, solver_name): return n -def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): +def lossy_bidirectional_links(n, carrier, efficiencies={}): "Split bidirectional links into two unidirectional links to include transmission losses." carrier_i = n.links.query("carrier == @carrier").index - if not losses_per_thousand_km or carrier_i.empty: + if not any(v != 1. for v in efficiencies.values()) or carrier_i.empty: return + efficiency_static = efficiencies.get("efficiency_static", 1) + efficiency_per_1000km = efficiencies.get("efficiency_per_1000km", 1) + logger.info( - f"Specified losses for {carrier} transmission. Splitting bidirectional links." + f"Specified losses for {carrier} transmission" + f"(static: {efficiency_static}, per 1000km: {efficiency_per_1000km})." + "Splitting bidirectional links." ) carrier_i = n.links.query("carrier == @carrier").index n.links.loc[carrier_i, "p_min_pu"] = 0 n.links.loc[carrier_i, "efficiency"] = ( - 1 - n.links.loc[carrier_i, "length"] * losses_per_thousand_km / 1e3 + efficiency_static * efficiency_per_1000km ** (n.links.loc[carrier_i, "length"] / 1e3) ) rev_links = ( n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) @@ -3474,7 +3479,7 @@ if __name__ == "__main__": if options["electricity_grid_connection"]: add_electricity_grid_connection(n, costs) - for k, v in options["transmission_losses"].items(): + for k, v in options["transmission_efficiency"].items(): lossy_bidirectional_links(n, k, v) # Workaround: Remove lines with conflicting (and unrealistic) properties From 118cabe8a60b238ef11aafc980a406011ea9f0fb Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 8 Aug 2023 17:56:22 +0200 Subject: [PATCH 004/101] add option to consider compression losses in pipelines as electricity demand --- config/config.default.yaml | 6 ++++-- scripts/prepare_sector_network.py | 11 ++++++++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 1b0a2260..81a26a0b 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -483,9 +483,11 @@ sector: efficiency_static: 0.98 efficiency_per_1000km: 0.977 H2 pipeline: - efficiency_per_1000km: 0.979 + efficiency_per_1000km: 1 # 0.979 + compression_per_1000km: 0.019 gas pipeline: - efficiency_per_1000km: 0.977 + efficiency_per_1000km: 1 #0.977 + compression_per_1000km: 0.01 H2_network: true gas_network: false H2_retrofit: false diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 48f5f41f..7b58329c 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3285,11 +3285,16 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): carrier_i = n.links.query("carrier == @carrier").index +<<<<<<< HEAD if not any(v != 1. for v in efficiencies.values()) or carrier_i.empty: +======= + if not any((v != 1.0) or (v >= 0) for v in efficiencies.values()) or carrier_i.empty: +>>>>>>> 5822adb0 (add option to consider compression losses in pipelines as electricity demand) return efficiency_static = efficiencies.get("efficiency_static", 1) efficiency_per_1000km = efficiencies.get("efficiency_per_1000km", 1) + compression_per_1000km = efficiencies.get("compression_per_1000km", 0) logger.info( f"Specified losses for {carrier} transmission" @@ -3297,7 +3302,6 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): "Splitting bidirectional links." ) - carrier_i = n.links.query("carrier == @carrier").index n.links.loc[carrier_i, "p_min_pu"] = 0 n.links.loc[carrier_i, "efficiency"] = ( efficiency_static * efficiency_per_1000km ** (n.links.loc[carrier_i, "length"] / 1e3) @@ -3312,6 +3316,11 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links = pd.concat([n.links, rev_links], sort=False) n.links["reversed"] = n.links["reversed"].fillna(False) + # do compression losses after concatenation to take electricity consumption at bus0 in either direction + carrier_i = n.links.query("carrier == @carrier").index + if compression_per_1000km > 0: + n.links.loc[carrier_i, "bus2"] = n.links.loc[carrier_i, "bus0"].map(n.buses.location) # electricity + n.links.loc[carrier_i, "efficiency2"] = - compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 if __name__ == "__main__": if "snakemake" not in globals(): From 592bc4eee7f57ef93e104f266595cb6d8ded754d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 12 Sep 2023 17:28:42 +0200 Subject: [PATCH 005/101] cherry-pick --- scripts/prepare_sector_network.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 7b58329c..de02095d 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3285,11 +3285,10 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): carrier_i = n.links.query("carrier == @carrier").index -<<<<<<< HEAD - if not any(v != 1. for v in efficiencies.values()) or carrier_i.empty: -======= - if not any((v != 1.0) or (v >= 0) for v in efficiencies.values()) or carrier_i.empty: ->>>>>>> 5822adb0 (add option to consider compression losses in pipelines as electricity demand) + if ( + not any((v != 1.0) or (v >= 0) for v in efficiencies.values()) + or carrier_i.empty + ): return efficiency_static = efficiencies.get("efficiency_static", 1) @@ -3303,8 +3302,10 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): ) n.links.loc[carrier_i, "p_min_pu"] = 0 - n.links.loc[carrier_i, "efficiency"] = ( - efficiency_static * efficiency_per_1000km ** (n.links.loc[carrier_i, "length"] / 1e3) + n.links.loc[ + carrier_i, "efficiency" + ] = efficiency_static * efficiency_per_1000km ** ( + n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) @@ -3319,8 +3320,13 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): # do compression losses after concatenation to take electricity consumption at bus0 in either direction carrier_i = n.links.query("carrier == @carrier").index if compression_per_1000km > 0: - n.links.loc[carrier_i, "bus2"] = n.links.loc[carrier_i, "bus0"].map(n.buses.location) # electricity - n.links.loc[carrier_i, "efficiency2"] = - compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 + n.links.loc[carrier_i, "bus2"] = n.links.loc[carrier_i, "bus0"].map( + n.buses.location + ) # electricity + n.links.loc[carrier_i, "efficiency2"] = ( + -compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 + ) + if __name__ == "__main__": if "snakemake" not in globals(): From 666e79e2fdb7b86348a81e097a0c6e200872b661 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 10 Aug 2023 17:13:19 +0200 Subject: [PATCH 006/101] improve logging for lossy bidirectional links --- scripts/prepare_sector_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index de02095d..6355f603 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3296,8 +3296,8 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): compression_per_1000km = efficiencies.get("compression_per_1000km", 0) logger.info( - f"Specified losses for {carrier} transmission" - f"(static: {efficiency_static}, per 1000km: {efficiency_per_1000km})." + f"Specified losses for {carrier} transmission " + f"(static: {efficiency_static}, per 1000km: {efficiency_per_1000km}, compression per 1000km: {compression_per_1000km}). " "Splitting bidirectional links." ) From bde04eeac9dad86b9d05ce6d23f48d98a728ba7f Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 29 Aug 2023 16:32:01 +0200 Subject: [PATCH 007/101] lossy_bidirectional_links: set length of reversed lines to 0 to avoid double counting in line volume limit --- scripts/prepare_sector_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 6355f603..cd5d9570 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3311,6 +3311,7 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) rev_links.capital_cost = 0 + rev_links.length = 0 rev_links["reversed"] = True rev_links.index = rev_links.index.map(lambda x: x + "-reversed") From 9b9090c76cb1fbd601626342ce569d87e490d9d0 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Wed, 18 Oct 2023 16:59:49 +0200 Subject: [PATCH 008/101] add option for additional national carbon budget constraints --- config/config.default.yaml | 17 +++++++ rules/solve_myopic.smk | 2 + scripts/solve_network.py | 99 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 118 insertions(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index 7dc0cf76..325bbbaa 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -84,6 +84,22 @@ co2_budget: 2045: 0.032 2050: 0.000 +co2_budget_national: + 2030: + 'DE': 0.350 + 'AT': 0.450 + 'BE': 0.450 + 'CH': 0.450 + 'CZ': 0.450 + 'DK': 0.450 + 'FR': 0.450 + 'GB': 0.450 + 'LU': 0.450 + 'NL': 0.450 + 'NO': 0.450 + 'PL': 0.450 + 'SE': 0.450 + # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#electricity electricity: voltages: [220., 300., 380.] @@ -454,6 +470,7 @@ sector: hydrogen_turbine: false SMR: true SMR_cc: true + co2_budget_national: false regional_co2_sequestration_potential: enable: false attribute: 'conservative estimate Mt' diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 8a93d24a..06fd9b79 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -88,11 +88,13 @@ rule solve_sector_network_myopic: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), + countries=config["countries"], input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", costs="data/costs_{planning_horizons}.csv", config=RESULTS + "config.yaml", + co2_totals_name=RESOURCES + "co2_totals.csv", output: RESULTS + "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 224d4714..f5dd79e0 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -41,6 +41,8 @@ logger = logging.getLogger(__name__) pypsa.pf.logger.setLevel(logging.WARNING) from pypsa.descriptors import get_switchable_as_dense as get_as_dense +from prepare_sector_network import emission_sectors_from_opts + def add_land_use_constraint(n, planning_horizons, config): if "m" in snakemake.wildcards.clusters: @@ -762,6 +764,92 @@ def add_pipe_retrofit_constraint(n): n.model.add_constraints(lhs == rhs, name="Link-pipe_retrofit") +def add_co2limit_country(n, config, limit_countries, nyears=1.0): + """ + Add a set of emissions limit constraints for specified countries. + + The countries and emissions limits are specified in the config file entry 'co2_budget_country_{investment_year}'. + + Parameters + ---------- + n : pypsa.Network + config : dict + limit_countries : dict + nyears: float, optional + Used to scale the emissions constraint to the number of snapshots of the base network. + """ + logger.info(f"Adding CO2 budget limit for each country as per unit of 1990 levels") + + # TODO: n.config (submodule) vs snakemake.config (main module, overwrite/overwritten config)? + # countries = config.countries + # print(config) + countries = ['AT', 'BE', 'CH', 'CZ', 'DE', 'DK', 'FR', 'GB', 'LU', 'NL', 'NO', 'PL', 'SE'] + + # TODO: import function from prepare_sector_network? Move to common place? + sectors = emission_sectors_from_opts(opts) + + # convert Mt to tCO2 + co2_totals = 1e6 * pd.read_csv(snakemake.input.co2_totals_name, index_col=0) + + co2_limit_countries = co2_totals.loc[countries, sectors].sum(axis=1) + co2_limit_countries = co2_limit_countries.loc[co2_limit_countries.index.isin(limit_countries.keys())] + + co2_limit_countries *= co2_limit_countries.index.map(limit_countries) * nyears + + p = n.model["Link-p"] # dimension: (time, component) + + # NB: Most country-specific links retain their locational information in bus1 (except for DAC, where it is in bus2) + country = n.links.bus1.map(n.buses.location).map(n.buses.country) + country_DAC = ( + n.links[n.links.carrier == "DAC"] + .bus2.map(n.buses.location) + .map(n.buses.country) + ) + country[country_DAC.index] = country_DAC + + lhs = [] + for port in [col[3:] for col in n.links if col.startswith("bus")]: + if port == str(0): + efficiency = ( + n.links["efficiency"].apply(lambda x: 1.0).rename("efficiency0") + ) + elif port == str(1): + efficiency = n.links["efficiency"].rename("efficiency1") + else: + efficiency = n.links[f"efficiency{port}"] + mask = n.links[f"bus{port}"].map(n.buses.carrier).eq("co2") + + idx = n.links[mask].index + + grouping = country.loc[idx] + + if not grouping.isnull().all(): + expr = ( + (p.loc[:, idx] * efficiency[idx]) + .groupby(grouping, axis=1) + .sum() + .sum(dims="snapshot") + ) + lhs.append(expr) + + lhs = sum(lhs) # dimension: (country) + lhs = lhs.rename({list(lhs.dims.keys())[0]: "country"}) + rhs = pd.Series(co2_limit_countries) # dimension: (country) + + for ct in lhs.indexes["country"]: + n.model.add_constraints( + lhs.loc[ct] <= rhs[ct], + name=f"GlobalConstraint-co2_limit_per_country{ct}", + ) + n.add( + "GlobalConstraint", + f"co2_limit_per_country{ct}", + constant=rhs[ct], + sense="<=", + type="", + ) + + def extra_functionality(n, snapshots): """ Collects supplementary constraints which will be passed to @@ -792,6 +880,17 @@ def extra_functionality(n, snapshots): add_carbon_budget_constraint(n, snapshots) add_retrofit_gas_boiler_constraint(n, snapshots) + if n.config["sector"]["co2_budget_national"]: + # prepare co2 constraint + nhours = n.snapshot_weightings.generators.sum() + nyears = nhours / 8760 + investment_year = int(snakemake.wildcards.planning_horizons[-4:]) + limit_countries = snakemake.config["co2_budget_national"][investment_year] + + # add co2 constraint for each country + logger.info(f"Add CO2 limit for each country") + add_co2limit_country(n, config, limit_countries, nyears) + def solve_network(n, config, solving, opts="", **kwargs): set_of_options = solving["solver"]["options"] From a35f5479aedd933773634a931e407d0535a6da64 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Tue, 24 Oct 2023 14:06:17 +0200 Subject: [PATCH 009/101] add links instead of equal-and-opposite fuel/emissions load pairs for land transport oil (ICEs), naphtha for industry and kerosene for aviation (before summed as 'oil'), shipping oil, shipping methanol, agriculture machinery oil --- scripts/prepare_sector_network.py | 256 +++++++++++++++++++----------- 1 file changed, 165 insertions(+), 91 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index ee2f0e3c..989bdb78 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -135,6 +135,7 @@ def define_spatial(nodes, options): spatial.oil = SimpleNamespace() spatial.oil.nodes = ["EU oil"] spatial.oil.locations = ["EU"] + spatial.oil.land_transport = nodes + " land transport oil" # uranium spatial.uranium = SimpleNamespace() @@ -1467,8 +1468,8 @@ def add_land_transport(n, costs): n.madd( "Bus", nodes, - location=nodes, suffix=" EV battery", + location=nodes, carrier="Li ion", unit="MWh_el", ) @@ -1568,28 +1569,31 @@ def add_land_transport(n, costs): ice_efficiency = options["transport_internal_combustion_efficiency"] n.madd( - "Load", - nodes, - suffix=" land transport oil", - bus=spatial.oil.nodes, + "Bus", + spatial.oil.land_transport, + location=nodes, carrier="land transport oil", - p_set=ice_share / ice_efficiency * transport[nodes], + unit="land transport", ) - co2 = ( - ice_share - / ice_efficiency - * transport[nodes].sum().sum() - / nhours - * costs.at["oil", "CO2 intensity"] - ) - - n.add( + n.madd( "Load", - "land transport oil emissions", - bus="co2 atmosphere", - carrier="land transport oil emissions", - p_set=-co2, + spatial.oil.land_transport, + bus=spatial.oil.land_transport, + carrier="land transport oil", + p_set=ice_share / ice_efficiency * transport[nodes].rename(columns=lambda x: x + " land transport oil"), + ) + + n.madd( + "Link", + spatial.oil.land_transport, + bus0=spatial.oil.nodes, + bus1=spatial.oil.land_transport, + bus2="co2 atmosphere", + carrier="land transport oil", + efficiency=ice_efficiency, + efficiency2=costs.at["oil", "CO2 intensity"], + p_nom_extendable=True, ) @@ -2611,46 +2615,36 @@ def add_industry(n, costs): ) p_set_methanol = shipping_methanol_share * p_set.sum() * efficiency - n.madd( + n.add( + "Bus", + "EU shipping methanol", + location="EU", + carrier="shipping methanol", + unit="MWh_LHV", + ) + + n.add( "Load", - spatial.methanol.nodes, - suffix=" shipping methanol", - bus=spatial.methanol.nodes, + "shipping methanol", + bus="EU shipping methanol", carrier="shipping methanol", p_set=p_set_methanol, ) - # CO2 intensity methanol based on stoichiometric calculation with 22.7 GJ/t methanol (32 g/mol), CO2 (44 g/mol), 277.78 MWh/TJ = 0.218 t/MWh - co2 = p_set_methanol / options["MWh_MeOH_per_tCO2"] - - n.add( - "Load", - "shipping methanol emissions", - bus="co2 atmosphere", - carrier="shipping methanol emissions", - p_set=-co2, - ) - - if shipping_oil_share: - p_set_oil = shipping_oil_share * p_set.sum() + if len(spatial.methanol.nodes) == 1: + link_names = ["EU shipping methanol"] + else: + link_names = nodes + " shipping methanol" n.madd( - "Load", - spatial.oil.nodes, - suffix=" shipping oil", - bus=spatial.oil.nodes, - carrier="shipping oil", - p_set=p_set_oil, - ) - - co2 = p_set_oil * costs.at["oil", "CO2 intensity"] - - n.add( - "Load", - "shipping oil emissions", - bus="co2 atmosphere", - carrier="shipping oil emissions", - p_set=-co2, + "Link", + link_names, + bus0=spatial.methanol.nodes, + bus1="EU shipping methanol", + bus2="co2 atmosphere", + carrier="shipping methanol", + p_nom_extendable=True, + efficiency2=1 / options["MWh_MeOH_per_tCO2"], # CO2 intensity methanol based on stoichiometric calculation with 22.7 GJ/t methanol (32 g/mol), CO2 (44 g/mol), 277.78 MWh/TJ = 0.218 t/MWh ) if "oil" not in n.buses.carrier.unique(): @@ -2683,6 +2677,41 @@ def add_industry(n, costs): marginal_cost=costs.at["oil", "fuel"], ) + if shipping_oil_share: + p_set_oil = shipping_oil_share * p_set.sum() + + n.add( + "Bus", + "EU shipping oil", + location="EU", + carrier="shipping oil", + unit="MWh_LHV", + ) + + n.add( + "Load", + "shipping oil", + bus="EU shipping oil", + carrier="shipping oil", + p_set=p_set_oil, + ) + + if len(spatial.oil.nodes) == 1: + link_names = ["EU shipping oil"] + else: + link_names = nodes + " shipping oil" + + n.madd( + "Link", + link_names, + bus0=spatial.oil.nodes, + bus1="EU shipping oil", + bus2="co2 atmosphere", + carrier="shipping oil", + p_nom_extendable=True, + efficiency2=costs.at["oil", "CO2 intensity"], + ) + if options["oil_boilers"]: nodes_heat = create_nodes_for_heat_sector()[0] @@ -2724,19 +2753,49 @@ def add_industry(n, costs): lifetime=costs.at["Fischer-Tropsch", "lifetime"], ) + # naphtha demand_factor = options.get("HVC_demand_factor", 1) - p_set = demand_factor * industrial_demand.loc[nodes, "naphtha"].sum() / nhours if demand_factor != 1: logger.warning(f"Changing HVC demand by {demand_factor*100-100:+.2f}%.") - n.madd( + # NB: CO2 gets released again to atmosphere when plastics decay + # except for the process emissions when naphtha is used for petrochemicals, which can be captured with other industry process emissions + # convert process emissions from feedstock from MtCO2 to energy demand + p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours + + n.add( + "Bus", + "EU naphtha for industry", + location="EU", + carrier="naphtha for industry", + unit="MWh_LHV", + ) + + n.add( "Load", - ["naphtha for industry"], - bus=spatial.oil.nodes, + "naphtha for industry", + bus="EU naphtha for industry", carrier="naphtha for industry", p_set=p_set, ) + if len(spatial.oil.nodes) == 1: + link_names = ["EU naphtha for industry"] + else: + link_names = nodes + " naphtha for industry" + + n.madd( + "Link", + link_names, + bus0=spatial.oil.nodes, + bus1="EU naphtha for industry", + bus2="co2 atmosphere", + carrier="naphtha for industry", + p_nom_extendable=True, + efficiency2=costs.at["oil", "CO2 intensity"], + ) + + # aviation demand_factor = options.get("aviation_demand_factor", 1) all_aviation = ["total international aviation", "total domestic aviation"] p_set = ( @@ -2748,29 +2807,36 @@ def add_industry(n, costs): if demand_factor != 1: logger.warning(f"Changing aviation demand by {demand_factor*100-100:+.2f}%.") - n.madd( - "Load", - ["kerosene for aviation"], - bus=spatial.oil.nodes, + n.add( + "Bus", + "EU kerosene for aviation", + location="EU", carrier="kerosene for aviation", - p_set=p_set, - ) - - # NB: CO2 gets released again to atmosphere when plastics decay or kerosene is burned - # except for the process emissions when naphtha is used for petrochemicals, which can be captured with other industry process emissions - # tco2 per hour - co2_release = ["naphtha for industry", "kerosene for aviation"] - co2 = ( - n.loads.loc[co2_release, "p_set"].sum() * costs.at["oil", "CO2 intensity"] - - industrial_demand.loc[nodes, "process emission from feedstock"].sum() / nhours + unit="MWh_LHV", ) n.add( "Load", - "oil emissions", - bus="co2 atmosphere", - carrier="oil emissions", - p_set=-co2, + "kerosene for aviation", + bus="EU kerosene for aviation", + carrier="kerosene for aviation", + p_set=p_set, + ) + + if len(spatial.oil.nodes) == 1: + link_names = ["EU kerosene for aviation"] + else: + link_names = nodes + " kerosene for aviation" + + n.madd( + "Link", + link_names, + bus0=spatial.oil.nodes, + bus1="EU kerosene for aviation", + bus2="co2 atmosphere", + carrier="kerosene for aviation", + p_nom_extendable=True, + efficiency2=costs.at["oil", "CO2 intensity"], ) # TODO simplify bus expression @@ -3018,28 +3084,36 @@ def add_agriculture(n, costs): ) if oil_share > 0: - n.madd( - "Load", - ["agriculture machinery oil"], - bus=spatial.oil.nodes, + n.add( + "Bus", + "EU agriculture machinery oil", + location="EU", carrier="agriculture machinery oil", - p_set=oil_share * machinery_nodal_energy.sum() * 1e6 / nhours, - ) - - co2 = ( - oil_share - * machinery_nodal_energy.sum() - * 1e6 - / nhours - * costs.at["oil", "CO2 intensity"] + unit="MWh_LHV", ) n.add( "Load", - "agriculture machinery oil emissions", - bus="co2 atmosphere", - carrier="agriculture machinery oil emissions", - p_set=-co2, + "agriculture machinery oil", + bus="EU agriculture machinery oil", + carrier="agriculture machinery oil", + p_set=oil_share * machinery_nodal_energy.sum() * 1e6 / nhours, + ) + + if len(spatial.oil.nodes) == 1: + link_names = ["EU agriculture machinery oil"] + else: + link_names = nodes + " agriculture machinery oil" + + n.madd( + "Link", + link_names, + bus0=spatial.oil.nodes, + bus1="EU agriculture machinery oil", + bus2="co2 atmosphere", + carrier="agriculture machinery oil", + p_nom_extendable=True, + efficiency2=costs.at["oil", "CO2 intensity"], ) From 94afba7c5d195b2cf6d7d17016e78040c4659440 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Tue, 24 Oct 2023 16:39:33 +0200 Subject: [PATCH 010/101] add coal tech_color to config --- config/config.default.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index 325bbbaa..cafb9d1d 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -816,6 +816,7 @@ plotting: Coal: '#545454' coal: '#545454' Coal marginal: '#545454' + coal for industry: '#343434' solid: '#545454' Lignite: '#826837' lignite: '#826837' From 7cb677d0e6056f52560e6ddb53e88f76861d8fc2 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Tue, 24 Oct 2023 16:39:58 +0200 Subject: [PATCH 011/101] clean up function add_co2limit_country --- scripts/solve_network.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index f5dd79e0..b372b366 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -764,7 +764,7 @@ def add_pipe_retrofit_constraint(n): n.model.add_constraints(lhs == rhs, name="Link-pipe_retrofit") -def add_co2limit_country(n, config, limit_countries, nyears=1.0): +def add_co2limit_country(n, limit_countries, nyears=1.0): """ Add a set of emissions limit constraints for specified countries. @@ -780,10 +780,7 @@ def add_co2limit_country(n, config, limit_countries, nyears=1.0): """ logger.info(f"Adding CO2 budget limit for each country as per unit of 1990 levels") - # TODO: n.config (submodule) vs snakemake.config (main module, overwrite/overwritten config)? - # countries = config.countries - # print(config) - countries = ['AT', 'BE', 'CH', 'CZ', 'DE', 'DK', 'FR', 'GB', 'LU', 'NL', 'NO', 'PL', 'SE'] + countries = n.config["countries"] # TODO: import function from prepare_sector_network? Move to common place? sectors = emission_sectors_from_opts(opts) @@ -814,7 +811,7 @@ def add_co2limit_country(n, config, limit_countries, nyears=1.0): n.links["efficiency"].apply(lambda x: 1.0).rename("efficiency0") ) elif port == str(1): - efficiency = n.links["efficiency"].rename("efficiency1") + efficiency = n.links["efficiency"] else: efficiency = n.links[f"efficiency{port}"] mask = n.links[f"bus{port}"].map(n.buses.carrier).eq("co2") @@ -889,7 +886,7 @@ def extra_functionality(n, snapshots): # add co2 constraint for each country logger.info(f"Add CO2 limit for each country") - add_co2limit_country(n, config, limit_countries, nyears) + add_co2limit_country(n, limit_countries, nyears) def solve_network(n, config, solving, opts="", **kwargs): From e2b2eafbc12e17254a0a517c87ae76bf08962585 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Tue, 24 Oct 2023 16:46:58 +0200 Subject: [PATCH 012/101] add geographical resolution to oil and methanol for options['co2_budget_national'] to include all necessary links in national co2 budget constraints --- scripts/add_existing_baseyear.py | 2 +- scripts/prepare_sector_network.py | 187 +++++++++++++++++------------- 2 files changed, 106 insertions(+), 83 deletions(-) diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index 1474b004..7ddc6b1d 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -303,7 +303,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas else: bus0 = vars(spatial)[carrier[generator]].nodes if "EU" not in vars(spatial)[carrier[generator]].locations: - bus0 = bus0.intersection(capacity.index + " gas") + bus0 = bus0.intersection(capacity.index + " " + carrier[generator]) # check for missing bus missing_bus = pd.Index(bus0).difference(n.buses.index) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 989bdb78..34bfdce7 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -128,13 +128,33 @@ def define_spatial(nodes, options): # methanol spatial.methanol = SimpleNamespace() - spatial.methanol.nodes = ["EU methanol"] - spatial.methanol.locations = ["EU"] + + if options["co2_budget_national"]: + spatial.methanol.nodes = nodes + " methanol" + spatial.methanol.locations = nodes + spatial.methanol.shipping = nodes + " shipping methanol" + else: + spatial.methanol.nodes = ["EU methanol"] + spatial.methanol.locations = ["EU"] + spatial.methanol.shipping = ["EU shipping methanol"] # oil spatial.oil = SimpleNamespace() - spatial.oil.nodes = ["EU oil"] - spatial.oil.locations = ["EU"] + + if options["co2_budget_national"]: + spatial.oil.nodes = nodes + " oil" + spatial.oil.locations = nodes + spatial.oil.naphtha = nodes + " naphtha for industry" + spatial.oil.kerosene = nodes + " kerosene for aviation" + spatial.oil.shipping = nodes + " shipping oil" + spatial.oil.agriculture_machinery = nodes + " agriculture machinery oil" + else: + spatial.oil.nodes = ["EU oil"] + spatial.oil.locations = ["EU"] + spatial.oil.naphtha = ["EU naphtha for industry"] + spatial.oil.kerosene = ["EU kerosene for aviation"] + spatial.oil.shipping = ["EU shipping oil"] + spatial.oil.agriculture_machinery = ["EU agriculture machinery oil"] spatial.oil.land_transport = nodes + " land transport oil" # uranium @@ -2613,34 +2633,34 @@ def add_industry(n, costs): efficiency = ( options["shipping_oil_efficiency"] / options["shipping_methanol_efficiency"] ) - p_set_methanol = shipping_methanol_share * p_set.sum() * efficiency - n.add( + # need to aggregate potentials if methanol not nodally resolved + if options["co2_budget_national"]: + p_set_methanol = shipping_methanol_share * p_set * efficiency + else: + p_set_methanol = shipping_methanol_share * p_set.sum() * efficiency + + n.madd( "Bus", - "EU shipping methanol", - location="EU", + spatial.methanol.shipping, + location=spatial.methanol.locations, carrier="shipping methanol", unit="MWh_LHV", ) - n.add( + n.madd( "Load", - "shipping methanol", - bus="EU shipping methanol", + spatial.methanol.shipping, + bus=spatial.methanol.shipping, carrier="shipping methanol", p_set=p_set_methanol, ) - if len(spatial.methanol.nodes) == 1: - link_names = ["EU shipping methanol"] - else: - link_names = nodes + " shipping methanol" - n.madd( "Link", - link_names, + spatial.methanol.shipping, bus0=spatial.methanol.nodes, - bus1="EU shipping methanol", + bus1=spatial.methanol.shipping, bus2="co2 atmosphere", carrier="shipping methanol", p_nom_extendable=True, @@ -2678,34 +2698,33 @@ def add_industry(n, costs): ) if shipping_oil_share: - p_set_oil = shipping_oil_share * p_set.sum() + # need to aggregate potentials if oil not nodally resolved + if options["co2_budget_national"]: + p_set_oil = shipping_oil_share * p_set + else: + p_set_oil = shipping_oil_share * p_set.sum() - n.add( + n.madd( "Bus", - "EU shipping oil", - location="EU", + spatial.oil.shipping, + location=spatial.oil.locations, carrier="shipping oil", unit="MWh_LHV", ) - n.add( + n.madd( "Load", - "shipping oil", - bus="EU shipping oil", + spatial.oil.shipping, + bus=spatial.oil.shipping, carrier="shipping oil", p_set=p_set_oil, ) - if len(spatial.oil.nodes) == 1: - link_names = ["EU shipping oil"] - else: - link_names = nodes + " shipping oil" - n.madd( "Link", - link_names, + spatial.oil.shipping, bus0=spatial.oil.nodes, - bus1="EU shipping oil", + bus1=spatial.oil.shipping, bus2="co2 atmosphere", carrier="shipping oil", p_nom_extendable=True, @@ -2761,34 +2780,33 @@ def add_industry(n, costs): # NB: CO2 gets released again to atmosphere when plastics decay # except for the process emissions when naphtha is used for petrochemicals, which can be captured with other industry process emissions # convert process emissions from feedstock from MtCO2 to energy demand - p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours + # need to aggregate potentials if oil not nodally resolved + if options["co2_budget_national"]: + p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]) / nhours + else: + p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours - n.add( + n.madd( "Bus", - "EU naphtha for industry", - location="EU", + spatial.oil.naphtha, + location=spatial.oil.locations, carrier="naphtha for industry", unit="MWh_LHV", ) - n.add( + n.madd( "Load", - "naphtha for industry", - bus="EU naphtha for industry", + spatial.oil.naphtha, + bus=spatial.oil.naphtha, carrier="naphtha for industry", p_set=p_set, ) - if len(spatial.oil.nodes) == 1: - link_names = ["EU naphtha for industry"] - else: - link_names = nodes + " naphtha for industry" - n.madd( "Link", - link_names, + spatial.oil.naphtha, bus0=spatial.oil.nodes, - bus1="EU naphtha for industry", + bus1=spatial.oil.naphtha, bus2="co2 atmosphere", carrier="naphtha for industry", p_nom_extendable=True, @@ -2797,42 +2815,47 @@ def add_industry(n, costs): # aviation demand_factor = options.get("aviation_demand_factor", 1) - all_aviation = ["total international aviation", "total domestic aviation"] - p_set = ( - demand_factor - * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1).sum() - * 1e6 - / nhours - ) if demand_factor != 1: logger.warning(f"Changing aviation demand by {demand_factor*100-100:+.2f}%.") - n.add( + all_aviation = ["total international aviation", "total domestic aviation"] + # need to aggregate potentials if oil not nodally resolved + if options["co2_budget_national"]: + p_set = ( + demand_factor + * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1) + * 1e6 + / nhours + ) + else: + p_set = ( + demand_factor + * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1).sum() + * 1e6 + / nhours + ) + + n.madd( "Bus", - "EU kerosene for aviation", - location="EU", + spatial.oil.kerosene, + location=spatial.oil.locations, carrier="kerosene for aviation", unit="MWh_LHV", ) - n.add( + n.madd( "Load", - "kerosene for aviation", - bus="EU kerosene for aviation", + spatial.oil.kerosene, + bus=spatial.oil.kerosene, carrier="kerosene for aviation", p_set=p_set, ) - if len(spatial.oil.nodes) == 1: - link_names = ["EU kerosene for aviation"] - else: - link_names = nodes + " kerosene for aviation" - n.madd( "Link", - link_names, + spatial.oil.kerosene, bus0=spatial.oil.nodes, - bus1="EU kerosene for aviation", + bus1=spatial.oil.kerosene, bus2="co2 atmosphere", carrier="kerosene for aviation", p_nom_extendable=True, @@ -3062,7 +3085,7 @@ def add_agriculture(n, costs): machinery_nodal_energy = pop_weighted_energy_totals.loc[ nodes, "total agriculture machinery" - ] + ] * 1e6 if electric_share > 0: efficiency_gain = ( @@ -3079,37 +3102,37 @@ def add_agriculture(n, costs): p_set=electric_share / efficiency_gain * machinery_nodal_energy - * 1e6 / nhours, ) if oil_share > 0: - n.add( + # need to aggregate potentials if oil not nodally resolved + if options["co2_budget_national"]: + p_set = oil_share * machinery_nodal_energy / nhours + else: + p_set = oil_share * machinery_nodal_energy.sum() / nhours + + n.madd( "Bus", - "EU agriculture machinery oil", - location="EU", + spatial.oil.agriculture_machinery, + location=spatial.oil.locations, carrier="agriculture machinery oil", unit="MWh_LHV", ) - n.add( + n.madd( "Load", - "agriculture machinery oil", - bus="EU agriculture machinery oil", + spatial.oil.agriculture_machinery, + bus=spatial.oil.agriculture_machinery, carrier="agriculture machinery oil", - p_set=oil_share * machinery_nodal_energy.sum() * 1e6 / nhours, + p_set=p_set, ) - if len(spatial.oil.nodes) == 1: - link_names = ["EU agriculture machinery oil"] - else: - link_names = nodes + " agriculture machinery oil" - n.madd( "Link", - link_names, + spatial.oil.agriculture_machinery, bus0=spatial.oil.nodes, - bus1="EU agriculture machinery oil", + bus1=spatial.oil.agriculture_machinery, bus2="co2 atmosphere", carrier="agriculture machinery oil", p_nom_extendable=True, From 2ad9ca8f7b10155d2b8f738d11a4948ac5f17fb1 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Thu, 26 Oct 2023 11:17:57 +0200 Subject: [PATCH 013/101] add regionalised oil load for process emissions from naphtha as feedstock --- scripts/prepare_sector_network.py | 30 +++++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 34bfdce7..54830106 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2782,9 +2782,25 @@ def add_industry(n, costs): # convert process emissions from feedstock from MtCO2 to energy demand # need to aggregate potentials if oil not nodally resolved if options["co2_budget_national"]: - p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]) / nhours + p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]) / nhours else: - p_set = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours + p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours + + if options["co2_budget_national"]: + p_set_process_emissions = ( + demand_factor + * (industrial_demand.loc[nodes, "process emission from feedstock"] + / costs.at["oil", "CO2 intensity"]) + / nhours + ) + else: + p_set_process_emissions = ( + demand_factor + * (industrial_demand.loc[nodes, "process emission from feedstock"] + / costs.at["oil", "CO2 intensity"] + ).sum() + / nhours + ) n.madd( "Bus", @@ -2799,7 +2815,15 @@ def add_industry(n, costs): spatial.oil.naphtha, bus=spatial.oil.naphtha, carrier="naphtha for industry", - p_set=p_set, + p_set=p_set_plastics, + ) + + n.madd( + "Load", + ["naphtha for industry into process emissions from feedstock"], + bus=spatial.oil.nodes, + carrier="naphtha for industry", + p_set=p_set_process_emissions, ) n.madd( From 82ac430fd92f2724918ff0e25568fdd57b75a9a5 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Wed, 8 Nov 2023 09:57:24 +0100 Subject: [PATCH 014/101] fix spatial resolution for solid biomass links and naphtha oil loads under 'co2_spatial: true' flag --- scripts/prepare_sector_network.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 54830106..a5ca8941 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -145,6 +145,7 @@ def define_spatial(nodes, options): spatial.oil.nodes = nodes + " oil" spatial.oil.locations = nodes spatial.oil.naphtha = nodes + " naphtha for industry" + spatial.oil.naphtha_process_emissions = nodes + " naphtha process emissions" spatial.oil.kerosene = nodes + " kerosene for aviation" spatial.oil.shipping = nodes + " shipping oil" spatial.oil.agriculture_machinery = nodes + " agriculture machinery oil" @@ -152,6 +153,7 @@ def define_spatial(nodes, options): spatial.oil.nodes = ["EU oil"] spatial.oil.locations = ["EU"] spatial.oil.naphtha = ["EU naphtha for industry"] + spatial.oil.naphtha_process_emissions = "EU naphtha process emissions" spatial.oil.kerosene = ["EU kerosene for aviation"] spatial.oil.shipping = ["EU shipping oil"] spatial.oil.agriculture_machinery = ["EU agriculture machinery oil"] @@ -2443,9 +2445,14 @@ def add_industry(n, costs): efficiency=1.0, ) + if len(spatial.biomass.industry_cc)<=1 and len(spatial.co2.nodes)>1: + link_names = nodes + " " + spatial.biomass.industry_cc + else: + link_names = spatial.biomass.industry_cc + n.madd( "Link", - spatial.biomass.industry_cc, + link_names, bus0=spatial.biomass.nodes, bus1=spatial.biomass.industry, bus2="co2 atmosphere", @@ -2820,7 +2827,7 @@ def add_industry(n, costs): n.madd( "Load", - ["naphtha for industry into process emissions from feedstock"], + spatial.oil.naphtha_process_emissions, bus=spatial.oil.nodes, carrier="naphtha for industry", p_set=p_set_process_emissions, From 014a4cd62e3bc2f41e9e0ccd8e04ff6c169e9a60 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sun, 12 Nov 2023 18:42:53 +0100 Subject: [PATCH 015/101] fix for losses with multi-period investment --- scripts/solve_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 83281284..fa59f7a3 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -697,7 +697,8 @@ def add_lossy_bidirectional_link_constraints(n): if not n.links.p_nom_extendable.any() or not "reversed" in n.links.columns: return - carriers = n.links.loc[n.links.reversed, "carrier"].unique() + reversed_links = n.links.reversed.fillna(0).astype(bool) + carriers = n.links.loc[reversed_links, "carrier"].unique() forward_i = n.links.query( "carrier in @carriers and ~reversed and p_nom_extendable" From d9ec127f996f854cc775cdfcc6db18cd26cf3ea5 Mon Sep 17 00:00:00 2001 From: chrstphtrs Date: Tue, 21 Nov 2023 14:55:32 +0100 Subject: [PATCH 016/101] Add process emissions to country emissions constraint, fix snapshot weighting --- scripts/solve_network.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index b372b366..e2edb2eb 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -795,7 +795,7 @@ def add_co2limit_country(n, limit_countries, nyears=1.0): p = n.model["Link-p"] # dimension: (time, component) - # NB: Most country-specific links retain their locational information in bus1 (except for DAC, where it is in bus2) + # NB: Most country-specific links retain their locational information in bus1 (except for DAC, where it is in bus2, and process emissions, where it is in bus0) country = n.links.bus1.map(n.buses.location).map(n.buses.country) country_DAC = ( n.links[n.links.carrier == "DAC"] @@ -803,6 +803,12 @@ def add_co2limit_country(n, limit_countries, nyears=1.0): .map(n.buses.country) ) country[country_DAC.index] = country_DAC + country_process_emissions = ( + n.links[n.links.carrier.str.contains("process emissions")] + .bus0.map(n.buses.location) + .map(n.buses.country) + ) + country[country_process_emissions.index] = country_process_emissions lhs = [] for port in [col[3:] for col in n.links if col.startswith("bus")]: @@ -818,13 +824,18 @@ def add_co2limit_country(n, limit_countries, nyears=1.0): idx = n.links[mask].index + international = n.links.carrier.map( + lambda x: 0.4 if x in ["kerosene for aviation", "shipping oil"] else 1.0 + ) grouping = country.loc[idx] if not grouping.isnull().all(): expr = ( - (p.loc[:, idx] * efficiency[idx]) + ((p.loc[:, idx] * efficiency[idx] * international[idx]) .groupby(grouping, axis=1) .sum() + *n.snapshot_weightings.generators + ) .sum(dims="snapshot") ) lhs.append(expr) @@ -935,6 +946,10 @@ def solve_network(n, config, solving, opts="", **kwargs): f"Solving status '{status}' with termination condition '{condition}'" ) if "infeasible" in condition: + m = n.model + labels = m.compute_infeasibilities() + print(labels) + m.print_infeasibilities() raise RuntimeError("Solving status 'infeasible'") return n From e8324b9c2788339837caf8718898b14f879ea4d8 Mon Sep 17 00:00:00 2001 From: lisazeyen <35347358+lisazeyen@users.noreply.github.com> Date: Fri, 24 Nov 2023 09:58:24 +0100 Subject: [PATCH 017/101] fix bug when oil copper plated --- scripts/prepare_sector_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index a5ca8941..81e4d6e3 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -153,7 +153,7 @@ def define_spatial(nodes, options): spatial.oil.nodes = ["EU oil"] spatial.oil.locations = ["EU"] spatial.oil.naphtha = ["EU naphtha for industry"] - spatial.oil.naphtha_process_emissions = "EU naphtha process emissions" + spatial.oil.naphtha_process_emissions = ["EU naphtha process emissions"] spatial.oil.kerosene = ["EU kerosene for aviation"] spatial.oil.shipping = ["EU shipping oil"] spatial.oil.agriculture_machinery = ["EU agriculture machinery oil"] From 3ff925e797574afc11193d7f63316fdbdde03e12 Mon Sep 17 00:00:00 2001 From: lisazeyen <35347358+lisazeyen@users.noreply.github.com> Date: Fri, 24 Nov 2023 10:00:07 +0100 Subject: [PATCH 018/101] add load shedding for all energy carriers --- scripts/solve_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index e2edb2eb..97c78dad 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -352,7 +352,7 @@ def prepare_network( # http://journal.frontiersin.org/article/10.3389/fenrg.2015.00055/full # TODO: retrieve color and nice name from config n.add("Carrier", "load", color="#dd2e23", nice_name="Load shedding") - buses_i = n.buses.query("carrier == 'AC'").index + buses_i = n.buses.index if not np.isscalar(load_shedding): # TODO: do not scale via sign attribute (use Eur/MWh instead of Eur/kWh) load_shedding = 1e2 # Eur/kWh From cea62de438b7c358bf23ad306dccb300b13beab7 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Mon, 4 Dec 2023 16:46:11 +0100 Subject: [PATCH 019/101] solve_network: quick fix so duals can be read from CO2 constrain --- scripts/solve_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 97c78dad..2413f4c9 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -841,10 +841,10 @@ def add_co2limit_country(n, limit_countries, nyears=1.0): lhs.append(expr) lhs = sum(lhs) # dimension: (country) - lhs = lhs.rename({list(lhs.dims.keys())[0]: "country"}) + lhs = lhs.rename({list(lhs.dims.keys())[0]: "snapshot"}) rhs = pd.Series(co2_limit_countries) # dimension: (country) - for ct in lhs.indexes["country"]: + for ct in lhs.indexes["snapshot"]: n.model.add_constraints( lhs.loc[ct] <= rhs[ct], name=f"GlobalConstraint-co2_limit_per_country{ct}", From 66178a5a27625b7055d029403c66bd7ac6df1da5 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Mon, 4 Dec 2023 16:46:45 +0100 Subject: [PATCH 020/101] solve_network: fix sign for country CO2 when bus0=atmosphere So that DAC extracts CO2 rather than pumping into air; for p>0, link withdraws from bus0, but injects into bus1/2/3, so you have to take account of this sign difference- --- scripts/solve_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 2413f4c9..53170da9 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -814,7 +814,7 @@ def add_co2limit_country(n, limit_countries, nyears=1.0): for port in [col[3:] for col in n.links if col.startswith("bus")]: if port == str(0): efficiency = ( - n.links["efficiency"].apply(lambda x: 1.0).rename("efficiency0") + n.links["efficiency"].apply(lambda x: -1.0).rename("efficiency0") ) elif port == str(1): efficiency = n.links["efficiency"] From bbf9ca2d9be0af6fe80ffcc667556405ab0bddbc Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Dec 2023 11:58:28 +0100 Subject: [PATCH 021/101] bug fix: naming of p_set when co2_national is True Without this naming fix, the p_set is a NaN once added --- scripts/prepare_sector_network.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 81e4d6e3..606e17b3 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2643,7 +2643,7 @@ def add_industry(n, costs): # need to aggregate potentials if methanol not nodally resolved if options["co2_budget_national"]: - p_set_methanol = shipping_methanol_share * p_set * efficiency + p_set_methanol = shipping_methanol_share * p_set.rename(lambda x : x + " shipping methanol") * efficiency else: p_set_methanol = shipping_methanol_share * p_set.sum() * efficiency @@ -2707,7 +2707,7 @@ def add_industry(n, costs): if shipping_oil_share: # need to aggregate potentials if oil not nodally resolved if options["co2_budget_national"]: - p_set_oil = shipping_oil_share * p_set + p_set_oil = shipping_oil_share * p_set.rename(lambda x: x + " shipping oil") else: p_set_oil = shipping_oil_share * p_set.sum() @@ -2789,15 +2789,16 @@ def add_industry(n, costs): # convert process emissions from feedstock from MtCO2 to energy demand # need to aggregate potentials if oil not nodally resolved if options["co2_budget_national"]: - p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]) / nhours + p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).rename(lambda x: x + " naphtha for industry") / nhours else: p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours + if options["co2_budget_national"]: p_set_process_emissions = ( demand_factor * (industrial_demand.loc[nodes, "process emission from feedstock"] - / costs.at["oil", "CO2 intensity"]) + / costs.at["oil", "CO2 intensity"]).rename(lambda x: x + " naphtha process emissions") / nhours ) else: @@ -2857,7 +2858,7 @@ def add_industry(n, costs): * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1) * 1e6 / nhours - ) + ).rename(lambda x: x + " kerosene for aviation") else: p_set = ( demand_factor @@ -3139,7 +3140,7 @@ def add_agriculture(n, costs): if oil_share > 0: # need to aggregate potentials if oil not nodally resolved if options["co2_budget_national"]: - p_set = oil_share * machinery_nodal_energy / nhours + p_set = oil_share * machinery_nodal_energy.rename(lambda x: x + " agriculture machinery oil") / nhours else: p_set = oil_share * machinery_nodal_energy.sum() / nhours From 2d323d1b879751bc96303bd6c6a54fda2c90eccb Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Dec 2023 12:27:07 +0100 Subject: [PATCH 022/101] bug fix: ICE efficiency for land transport was applied twice This was overestimating ICE oil demand by factor 1/0.3. --- scripts/prepare_sector_network.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 606e17b3..342a6b15 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1613,7 +1613,6 @@ def add_land_transport(n, costs): bus1=spatial.oil.land_transport, bus2="co2 atmosphere", carrier="land transport oil", - efficiency=ice_efficiency, efficiency2=costs.at["oil", "CO2 intensity"], p_nom_extendable=True, ) From 00e86e6435816fe007fb25d62de90cf58fbc01c4 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Dec 2023 13:28:08 +0100 Subject: [PATCH 023/101] bug fix: route process emissions from steam cracker to correct bus Now naphtha demand causes process emissions from steak crackers to route to process emissions bus, then rest of CO2 goes to atmosphere. --- scripts/prepare_sector_network.py | 46 +++++++------------------------ 1 file changed, 10 insertions(+), 36 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 342a6b15..8e995dd6 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -145,7 +145,6 @@ def define_spatial(nodes, options): spatial.oil.nodes = nodes + " oil" spatial.oil.locations = nodes spatial.oil.naphtha = nodes + " naphtha for industry" - spatial.oil.naphtha_process_emissions = nodes + " naphtha process emissions" spatial.oil.kerosene = nodes + " kerosene for aviation" spatial.oil.shipping = nodes + " shipping oil" spatial.oil.agriculture_machinery = nodes + " agriculture machinery oil" @@ -153,7 +152,6 @@ def define_spatial(nodes, options): spatial.oil.nodes = ["EU oil"] spatial.oil.locations = ["EU"] spatial.oil.naphtha = ["EU naphtha for industry"] - spatial.oil.naphtha_process_emissions = ["EU naphtha process emissions"] spatial.oil.kerosene = ["EU kerosene for aviation"] spatial.oil.shipping = ["EU shipping oil"] spatial.oil.agriculture_machinery = ["EU agriculture machinery oil"] @@ -2783,31 +2781,10 @@ def add_industry(n, costs): if demand_factor != 1: logger.warning(f"Changing HVC demand by {demand_factor*100-100:+.2f}%.") - # NB: CO2 gets released again to atmosphere when plastics decay - # except for the process emissions when naphtha is used for petrochemicals, which can be captured with other industry process emissions - # convert process emissions from feedstock from MtCO2 to energy demand - # need to aggregate potentials if oil not nodally resolved if options["co2_budget_national"]: - p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).rename(lambda x: x + " naphtha for industry") / nhours + p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].rename(lambda x: x + " naphtha for industry") / nhours else: - p_set_plastics = demand_factor * (industrial_demand.loc[nodes, "naphtha"] - industrial_demand.loc[nodes, "process emission from feedstock"] / costs.at["oil", "CO2 intensity"]).sum() / nhours - - - if options["co2_budget_national"]: - p_set_process_emissions = ( - demand_factor - * (industrial_demand.loc[nodes, "process emission from feedstock"] - / costs.at["oil", "CO2 intensity"]).rename(lambda x: x + " naphtha process emissions") - / nhours - ) - else: - p_set_process_emissions = ( - demand_factor - * (industrial_demand.loc[nodes, "process emission from feedstock"] - / costs.at["oil", "CO2 intensity"] - ).sum() - / nhours - ) + p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].sum() / nhours n.madd( "Bus", @@ -2825,13 +2802,10 @@ def add_industry(n, costs): p_set=p_set_plastics, ) - n.madd( - "Load", - spatial.oil.naphtha_process_emissions, - bus=spatial.oil.nodes, - carrier="naphtha for industry", - p_set=p_set_process_emissions, - ) + # some CO2 from naphtha are process emissions from steam cracker + # rest of CO2 released to atmosphere either in waste-to-energy or decay + process_co2_per_naphtha = industrial_demand.loc[nodes, "process emission from feedstock"].sum() / industrial_demand.loc[nodes, "naphtha"].sum() + emitted_co2_per_naphtha = costs.at["oil", "CO2 intensity"] - process_co2_per_naphtha n.madd( "Link", @@ -2839,9 +2813,11 @@ def add_industry(n, costs): bus0=spatial.oil.nodes, bus1=spatial.oil.naphtha, bus2="co2 atmosphere", + bus3=spatial.co2.process_emissions, carrier="naphtha for industry", p_nom_extendable=True, - efficiency2=costs.at["oil", "CO2 intensity"], + efficiency2=emitted_co2_per_naphtha, + efficiency3=process_co2_per_naphtha, ) # aviation @@ -2941,7 +2917,7 @@ def add_industry(n, costs): unit="t_co2", ) - sel = ["process emission", "process emission from feedstock"] + sel = ["process emission"] if options["co2_spatial"] or options["co2network"]: p_set = ( -industrial_demand.loc[nodes, sel] @@ -2952,8 +2928,6 @@ def add_industry(n, costs): else: p_set = -industrial_demand.loc[nodes, sel].sum(axis=1).sum() / nhours - # this should be process emissions fossil+feedstock - # then need load on atmosphere for feedstock emissions that are currently going to atmosphere via Link Fischer-Tropsch demand n.madd( "Load", spatial.co2.process_emissions, From 326ed63329d55d5a84f9230840161cdb3673e27a Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Dec 2023 17:53:28 +0100 Subject: [PATCH 024/101] add_brownfield: disable grid expansion if LV already hit Numerical problems were causing infeasibilities otherwise --- scripts/add_brownfield.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index 74102580..fb1453fd 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -119,6 +119,32 @@ def add_brownfield(n, n_p, year): n.links.loc[new_pipes, "p_nom"] = 0.0 n.links.loc[new_pipes, "p_nom_min"] = 0.0 +def disable_grid_expansion_if_LV_limit_hit(n): + if not "lv_limit" in n.global_constraints.index: + return + + #calculate minimum LV + attr = "nom_min" + dc = n.links.index[n.links.carrier == "DC"] + tot = (n.lines["s_" + attr]*n.lines["length"]).sum() + (n.links.loc[dc,"p_" + attr]*n.links.loc[dc,"length"]).sum() + + diff = n.global_constraints.at["lv_limit","constant"]-tot + + #allow small numerical differences + limit = 1 + + if diff < limit: + logger.info(f"LV is already reached (gap {diff}), disabling expansion and LV limit") + expandable_acs = n.lines.index[n.lines.s_nom_extendable] + n.lines.loc[expandable_acs,"s_nom_extendable"] = False + n.lines.loc[expandable_acs,"s_nom"] = n.lines.loc[expandable_acs,"s_nom_min"] + + expandable_dcs = n.links.index[n.links.p_nom_extendable & (n.links.carrier == "DC")] + n.links.loc[expandable_dcs,"p_nom_extendable"] = False + n.links.loc[expandable_dcs,"p_nom"] = n.links.loc[expandable_dcs,"p_nom_min"] + + n.global_constraints.drop("lv_limit", + inplace=True) if __name__ == "__main__": if "snakemake" not in globals(): @@ -150,5 +176,7 @@ if __name__ == "__main__": add_brownfield(n, n_p, year) + disable_grid_expansion_if_LV_limit_hit(n) + n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output[0]) From 830019a6e5d5ced3403bd4d5d9e28e2d66fe621b Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 15 Dec 2023 09:50:47 +0100 Subject: [PATCH 025/101] add rule that allows cost data to be modified --- rules/build_sector.smk | 4 ++-- rules/retrieve.smk | 14 ++++++++++++++ scripts/modify_cost_data.py | 12 ++++++++++++ 3 files changed, 28 insertions(+), 2 deletions(-) create mode 100644 scripts/modify_cost_data.py diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 5a9e8646..596c0305 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -743,9 +743,9 @@ rule prepare_sector_network: else RESOURCES + "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv", heat_profile="data/heat_load_profile_BDEW.csv", - costs="data/costs_{}.csv".format(config["costs"]["year"]) + costs="data/costs_{}-modified.csv".format(config["costs"]["year"]) if config["foresight"] == "overnight" - else "data/costs_{planning_horizons}.csv", + else "data/costs_{planning_horizons}-modified.csv", profile_offwind_ac=RESOURCES + "profile_offwind-ac.nc", profile_offwind_dc=RESOURCES + "profile_offwind-dc.nc", h2_cavern=RESOURCES + "salt_cavern_potentials_s{simpl}_{clusters}.csv", diff --git a/rules/retrieve.smk b/rules/retrieve.smk index b830be25..18b424ff 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -259,3 +259,17 @@ if config["enable"]["retrieve"]: "../envs/environment.yaml" script: "../scripts/retrieve_monthly_fuel_prices.py" + + +rule modify_cost_data: + input: + costs="data/costs_{year}.csv", + output: + "data/costs_{year}-modified.csv" + log: + LOGS + "modify_cost_data_{year}.log", + resources: + mem_mb=1000, + retries: 2 + script: + "../scripts/modify_cost_data.py" diff --git a/scripts/modify_cost_data.py b/scripts/modify_cost_data.py new file mode 100644 index 00000000..3e1f12f4 --- /dev/null +++ b/scripts/modify_cost_data.py @@ -0,0 +1,12 @@ + +import pandas as pd + +costs = pd.read_csv(snakemake.input.costs, index_col=[0, 1]).sort_index() + +if "modifications" in snakemake.input.keys(): + modifications = pd.read_csv(snakemake.input.modifications, index_col=[0, 1]).sort_index() + costs.loc[modifications.index] = modifications + print(modifications) + print( costs.loc[modifications.index]) + +costs.to_csv(snakemake.output[0]) From c5a123b4f443a29e8f0f7446ed45ab48230ed1a9 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 15 Dec 2023 14:57:03 +0100 Subject: [PATCH 026/101] allow additional functionality for solving to be added by file To add this, overwrite the rule with a new argument: snakemake.input.additional_functionality --- scripts/solve_network.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 53170da9..dce63efe 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -44,6 +44,7 @@ from pypsa.descriptors import get_switchable_as_dense as get_as_dense from prepare_sector_network import emission_sectors_from_opts + def add_land_use_constraint(n, planning_horizons, config): if "m" in snakemake.wildcards.clusters: _add_land_use_constraint_m(n, planning_horizons, config) @@ -899,6 +900,13 @@ def extra_functionality(n, snapshots): logger.info(f"Add CO2 limit for each country") add_co2limit_country(n, limit_countries, nyears) + if "additional_functionality" in snakemake.input.keys(): + import importlib, os, sys + sys.path.append(os.path.dirname(snakemake.input.additional_functionality)) + additional_functionality = importlib.import_module(os.path.splitext(os.path.basename(snakemake.input.additional_functionality))[0]) + + additional_functionality.additional_functionality(n, snapshots, snakemake.wildcards.planning_horizons) + def solve_network(n, config, solving, opts="", **kwargs): set_of_options = solving["solver"]["options"] From 1a7f093e037ed177468c163863a7bbd929d322c3 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 15 Dec 2023 17:18:36 +0100 Subject: [PATCH 027/101] solve: pass wildcards and config to additional_functionality --- scripts/solve_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index dce63efe..6f88b904 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -905,7 +905,7 @@ def extra_functionality(n, snapshots): sys.path.append(os.path.dirname(snakemake.input.additional_functionality)) additional_functionality = importlib.import_module(os.path.splitext(os.path.basename(snakemake.input.additional_functionality))[0]) - additional_functionality.additional_functionality(n, snapshots, snakemake.wildcards.planning_horizons) + additional_functionality.additional_functionality(n, snapshots, snakemake.wildcards, config) def solve_network(n, config, solving, opts="", **kwargs): From b3753d73d75eedaddb8da9f6dcfc4bcf8793831a Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Wed, 20 Dec 2023 09:22:40 +0100 Subject: [PATCH 028/101] undo addition of script to allow cost modifications This undoes commit 830019a6e5d5ced3403bd4d5d9e28e2d66fe621b. Reason: this was introduced for the PyPSA-Ariadne derivative, but can be handled more elegantly within the derivative repository. --- rules/build_sector.smk | 4 ++-- rules/retrieve.smk | 14 -------------- scripts/modify_cost_data.py | 12 ------------ 3 files changed, 2 insertions(+), 28 deletions(-) delete mode 100644 scripts/modify_cost_data.py diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 596c0305..5a9e8646 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -743,9 +743,9 @@ rule prepare_sector_network: else RESOURCES + "biomass_potentials_s{simpl}_{clusters}_{planning_horizons}.csv", heat_profile="data/heat_load_profile_BDEW.csv", - costs="data/costs_{}-modified.csv".format(config["costs"]["year"]) + costs="data/costs_{}.csv".format(config["costs"]["year"]) if config["foresight"] == "overnight" - else "data/costs_{planning_horizons}-modified.csv", + else "data/costs_{planning_horizons}.csv", profile_offwind_ac=RESOURCES + "profile_offwind-ac.nc", profile_offwind_dc=RESOURCES + "profile_offwind-dc.nc", h2_cavern=RESOURCES + "salt_cavern_potentials_s{simpl}_{clusters}.csv", diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 18b424ff..b830be25 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -259,17 +259,3 @@ if config["enable"]["retrieve"]: "../envs/environment.yaml" script: "../scripts/retrieve_monthly_fuel_prices.py" - - -rule modify_cost_data: - input: - costs="data/costs_{year}.csv", - output: - "data/costs_{year}-modified.csv" - log: - LOGS + "modify_cost_data_{year}.log", - resources: - mem_mb=1000, - retries: 2 - script: - "../scripts/modify_cost_data.py" diff --git a/scripts/modify_cost_data.py b/scripts/modify_cost_data.py deleted file mode 100644 index 3e1f12f4..00000000 --- a/scripts/modify_cost_data.py +++ /dev/null @@ -1,12 +0,0 @@ - -import pandas as pd - -costs = pd.read_csv(snakemake.input.costs, index_col=[0, 1]).sort_index() - -if "modifications" in snakemake.input.keys(): - modifications = pd.read_csv(snakemake.input.modifications, index_col=[0, 1]).sort_index() - costs.loc[modifications.index] = modifications - print(modifications) - print( costs.loc[modifications.index]) - -costs.to_csv(snakemake.output[0]) From 8a55a55d20215dfe32cc49bae5de3e0f05411f1f Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Thu, 21 Dec 2023 16:08:43 +0100 Subject: [PATCH 029/101] copperplate oil/methanol supply; allow demand to be regional Force a single supply bus for oil/methanol (until we allow oil/methanol transport). Introduce new config switches "regional_oil/methanol_demand" that allow demand to be regionalised. This is important if regional CO2 budgets need to be enforced. --- config/config.default.yaml | 2 + scripts/prepare_sector_network.py | 103 ++++++++++++++++-------------- 2 files changed, 57 insertions(+), 48 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index cafb9d1d..c1e7ed0f 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -471,6 +471,8 @@ sector: SMR: true SMR_cc: true co2_budget_national: false + regional_methanol_demand: false #set to true if regional CO2 constraints needed + regional_oil_demand: false #set to true if regional CO2 constraints needed regional_co2_sequestration_potential: enable: false attribute: 'conservative estimate Mt' diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 8e995dd6..b5a0c0d5 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -127,35 +127,42 @@ def define_spatial(nodes, options): spatial.h2.locations = nodes # methanol + + #beware: unlike other carriers, uses locations rather than locations+carriername + #this allows to avoid separation between nodes and locations + spatial.methanol = SimpleNamespace() - if options["co2_budget_national"]: - spatial.methanol.nodes = nodes + " methanol" - spatial.methanol.locations = nodes + spatial.methanol.nodes = ["EU methanol"] + spatial.methanol.locations = ["EU"] + + if options["regional_methanol_demand"]: + spatial.methanol.demand_locations = nodes spatial.methanol.shipping = nodes + " shipping methanol" else: - spatial.methanol.nodes = ["EU methanol"] - spatial.methanol.locations = ["EU"] + spatial.methanol.demand_locations = ["EU"] spatial.methanol.shipping = ["EU shipping methanol"] # oil spatial.oil = SimpleNamespace() - if options["co2_budget_national"]: - spatial.oil.nodes = nodes + " oil" - spatial.oil.locations = nodes + spatial.oil.nodes = ["EU oil"] + spatial.oil.locations = ["EU"] + + if options["regional_oil_demand"]: + spatial.oil.demand_locations = nodes spatial.oil.naphtha = nodes + " naphtha for industry" spatial.oil.kerosene = nodes + " kerosene for aviation" spatial.oil.shipping = nodes + " shipping oil" spatial.oil.agriculture_machinery = nodes + " agriculture machinery oil" + spatial.oil.land_transport = nodes + " land transport oil" else: - spatial.oil.nodes = ["EU oil"] - spatial.oil.locations = ["EU"] + spatial.oil.demand_locations = ["EU"] spatial.oil.naphtha = ["EU naphtha for industry"] spatial.oil.kerosene = ["EU kerosene for aviation"] spatial.oil.shipping = ["EU shipping oil"] spatial.oil.agriculture_machinery = ["EU agriculture machinery oil"] - spatial.oil.land_transport = nodes + " land transport oil" + spatial.oil.land_transport = ["EU land transport oil"] # uranium spatial.uranium = SimpleNamespace() @@ -1588,10 +1595,15 @@ def add_land_transport(n, costs): ice_efficiency = options["transport_internal_combustion_efficiency"] + p_set_land_transport_oil = ice_share / ice_efficiency * transport[nodes].rename(columns=lambda x: x + " land transport oil") + + if not options["regional_oil_demand"]: + p_set_land_transport_oil = p_set_land_transport_oil.sum(axis=1).to_frame(name="EU land transport oil") + n.madd( "Bus", spatial.oil.land_transport, - location=nodes, + location=spatial.oil.demand_locations, carrier="land transport oil", unit="land transport", ) @@ -1601,7 +1613,7 @@ def add_land_transport(n, costs): spatial.oil.land_transport, bus=spatial.oil.land_transport, carrier="land transport oil", - p_set=ice_share / ice_efficiency * transport[nodes].rename(columns=lambda x: x + " land transport oil"), + p_set=p_set_land_transport_oil, ) n.madd( @@ -2638,16 +2650,15 @@ def add_industry(n, costs): options["shipping_oil_efficiency"] / options["shipping_methanol_efficiency"] ) - # need to aggregate potentials if methanol not nodally resolved - if options["co2_budget_national"]: - p_set_methanol = shipping_methanol_share * p_set.rename(lambda x : x + " shipping methanol") * efficiency - else: - p_set_methanol = shipping_methanol_share * p_set.sum() * efficiency + p_set_methanol = shipping_methanol_share * p_set.rename(lambda x : x + " shipping methanol") * efficiency + + if not options["regional_methanol_demand"]: + p_set_methanol = p_set_methanol.sum() n.madd( "Bus", spatial.methanol.shipping, - location=spatial.methanol.locations, + location=spatial.methanol.demand_locations, carrier="shipping methanol", unit="MWh_LHV", ) @@ -2684,7 +2695,8 @@ def add_industry(n, costs): # could correct to e.g. 0.001 EUR/kWh * annuity and O&M n.madd( "Store", - [oil_bus + " Store" for oil_bus in spatial.oil.nodes], + spatial.oil.nodes, + suffix=" Store", bus=spatial.oil.nodes, e_nom_extendable=True, e_cyclic=True, @@ -2702,16 +2714,16 @@ def add_industry(n, costs): ) if shipping_oil_share: - # need to aggregate potentials if oil not nodally resolved - if options["co2_budget_national"]: - p_set_oil = shipping_oil_share * p_set.rename(lambda x: x + " shipping oil") - else: - p_set_oil = shipping_oil_share * p_set.sum() + + p_set_oil = shipping_oil_share * p_set.rename(lambda x: x + " shipping oil") + + if not options["regional_oil_demand"]: + p_set_oil = p_set_oil.sum() n.madd( "Bus", spatial.oil.shipping, - location=spatial.oil.locations, + location=spatial.oil.demand_locations, carrier="shipping oil", unit="MWh_LHV", ) @@ -2781,15 +2793,15 @@ def add_industry(n, costs): if demand_factor != 1: logger.warning(f"Changing HVC demand by {demand_factor*100-100:+.2f}%.") - if options["co2_budget_national"]: - p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].rename(lambda x: x + " naphtha for industry") / nhours - else: - p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].sum() / nhours + p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].rename(lambda x: x + " naphtha for industry") / nhours + + if not options["regional_oil_demand"]: + p_set_plastics = p_set_plastics.sum() n.madd( "Bus", spatial.oil.naphtha, - location=spatial.oil.locations, + location=spatial.oil.demand_locations, carrier="naphtha for industry", unit="MWh_LHV", ) @@ -2826,26 +2838,21 @@ def add_industry(n, costs): logger.warning(f"Changing aviation demand by {demand_factor*100-100:+.2f}%.") all_aviation = ["total international aviation", "total domestic aviation"] - # need to aggregate potentials if oil not nodally resolved - if options["co2_budget_national"]: - p_set = ( + + p_set = ( demand_factor * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1) * 1e6 / nhours ).rename(lambda x: x + " kerosene for aviation") - else: - p_set = ( - demand_factor - * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1).sum() - * 1e6 - / nhours - ) + + if not options["regional_oil_demand"]: + p_set = p_set.sum() n.madd( "Bus", spatial.oil.kerosene, - location=spatial.oil.locations, + location=spatial.oil.demand_locations, carrier="kerosene for aviation", unit="MWh_LHV", ) @@ -3111,16 +3118,16 @@ def add_agriculture(n, costs): ) if oil_share > 0: - # need to aggregate potentials if oil not nodally resolved - if options["co2_budget_national"]: - p_set = oil_share * machinery_nodal_energy.rename(lambda x: x + " agriculture machinery oil") / nhours - else: - p_set = oil_share * machinery_nodal_energy.sum() / nhours + + p_set = oil_share * machinery_nodal_energy.rename(lambda x: x + " agriculture machinery oil") / nhours + + if not options["regional_oil_demand"]: + p_set = p_set.sum() n.madd( "Bus", spatial.oil.agriculture_machinery, - location=spatial.oil.locations, + location=spatial.oil.demand_locations, carrier="agriculture machinery oil", unit="MWh_LHV", ) From 8a11bdb4b132f6803485c01c2d222cad8e9d3c66 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 17:19:19 +0100 Subject: [PATCH 030/101] solve_network: option to inject custom extra functionalities from source file --- config/config.default.yaml | 1 + doc/configtables/solving.csv | 1 + doc/release_notes.rst | 5 +++++ rules/solve_myopic.smk | 3 +++ rules/solve_overnight.smk | 3 +++ rules/solve_perfect.smk | 3 +++ scripts/solve_network.py | 7 +++++++ 7 files changed, 23 insertions(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index a6df173b..0ff742e7 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -627,6 +627,7 @@ solving: skip_iterations: true rolling_horizon: false seed: 123 + custom_extra_functionality: "data/custom_extra_functionality.py" # options that go into the optimize function track_iterations: false min_iterations: 4 diff --git a/doc/configtables/solving.csv b/doc/configtables/solving.csv index 45d50d84..9d47c043 100644 --- a/doc/configtables/solving.csv +++ b/doc/configtables/solving.csv @@ -6,6 +6,7 @@ options,,, -- skip_iterations,bool,"{'true','false'}","Skip iterating, do not update impedances of branches. Defaults to true." -- rolling_horizon,bool,"{'true','false'}","Whether to optimize the network in a rolling horizon manner, where the snapshot range is split into slices of size `horizon` which are solved consecutively." -- seed,--,int,Random seed for increased deterministic behaviour. +-- custom_extra_functionality,--,str,Path to a Python file with custom extra functionality code to be injected into the solving rules of the workflow. -- track_iterations,bool,"{'true','false'}",Flag whether to store the intermediate branch capacities and objective function values are recorded for each iteration in ``network.lines['s_nom_opt_X']`` (where ``X`` labels the iteration) -- min_iterations,--,int,Minimum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run. -- max_iterations,--,int,Maximum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run. diff --git a/doc/release_notes.rst b/doc/release_notes.rst index d7931f0e..a7644682 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -54,6 +54,11 @@ Upcoming Release reconnected to the main Ukrainian grid with the configuration option `reconnect_crimea`. +* Add option to reference an additional source file where users can specify + custom ``extra_functionality`` constraints in the configuration file. The + default setting points to an empty hull at + ``data/custom_extra_functionality.py``. + **Bugs and Compatibility** diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 8a93d24a..217547b9 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -88,6 +88,9 @@ rule solve_sector_network_myopic: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), + custom_extra_functionality=workflow.source_path( + config["solver"]["options"].get("custom_extra_functionality", "") + ), input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index c7700760..8ac56db8 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -11,6 +11,9 @@ rule solve_sector_network: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), + custom_extra_functionality=workflow.source_path( + config["solver"]["options"].get("custom_extra_functionality", "") + ), input: network=RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index ef4e367d..322ced8d 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -118,6 +118,9 @@ rule solve_sector_network_perfect: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), + custom_extra_functionality=workflow.source_path( + config["solver"]["options"].get("custom_extra_functionality", "") + ), input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc", diff --git a/scripts/solve_network.py b/scripts/solve_network.py index ff2a2f23..539c4e72 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -26,7 +26,9 @@ Additionally, some extra constraints specified in :mod:`solve_network` are added the workflow for all scenarios in the configuration file (``scenario:``) based on the rule :mod:`solve_network`. """ +import importlib import logging +import os import re import numpy as np @@ -792,6 +794,11 @@ def extra_functionality(n, snapshots): add_carbon_budget_constraint(n, snapshots) add_retrofit_gas_boiler_constraint(n, snapshots) + if snakemake.params.custom_extra_functionality: + source_path = snakemake.params.custom_extra_functionality + module_name = os.path.splitext(os.path.basename(source_path))[0] + module = importlib.import_module(module_name) + module.custom_extra_functionality(n, snapshots) def solve_network(n, config, solving, opts="", **kwargs): set_of_options = solving["solver"]["options"] From fba320bfa7ae05a86e567426848577d42a25b337 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 29 Dec 2023 16:20:24 +0000 Subject: [PATCH 031/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/solve_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 539c4e72..d79a6342 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -800,6 +800,7 @@ def extra_functionality(n, snapshots): module = importlib.import_module(module_name) module.custom_extra_functionality(n, snapshots) + def solve_network(n, config, solving, opts="", **kwargs): set_of_options = solving["solver"]["options"] cf_solving = solving["options"] From 4b6dd2908324eda4c1722cb9ba41d330df6da443 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 17:23:11 +0100 Subject: [PATCH 032/101] add dummy file and assert path exists --- data/custom_extra_functionality.py | 9 +++++++++ scripts/solve_network.py | 1 + 2 files changed, 10 insertions(+) create mode 100644 data/custom_extra_functionality.py diff --git a/data/custom_extra_functionality.py b/data/custom_extra_functionality.py new file mode 100644 index 00000000..98b0c026 --- /dev/null +++ b/data/custom_extra_functionality.py @@ -0,0 +1,9 @@ +# SPDX-FileCopyrightText: : 2023- The PyPSA-Eur Authors +# +# SPDX-License-Identifier: MIT + +def custom_extra_functionality(n, snapshots): + """ + Add custom extra functionality constraints. + """ + pass diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 539c4e72..ed28c51c 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -796,6 +796,7 @@ def extra_functionality(n, snapshots): if snakemake.params.custom_extra_functionality: source_path = snakemake.params.custom_extra_functionality + assert os.path.exists(source_path), f"{source_path} does not exist" module_name = os.path.splitext(os.path.basename(source_path))[0] module = importlib.import_module(module_name) module.custom_extra_functionality(n, snapshots) From 876a28b688719273cc8fd9531845ae78c0e7edbe Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 29 Dec 2023 16:23:52 +0000 Subject: [PATCH 033/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- data/custom_extra_functionality.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/data/custom_extra_functionality.py b/data/custom_extra_functionality.py index 98b0c026..0ac24cea 100644 --- a/data/custom_extra_functionality.py +++ b/data/custom_extra_functionality.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- # SPDX-FileCopyrightText: : 2023- The PyPSA-Eur Authors # # SPDX-License-Identifier: MIT + def custom_extra_functionality(n, snapshots): """ Add custom extra functionality constraints. From a5ba2565a0abfcbe84d25c75a66fd2639bf08ca1 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 17:30:03 +0100 Subject: [PATCH 034/101] correct config location --- rules/solve_myopic.smk | 2 +- rules/solve_overnight.smk | 2 +- rules/solve_perfect.smk | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index 217547b9..afa8ad2c 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -89,7 +89,7 @@ rule solve_sector_network_myopic: "co2_sequestration_potential", 200 ), custom_extra_functionality=workflow.source_path( - config["solver"]["options"].get("custom_extra_functionality", "") + config["solving"]["options"].get("custom_extra_functionality", "") ), input: network=RESULTS diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index 8ac56db8..fc2f74df 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -12,7 +12,7 @@ rule solve_sector_network: "co2_sequestration_potential", 200 ), custom_extra_functionality=workflow.source_path( - config["solver"]["options"].get("custom_extra_functionality", "") + config["solving"]["options"].get("custom_extra_functionality", "") ), input: network=RESULTS diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index 322ced8d..63be5cc1 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -119,7 +119,7 @@ rule solve_sector_network_perfect: "co2_sequestration_potential", 200 ), custom_extra_functionality=workflow.source_path( - config["solver"]["options"].get("custom_extra_functionality", "") + config["solving"]["options"].get("custom_extra_functionality", "") ), input: network=RESULTS From 79ca64382b56e69315e641c9441e7a036a97313a Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sun, 31 Dec 2023 14:15:37 +0100 Subject: [PATCH 035/101] correct path for custom_extra_functionality --- config/config.default.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 0ff742e7..b8945f75 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -627,7 +627,7 @@ solving: skip_iterations: true rolling_horizon: false seed: 123 - custom_extra_functionality: "data/custom_extra_functionality.py" + custom_extra_functionality: "../data/custom_extra_functionality.py" # options that go into the optimize function track_iterations: false min_iterations: 4 From f28e9b47d26cc108c99103c3ce63b34d0ed2011d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 16:00:41 +0100 Subject: [PATCH 036/101] add custom_extra_functionality param to solve_electricity rule --- rules/solve_electricity.smk | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index c396ebd5..2c956097 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -11,6 +11,9 @@ rule solve_network: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), + custom_extra_functionality=workflow.source_path( + config["solving"]["options"].get("custom_extra_functionality", "") + ), input: network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", config=RESULTS + "config.yaml", From 340bf778498a4a6ceffe246b8ab3245a9ccc84f7 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 16:01:28 +0100 Subject: [PATCH 037/101] clarify that source_path is relative to directory --- doc/configtables/solving.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/configtables/solving.csv b/doc/configtables/solving.csv index 9d47c043..dcff54e4 100644 --- a/doc/configtables/solving.csv +++ b/doc/configtables/solving.csv @@ -6,7 +6,7 @@ options,,, -- skip_iterations,bool,"{'true','false'}","Skip iterating, do not update impedances of branches. Defaults to true." -- rolling_horizon,bool,"{'true','false'}","Whether to optimize the network in a rolling horizon manner, where the snapshot range is split into slices of size `horizon` which are solved consecutively." -- seed,--,int,Random seed for increased deterministic behaviour. --- custom_extra_functionality,--,str,Path to a Python file with custom extra functionality code to be injected into the solving rules of the workflow. +-- custom_extra_functionality,--,str,Path to a Python file with custom extra functionality code to be injected into the solving rules of the workflow relative to ``rules`` directory. -- track_iterations,bool,"{'true','false'}",Flag whether to store the intermediate branch capacities and objective function values are recorded for each iteration in ``network.lines['s_nom_opt_X']`` (where ``X`` labels the iteration) -- min_iterations,--,int,Minimum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run. -- max_iterations,--,int,Maximum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run. From 1b569dde1bcbcd32175d41b0ba3ed265e76a1aad Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Tue, 2 Jan 2024 16:02:10 +0100 Subject: [PATCH 038/101] move code for national CO2 budgets out of extra_functionality This can be added by derived workflows like PyPSA-Eur via additional_functionality. Changed additional_functionality to pass snakemake rather than wildcards and config separately. This gives maximal flexibility. --- config/config.default.yaml | 17 ------ scripts/solve_network.py | 110 +------------------------------------ 2 files changed, 1 insertion(+), 126 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index c1e7ed0f..6d2ebd9f 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -84,22 +84,6 @@ co2_budget: 2045: 0.032 2050: 0.000 -co2_budget_national: - 2030: - 'DE': 0.350 - 'AT': 0.450 - 'BE': 0.450 - 'CH': 0.450 - 'CZ': 0.450 - 'DK': 0.450 - 'FR': 0.450 - 'GB': 0.450 - 'LU': 0.450 - 'NL': 0.450 - 'NO': 0.450 - 'PL': 0.450 - 'SE': 0.450 - # docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#electricity electricity: voltages: [220., 300., 380.] @@ -470,7 +454,6 @@ sector: hydrogen_turbine: false SMR: true SMR_cc: true - co2_budget_national: false regional_methanol_demand: false #set to true if regional CO2 constraints needed regional_oil_demand: false #set to true if regional CO2 constraints needed regional_co2_sequestration_potential: diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 6f88b904..433b175b 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -41,9 +41,6 @@ logger = logging.getLogger(__name__) pypsa.pf.logger.setLevel(logging.WARNING) from pypsa.descriptors import get_switchable_as_dense as get_as_dense -from prepare_sector_network import emission_sectors_from_opts - - def add_land_use_constraint(n, planning_horizons, config): if "m" in snakemake.wildcards.clusters: @@ -765,100 +762,6 @@ def add_pipe_retrofit_constraint(n): n.model.add_constraints(lhs == rhs, name="Link-pipe_retrofit") -def add_co2limit_country(n, limit_countries, nyears=1.0): - """ - Add a set of emissions limit constraints for specified countries. - - The countries and emissions limits are specified in the config file entry 'co2_budget_country_{investment_year}'. - - Parameters - ---------- - n : pypsa.Network - config : dict - limit_countries : dict - nyears: float, optional - Used to scale the emissions constraint to the number of snapshots of the base network. - """ - logger.info(f"Adding CO2 budget limit for each country as per unit of 1990 levels") - - countries = n.config["countries"] - - # TODO: import function from prepare_sector_network? Move to common place? - sectors = emission_sectors_from_opts(opts) - - # convert Mt to tCO2 - co2_totals = 1e6 * pd.read_csv(snakemake.input.co2_totals_name, index_col=0) - - co2_limit_countries = co2_totals.loc[countries, sectors].sum(axis=1) - co2_limit_countries = co2_limit_countries.loc[co2_limit_countries.index.isin(limit_countries.keys())] - - co2_limit_countries *= co2_limit_countries.index.map(limit_countries) * nyears - - p = n.model["Link-p"] # dimension: (time, component) - - # NB: Most country-specific links retain their locational information in bus1 (except for DAC, where it is in bus2, and process emissions, where it is in bus0) - country = n.links.bus1.map(n.buses.location).map(n.buses.country) - country_DAC = ( - n.links[n.links.carrier == "DAC"] - .bus2.map(n.buses.location) - .map(n.buses.country) - ) - country[country_DAC.index] = country_DAC - country_process_emissions = ( - n.links[n.links.carrier.str.contains("process emissions")] - .bus0.map(n.buses.location) - .map(n.buses.country) - ) - country[country_process_emissions.index] = country_process_emissions - - lhs = [] - for port in [col[3:] for col in n.links if col.startswith("bus")]: - if port == str(0): - efficiency = ( - n.links["efficiency"].apply(lambda x: -1.0).rename("efficiency0") - ) - elif port == str(1): - efficiency = n.links["efficiency"] - else: - efficiency = n.links[f"efficiency{port}"] - mask = n.links[f"bus{port}"].map(n.buses.carrier).eq("co2") - - idx = n.links[mask].index - - international = n.links.carrier.map( - lambda x: 0.4 if x in ["kerosene for aviation", "shipping oil"] else 1.0 - ) - grouping = country.loc[idx] - - if not grouping.isnull().all(): - expr = ( - ((p.loc[:, idx] * efficiency[idx] * international[idx]) - .groupby(grouping, axis=1) - .sum() - *n.snapshot_weightings.generators - ) - .sum(dims="snapshot") - ) - lhs.append(expr) - - lhs = sum(lhs) # dimension: (country) - lhs = lhs.rename({list(lhs.dims.keys())[0]: "snapshot"}) - rhs = pd.Series(co2_limit_countries) # dimension: (country) - - for ct in lhs.indexes["snapshot"]: - n.model.add_constraints( - lhs.loc[ct] <= rhs[ct], - name=f"GlobalConstraint-co2_limit_per_country{ct}", - ) - n.add( - "GlobalConstraint", - f"co2_limit_per_country{ct}", - constant=rhs[ct], - sense="<=", - type="", - ) - - def extra_functionality(n, snapshots): """ Collects supplementary constraints which will be passed to @@ -889,23 +792,12 @@ def extra_functionality(n, snapshots): add_carbon_budget_constraint(n, snapshots) add_retrofit_gas_boiler_constraint(n, snapshots) - if n.config["sector"]["co2_budget_national"]: - # prepare co2 constraint - nhours = n.snapshot_weightings.generators.sum() - nyears = nhours / 8760 - investment_year = int(snakemake.wildcards.planning_horizons[-4:]) - limit_countries = snakemake.config["co2_budget_national"][investment_year] - - # add co2 constraint for each country - logger.info(f"Add CO2 limit for each country") - add_co2limit_country(n, limit_countries, nyears) - if "additional_functionality" in snakemake.input.keys(): import importlib, os, sys sys.path.append(os.path.dirname(snakemake.input.additional_functionality)) additional_functionality = importlib.import_module(os.path.splitext(os.path.basename(snakemake.input.additional_functionality))[0]) - additional_functionality.additional_functionality(n, snapshots, snakemake.wildcards, config) + additional_functionality.additional_functionality(n, snapshots, snakemake) def solve_network(n, config, solving, opts="", **kwargs): From f494dd85b969f9491a2d9bf81ea98008452440a7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 15:21:49 +0000 Subject: [PATCH 039/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/add_brownfield.py | 31 ++++++++------ scripts/prepare_sector_network.py | 69 ++++++++++++++++++++----------- scripts/solve_network.py | 11 ++++- 3 files changed, 73 insertions(+), 38 deletions(-) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index fb1453fd..ffdaf46b 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -119,32 +119,39 @@ def add_brownfield(n, n_p, year): n.links.loc[new_pipes, "p_nom"] = 0.0 n.links.loc[new_pipes, "p_nom_min"] = 0.0 + def disable_grid_expansion_if_LV_limit_hit(n): if not "lv_limit" in n.global_constraints.index: return - #calculate minimum LV + # calculate minimum LV attr = "nom_min" dc = n.links.index[n.links.carrier == "DC"] - tot = (n.lines["s_" + attr]*n.lines["length"]).sum() + (n.links.loc[dc,"p_" + attr]*n.links.loc[dc,"length"]).sum() + tot = (n.lines["s_" + attr] * n.lines["length"]).sum() + ( + n.links.loc[dc, "p_" + attr] * n.links.loc[dc, "length"] + ).sum() - diff = n.global_constraints.at["lv_limit","constant"]-tot + diff = n.global_constraints.at["lv_limit", "constant"] - tot - #allow small numerical differences + # allow small numerical differences limit = 1 if diff < limit: - logger.info(f"LV is already reached (gap {diff}), disabling expansion and LV limit") + logger.info( + f"LV is already reached (gap {diff}), disabling expansion and LV limit" + ) expandable_acs = n.lines.index[n.lines.s_nom_extendable] - n.lines.loc[expandable_acs,"s_nom_extendable"] = False - n.lines.loc[expandable_acs,"s_nom"] = n.lines.loc[expandable_acs,"s_nom_min"] + n.lines.loc[expandable_acs, "s_nom_extendable"] = False + n.lines.loc[expandable_acs, "s_nom"] = n.lines.loc[expandable_acs, "s_nom_min"] - expandable_dcs = n.links.index[n.links.p_nom_extendable & (n.links.carrier == "DC")] - n.links.loc[expandable_dcs,"p_nom_extendable"] = False - n.links.loc[expandable_dcs,"p_nom"] = n.links.loc[expandable_dcs,"p_nom_min"] + expandable_dcs = n.links.index[ + n.links.p_nom_extendable & (n.links.carrier == "DC") + ] + n.links.loc[expandable_dcs, "p_nom_extendable"] = False + n.links.loc[expandable_dcs, "p_nom"] = n.links.loc[expandable_dcs, "p_nom_min"] + + n.global_constraints.drop("lv_limit", inplace=True) - n.global_constraints.drop("lv_limit", - inplace=True) if __name__ == "__main__": if "snakemake" not in globals(): diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b5a0c0d5..f1ddce2d 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -128,8 +128,8 @@ def define_spatial(nodes, options): # methanol - #beware: unlike other carriers, uses locations rather than locations+carriername - #this allows to avoid separation between nodes and locations + # beware: unlike other carriers, uses locations rather than locations+carriername + # this allows to avoid separation between nodes and locations spatial.methanol = SimpleNamespace() @@ -1595,10 +1595,16 @@ def add_land_transport(n, costs): ice_efficiency = options["transport_internal_combustion_efficiency"] - p_set_land_transport_oil = ice_share / ice_efficiency * transport[nodes].rename(columns=lambda x: x + " land transport oil") + p_set_land_transport_oil = ( + ice_share + / ice_efficiency + * transport[nodes].rename(columns=lambda x: x + " land transport oil") + ) if not options["regional_oil_demand"]: - p_set_land_transport_oil = p_set_land_transport_oil.sum(axis=1).to_frame(name="EU land transport oil") + p_set_land_transport_oil = p_set_land_transport_oil.sum(axis=1).to_frame( + name="EU land transport oil" + ) n.madd( "Bus", @@ -2454,7 +2460,7 @@ def add_industry(n, costs): efficiency=1.0, ) - if len(spatial.biomass.industry_cc)<=1 and len(spatial.co2.nodes)>1: + if len(spatial.biomass.industry_cc) <= 1 and len(spatial.co2.nodes) > 1: link_names = nodes + " " + spatial.biomass.industry_cc else: link_names = spatial.biomass.industry_cc @@ -2650,7 +2656,11 @@ def add_industry(n, costs): options["shipping_oil_efficiency"] / options["shipping_methanol_efficiency"] ) - p_set_methanol = shipping_methanol_share * p_set.rename(lambda x : x + " shipping methanol") * efficiency + p_set_methanol = ( + shipping_methanol_share + * p_set.rename(lambda x: x + " shipping methanol") + * efficiency + ) if not options["regional_methanol_demand"]: p_set_methanol = p_set_methanol.sum() @@ -2679,7 +2689,10 @@ def add_industry(n, costs): bus2="co2 atmosphere", carrier="shipping methanol", p_nom_extendable=True, - efficiency2=1 / options["MWh_MeOH_per_tCO2"], # CO2 intensity methanol based on stoichiometric calculation with 22.7 GJ/t methanol (32 g/mol), CO2 (44 g/mol), 277.78 MWh/TJ = 0.218 t/MWh + efficiency2=1 + / options[ + "MWh_MeOH_per_tCO2" + ], # CO2 intensity methanol based on stoichiometric calculation with 22.7 GJ/t methanol (32 g/mol), CO2 (44 g/mol), 277.78 MWh/TJ = 0.218 t/MWh ) if "oil" not in n.buses.carrier.unique(): @@ -2714,7 +2727,6 @@ def add_industry(n, costs): ) if shipping_oil_share: - p_set_oil = shipping_oil_share * p_set.rename(lambda x: x + " shipping oil") if not options["regional_oil_demand"]: @@ -2793,7 +2805,13 @@ def add_industry(n, costs): if demand_factor != 1: logger.warning(f"Changing HVC demand by {demand_factor*100-100:+.2f}%.") - p_set_plastics = demand_factor * industrial_demand.loc[nodes, "naphtha"].rename(lambda x: x + " naphtha for industry") / nhours + p_set_plastics = ( + demand_factor + * industrial_demand.loc[nodes, "naphtha"].rename( + lambda x: x + " naphtha for industry" + ) + / nhours + ) if not options["regional_oil_demand"]: p_set_plastics = p_set_plastics.sum() @@ -2816,7 +2834,10 @@ def add_industry(n, costs): # some CO2 from naphtha are process emissions from steam cracker # rest of CO2 released to atmosphere either in waste-to-energy or decay - process_co2_per_naphtha = industrial_demand.loc[nodes, "process emission from feedstock"].sum() / industrial_demand.loc[nodes, "naphtha"].sum() + process_co2_per_naphtha = ( + industrial_demand.loc[nodes, "process emission from feedstock"].sum() + / industrial_demand.loc[nodes, "naphtha"].sum() + ) emitted_co2_per_naphtha = costs.at["oil", "CO2 intensity"] - process_co2_per_naphtha n.madd( @@ -2840,11 +2861,11 @@ def add_industry(n, costs): all_aviation = ["total international aviation", "total domestic aviation"] p_set = ( - demand_factor - * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1) - * 1e6 - / nhours - ).rename(lambda x: x + " kerosene for aviation") + demand_factor + * pop_weighted_energy_totals.loc[nodes, all_aviation].sum(axis=1) + * 1e6 + / nhours + ).rename(lambda x: x + " kerosene for aviation") if not options["regional_oil_demand"]: p_set = p_set.sum() @@ -3095,9 +3116,9 @@ def add_agriculture(n, costs): f"Total agriculture machinery shares sum up to {total_share:.2%}, corresponding to increased or decreased demand assumptions." ) - machinery_nodal_energy = pop_weighted_energy_totals.loc[ - nodes, "total agriculture machinery" - ] * 1e6 + machinery_nodal_energy = ( + pop_weighted_energy_totals.loc[nodes, "total agriculture machinery"] * 1e6 + ) if electric_share > 0: efficiency_gain = ( @@ -3111,15 +3132,15 @@ def add_agriculture(n, costs): suffix=" agriculture machinery electric", bus=nodes, carrier="agriculture machinery electric", - p_set=electric_share - / efficiency_gain - * machinery_nodal_energy - / nhours, + p_set=electric_share / efficiency_gain * machinery_nodal_energy / nhours, ) if oil_share > 0: - - p_set = oil_share * machinery_nodal_energy.rename(lambda x: x + " agriculture machinery oil") / nhours + p_set = ( + oil_share + * machinery_nodal_energy.rename(lambda x: x + " agriculture machinery oil") + / nhours + ) if not options["regional_oil_demand"]: p_set = p_set.sum() diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 433b175b..4bdbb543 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -793,9 +793,16 @@ def extra_functionality(n, snapshots): add_retrofit_gas_boiler_constraint(n, snapshots) if "additional_functionality" in snakemake.input.keys(): - import importlib, os, sys + import importlib + import os + import sys + sys.path.append(os.path.dirname(snakemake.input.additional_functionality)) - additional_functionality = importlib.import_module(os.path.splitext(os.path.basename(snakemake.input.additional_functionality))[0]) + additional_functionality = importlib.import_module( + os.path.splitext( + os.path.basename(snakemake.input.additional_functionality) + )[0] + ) additional_functionality.additional_functionality(n, snapshots, snakemake) From 0d03d384cc0ce27e681b76d14418b6d1b5cf9d1c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 09:07:08 +0100 Subject: [PATCH 040/101] lossy_bidirectional_links: use original length for loss calculation --- scripts/prepare_sector_network.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 998f954e..09de541a 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3309,15 +3309,16 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( - n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) + n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) ) - rev_links.capital_cost = 0 - rev_links.length = 0 + rev_links["capital_cost"] = 0 + rev_links["length"] = 0 rev_links["reversed"] = True rev_links.index = rev_links.index.map(lambda x: x + "-reversed") n.links = pd.concat([n.links, rev_links], sort=False) n.links["reversed"] = n.links["reversed"].fillna(False) + n.links["length_original"] = n.links["length_original"].fillna(n.links.length) # do compression losses after concatenation to take electricity consumption at bus0 in either direction carrier_i = n.links.query("carrier == @carrier").index @@ -3326,7 +3327,7 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.buses.location ) # electricity n.links.loc[carrier_i, "efficiency2"] = ( - -compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 + -compression_per_1000km * n.links.loc[carrier_i, "length_original"] / 1e3 ) From 2b2bad392f6c83771472d93ca2df597608ea6b26 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 08:08:21 +0000 Subject: [PATCH 041/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 09de541a..bab8de7b 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3309,7 +3309,9 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( - n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) + n.links.loc[carrier_i] + .copy() + .rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) ) rev_links["capital_cost"] = 0 rev_links["length"] = 0 From 075ffb5c043edf16b1a9b69c4be3ed31da7919b4 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 09:26:08 +0100 Subject: [PATCH 042/101] add release notes and documentation --- doc/release_notes.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 505c747e..82f63252 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,6 +10,13 @@ Release Notes Upcoming Release ================ +* Add option to specify losses for bidirectional links, e.g. pipelines or HVDC + links, in configuration file under ``sector: transmission_efficiency:``. Users + can specify static or length-dependent values as well as a length-dependent + electricity demand for compression, which is implemented as a multi-link to + the local electricity buses. The bidirectional links will then be split into + two unidirectional links with linked capacities. + * Updated Global Energy Monitor LNG terminal data to March 2023 version. * For industry distribution, use EPRTR as fallback if ETS data is not available. From d829d6fd3da28cc7103648132b07726deda1b9c8 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 09:28:24 +0100 Subject: [PATCH 043/101] add release notes and documentation --- doc/configtables/sector.csv | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index d610c862..2767c603 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -107,6 +107,11 @@ electricity_distribution _grid,--,"{true, false}",Add a simplified representatio electricity_distribution _grid_cost_factor,,,Multiplies the investment cost of the electricity distribution grid ,,, electricity_grid _connection,--,"{true, false}",Add the cost of electricity grid connection for onshore wind and solar +transmission_efficiency,,,Section to specify transmission losses or compression energy demands of bidirectional links. Splits them into two capacity-linked unidirectional links. +-- {carrier},--,str,The carrier of the link. +-- -- efficiency_static,p.u.,float,Length-independent transmission efficiency. +-- -- efficiency_per_1000km,p.u. per 1000 km,float,Length-dependent transmission efficiency ($\eta^{\text{length}}$) +-- -- compression_per_1000km,p.u. per 1000 km,float,Length-dependent electricity demand for compression ($\eta \cdot \text{length}$) implemented as multi-link to local electricity bus. H2_network,--,"{true, false}",Add option for new hydrogen pipelines gas_network,--,"{true, false}","Add existing natural gas infrastructure, incl. LNG terminals, production and entry-points. The existing gas network is added with a lossless transport model. A length-weighted `k-edge augmentation algorithm `_ can be run to add new candidate gas pipelines such that all regions of the model can be connected to the gas network. When activated, all the gas demands are regionally disaggregated as well." H2_retrofit,--,"{true, false}",Add option for retrofiting existing pipelines to transport hydrogen. From 9d939fa635f8a0b55f7049dd23a29facfeda1471 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 10:12:43 +0100 Subject: [PATCH 044/101] remove helmeth option --- config/config.default.yaml | 2 -- doc/configtables/sector.csv | 1 - doc/release_notes.rst | 2 ++ scripts/plot_network.py | 4 ++-- scripts/plot_summary.py | 1 - scripts/prepare_sector_network.py | 17 ----------------- 6 files changed, 4 insertions(+), 23 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index d7704a27..e8ca22dc 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -451,7 +451,6 @@ sector: solar_cf_correction: 0.788457 # = >>> 1/1.2683 marginal_cost_storage: 0. #1e-4 methanation: true - helmeth: false coal_cc: false dac: true co2_vent: false @@ -954,7 +953,6 @@ plotting: Sabatier: '#9850ad' methanation: '#c44ce6' methane: '#c44ce6' - helmeth: '#e899ff' # synfuels Fischer-Tropsch: '#25c49a' liquid: '#25c49a' diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index 280c1906..57e6ce3d 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -71,7 +71,6 @@ solar_thermal,--,"{true, false}",Add option for using solar thermal to generate solar_cf_correction,--,float,The correction factor for the value provided by the solar thermal profile calculations marginal_cost_storage,currency/MWh ,float,The marginal cost of discharging batteries in distributed grids methanation,--,"{true, false}",Add option for transforming hydrogen and CO2 into methane using methanation. -helmeth,--,"{true, false}",Add option for transforming power into gas using HELMETH (Integrated High-Temperature ELectrolysis and METHanation for Effective Power to Gas Conversion) coal_cc,--,"{true, false}",Add option for coal CHPs with carbon capture dac,--,"{true, false}",Add option for Direct Air Capture (DAC) co2_vent,--,"{true, false}",Add option for vent out CO2 from storages to the atmosphere. diff --git a/doc/release_notes.rst b/doc/release_notes.rst index f84c0f83..5ac7925e 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -79,6 +79,8 @@ Upcoming Release Energy Monitor's `Europe Gas Tracker `_. +* Remove HELMETH option. + **Bugs and Compatibility** * A bug preventing custom powerplants specified in ``data/custom_powerplants.csv`` was fixed. (https://github.com/PyPSA/pypsa-eur/pull/732) diff --git a/scripts/plot_network.py b/scripts/plot_network.py index f44bb6de..67481120 100644 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -31,7 +31,7 @@ def rename_techs_tyndp(tech): tech = rename_techs(tech) if "heat pump" in tech or "resistive heater" in tech: return "power-to-heat" - elif tech in ["H2 Electrolysis", "methanation", "helmeth", "H2 liquefaction"]: + elif tech in ["H2 Electrolysis", "methanation", "H2 liquefaction"]: return "power-to-gas" elif tech == "H2": return "H2 storage" @@ -495,7 +495,7 @@ def plot_ch4_map(network): # make a fake MultiIndex so that area is correct for legend fossil_gas.index = pd.MultiIndex.from_product([fossil_gas.index, ["fossil gas"]]) - methanation_i = n.links[n.links.carrier.isin(["helmeth", "Sabatier"])].index + methanation_i = n.links.query("carrier == 'Sabatier'").index methanation = ( abs( n.links_t.p1.loc[:, methanation_i].mul( diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index 5804e785..67ac9b55 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -121,7 +121,6 @@ preferred_order = pd.Index( "gas boiler", "gas", "natural gas", - "helmeth", "methanation", "ammonia", "hydrogen storage", diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index c4a67a38..f746fe9c 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1369,23 +1369,6 @@ def add_storage_and_grids(n, costs): lifetime=costs.at["methanation", "lifetime"], ) - if options["helmeth"]: - n.madd( - "Link", - spatial.nodes, - suffix=" helmeth", - bus0=nodes, - bus1=spatial.gas.nodes, - bus2=spatial.co2.nodes, - carrier="helmeth", - p_nom_extendable=True, - efficiency=costs.at["helmeth", "efficiency"], - efficiency2=-costs.at["helmeth", "efficiency"] - * costs.at["gas", "CO2 intensity"], - capital_cost=costs.at["helmeth", "fixed"], - lifetime=costs.at["helmeth", "lifetime"], - ) - if options.get("coal_cc"): n.madd( "Link", From 92df7bbb9c786667364f7358f5ee90caad87ec1d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 10:27:42 +0100 Subject: [PATCH 045/101] build_renewable_profiles: improve logging of time passed --- scripts/build_renewable_profiles.py | 29 +++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 3a1c525e..ef8683cb 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -277,15 +277,14 @@ if __name__ == "__main__": snakemake.input.country_shapes, buffer=buffer, invert=True ) + logger.info("Calculate landuse availability...") + start = time.time() + kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress) - if noprogress: - logger.info("Calculate landuse availabilities...") - start = time.time() - availability = cutout.availabilitymatrix(regions, excluder, **kwargs) - duration = time.time() - start - logger.info(f"Completed availability calculation ({duration:2.2f}s)") - else: - availability = cutout.availabilitymatrix(regions, excluder, **kwargs) + availability = cutout.availabilitymatrix(regions, excluder, **kwargs) + + duration = time.time() - start + logger.info(f"Completed landuse availability calculation ({duration:2.2f}s)") # For Moldova and Ukraine: Overwrite parts not covered by Corine with # externally determined available areas @@ -304,8 +303,19 @@ if __name__ == "__main__": func = getattr(cutout, resource.pop("method")) if client is not None: resource["dask_kwargs"] = {"scheduler": client} + + logger.info("Calculate average capacity factor...") + start = time.time() + capacity_factor = correction_factor * func(capacity_factor=True, **resource) layout = capacity_factor * area * capacity_per_sqkm + + duration = time.time() - start + logger.info(f"Completed average capacity factor calculation ({duration:2.2f}s)") + + logger.info("Calculate weighted capacity factor time series...") + start = time.time() + profile, capacities = func( matrix=availability.stack(spatial=["y", "x"]), layout=layout, @@ -315,6 +325,9 @@ if __name__ == "__main__": **resource, ) + duration = time.time() - start + logger.info(f"Completed weighted capacity factor time series calculation ({duration:2.2f}s)") + logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')") if p_nom_max_meth == "simple": p_nom_max = capacity_per_sqkm * availability @ area From fdb63bc6ca4c3aa332104d26bca1c0a5d5c546c1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:29:08 +0000 Subject: [PATCH 046/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/build_renewable_profiles.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index ef8683cb..83c79482 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -326,7 +326,9 @@ if __name__ == "__main__": ) duration = time.time() - start - logger.info(f"Completed weighted capacity factor time series calculation ({duration:2.2f}s)") + logger.info( + f"Completed weighted capacity factor time series calculation ({duration:2.2f}s)" + ) logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')") if p_nom_max_meth == "simple": From 6b344c9901f7aa78d8714ad00cb9626b2773cb37 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 10:33:33 +0100 Subject: [PATCH 047/101] renewable_profiles: remove conservative potential estimation method --- config/config.default.yaml | 4 -- doc/configtables/offwind-ac.csv | 1 - doc/configtables/offwind-dc.csv | 1 - doc/configtables/onwind.csv | 1 - doc/configtables/solar.csv | 1 - scripts/build_renewable_profiles.py | 98 ++++++++++++----------------- 6 files changed, 40 insertions(+), 66 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index d7704a27..dc818e84 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -167,7 +167,6 @@ renewable: distance_grid_codes: [1, 2, 3, 4, 5, 6] natura: true excluder_resolution: 100 - potential: simple # or conservative clip_p_max_pu: 1.e-2 offwind-ac: cutout: europe-2013-era5 @@ -183,7 +182,6 @@ renewable: max_depth: 50 max_shore_distance: 30000 excluder_resolution: 200 - potential: simple # or conservative clip_p_max_pu: 1.e-2 offwind-dc: cutout: europe-2013-era5 @@ -199,7 +197,6 @@ renewable: max_depth: 50 min_shore_distance: 30000 excluder_resolution: 200 - potential: simple # or conservative clip_p_max_pu: 1.e-2 solar: cutout: europe-2013-sarah @@ -214,7 +211,6 @@ renewable: corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] natura: true excluder_resolution: 100 - potential: simple # or conservative clip_p_max_pu: 1.e-2 hydro: cutout: europe-2013-era5 diff --git a/doc/configtables/offwind-ac.csv b/doc/configtables/offwind-ac.csv index 6b756799..c3512a9e 100644 --- a/doc/configtables/offwind-ac.csv +++ b/doc/configtables/offwind-ac.csv @@ -12,5 +12,4 @@ ship_threshold,--,float,"Ship density threshold from which areas are excluded." max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential." min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential." max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." diff --git a/doc/configtables/offwind-dc.csv b/doc/configtables/offwind-dc.csv index 1f72228a..35095597 100644 --- a/doc/configtables/offwind-dc.csv +++ b/doc/configtables/offwind-dc.csv @@ -12,5 +12,4 @@ ship_threshold,--,float,"Ship density threshold from which areas are excluded." max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential." min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build." max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." diff --git a/doc/configtables/onwind.csv b/doc/configtables/onwind.csv index ba9482e5..b7e823b3 100644 --- a/doc/configtables/onwind.csv +++ b/doc/configtables/onwind.csv @@ -9,7 +9,6 @@ corine,,, -- distance,m,float,"Distance to keep from areas specified in ``distance_grid_codes``" -- distance_grid_codes,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes to which wind turbines must maintain a distance specified in the setting ``distance``." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." correction_factor,--,float,"Correction factor for capacity factor time series." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." diff --git a/doc/configtables/solar.csv b/doc/configtables/solar.csv index 803445d5..7da1281b 100644 --- a/doc/configtables/solar.csv +++ b/doc/configtables/solar.csv @@ -10,6 +10,5 @@ capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of solar panel placem correction_factor,--,float,"A correction factor for the capacity factor (availability) time series." corine,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for solar panel placement." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 3a1c525e..c33bdf9b 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -7,10 +7,10 @@ """ Calculates for each network node the (i) installable capacity (based on land- use), (ii) the available generation time series (based on weather data), and -(iii) the average distance from the node for onshore wind, AC-connected -offshore wind, DC-connected offshore wind and solar PV generators. In addition -for offshore wind it calculates the fraction of the grid connection which is -under water. +(iii) the average distance from the node for onshore wind, AC-connected offshore +wind, DC-connected offshore wind and solar PV generators. In addition for +offshore wind it calculates the fraction of the grid connection which is under +water. .. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`. @@ -26,20 +26,9 @@ Relevant settings renewable: {technology}: - cutout: - corine: - grid_codes: - distance: - natura: - max_depth: - max_shore_distance: - min_shore_distance: - capacity_per_sqkm: - correction_factor: - potential: - min_p_max_pu: - clip_p_max_pu: - resource: + cutout: corine: grid_codes: distance: natura: max_depth: + max_shore_distance: min_shore_distance: capacity_per_sqkm: + correction_factor: min_p_max_pu: clip_p_max_pu: resource: .. seealso:: Documentation of the configuration file ``config/config.yaml`` at @@ -48,21 +37,30 @@ Relevant settings Inputs ------ -- ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC) `_ inventory on `44 classes `_ of land use (e.g. forests, arable land, industrial, urban areas). +- ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC) + `_ inventory on `44 + classes `_ of + land use (e.g. forests, arable land, industrial, urban areas). .. image:: img/corine.png :scale: 33 % -- ``data/bundle/GEBCO_2014_2D.nc``: A `bathymetric `_ data set with a global terrain model for ocean and land at 15 arc-second intervals by the `General Bathymetric Chart of the Oceans (GEBCO) `_. +- ``data/bundle/GEBCO_2014_2D.nc``: A `bathymetric + `_ data set with a global terrain + model for ocean and land at 15 arc-second intervals by the `General + Bathymetric Chart of the Oceans (GEBCO) + `_. .. image:: img/gebco_2019_grid_image.jpg :scale: 50 % - **Source:** `GEBCO `_ + **Source:** `GEBCO + `_ - ``resources/natura.tiff``: confer :ref:`natura` - ``resources/offshore_shapes.geojson``: confer :ref:`shapes` -- ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions` +- ``resources/regions_onshore.geojson``: (if not offshore wind), confer + :ref:`busregions` - ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions` - ``"cutouts/" + params["renewable"][{technology}]['cutout']``: :ref:`cutout` - ``networks/base.nc``: :ref:`base` @@ -128,25 +126,25 @@ Description This script functions at two main spatial resolutions: the resolution of the network nodes and their `Voronoi cells `_, and the resolution of the -cutout grid cells for the weather data. Typically the weather data grid is -finer than the network nodes, so we have to work out the distribution of -generators across the grid cells within each Voronoi cell. This is done by -taking account of a combination of the available land at each grid cell and the -capacity factor there. +cutout grid cells for the weather data. Typically the weather data grid is finer +than the network nodes, so we have to work out the distribution of generators +across the grid cells within each Voronoi cell. This is done by taking account +of a combination of the available land at each grid cell and the capacity factor +there. First the script computes how much of the technology can be installed at each cutout grid cell and each node using the `GLAES -`_ library. This uses the CORINE land use data, -Natura2000 nature reserves and GEBCO bathymetry data. +`_ library. This uses the CORINE land use +data, Natura2000 nature reserves and GEBCO bathymetry data. .. image:: img/eligibility.png :scale: 50 % :align: center -To compute the layout of generators in each node's Voronoi cell, the -installable potential in each grid cell is multiplied with the capacity factor -at each grid cell. This is done since we assume more generators are installed -at cells with a higher capacity factor. +To compute the layout of generators in each node's Voronoi cell, the installable +potential in each grid cell is multiplied with the capacity factor at each grid +cell. This is done since we assume more generators are installed at cells with a +higher capacity factor. .. image:: img/offwinddc-gridcell.png :scale: 50 % @@ -164,20 +162,14 @@ at cells with a higher capacity factor. :scale: 50 % :align: center -This layout is then used to compute the generation availability time series -from the weather data cutout from ``atlite``. +This layout is then used to compute the generation availability time series from +the weather data cutout from ``atlite``. -Two methods are available to compute the maximal installable potential for the -node (`p_nom_max`): ``simple`` and ``conservative``: - -- ``simple`` adds up the installable potentials of the individual grid cells. - If the model comes close to this limit, then the time series may slightly - overestimate production since it is assumed the geographical distribution is - proportional to capacity factor. - -- ``conservative`` assertains the nodal limit by increasing capacities - proportional to the layout until the limit of an individual grid cell is - reached. +The maximal installable potential for the node (`p_nom_max`) is computed by +adding up the installable potentials of the individual grid cells. +If the model comes close to this limit, then the time series may slightly +overestimate production since it is assumed the geographical distribution is +proportional to capacity factor. """ import functools import logging @@ -210,7 +202,6 @@ if __name__ == "__main__": resource = params["resource"] # pv panel params / wind turbine params correction_factor = params.get("correction_factor", 1.0) capacity_per_sqkm = params["capacity_per_sqkm"] - p_nom_max_meth = params.get("potential", "conservative") if isinstance(params.get("corine", {}), list): params["corine"] = {"grid_codes": params["corine"]} @@ -315,17 +306,8 @@ if __name__ == "__main__": **resource, ) - logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')") - if p_nom_max_meth == "simple": - p_nom_max = capacity_per_sqkm * availability @ area - elif p_nom_max_meth == "conservative": - max_cap_factor = capacity_factor.where(availability != 0).max(["x", "y"]) - p_nom_max = capacities / max_cap_factor - else: - raise AssertionError( - 'Config key `potential` should be one of "simple" ' - f'(default) or "conservative", not "{p_nom_max_meth}"' - ) + logger.info(f"Calculating maximal capacity per bus") + p_nom_max = capacity_per_sqkm * availability @ area logger.info("Calculate average distances.") layoutmatrix = (layout * availability).stack(spatial=["y", "x"]) From 38d587944b8625cfb208f6cc0c5046b1a3ee97d6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:35:05 +0000 Subject: [PATCH 048/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/build_renewable_profiles.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index c33bdf9b..0ad840ba 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -7,10 +7,10 @@ """ Calculates for each network node the (i) installable capacity (based on land- use), (ii) the available generation time series (based on weather data), and -(iii) the average distance from the node for onshore wind, AC-connected offshore -wind, DC-connected offshore wind and solar PV generators. In addition for -offshore wind it calculates the fraction of the grid connection which is under -water. +(iii) the average distance from the node for onshore wind, AC-connected +offshore wind, DC-connected offshore wind and solar PV generators. In addition +for offshore wind it calculates the fraction of the grid connection which is +under water. .. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`. From 29afffb4ca1b8480d88580769960b9536c17ef26 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 11:31:56 +0100 Subject: [PATCH 049/101] fix potential duplicate renaming of length_original --- scripts/prepare_sector_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 2ba64e87..54d5d7c8 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3441,8 +3441,9 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): rev_links = ( n.links.loc[carrier_i] .copy() - .rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) + .rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) + rev_links["length_original"] = rev_links["length"] rev_links["capital_cost"] = 0 rev_links["length"] = 0 rev_links["reversed"] = True From 4606cb131b292c02e95b2af3583e7df48561fcb9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 10:44:14 +0000 Subject: [PATCH 050/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 54d5d7c8..815bf6ff 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3439,9 +3439,7 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( - n.links.loc[carrier_i] - .copy() - .rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) + n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) rev_links["length_original"] = rev_links["length"] rev_links["capital_cost"] = 0 From 05495ce48413d2aee4c351da29b230cd62add824 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 12:46:42 +0100 Subject: [PATCH 051/101] fix lossy bidirectional link coupling countraint for myopic --- scripts/solve_network.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index a2125895..0bfc68ff 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -691,13 +691,24 @@ def add_lossy_bidirectional_link_constraints(n): if not n.links.p_nom_extendable.any() or not "reversed" in n.links.columns: return - reversed_links = n.links.reversed.fillna(0).astype(bool) - carriers = n.links.loc[reversed_links, "carrier"].unique() + n.links["reversed"] = n.links.reversed.fillna(0).astype(bool) + carriers = n.links.loc[n.links.reversed, "carrier"].unique() forward_i = n.links.query( "carrier in @carriers and ~reversed and p_nom_extendable" ).index - backward_i = forward_i + "-reversed" + + def get_backward_i(forward_i): + return pd.Index( + [ + re.sub(r"-(\d{4})$", r"-reversed-\1", s) + if re.search(r"-\d{4}$", s) + else s + "-reversed" + for s in forward_i + ] + ) + + backward_i = get_backward_i(forward_i) lhs = n.model["Link-p_nom"].loc[backward_i] rhs = n.model["Link-p_nom"].loc[forward_i] From 80f9259bac4742b0f819ddc6542da458a7690874 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 12:57:22 +0100 Subject: [PATCH 052/101] handle gas pipeline retrofitting with lossy links --- scripts/solve_network.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 0bfc68ff..98afd49d 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -774,9 +774,13 @@ def add_pipe_retrofit_constraint(n): """ Add constraint for retrofitting existing CH4 pipelines to H2 pipelines. """ - gas_pipes_i = n.links.query("carrier == 'gas pipeline' and p_nom_extendable").index + if "reversed" not in n.links.columns: + n.links["reversed"] = False + gas_pipes_i = n.links.query( + "carrier == 'gas pipeline' and p_nom_extendable and ~reversed" + ).index h2_retrofitted_i = n.links.query( - "carrier == 'H2 pipeline retrofitted' and p_nom_extendable" + "carrier == 'H2 pipeline retrofitted' and p_nom_extendable and ~reversed" ).index if h2_retrofitted_i.empty or gas_pipes_i.empty: From bcafbb1e5459ac90eb3fbb65f9b3da22149a2f7a Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:15:43 +0100 Subject: [PATCH 053/101] compatibility for config with single node in single country --- scripts/build_clustered_population_layouts.py | 1 - scripts/build_heat_demand.py | 1 - scripts/build_solar_thermal_profiles.py | 1 - scripts/build_temperature_profiles.py | 1 - 4 files changed, 4 deletions(-) diff --git a/scripts/build_clustered_population_layouts.py b/scripts/build_clustered_population_layouts.py index 083f3de4..73972d3d 100644 --- a/scripts/build_clustered_population_layouts.py +++ b/scripts/build_clustered_population_layouts.py @@ -28,7 +28,6 @@ if __name__ == "__main__": gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_heat_demand.py b/scripts/build_heat_demand.py index 73494260..da7c476e 100644 --- a/scripts/build_heat_demand.py +++ b/scripts/build_heat_demand.py @@ -34,7 +34,6 @@ if __name__ == "__main__": gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_solar_thermal_profiles.py b/scripts/build_solar_thermal_profiles.py index d285691a..4e7a6cd4 100644 --- a/scripts/build_solar_thermal_profiles.py +++ b/scripts/build_solar_thermal_profiles.py @@ -36,7 +36,6 @@ if __name__ == "__main__": gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index 9db37c25..d8eaadce 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -34,7 +34,6 @@ if __name__ == "__main__": gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) From d7051e7f66eb3bdbe0f790ea4513cbf01133b09a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 12:16:43 +0000 Subject: [PATCH 054/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/build_clustered_population_layouts.py | 4 +--- scripts/build_heat_demand.py | 4 +--- scripts/build_solar_thermal_profiles.py | 4 +--- scripts/build_temperature_profiles.py | 4 +--- 4 files changed, 4 insertions(+), 12 deletions(-) diff --git a/scripts/build_clustered_population_layouts.py b/scripts/build_clustered_population_layouts.py index 73972d3d..2f237656 100644 --- a/scripts/build_clustered_population_layouts.py +++ b/scripts/build_clustered_population_layouts.py @@ -25,9 +25,7 @@ if __name__ == "__main__": cutout = atlite.Cutout(snakemake.input.cutout) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_heat_demand.py b/scripts/build_heat_demand.py index da7c476e..77768404 100644 --- a/scripts/build_heat_demand.py +++ b/scripts/build_heat_demand.py @@ -31,9 +31,7 @@ if __name__ == "__main__": cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_solar_thermal_profiles.py b/scripts/build_solar_thermal_profiles.py index 4e7a6cd4..ee6ed881 100644 --- a/scripts/build_solar_thermal_profiles.py +++ b/scripts/build_solar_thermal_profiles.py @@ -33,9 +33,7 @@ if __name__ == "__main__": cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index d8eaadce..a13ec3c2 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -31,9 +31,7 @@ if __name__ == "__main__": cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) From 00aa07242a313755f8de1a2a6da7111f4cc1abf6 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Dec 2023 17:53:28 +0100 Subject: [PATCH 055/101] add_brownfield: disable grid expansion if LV already hit Numerical problems were causing infeasibilities otherwise --- scripts/add_brownfield.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index 74102580..fb1453fd 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -119,6 +119,32 @@ def add_brownfield(n, n_p, year): n.links.loc[new_pipes, "p_nom"] = 0.0 n.links.loc[new_pipes, "p_nom_min"] = 0.0 +def disable_grid_expansion_if_LV_limit_hit(n): + if not "lv_limit" in n.global_constraints.index: + return + + #calculate minimum LV + attr = "nom_min" + dc = n.links.index[n.links.carrier == "DC"] + tot = (n.lines["s_" + attr]*n.lines["length"]).sum() + (n.links.loc[dc,"p_" + attr]*n.links.loc[dc,"length"]).sum() + + diff = n.global_constraints.at["lv_limit","constant"]-tot + + #allow small numerical differences + limit = 1 + + if diff < limit: + logger.info(f"LV is already reached (gap {diff}), disabling expansion and LV limit") + expandable_acs = n.lines.index[n.lines.s_nom_extendable] + n.lines.loc[expandable_acs,"s_nom_extendable"] = False + n.lines.loc[expandable_acs,"s_nom"] = n.lines.loc[expandable_acs,"s_nom_min"] + + expandable_dcs = n.links.index[n.links.p_nom_extendable & (n.links.carrier == "DC")] + n.links.loc[expandable_dcs,"p_nom_extendable"] = False + n.links.loc[expandable_dcs,"p_nom"] = n.links.loc[expandable_dcs,"p_nom_min"] + + n.global_constraints.drop("lv_limit", + inplace=True) if __name__ == "__main__": if "snakemake" not in globals(): @@ -150,5 +176,7 @@ if __name__ == "__main__": add_brownfield(n, n_p, year) + disable_grid_expansion_if_LV_limit_hit(n) + n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output[0]) From 42f11752caa06a57f3b4bde2de24f0d5e5e95255 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:35:11 +0100 Subject: [PATCH 056/101] standardise formatting --- scripts/add_brownfield.py | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index fb1453fd..ffdaf46b 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -119,32 +119,39 @@ def add_brownfield(n, n_p, year): n.links.loc[new_pipes, "p_nom"] = 0.0 n.links.loc[new_pipes, "p_nom_min"] = 0.0 + def disable_grid_expansion_if_LV_limit_hit(n): if not "lv_limit" in n.global_constraints.index: return - #calculate minimum LV + # calculate minimum LV attr = "nom_min" dc = n.links.index[n.links.carrier == "DC"] - tot = (n.lines["s_" + attr]*n.lines["length"]).sum() + (n.links.loc[dc,"p_" + attr]*n.links.loc[dc,"length"]).sum() + tot = (n.lines["s_" + attr] * n.lines["length"]).sum() + ( + n.links.loc[dc, "p_" + attr] * n.links.loc[dc, "length"] + ).sum() - diff = n.global_constraints.at["lv_limit","constant"]-tot + diff = n.global_constraints.at["lv_limit", "constant"] - tot - #allow small numerical differences + # allow small numerical differences limit = 1 if diff < limit: - logger.info(f"LV is already reached (gap {diff}), disabling expansion and LV limit") + logger.info( + f"LV is already reached (gap {diff}), disabling expansion and LV limit" + ) expandable_acs = n.lines.index[n.lines.s_nom_extendable] - n.lines.loc[expandable_acs,"s_nom_extendable"] = False - n.lines.loc[expandable_acs,"s_nom"] = n.lines.loc[expandable_acs,"s_nom_min"] + n.lines.loc[expandable_acs, "s_nom_extendable"] = False + n.lines.loc[expandable_acs, "s_nom"] = n.lines.loc[expandable_acs, "s_nom_min"] - expandable_dcs = n.links.index[n.links.p_nom_extendable & (n.links.carrier == "DC")] - n.links.loc[expandable_dcs,"p_nom_extendable"] = False - n.links.loc[expandable_dcs,"p_nom"] = n.links.loc[expandable_dcs,"p_nom_min"] + expandable_dcs = n.links.index[ + n.links.p_nom_extendable & (n.links.carrier == "DC") + ] + n.links.loc[expandable_dcs, "p_nom_extendable"] = False + n.links.loc[expandable_dcs, "p_nom"] = n.links.loc[expandable_dcs, "p_nom_min"] + + n.global_constraints.drop("lv_limit", inplace=True) - n.global_constraints.drop("lv_limit", - inplace=True) if __name__ == "__main__": if "snakemake" not in globals(): From deba2a4ed53163ade07d2ba7a64c4f928ae10c72 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:41:42 +0100 Subject: [PATCH 057/101] tidy code --- scripts/add_brownfield.py | 30 ++++++++++++------------------ 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index ffdaf46b..9ddd3d99 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -124,31 +124,25 @@ def disable_grid_expansion_if_LV_limit_hit(n): if not "lv_limit" in n.global_constraints.index: return - # calculate minimum LV - attr = "nom_min" - dc = n.links.index[n.links.carrier == "DC"] - tot = (n.lines["s_" + attr] * n.lines["length"]).sum() + ( - n.links.loc[dc, "p_" + attr] * n.links.loc[dc, "length"] + total_expansion = ( + n.lines.eval("s_nom_min * length").sum() + + n.links.query("carrier == 'DC'").eval("p_nom_min * length").sum() ).sum() - diff = n.global_constraints.at["lv_limit", "constant"] - tot + lv_limit = n.global_constraints.at["lv_limit", "constant"] # allow small numerical differences - limit = 1 - - if diff < limit: + if lv_limit - total_expansion < 1: logger.info( - f"LV is already reached (gap {diff}), disabling expansion and LV limit" + f"LV is already reached (gap {diff} MWkm), disabling expansion and LV limit" ) - expandable_acs = n.lines.index[n.lines.s_nom_extendable] - n.lines.loc[expandable_acs, "s_nom_extendable"] = False - n.lines.loc[expandable_acs, "s_nom"] = n.lines.loc[expandable_acs, "s_nom_min"] + extendable_acs = n.lines.query("s_nom_extendable").index + n.lines.loc[extendable_acs, "s_nom_extendable"] = False + n.lines.loc[extendable_acs, "s_nom"] = n.lines.loc[extendable_acs, "s_nom_min"] - expandable_dcs = n.links.index[ - n.links.p_nom_extendable & (n.links.carrier == "DC") - ] - n.links.loc[expandable_dcs, "p_nom_extendable"] = False - n.links.loc[expandable_dcs, "p_nom"] = n.links.loc[expandable_dcs, "p_nom_min"] + extendable_dcs = n.links.query("carrier == 'DC' and p_nom_extendable").index + n.links.loc[extendable_dcs, "p_nom_extendable"] = False + n.links.loc[extendable_dcs, "p_nom"] = n.links.loc[extendable_dcs, "p_nom_min"] n.global_constraints.drop("lv_limit", inplace=True) From 257b16efd8efae8848171083c1d4c04ab4af9579 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:47:13 +0100 Subject: [PATCH 058/101] print IIS if solver returns status infeasible --- scripts/solve_network.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index ff2a2f23..8c46e025 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -839,6 +839,9 @@ def solve_network(n, config, solving, opts="", **kwargs): f"Solving status '{status}' with termination condition '{condition}'" ) if "infeasible" in condition: + labels = n.model.compute_infeasibilities() + logger.info("Labels:\n" + labels) + n.model.print_infeasibilities() raise RuntimeError("Solving status 'infeasible'") return n From 2acddb6a7ccf1c6d30bcf8d452e7c2bd61a7a36c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:48:34 +0100 Subject: [PATCH 059/101] add release note --- doc/release_notes.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 5ac7925e..31e492a8 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -81,6 +81,9 @@ Upcoming Release * Remove HELMETH option. +* Print Irreducible Infeasible Subset (IIS) if model is infeasible. Only for + solvers with IIS support. + **Bugs and Compatibility** * A bug preventing custom powerplants specified in ``data/custom_powerplants.csv`` was fixed. (https://github.com/PyPSA/pypsa-eur/pull/732) From ecd85d23d317e1acf106bf1a2b28c82fff77a275 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 15:01:43 +0100 Subject: [PATCH 060/101] add option to use LUISA land coverage data --- config/config.default.yaml | 7 ++++ doc/configtables/offwind-ac.csv | 1 + doc/configtables/offwind-dc.csv | 1 + doc/configtables/onwind.csv | 4 ++ doc/configtables/solar.csv | 1 + doc/release_notes.rst | 10 +++++ rules/build_electricity.smk | 5 +++ rules/retrieve.smk | 16 +++++++ scripts/build_renewable_profiles.py | 65 +++++++++++++++++------------ 9 files changed, 84 insertions(+), 26 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 74844ec0..eddd0271 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -165,6 +165,10 @@ renewable: grid_codes: [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 31, 32] distance: 1000 distance_grid_codes: [1, 2, 3, 4, 5, 6] + luisa: false + # grid_codes: [1111, 1121, 1122, 1123, 1130, 1210, 1221, 1222, 1230, 1241, 1242] + # distance: 1000 + # distance_grid_codes: [1111, 1121, 1122, 1123, 1130, 1210, 1221, 1222, 1230, 1241, 1242] natura: true excluder_resolution: 100 clip_p_max_pu: 1.e-2 @@ -177,6 +181,7 @@ renewable: capacity_per_sqkm: 2 correction_factor: 0.8855 corine: [44, 255] + luisa: false # [0, 5230] natura: true ship_threshold: 400 max_depth: 50 @@ -192,6 +197,7 @@ renewable: capacity_per_sqkm: 2 correction_factor: 0.8855 corine: [44, 255] + luisa: false # [0, 5230] natura: true ship_threshold: 400 max_depth: 50 @@ -209,6 +215,7 @@ renewable: capacity_per_sqkm: 1.7 # correction_factor: 0.854337 corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] + luisa: false # [1111, 1121, 1122, 1123, 1130, 1210, 1221, 1222, 1230, 1241, 1242, 1310, 1320, 1330, 1410, 1421, 1422, 2110, 2120, 2130, 2210, 2220, 2230, 2310, 2410, 2420, 3210, 3320, 3330] natura: true excluder_resolution: 100 clip_p_max_pu: 1.e-2 diff --git a/doc/configtables/offwind-ac.csv b/doc/configtables/offwind-ac.csv index c3512a9e..9dc0614c 100644 --- a/doc/configtables/offwind-ac.csv +++ b/doc/configtables/offwind-ac.csv @@ -7,6 +7,7 @@ capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine place correction_factor,--,float,"Correction factor for capacity factor time series." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." corine,--,"Any *realistic* subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement." +luisa,--,"Any subset of the `LUISA Base Map codes in Annex 1 `_","Specifies areas according to the LUISA Base Map codes which are generally eligible for AC-connected offshore wind turbine placement." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." ship_threshold,--,float,"Ship density threshold from which areas are excluded." max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential." diff --git a/doc/configtables/offwind-dc.csv b/doc/configtables/offwind-dc.csv index 35095597..c947f358 100644 --- a/doc/configtables/offwind-dc.csv +++ b/doc/configtables/offwind-dc.csv @@ -7,6 +7,7 @@ capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine place correction_factor,--,float,"Correction factor for capacity factor time series." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." corine,--,"Any *realistic* subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement." +luisa,--,"Any subset of the `LUISA Base Map codes in Annex 1 `_","Specifies areas according to the LUISA Base Map codes which are generally eligible for DC-connected offshore wind turbine placement." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." ship_threshold,--,float,"Ship density threshold from which areas are excluded." max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential." diff --git a/doc/configtables/onwind.csv b/doc/configtables/onwind.csv index b7e823b3..f6b36e5d 100644 --- a/doc/configtables/onwind.csv +++ b/doc/configtables/onwind.csv @@ -8,6 +8,10 @@ corine,,, -- grid_codes,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for wind turbine placement." -- distance,m,float,"Distance to keep from areas specified in ``distance_grid_codes``" -- distance_grid_codes,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes to which wind turbines must maintain a distance specified in the setting ``distance``." +luisa,,, +-- grid_codes,--,"Any subset of the `LUISA Base Map codes in Annex 1 `_","Specifies areas according to the LUISA Base Map codes which are generally eligible for wind turbine placement." +-- distance,m,float,"Distance to keep from areas specified in ``distance_grid_codes``" +-- distance_grid_codes,--,"Any subset of the `LUISA Base Map codes in Annex 1 `_","Specifies areas according to the LUISA Base Map codes to which wind turbines must maintain a distance specified in the setting ``distance``." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." correction_factor,--,float,"Correction factor for capacity factor time series." diff --git a/doc/configtables/solar.csv b/doc/configtables/solar.csv index 7da1281b..8328d342 100644 --- a/doc/configtables/solar.csv +++ b/doc/configtables/solar.csv @@ -9,6 +9,7 @@ resource,,, capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of solar panel placement." correction_factor,--,float,"A correction factor for the capacity factor (availability) time series." corine,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for solar panel placement." +luisa,--,"Any subset of the `LUISA Base Map codes in Annex 1 `_","Specifies areas according to the LUISA Base Map codes which are generally eligible for solar panel placement." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 31e492a8..a1eb644e 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -84,6 +84,16 @@ Upcoming Release * Print Irreducible Infeasible Subset (IIS) if model is infeasible. Only for solvers with IIS support. +* Add option to use `LUISA Base Map + `_ 50m land + coverage dataset for land eligibility analysis in + :mod:`build_renewable_profiles`. Settings are analogous to the CORINE dataset + but with the key ``luisa:`` in the configuration file. To leverage the + dataset's full advantages, set the excluder resolution to 50m + (``excluder_resolution: 50``). For land category codes, see `Annex 1 of the + technical documentation + `_. + **Bugs and Compatibility** * A bug preventing custom powerplants specified in ``data/custom_powerplants.csv`` was fixed. (https://github.com/PyPSA/pypsa-eur/pull/732) diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index 6308552f..055cffca 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -268,6 +268,11 @@ rule build_renewable_profiles: if config["renewable"][w.technology]["natura"] else [] ), + luisa=lambda w: ( + "data/LUISA_basemap_020321_50m.tif" + if config["renewable"][w.technology].get("luisa") + else [] + ), gebco=ancient( lambda w: ( "data/bundle/GEBCO_2014_2D.nc" diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 4ded2a46..99ce344e 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -249,6 +249,22 @@ if config["enable"]["retrieve"]: validate_checksum(output[0], input[0]) +if config["enable"]["retrieve"]: + + # Downloading LUISA Base Map for land cover and land use: + # Website: https://ec.europa.eu/jrc/en/luisa + rule retrieve_luisa_land_cover: + input: + HTTP.remote( + "jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/LUISA/EUROPE/Basemaps/LandUse/2018/LATEST/LUISA_basemap_020321_50m.tif", + static=True + ) + output: + "data/LUISA_basemap_020321_50m.tif" + run: + move(input[0], output[0]) + + if config["enable"]["retrieve"]: # Some logic to find the correct file URL # Sometimes files are released delayed or ahead of schedule, check which file is currently available diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index c579f588..60c11921 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -7,10 +7,10 @@ """ Calculates for each network node the (i) installable capacity (based on land- use), (ii) the available generation time series (based on weather data), and -(iii) the average distance from the node for onshore wind, AC-connected -offshore wind, DC-connected offshore wind and solar PV generators. In addition -for offshore wind it calculates the fraction of the grid connection which is -under water. +(iii) the average distance from the node for onshore wind, AC-connected offshore +wind, DC-connected offshore wind and solar PV generators. In addition for +offshore wind it calculates the fraction of the grid connection which is under +water. .. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`. @@ -26,7 +26,7 @@ Relevant settings renewable: {technology}: - cutout: corine: grid_codes: distance: natura: max_depth: + cutout: corine: luisa: grid_codes: distance: natura: max_depth: max_shore_distance: min_shore_distance: capacity_per_sqkm: correction_factor: min_p_max_pu: clip_p_max_pu: resource: @@ -40,11 +40,18 @@ Inputs - ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC) `_ inventory on `44 classes `_ of - land use (e.g. forests, arable land, industrial, urban areas). + land use (e.g. forests, arable land, industrial, urban areas) at 100m + resolution. .. image:: img/corine.png :scale: 33 % +- ``data/LUISA_basemap_020321_50m.tif``: `LUISA Base Map + `_ land + coverage dataset at 50m resolution similar to CORINE. For codes in relation to + CORINE land cover, see `Annex 1 of the technical documentation + `_. + - ``data/bundle/GEBCO_2014_2D.nc``: A `bathymetric `_ data set with a global terrain model for ocean and land at 15 arc-second intervals by the `General @@ -133,9 +140,10 @@ of a combination of the available land at each grid cell and the capacity factor there. First the script computes how much of the technology can be installed at each -cutout grid cell and each node using the `GLAES -`_ library. This uses the CORINE land use -data, Natura2000 nature reserves and GEBCO bathymetry data. +cutout grid cell and each node using the `atlite +`_ library. This uses the CORINE land use data, +LUISA land use data, Natura2000 nature reserves, GEBCO bathymetry data, and +shipping lanes. .. image:: img/eligibility.png :scale: 50 % @@ -166,10 +174,10 @@ This layout is then used to compute the generation availability time series from the weather data cutout from ``atlite``. The maximal installable potential for the node (`p_nom_max`) is computed by -adding up the installable potentials of the individual grid cells. -If the model comes close to this limit, then the time series may slightly -overestimate production since it is assumed the geographical distribution is -proportional to capacity factor. +adding up the installable potentials of the individual grid cells. If the model +comes close to this limit, then the time series may slightly overestimate +production since it is assumed the geographical distribution is proportional to +capacity factor. """ import functools import logging @@ -203,9 +211,6 @@ if __name__ == "__main__": correction_factor = params.get("correction_factor", 1.0) capacity_per_sqkm = params["capacity_per_sqkm"] - if isinstance(params.get("corine", {}), list): - params["corine"] = {"grid_codes": params["corine"]} - if correction_factor != 1.0: logger.info(f"correction_factor is set as {correction_factor}") @@ -231,16 +236,24 @@ if __name__ == "__main__": if params["natura"]: excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True) - corine = params.get("corine", {}) - if "grid_codes" in corine: - codes = corine["grid_codes"] - excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035) - if corine.get("distance", 0.0) > 0.0: - codes = corine["distance_grid_codes"] - buffer = corine["distance"] - excluder.add_raster( - snakemake.input.corine, codes=codes, buffer=buffer, crs=3035 - ) + for landuse in ["corine", "luisa"]: + kwargs = {"nodata": 0} if landuse == "luisa" else {} + landuse = params.get(landuse, {}) + if not landuse: + continue + if isinstance(landuse, list): + landuse = {"grid_codes": landuse} + if "grid_codes" in landuse: + codes = landuse["grid_codes"] + excluder.add_raster( + snakemake.input[landuse], codes=codes, invert=True, crs=3035, **kwargs + ) + if landuse.get("distance", 0.0) > 0.0: + codes = landuse["distance_grid_codes"] + buffer = landuse["distance"] + excluder.add_raster( + snakemake.input[landuse], codes=codes, buffer=buffer, crs=3035, **kwargs + ) if params.get("ship_threshold"): shipping_threshold = ( From 045eeba4cfc17500c9740706c60c9f61fc4a3a68 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 14:02:24 +0000 Subject: [PATCH 061/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/retrieve.smk | 6 +++--- scripts/build_renewable_profiles.py | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 99ce344e..e062091e 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -257,10 +257,10 @@ if config["enable"]["retrieve"]: input: HTTP.remote( "jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/LUISA/EUROPE/Basemaps/LandUse/2018/LATEST/LUISA_basemap_020321_50m.tif", - static=True - ) + static=True, + ), output: - "data/LUISA_basemap_020321_50m.tif" + "data/LUISA_basemap_020321_50m.tif", run: move(input[0], output[0]) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 60c11921..d4cab19d 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -7,10 +7,10 @@ """ Calculates for each network node the (i) installable capacity (based on land- use), (ii) the available generation time series (based on weather data), and -(iii) the average distance from the node for onshore wind, AC-connected offshore -wind, DC-connected offshore wind and solar PV generators. In addition for -offshore wind it calculates the fraction of the grid connection which is under -water. +(iii) the average distance from the node for onshore wind, AC-connected +offshore wind, DC-connected offshore wind and solar PV generators. In addition +for offshore wind it calculates the fraction of the grid connection which is +under water. .. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`. From 94f4383e0289e2b81e9edc20e21f18910156f0e6 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 15:22:13 +0100 Subject: [PATCH 062/101] distinguish dataset name and dataset settings --- scripts/build_renewable_profiles.py | 35 ++++++++++++++++------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index d4cab19d..b58482ae 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -236,23 +236,28 @@ if __name__ == "__main__": if params["natura"]: excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True) - for landuse in ["corine", "luisa"]: - kwargs = {"nodata": 0} if landuse == "luisa" else {} - landuse = params.get(landuse, {}) - if not landuse: - continue - if isinstance(landuse, list): - landuse = {"grid_codes": landuse} - if "grid_codes" in landuse: - codes = landuse["grid_codes"] - excluder.add_raster( - snakemake.input[landuse], codes=codes, invert=True, crs=3035, **kwargs + for dataset in ["corine", "luisa"]: + kwargs = {"nodata": 0} if dataset == "luisa" else {} + if dataset == "luisa" and res > 50: + logger.info( + "LUISA data is available at 50m resolution, " + f"but coarser {res}m resolution is used." ) - if landuse.get("distance", 0.0) > 0.0: - codes = landuse["distance_grid_codes"] - buffer = landuse["distance"] + settings = params.get(dataset, {}) + if not settings: + continue + if isinstance(settings, list): + settings = {"grid_codes": settings} + if "grid_codes" in settings: + codes = settings["grid_codes"] excluder.add_raster( - snakemake.input[landuse], codes=codes, buffer=buffer, crs=3035, **kwargs + snakemake.input[dataset], codes=codes, invert=True, crs=3035, **kwargs + ) + if settings.get("distance", 0.0) > 0.0: + codes = settings["distance_grid_codes"] + buffer = settings["distance"] + excluder.add_raster( + snakemake.input[dataset], codes=codes, buffer=buffer, crs=3035, **kwargs ) if params.get("ship_threshold"): From 1a6031f318aab522d3356c2bb4ef314b3eed76d2 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 15:25:27 +0100 Subject: [PATCH 063/101] only copy config.default.yaml if it exists --- Snakefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Snakefile b/Snakefile index 83530df7..14ce0e40 100644 --- a/Snakefile +++ b/Snakefile @@ -14,7 +14,7 @@ from snakemake.utils import min_version min_version("7.7") -if not exists("config/config.yaml"): +if not exists("config/config.yaml") and exists("config/config.default.yaml"): copyfile("config/config.default.yaml", "config/config.yaml") From d145758fb7ff4bf126ddada8eec6d8f942c93f4f Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 09:00:31 +0100 Subject: [PATCH 064/101] gracefully handle absent extra_functionality file; add file to path --- rules/common.smk | 7 +++++++ rules/solve_electricity.smk | 4 +--- rules/solve_myopic.smk | 4 +--- rules/solve_overnight.smk | 5 +---- rules/solve_perfect.smk | 4 +--- scripts/solve_network.py | 2 ++ 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index 2c8cf69c..44e3a807 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -28,6 +28,13 @@ def memory(w): return int(factor * (10000 + 195 * int(w.clusters))) +def input_custom_extra_functionality(w): + path = config["solving"]["options"].get("custom_extra_functionality", False) + if path: + return workflow.source_path(path) + return [] + + # Check if the workflow has access to the internet by trying to access the HEAD of specified url def has_internet_access(url="www.zenodo.org") -> bool: import http.client as http_client diff --git a/rules/solve_electricity.smk b/rules/solve_electricity.smk index 2c956097..7f6092be 100644 --- a/rules/solve_electricity.smk +++ b/rules/solve_electricity.smk @@ -11,9 +11,7 @@ rule solve_network: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), - custom_extra_functionality=workflow.source_path( - config["solving"]["options"].get("custom_extra_functionality", "") - ), + custom_extra_functionality=input_custom_extra_functionality, input: network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", config=RESULTS + "config.yaml", diff --git a/rules/solve_myopic.smk b/rules/solve_myopic.smk index afa8ad2c..7ca8857d 100644 --- a/rules/solve_myopic.smk +++ b/rules/solve_myopic.smk @@ -88,9 +88,7 @@ rule solve_sector_network_myopic: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), - custom_extra_functionality=workflow.source_path( - config["solving"]["options"].get("custom_extra_functionality", "") - ), + custom_extra_functionality=input_custom_extra_functionality, input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index fc2f74df..8686b205 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: MIT - rule solve_sector_network: params: solving=config["solving"], @@ -11,9 +10,7 @@ rule solve_sector_network: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), - custom_extra_functionality=workflow.source_path( - config["solving"]["options"].get("custom_extra_functionality", "") - ), + custom_extra_functionality=input_custom_extra_functionality, input: network=RESULTS + "prenetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc", diff --git a/rules/solve_perfect.smk b/rules/solve_perfect.smk index 63be5cc1..a7856fa9 100644 --- a/rules/solve_perfect.smk +++ b/rules/solve_perfect.smk @@ -118,9 +118,7 @@ rule solve_sector_network_perfect: co2_sequestration_potential=config["sector"].get( "co2_sequestration_potential", 200 ), - custom_extra_functionality=workflow.source_path( - config["solving"]["options"].get("custom_extra_functionality", "") - ), + custom_extra_functionality=input_custom_extra_functionality, input: network=RESULTS + "prenetworks-brownfield/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_brownfield_all_years.nc", diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 5a045577..2f170dff 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -30,6 +30,7 @@ import importlib import logging import os import re +import sys import numpy as np import pandas as pd @@ -831,6 +832,7 @@ def extra_functionality(n, snapshots): if snakemake.params.custom_extra_functionality: source_path = snakemake.params.custom_extra_functionality assert os.path.exists(source_path), f"{source_path} does not exist" + sys.path.append(os.path.dirname(source_path)) module_name = os.path.splitext(os.path.basename(source_path))[0] module = importlib.import_module(module_name) module.custom_extra_functionality(n, snapshots) From 60493fc55829ddc95bd8d55d35b0f505cef5f624 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 08:01:00 +0000 Subject: [PATCH 065/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/solve_overnight.smk | 1 + 1 file changed, 1 insertion(+) diff --git a/rules/solve_overnight.smk b/rules/solve_overnight.smk index 8686b205..a3fed042 100644 --- a/rules/solve_overnight.smk +++ b/rules/solve_overnight.smk @@ -2,6 +2,7 @@ # # SPDX-License-Identifier: MIT + rule solve_sector_network: params: solving=config["solving"], From ab1d93279a76fe2608183ca0ed2968514ce0b3fb Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 09:19:29 +0100 Subject: [PATCH 066/101] move LUISA resolution info to a later point --- scripts/build_renewable_profiles.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index b58482ae..b736f68a 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -238,14 +238,14 @@ if __name__ == "__main__": for dataset in ["corine", "luisa"]: kwargs = {"nodata": 0} if dataset == "luisa" else {} + settings = params.get(dataset, {}) + if not settings: + continue if dataset == "luisa" and res > 50: logger.info( "LUISA data is available at 50m resolution, " f"but coarser {res}m resolution is used." ) - settings = params.get(dataset, {}) - if not settings: - continue if isinstance(settings, list): settings = {"grid_codes": settings} if "grid_codes" in settings: From f451e28f582f970a5b4f3a8336d99c89c67429bb Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 11:42:24 +0100 Subject: [PATCH 067/101] add release notes and documentation --- doc/configtables/sector.csv | 2 ++ doc/release_notes.rst | 17 +++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index 938c120a..5e2514e4 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -79,6 +79,8 @@ hydrogen_fuel_cell,--,"{true, false}",Add option to include hydrogen fuel cell f hydrogen_turbine,--,"{true, false}",Add option to include hydrogen turbine for re-electrification. Assuming OCGT technology costs SMR,--,"{true, false}",Add option for transforming natural gas into hydrogen and CO2 using Steam Methane Reforming (SMR) SMR CC,--,"{true, false}",Add option for transforming natural gas into hydrogen and CO2 using Steam Methane Reforming (SMR) and Carbon Capture (CC) +regional_methanol_demand,--,"{true, false}",Spatially resolve methanol demand. Set to true if regional CO2 constraints needed. +regional_oil_demand,--,"{true, false}",Spatially resolve oil demand. Set to true if regional CO2 constraints needed. regional_co2 _sequestration_potential,,, -- enable,--,"{true, false}",Add option for regionally-resolved geological carbon dioxide sequestration potentials based on `CO2StoP `_. -- attribute,--,string,Name of the attribute for the sequestration potential diff --git a/doc/release_notes.rst b/doc/release_notes.rst index bb9732de..03d9318d 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,6 +10,23 @@ Release Notes Upcoming Release ================ +* Remove all negative loads on the ``co2 atmosphere`` bus representing emissions + for e.g. fixed fossil demands for transport oil. Instead these are handled + more transparently with a fixed transport oil demand and a link taking care of + the emissions to the ``co2 atmosphere`` bus. This is also a preparation for + endogenous transport optimisation, where demand will be subject to + optimisation (e.g. fuel switching in the transport sector). + +* Allow possibility to go from copperplated to regionally resolved methanol and + oil demand with switches ``sector: regional_methanol_demand: true`` and + ``sector: regional_oil_demand: true``. This allows nodal/regional CO2 + constraints to be applied. + +* Process emissions from steam crackers (i.e. naphtha processing for HVC) are now + piped from the consumption link to the process emissions bus where the model + can decide about carbon capture. Previously the process emissions for naphtha + were a fixed load. + * Add option to specify losses for bidirectional links, e.g. pipelines or HVDC links, in configuration file under ``sector: transmission_efficiency:``. Users can specify static or length-dependent values as well as a length-dependent From addaecf77a7048955f49905c2b2f54371d3fd3a3 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 12:43:55 +0100 Subject: [PATCH 068/101] move comments to documentation --- config/config.default.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 57416cc7..9e8f57a6 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -463,8 +463,8 @@ sector: hydrogen_turbine: false SMR: true SMR_cc: true - regional_methanol_demand: false #set to true if regional CO2 constraints needed - regional_oil_demand: false #set to true if regional CO2 constraints needed + regional_methanol_demand: false + regional_oil_demand: false regional_co2_sequestration_potential: enable: false attribute: 'conservative estimate Mt' From 593995675e43d860c120b568ab19f8ffade31b32 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 12:44:47 +0100 Subject: [PATCH 069/101] gurobi: avoid double-logging to console --- scripts/solve_network.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 2f170dff..ecf56a24 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -854,6 +854,9 @@ def solve_network(n, config, solving, opts="", **kwargs): ) kwargs["assign_all_duals"] = cf_solving.get("assign_all_duals", False) + if kwargs["solver_name"] == "gurobi": + logging.getLogger('gurobipy').setLevel(logging.CRITICAL) + rolling_horizon = cf_solving.pop("rolling_horizon", False) skip_iterations = cf_solving.pop("skip_iterations", False) if not n.lines.s_nom_extendable.any(): From 8054ad382c3cc3806c7ec808137206358f77d888 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 12:47:26 +0100 Subject: [PATCH 070/101] prepare_sector_network: simplify process emissions with outsourced feedstock emissions --- scripts/prepare_sector_network.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index e211be15..ea0c4f3f 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2996,16 +2996,14 @@ def add_industry(n, costs): unit="t_co2", ) - sel = ["process emission"] if options["co2_spatial"] or options["co2network"]: p_set = ( - -industrial_demand.loc[nodes, sel] - .sum(axis=1) + -industrial_demand.loc[nodes, "process emission"] .rename(index=lambda x: x + " process emissions") / nhours ) else: - p_set = -industrial_demand.loc[nodes, sel].sum(axis=1).sum() / nhours + p_set = -industrial_demand.loc[nodes, "process emission"].sum() / nhours n.madd( "Load", From cfd689bbec29fad829cef8d2e6b1fdf89fd2ebea Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 13:03:03 +0100 Subject: [PATCH 071/101] add snakemake object to custom_extra_functionality arguments --- data/custom_extra_functionality.py | 2 +- scripts/solve_network.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/data/custom_extra_functionality.py b/data/custom_extra_functionality.py index 0ac24cea..e7a9df0f 100644 --- a/data/custom_extra_functionality.py +++ b/data/custom_extra_functionality.py @@ -4,7 +4,7 @@ # SPDX-License-Identifier: MIT -def custom_extra_functionality(n, snapshots): +def custom_extra_functionality(n, snapshots, snakemake): """ Add custom extra functionality constraints. """ diff --git a/scripts/solve_network.py b/scripts/solve_network.py index ecf56a24..4b988666 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -835,7 +835,7 @@ def extra_functionality(n, snapshots): sys.path.append(os.path.dirname(source_path)) module_name = os.path.splitext(os.path.basename(source_path))[0] module = importlib.import_module(module_name) - module.custom_extra_functionality(n, snapshots) + module.custom_extra_functionality(n, snapshots, snakemake) def solve_network(n, config, solving, opts="", **kwargs): From 34535bcbffd5506faeaba7c8ada546e29f00c7eb Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 13:08:14 +0100 Subject: [PATCH 072/101] custom_extra_functionality: assume same function name as file name --- scripts/solve_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 4b988666..203d8b0f 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -835,7 +835,8 @@ def extra_functionality(n, snapshots): sys.path.append(os.path.dirname(source_path)) module_name = os.path.splitext(os.path.basename(source_path))[0] module = importlib.import_module(module_name) - module.custom_extra_functionality(n, snapshots, snakemake) + custom_extra_functionality = getattr(module, module_name) + custom_extra_functionality(n, snapshots, snakemake) def solve_network(n, config, solving, opts="", **kwargs): From c3bcaee1a22a888d2ba1147ed643fbf7607ffa86 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 13:37:35 +0100 Subject: [PATCH 073/101] common.smk: find _helpers.py also if pypsa-eur is used as module --- rules/common.smk | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index 44e3a807..0e85b620 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -2,9 +2,14 @@ # # SPDX-License-Identifier: MIT -import os, sys +import os, sys, glob + +helper_source_path = [match for match in glob.glob('**/_helpers.py', recursive=True)] + +for path in helper_source_path: + path = os.path.dirname(os.path.abspath(path)) + sys.path.insert(0, os.path.abspath(path)) -sys.path.insert(0, os.path.abspath("scripts")) from _helpers import validate_checksum From 29a95dc997efd4f4e381b3ee800c5f98208917d5 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 16:02:02 +0100 Subject: [PATCH 074/101] default to approximating transmission losses in HVAC lines --- config/config.default.yaml | 2 +- doc/release_notes.rst | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 9e8f57a6..7f1f2034 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -654,7 +654,7 @@ solving: track_iterations: false min_iterations: 4 max_iterations: 6 - transmission_losses: 0 + transmission_losses: 2 linearized_unit_commitment: true horizon: 365 diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 03d9318d..09417727 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,6 +10,9 @@ Release Notes Upcoming Release ================ +* Default to approximating transmission losses in HVAC lines + (``transmission_losses: 2``). + * Remove all negative loads on the ``co2 atmosphere`` bus representing emissions for e.g. fixed fossil demands for transport oil. Instead these are handled more transparently with a fixed transport oil demand and a link taking care of From 73abb6196796ae0d741050f5cfe333b8c02cd768 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 17:05:54 +0100 Subject: [PATCH 075/101] add_brownfield: fix lv_limit reached --- scripts/add_brownfield.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index 9ddd3d99..e151c441 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -134,7 +134,7 @@ def disable_grid_expansion_if_LV_limit_hit(n): # allow small numerical differences if lv_limit - total_expansion < 1: logger.info( - f"LV is already reached (gap {diff} MWkm), disabling expansion and LV limit" + f"LV is already reached, disabling expansion and LV limit" ) extendable_acs = n.lines.query("s_nom_extendable").index n.lines.loc[extendable_acs, "s_nom_extendable"] = False From 29b24b1b414b33685e91840bfd5b12da5b13ec3f Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 18:42:47 +0100 Subject: [PATCH 076/101] segmentation_clustering: log distribution of snapshot durations --- scripts/prepare_sector_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index ea0c4f3f..8c93ba5a 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3481,6 +3481,7 @@ def apply_time_segmentation( sn_weightings = pd.Series( weightings, index=snapshots, name="weightings", dtype="float64" ) + logger.info("Distribution of snapshot durations:\n", weightings.value_counts()) n.set_snapshots(sn_weightings.index) n.snapshot_weightings = n.snapshot_weightings.mul(sn_weightings, axis=0) From cf5f3cbd88dc90da3767bdbb7e84840fb5a76290 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 18:54:49 +0100 Subject: [PATCH 077/101] segmentation_clustering: log distribution of snapshot durations (fix) --- scripts/prepare_sector_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 8c93ba5a..62bfa80a 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3481,7 +3481,7 @@ def apply_time_segmentation( sn_weightings = pd.Series( weightings, index=snapshots, name="weightings", dtype="float64" ) - logger.info("Distribution of snapshot durations:\n", weightings.value_counts()) + logger.info(f"Distribution of snapshot durations:\n{weightings.value_counts()}") n.set_snapshots(sn_weightings.index) n.snapshot_weightings = n.snapshot_weightings.mul(sn_weightings, axis=0) From 9530d63e551f1cc19dd6847f10902fec3d59da0f Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 18:57:09 +0100 Subject: [PATCH 078/101] cluster_heat_buses: performance boost and tidy code --- scripts/prepare_sector_network.py | 29 +++++++++++++---------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 62bfa80a..26aa50c9 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3332,24 +3332,24 @@ def limit_individual_line_extension(n, maxext): aggregate_dict = { - "p_nom": "sum", - "s_nom": "sum", + "p_nom": pd.Series.sum, + "s_nom": pd.Series.sum, "v_nom": "max", "v_mag_pu_max": "min", "v_mag_pu_min": "max", - "p_nom_max": "sum", - "s_nom_max": "sum", - "p_nom_min": "sum", - "s_nom_min": "sum", + "p_nom_max": pd.Series.sum, + "s_nom_max": pd.Series.sum, + "p_nom_min": pd.Series.sum, + "s_nom_min": pd.Series.sum, "v_ang_min": "max", "v_ang_max": "min", "terrain_factor": "mean", "num_parallel": "sum", "p_set": "sum", "e_initial": "sum", - "e_nom": "sum", - "e_nom_max": "sum", - "e_nom_min": "sum", + "e_nom": pd.Series.sum, + "e_nom_max": pd.Series.sum, + "e_nom_min": pd.Series.sum, "state_of_charge_initial": "sum", "state_of_charge_set": "sum", "inflow": "sum", @@ -3411,13 +3411,10 @@ def cluster_heat_buses(n): pnl = c.pnl agg = define_clustering(pd.Index(pnl.keys()), aggregate_dict) for k in pnl.keys(): - pnl[k].rename( - columns=lambda x: x.replace("residential ", "").replace( - "services ", "" - ), - inplace=True, - ) - pnl[k] = pnl[k].groupby(level=0, axis=1).agg(agg[k], **agg_group_kwargs) + def renamer(s): + return s.replace("residential ", "").replace("services ", "") + + pnl[k] = pnl[k].groupby(renamer, axis=1).agg(agg[k], **agg_group_kwargs) # remove unclustered assets of service/residential to_drop = c.df.index.difference(df.index) From f9fec1a4be62d72ca52934bb9e6be9ca4308cf7f Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 18:58:04 +0100 Subject: [PATCH 079/101] more precise restrictive handling of opts cost adjustment flags --- scripts/prepare_sector_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 26aa50c9..17eb832d 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3286,7 +3286,8 @@ def remove_h2_network(n): def maybe_adjust_costs_and_potentials(n, opts): for o in opts: - if "+" not in o: + flags = ["+e", "+p", "+m"] + if all(flag not in o for flag in flags): continue oo = o.split("+") carrier_list = np.hstack( From a67c5ea3b71139ed90130767dcf6c3d6fb7eec65 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 19:03:34 +0100 Subject: [PATCH 080/101] default to full land transport electrification by 2050 --- config/config.default.yaml | 12 ++++++------ doc/release_notes.rst | 2 ++ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 7f1f2034..1e26781d 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -381,14 +381,14 @@ sector: v2g: true land_transport_fuel_cell_share: 2020: 0 - 2030: 0.05 - 2040: 0.1 - 2050: 0.15 + 2030: 0 + 2040: 0 + 2050: 0 land_transport_electric_share: 2020: 0 - 2030: 0.25 - 2040: 0.6 - 2050: 0.85 + 2030: 0.3 + 2040: 0.7 + 2050: 1 land_transport_ice_share: 2020: 1 2030: 0.7 diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 09417727..01833ccc 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,6 +10,8 @@ Release Notes Upcoming Release ================ +* Default to full electrification of land transport by 2050. + * Default to approximating transmission losses in HVAC lines (``transmission_losses: 2``). From 62bcded8001f2a3ff31b218ab0225594b1adedb3 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 19:12:04 +0100 Subject: [PATCH 081/101] build_sector.smk: simplify gas infrastructure rules --- rules/build_sector.smk | 126 +++++++++++++++++++---------------------- 1 file changed, 58 insertions(+), 68 deletions(-) diff --git a/rules/build_sector.smk b/rules/build_sector.smk index ab8ff4ed..23ea604c 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -67,76 +67,65 @@ rule build_simplified_population_layouts: "../scripts/build_clustered_population_layouts.py" -if config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]: - - rule build_gas_network: - input: - gas_network="data/gas_network/scigrid-gas/data/IGGIELGN_PipeSegments.geojson", - output: - cleaned_gas_network=RESOURCES + "gas_network.csv", - resources: - mem_mb=4000, - log: - LOGS + "build_gas_network.log", - conda: - "../envs/environment.yaml" - script: - "../scripts/build_gas_network.py" - - rule build_gas_input_locations: - input: - gem=HTTP.remote( - "https://globalenergymonitor.org/wp-content/uploads/2023/07/Europe-Gas-Tracker-2023-03-v3.xlsx", - keep_local=True, - ), - entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson", - storage="data/gas_network/scigrid-gas/data/IGGIELGN_Storages.geojson", - regions_onshore=RESOURCES - + "regions_onshore_elec_s{simpl}_{clusters}.geojson", - regions_offshore=RESOURCES - + "regions_offshore_elec_s{simpl}_{clusters}.geojson", - output: - gas_input_nodes=RESOURCES - + "gas_input_locations_s{simpl}_{clusters}.geojson", - gas_input_nodes_simplified=RESOURCES - + "gas_input_locations_s{simpl}_{clusters}_simplified.csv", - resources: - mem_mb=2000, - log: - LOGS + "build_gas_input_locations_s{simpl}_{clusters}.log", - conda: - "../envs/environment.yaml" - script: - "../scripts/build_gas_input_locations.py" - - rule cluster_gas_network: - input: - cleaned_gas_network=RESOURCES + "gas_network.csv", - regions_onshore=RESOURCES - + "regions_onshore_elec_s{simpl}_{clusters}.geojson", - regions_offshore=RESOURCES - + "regions_offshore_elec_s{simpl}_{clusters}.geojson", - output: - clustered_gas_network=RESOURCES + "gas_network_elec_s{simpl}_{clusters}.csv", - resources: - mem_mb=4000, - log: - LOGS + "cluster_gas_network_s{simpl}_{clusters}.log", - conda: - "../envs/environment.yaml" - script: - "../scripts/cluster_gas_network.py" - - gas_infrastructure = { - **rules.cluster_gas_network.output, - **rules.build_gas_input_locations.output, - } +rule build_gas_network: + input: + gas_network="data/gas_network/scigrid-gas/data/IGGIELGN_PipeSegments.geojson", + output: + cleaned_gas_network=RESOURCES + "gas_network.csv", + resources: + mem_mb=4000, + log: + LOGS + "build_gas_network.log", + conda: + "../envs/environment.yaml" + script: + "../scripts/build_gas_network.py" -if not (config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]): - # this is effecively an `else` statement which is however not liked by snakefmt +rule build_gas_input_locations: + input: + gem=HTTP.remote( + "https://globalenergymonitor.org/wp-content/uploads/2023/07/Europe-Gas-Tracker-2023-03-v3.xlsx", + keep_local=True, + ), + entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson", + storage="data/gas_network/scigrid-gas/data/IGGIELGN_Storages.geojson", + regions_onshore=RESOURCES + + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + regions_offshore=RESOURCES + + "regions_offshore_elec_s{simpl}_{clusters}.geojson", + output: + gas_input_nodes=RESOURCES + + "gas_input_locations_s{simpl}_{clusters}.geojson", + gas_input_nodes_simplified=RESOURCES + + "gas_input_locations_s{simpl}_{clusters}_simplified.csv", + resources: + mem_mb=2000, + log: + LOGS + "build_gas_input_locations_s{simpl}_{clusters}.log", + conda: + "../envs/environment.yaml" + script: + "../scripts/build_gas_input_locations.py" - gas_infrastructure = {} + +rule cluster_gas_network: + input: + cleaned_gas_network=RESOURCES + "gas_network.csv", + regions_onshore=RESOURCES + + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + regions_offshore=RESOURCES + + "regions_offshore_elec_s{simpl}_{clusters}.geojson", + output: + clustered_gas_network=RESOURCES + "gas_network_elec_s{simpl}_{clusters}.csv", + resources: + mem_mb=4000, + log: + LOGS + "cluster_gas_network_s{simpl}_{clusters}.log", + conda: + "../envs/environment.yaml" + script: + "../scripts/cluster_gas_network.py" rule build_heat_demands: @@ -722,7 +711,8 @@ rule prepare_sector_network: input: **build_retro_cost_output, **build_biomass_transport_costs_output, - **gas_infrastructure, + **rules.cluster_gas_network.output, + **rules.build_gas_input_locations.output, **build_sequestration_potentials_output, network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", energy_totals_name=RESOURCES + "energy_totals.csv", From 9d0040912b8b50ca2d69ae4c2720a486d7b98502 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 19:14:11 +0100 Subject: [PATCH 082/101] retrieve.smk: simplify gas infrastructure retrieve rules --- rules/retrieve.smk | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index e062091e..198edf97 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -162,9 +162,8 @@ if config["enable"]["retrieve"] and config["enable"].get( "../scripts/retrieve_sector_databundle.py" -if config["enable"]["retrieve"] and ( - config["sector"]["gas_network"] or config["sector"]["H2_retrofit"] -): +if config["enable"]["retrieve"]: + datafiles = [ "IGGIELGN_LNGs.geojson", "IGGIELGN_BorderPoints.geojson", From 1e5f9e867da7719a27fa547942916fa4fb6ad281 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 19:17:49 +0100 Subject: [PATCH 083/101] build_industrial_distribution_key.py: add dropna to EPRTR emissions --- scripts/build_industrial_distribution_key.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_industrial_distribution_key.py b/scripts/build_industrial_distribution_key.py index b86d47c2..e6d515b0 100644 --- a/scripts/build_industrial_distribution_key.py +++ b/scripts/build_industrial_distribution_key.py @@ -130,7 +130,7 @@ def build_nodal_distribution_key(hotmaps, regions, countries): if not facilities.empty: emissions = facilities["Emissions_ETS_2014"].fillna( - hotmaps["Emissions_EPRTR_2014"] + hotmaps["Emissions_EPRTR_2014"].dropna() ) if emissions.sum() == 0: key = pd.Series(1 / len(facilities), facilities.index) From 0418af6071841606532ba7366c8acab7ae3957b2 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 19:20:26 +0100 Subject: [PATCH 084/101] config: keep nuclear if created in prepare_network.py --- config/config.default.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index 1e26781d..95ae738e 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -295,6 +295,7 @@ pypsa_eur: - offwind-dc - solar - ror + - nuclear StorageUnit: - PHS - hydro From a0f43aeeb77937db24223c4dc1e85d5cc0625706 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 19:22:26 +0100 Subject: [PATCH 085/101] config: increase default solar deployment density to 5.1 MW/km2 --- config/config.default.yaml | 2 +- doc/release_notes.rst | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 95ae738e..ba41fe5a 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -212,7 +212,7 @@ renewable: orientation: slope: 35. azimuth: 180. - capacity_per_sqkm: 1.7 + capacity_per_sqkm: 5.1 # correction_factor: 0.854337 corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] luisa: false # [1111, 1121, 1122, 1123, 1130, 1210, 1221, 1222, 1230, 1241, 1242, 1310, 1320, 1330, 1410, 1421, 1422, 2110, 2120, 2130, 2210, 2220, 2230, 2310, 2410, 2420, 3210, 3320, 3330] diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 01833ccc..33857780 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,6 +10,8 @@ Release Notes Upcoming Release ================ +* Increase deployment density of solar to 5.1 MW/sqkm by default. + * Default to full electrification of land transport by 2050. * Default to approximating transmission losses in HVAC lines From 608a12fed26ad22e64c2dfc341163830000ae62c Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 27 Sep 2023 11:53:00 +0200 Subject: [PATCH 086/101] prepare_sector: allow planning_horizons wc to be a scalar value --- scripts/prepare_sector_network.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 17eb832d..79bc67e9 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -286,6 +286,8 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year): ) planning_horizons = snakemake.params.planning_horizons + if not isinstance(planning_horizons, list): + planning_horizons = [planning_horizons] t_0 = planning_horizons[0] if "be" in o: From b135a0cc01a76dbe81ffcdc3ca5cdc63891ab846 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 19:42:22 +0100 Subject: [PATCH 087/101] build_ship_raster: performance improvement --- scripts/build_ship_raster.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/scripts/build_ship_raster.py b/scripts/build_ship_raster.py index 02f4d5d5..7025e85d 100644 --- a/scripts/build_ship_raster.py +++ b/scripts/build_ship_raster.py @@ -62,11 +62,13 @@ if __name__ == "__main__": xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts)) with zipfile.ZipFile(snakemake.input.ship_density) as zip_f: - zip_f.extract("shipdensity_global.tif") - with rioxarray.open_rasterio("shipdensity_global.tif") as ship_density: - ship_density = ship_density.drop_vars(["band"]).sel( - x=slice(min(xs), max(Xs)), y=slice(max(Ys), min(ys)) - ) - ship_density.rio.to_raster(snakemake.output[0]) + resources = Path(snakemake.output[0]).parent + fn = "shipdensity_global.tif" + zip_f.extract(fn, resources) + with rioxarray.open_rasterio(resources / fn) as ship_density: + ship_density = ship_density.drop_vars(["band"]).sel( + x=slice(min(xs), max(Xs)), y=slice(max(Ys), min(ys)) + ) + ship_density.rio.to_raster(snakemake.output[0]) - os.remove("shipdensity_global.tif") + (resources / fn).unlink() From c1f527b663b5e9f11f283f2052ab23eaba3c9788 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 19:44:29 +0100 Subject: [PATCH 088/101] build_ship_raster: adjust imports --- scripts/build_ship_raster.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_ship_raster.py b/scripts/build_ship_raster.py index 7025e85d..da8c8b28 100644 --- a/scripts/build_ship_raster.py +++ b/scripts/build_ship_raster.py @@ -42,8 +42,8 @@ Description """ import logging -import os import zipfile +from pathlib import Path import rioxarray from _helpers import configure_logging From 254d50b1b4dc35d7db3b8cdc56ba99b798df3546 Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 18 Sep 2023 12:25:04 +0200 Subject: [PATCH 089/101] prepare sectors: allow for updating co2 network costs --- Snakefile | 1 + config/config.default.yaml | 2 ++ scripts/prepare_sector_network.py | 47 ++++++++++++++++++++++++++----- scripts/solve_network.py | 8 +++--- 4 files changed, 47 insertions(+), 11 deletions(-) diff --git a/Snakefile b/Snakefile index 14ce0e40..f6e581a4 100644 --- a/Snakefile +++ b/Snakefile @@ -125,6 +125,7 @@ rule sync: shell: """ rsync -uvarh --ignore-missing-args --files-from=.sync-send . {params.cluster} + rsync -uvarh --no-g {params.cluster}/resources . || echo "No resources directory, skipping rsync rsync -uvarh --no-g {params.cluster}/results . || echo "No results directory, skipping rsync" rsync -uvarh --no-g {params.cluster}/logs . || echo "No logs directory, skipping rsync" """ diff --git a/config/config.default.yaml b/config/config.default.yaml index ba41fe5a..331f7382 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -478,6 +478,7 @@ sector: co2_sequestration_lifetime: 50 co2_spatial: false co2network: false + co2_network_cost_factor: 1 cc_fraction: 0.9 hydrogen_underground_storage: true hydrogen_underground_storage_locations: @@ -985,6 +986,7 @@ plotting: CO2 sequestration: '#f29dae' DAC: '#ff5270' co2 stored: '#f2385a' + co2 sequestered: '#f2682f' co2: '#f29dae' co2 vent: '#ffd4dc' CO2 pipeline: '#f5627f' diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 79bc67e9..b0d0a4e6 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -549,7 +549,7 @@ def patch_electricity_network(n): n.loads_t.p_set.rename(lambda x: x.strip(), axis=1, inplace=True) -def add_co2_tracking(n, options): +def add_co2_tracking(n, costs, options): # minus sign because opposite to how fossil fuels used: # CH4 burning puts CH4 down, atmosphere up n.add("Carrier", "co2", co2_emissions=-1.0) @@ -576,6 +576,37 @@ def add_co2_tracking(n, options): unit="t_co2", ) + # add CO2 tanks + n.madd( + "Store", + spatial.co2.nodes, + e_nom_extendable=True, + capital_cost=costs.loc["CO2 storage tank"], + carrier="co2 stored", + bus=spatial.co2.nodes, + ) + n.add("Carrier", "co2 stored") + + # this tracks CO2 stored, e.g. underground + sequestration_buses = spatial.co2.nodes.str.replace(" stored", " sequestered") + n.madd( + "Bus", + sequestration_buses, + location=spatial.co2.locations, + carrier="co2 sequestered", + unit="t_co2", + ) + + n.madd( + "Link", + sequestration_buses, + bus0=spatial.co2.nodes, + bus1=sequestration_buses, + carrier="co2 sequestered", + efficiency=1.0, + p_nom_extendable=True, + ) + if options["regional_co2_sequestration_potential"]["enable"]: upper_limit = ( options["regional_co2_sequestration_potential"]["max_size"] * 1e3 @@ -591,22 +622,22 @@ def add_co2_tracking(n, options): .mul(1e6) / annualiser ) # t - e_nom_max = e_nom_max.rename(index=lambda x: x + " co2 stored") + e_nom_max = e_nom_max.rename(index=lambda x: x + " co2 sequestered") else: e_nom_max = np.inf n.madd( "Store", - spatial.co2.nodes, + sequestration_buses, e_nom_extendable=True, e_nom_max=e_nom_max, capital_cost=options["co2_sequestration_cost"], - carrier="co2 stored", - bus=spatial.co2.nodes, + bus=sequestration_buses, lifetime=options["co2_sequestration_lifetime"], + carrier="co2 sequestered", ) - n.add("Carrier", "co2 stored") + n.add("Carrier", "co2 sequestered") if options["co2_vent"]: n.madd( @@ -635,6 +666,8 @@ def add_co2_network(n, costs): * co2_links.length ) capital_cost = cost_onshore + cost_submarine + cost_factor = snakemake.config["sector"]["co2_network_cost_factor"] + capital_cost *= cost_factor n.madd( "Link", @@ -3626,7 +3659,7 @@ if __name__ == "__main__": for carrier in conventional: add_carrier_buses(n, carrier) - add_co2_tracking(n, options) + add_co2_tracking(n, costs, options) add_generation(n, costs) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 203d8b0f..2bbd0164 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -202,10 +202,10 @@ def add_co2_sequestration_limit(n, config, limit=200): n.madd( "GlobalConstraint", names, - sense="<=", - constant=limit, - type="primary_energy", - carrier_attribute="co2_absorptions", + sense=">=", + constant=-limit, + type="operational_limit", + carrier_attribute="co2 sequestered", investment_period=periods, ) From 2d027e80c3561f60ac293edffce3ab3c69981842 Mon Sep 17 00:00:00 2001 From: Fabian Date: Tue, 19 Sep 2023 12:22:16 +0200 Subject: [PATCH 090/101] fix capital costs of co2 tanks --- Snakefile | 2 +- scripts/prepare_sector_network.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Snakefile b/Snakefile index f6e581a4..7c16ff9f 100644 --- a/Snakefile +++ b/Snakefile @@ -125,7 +125,7 @@ rule sync: shell: """ rsync -uvarh --ignore-missing-args --files-from=.sync-send . {params.cluster} - rsync -uvarh --no-g {params.cluster}/resources . || echo "No resources directory, skipping rsync + rsync -uvarh --no-g {params.cluster}/resources . || echo "No resources directory, skipping rsync" rsync -uvarh --no-g {params.cluster}/results . || echo "No results directory, skipping rsync" rsync -uvarh --no-g {params.cluster}/logs . || echo "No logs directory, skipping rsync" """ diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b0d0a4e6..4cfaa95f 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -567,7 +567,7 @@ def add_co2_tracking(n, costs, options): bus="co2 atmosphere", ) - # this tracks CO2 stored, e.g. underground + # add CO2 tanks n.madd( "Bus", spatial.co2.nodes, @@ -576,13 +576,13 @@ def add_co2_tracking(n, costs, options): unit="t_co2", ) - # add CO2 tanks n.madd( "Store", spatial.co2.nodes, e_nom_extendable=True, - capital_cost=costs.loc["CO2 storage tank"], + capital_cost=costs.at["CO2 storage tank", "fixed"], carrier="co2 stored", + e_cyclic=True, bus=spatial.co2.nodes, ) n.add("Carrier", "co2 stored") From c71c4e75675638e55a56336d13ac3dbb3e0cbf6c Mon Sep 17 00:00:00 2001 From: Fabian Date: Wed, 20 Sep 2023 15:27:09 +0200 Subject: [PATCH 091/101] add biomass constraint for biomass spatial enabled --- scripts/prepare_sector_network.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 4cfaa95f..feb3faef 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2330,6 +2330,14 @@ def add_biomass(n, costs): marginal_cost=costs.at["solid biomass", "fuel"] + bus_transport_costs * average_distance, ) + n.add( + "GlobalConstraint", + "biomass limit", + carrier_attribute="solid biomass", + sense="<=", + constant=biomass_potentials["solid biomass"].sum(), + type="operational_limit", + ) # AC buses with district heating urban_central = n.buses.index[n.buses.carrier == "urban central heat"] From be5331c89c8aeabd64108d1c15827da47519c43a Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 22 Sep 2023 10:51:36 +0200 Subject: [PATCH 092/101] formulate sequestration limit constraint as operational_limit constraint --- scripts/prepare_sector_network.py | 2 +- scripts/solve_network.py | 5 +---- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index feb3faef..0caf45dd 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -587,7 +587,7 @@ def add_co2_tracking(n, costs, options): ) n.add("Carrier", "co2 stored") - # this tracks CO2 stored, e.g. underground + # this tracks CO2 sequestered, e.g. underground sequestration_buses = spatial.co2.nodes.str.replace(" stored", " sequestered") n.madd( "Bus", diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 2bbd0164..e76d4004 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -182,9 +182,6 @@ def add_co2_sequestration_limit(n, config, limit=200): """ Add a global constraint on the amount of Mt CO2 that can be sequestered. """ - n.carriers.loc["co2 stored", "co2_absorptions"] = -1 - n.carriers.co2_absorptions = n.carriers.co2_absorptions.fillna(0) - limit = limit * 1e6 for o in opts: if "seq" not in o: @@ -396,7 +393,7 @@ def prepare_network( if snakemake.params["sector"]["limit_max_growth"]["enable"]: n = add_max_growth(n, config) - if n.stores.carrier.eq("co2 stored").any(): + if n.stores.carrier.eq("co2 sequestered").any(): limit = co2_sequestration_potential add_co2_sequestration_limit(n, config, limit=limit) From 4e03e5a7ecb8165306087bad6413d4e9eb2f8ce3 Mon Sep 17 00:00:00 2001 From: Fabian Date: Thu, 5 Oct 2023 16:07:04 +0200 Subject: [PATCH 093/101] prepare_sector: add VOM for FT and methanolization, always use `.at` accessor for costs --- scripts/prepare_sector_network.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 0caf45dd..3a3c9992 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -102,7 +102,10 @@ def define_spatial(nodes, options): spatial.gas.biogas = ["EU biogas"] spatial.gas.industry = ["gas for industry"] spatial.gas.biogas_to_gas = ["EU biogas to gas"] - spatial.gas.biogas_to_gas_cc = ["EU biogas to gas CC"] + if options.get("biomass_spatial", options["biomass_transport"]): + spatial.gas.biogas_to_gas_cc = nodes + " biogas to gas CC" + else: + spatial.gas.biogas_to_gas_cc = ["EU biogas to gas CC"] if options.get("co2_spatial", options["co2network"]): spatial.gas.industry_cc = nodes + " gas for industry CC" else: @@ -2257,13 +2260,12 @@ def add_biomass(n, costs): # Assuming for costs that the CO2 from upgrading is pure, such as in amine scrubbing. I.e., with and without CC is # equivalent. Adding biomass CHP capture because biogas is often small-scale and decentral so further # from e.g. CO2 grid or buyers. This is a proxy for the added cost for e.g. a raw biogas pipeline to a central upgrading facility - n.madd( "Link", spatial.gas.biogas_to_gas_cc, bus0=spatial.gas.biogas, bus1=spatial.gas.nodes, - bus2="co2 stored", + bus2=spatial.co2.nodes, bus3="co2 atmosphere", carrier="biogas to gas CC", capital_cost=costs.at["biogas CC", "fixed"] @@ -2734,6 +2736,7 @@ def add_industry(n, costs): carrier="methanolisation", p_nom_extendable=True, p_min_pu=options.get("min_part_load_methanolisation", 0), + marginal_cost=options["MWh_MeOH_per_MWh_H2"] * costs.at["fuel cell", "VOM"], capital_cost=costs.at["methanolisation", "fixed"] * options["MWh_MeOH_per_MWh_H2"], # EUR/MW_H2/a marginal_cost=options["MWh_MeOH_per_MWh_H2"] From 6078b4626239ef94984a08bc16ac19b5b2e9244b Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 20:04:56 +0100 Subject: [PATCH 094/101] remove duplicate marginal_cost for methanolisation from merge --- scripts/prepare_sector_network.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 3a3c9992..0cb9759c 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2736,7 +2736,6 @@ def add_industry(n, costs): carrier="methanolisation", p_nom_extendable=True, p_min_pu=options.get("min_part_load_methanolisation", 0), - marginal_cost=options["MWh_MeOH_per_MWh_H2"] * costs.at["fuel cell", "VOM"], capital_cost=costs.at["methanolisation", "fixed"] * options["MWh_MeOH_per_MWh_H2"], # EUR/MW_H2/a marginal_cost=options["MWh_MeOH_per_MWh_H2"] From af7c1b15e6b9268a2948cfd9bd8f7c8752920e26 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 20:10:05 +0100 Subject: [PATCH 095/101] add documentation --- doc/configtables/sector.csv | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index 5e2514e4..90979180 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -93,6 +93,7 @@ co2_sequestration_cost,currency/tCO2,float,The cost of sequestering a ton of CO2 co2_spatial,--,"{true, false}","Add option to spatially resolve carrier representing stored carbon dioxide. This allows for more detailed modelling of CCUTS, e.g. regarding the capturing of industrial process emissions, usage as feedstock for electrofuels, transport of carbon dioxide, and geological sequestration sites." ,,, co2network,--,"{true, false}",Add option for planning a new carbon dioxide transmission network +co2_network_cost_factor,p.u.,float,The cost factor for the capital cost of the carbon dioxide transmission network ,,, cc_fraction,--,float,The default fraction of CO2 captured with post-combustion capture hydrogen_underground _storage,--,"{true, false}",Add options for storing hydrogen underground. Storage potential depends regionally. From 48832874171601e2eec0bf2b7512ae5a4ce718e1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 19:10:04 +0000 Subject: [PATCH 096/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/common.smk | 2 +- scripts/add_brownfield.py | 4 +--- scripts/prepare_sector_network.py | 6 ++++-- scripts/solve_network.py | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index 0e85b620..2298ff91 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -4,7 +4,7 @@ import os, sys, glob -helper_source_path = [match for match in glob.glob('**/_helpers.py', recursive=True)] +helper_source_path = [match for match in glob.glob("**/_helpers.py", recursive=True)] for path in helper_source_path: path = os.path.dirname(os.path.abspath(path)) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index e151c441..cb1f51c8 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -133,9 +133,7 @@ def disable_grid_expansion_if_LV_limit_hit(n): # allow small numerical differences if lv_limit - total_expansion < 1: - logger.info( - f"LV is already reached, disabling expansion and LV limit" - ) + logger.info(f"LV is already reached, disabling expansion and LV limit") extendable_acs = n.lines.query("s_nom_extendable").index n.lines.loc[extendable_acs, "s_nom_extendable"] = False n.lines.loc[extendable_acs, "s_nom"] = n.lines.loc[extendable_acs, "s_nom_min"] diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 3a3c9992..ec76399d 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3044,8 +3044,9 @@ def add_industry(n, costs): if options["co2_spatial"] or options["co2network"]: p_set = ( - -industrial_demand.loc[nodes, "process emission"] - .rename(index=lambda x: x + " process emissions") + -industrial_demand.loc[nodes, "process emission"].rename( + index=lambda x: x + " process emissions" + ) / nhours ) else: @@ -3458,6 +3459,7 @@ def cluster_heat_buses(n): pnl = c.pnl agg = define_clustering(pd.Index(pnl.keys()), aggregate_dict) for k in pnl.keys(): + def renamer(s): return s.replace("residential ", "").replace("services ", "") diff --git a/scripts/solve_network.py b/scripts/solve_network.py index e76d4004..36b53086 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -853,7 +853,7 @@ def solve_network(n, config, solving, opts="", **kwargs): kwargs["assign_all_duals"] = cf_solving.get("assign_all_duals", False) if kwargs["solver_name"] == "gurobi": - logging.getLogger('gurobipy').setLevel(logging.CRITICAL) + logging.getLogger("gurobipy").setLevel(logging.CRITICAL) rolling_horizon = cf_solving.pop("rolling_horizon", False) skip_iterations = cf_solving.pop("skip_iterations", False) From becba42a88669c5e59fbb127336a67cf5498da49 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 20:15:00 +0100 Subject: [PATCH 097/101] add release notes --- doc/release_notes.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 33857780..88f05854 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,6 +10,13 @@ Release Notes Upcoming Release ================ +* Distinguish between stored and sequestered CO2. Stored CO2 is stored + overground in tanks and can be used for CCU (e.g. methanolisation). + Sequestered CO2 is stored underground and can no longer be used for CCU. This + distinction is made because storage in tanks is more expensive than + underground storage. The link that connects stored and sequestered CO2 is + unidirectional. + * Increase deployment density of solar to 5.1 MW/sqkm by default. * Default to full electrification of land transport by 2050. From 0534f574e9ef15b59d475991080fd2c47c8cd2c0 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 19:18:07 +0000 Subject: [PATCH 098/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/common.smk | 2 +- scripts/add_brownfield.py | 4 +--- scripts/prepare_sector_network.py | 6 ++++-- scripts/solve_network.py | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index 0e85b620..2298ff91 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -4,7 +4,7 @@ import os, sys, glob -helper_source_path = [match for match in glob.glob('**/_helpers.py', recursive=True)] +helper_source_path = [match for match in glob.glob("**/_helpers.py", recursive=True)] for path in helper_source_path: path = os.path.dirname(os.path.abspath(path)) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index e151c441..cb1f51c8 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -133,9 +133,7 @@ def disable_grid_expansion_if_LV_limit_hit(n): # allow small numerical differences if lv_limit - total_expansion < 1: - logger.info( - f"LV is already reached, disabling expansion and LV limit" - ) + logger.info(f"LV is already reached, disabling expansion and LV limit") extendable_acs = n.lines.query("s_nom_extendable").index n.lines.loc[extendable_acs, "s_nom_extendable"] = False n.lines.loc[extendable_acs, "s_nom"] = n.lines.loc[extendable_acs, "s_nom_min"] diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 79bc67e9..8a45da2e 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3000,8 +3000,9 @@ def add_industry(n, costs): if options["co2_spatial"] or options["co2network"]: p_set = ( - -industrial_demand.loc[nodes, "process emission"] - .rename(index=lambda x: x + " process emissions") + -industrial_demand.loc[nodes, "process emission"].rename( + index=lambda x: x + " process emissions" + ) / nhours ) else: @@ -3414,6 +3415,7 @@ def cluster_heat_buses(n): pnl = c.pnl agg = define_clustering(pd.Index(pnl.keys()), aggregate_dict) for k in pnl.keys(): + def renamer(s): return s.replace("residential ", "").replace("services ", "") diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 203d8b0f..aa802ea8 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -856,7 +856,7 @@ def solve_network(n, config, solving, opts="", **kwargs): kwargs["assign_all_duals"] = cf_solving.get("assign_all_duals", False) if kwargs["solver_name"] == "gurobi": - logging.getLogger('gurobipy').setLevel(logging.CRITICAL) + logging.getLogger("gurobipy").setLevel(logging.CRITICAL) rolling_horizon = cf_solving.pop("rolling_horizon", False) skip_iterations = cf_solving.pop("skip_iterations", False) From 7b1600164fa292eb1ba2288340340deaf1cb3f59 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 19:18:59 +0000 Subject: [PATCH 099/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/build_sector.smk | 15 +++++---------- rules/common.smk | 2 +- rules/retrieve.smk | 1 - scripts/add_brownfield.py | 4 +--- scripts/prepare_sector_network.py | 6 ++++-- scripts/solve_network.py | 2 +- 6 files changed, 12 insertions(+), 18 deletions(-) diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 23ea604c..ef2fc6c8 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -90,13 +90,10 @@ rule build_gas_input_locations: ), entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson", storage="data/gas_network/scigrid-gas/data/IGGIELGN_Storages.geojson", - regions_onshore=RESOURCES - + "regions_onshore_elec_s{simpl}_{clusters}.geojson", - regions_offshore=RESOURCES - + "regions_offshore_elec_s{simpl}_{clusters}.geojson", + regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + regions_offshore=RESOURCES + "regions_offshore_elec_s{simpl}_{clusters}.geojson", output: - gas_input_nodes=RESOURCES - + "gas_input_locations_s{simpl}_{clusters}.geojson", + gas_input_nodes=RESOURCES + "gas_input_locations_s{simpl}_{clusters}.geojson", gas_input_nodes_simplified=RESOURCES + "gas_input_locations_s{simpl}_{clusters}_simplified.csv", resources: @@ -112,10 +109,8 @@ rule build_gas_input_locations: rule cluster_gas_network: input: cleaned_gas_network=RESOURCES + "gas_network.csv", - regions_onshore=RESOURCES - + "regions_onshore_elec_s{simpl}_{clusters}.geojson", - regions_offshore=RESOURCES - + "regions_offshore_elec_s{simpl}_{clusters}.geojson", + regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", + regions_offshore=RESOURCES + "regions_offshore_elec_s{simpl}_{clusters}.geojson", output: clustered_gas_network=RESOURCES + "gas_network_elec_s{simpl}_{clusters}.csv", resources: diff --git a/rules/common.smk b/rules/common.smk index 0e85b620..2298ff91 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -4,7 +4,7 @@ import os, sys, glob -helper_source_path = [match for match in glob.glob('**/_helpers.py', recursive=True)] +helper_source_path = [match for match in glob.glob("**/_helpers.py", recursive=True)] for path in helper_source_path: path = os.path.dirname(os.path.abspath(path)) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 198edf97..7a180e22 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -163,7 +163,6 @@ if config["enable"]["retrieve"] and config["enable"].get( if config["enable"]["retrieve"]: - datafiles = [ "IGGIELGN_LNGs.geojson", "IGGIELGN_BorderPoints.geojson", diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index e151c441..cb1f51c8 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -133,9 +133,7 @@ def disable_grid_expansion_if_LV_limit_hit(n): # allow small numerical differences if lv_limit - total_expansion < 1: - logger.info( - f"LV is already reached, disabling expansion and LV limit" - ) + logger.info(f"LV is already reached, disabling expansion and LV limit") extendable_acs = n.lines.query("s_nom_extendable").index n.lines.loc[extendable_acs, "s_nom_extendable"] = False n.lines.loc[extendable_acs, "s_nom"] = n.lines.loc[extendable_acs, "s_nom_min"] diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 17eb832d..70f57953 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2998,8 +2998,9 @@ def add_industry(n, costs): if options["co2_spatial"] or options["co2network"]: p_set = ( - -industrial_demand.loc[nodes, "process emission"] - .rename(index=lambda x: x + " process emissions") + -industrial_demand.loc[nodes, "process emission"].rename( + index=lambda x: x + " process emissions" + ) / nhours ) else: @@ -3412,6 +3413,7 @@ def cluster_heat_buses(n): pnl = c.pnl agg = define_clustering(pd.Index(pnl.keys()), aggregate_dict) for k in pnl.keys(): + def renamer(s): return s.replace("residential ", "").replace("services ", "") diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 203d8b0f..aa802ea8 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -856,7 +856,7 @@ def solve_network(n, config, solving, opts="", **kwargs): kwargs["assign_all_duals"] = cf_solving.get("assign_all_duals", False) if kwargs["solver_name"] == "gurobi": - logging.getLogger('gurobipy').setLevel(logging.CRITICAL) + logging.getLogger("gurobipy").setLevel(logging.CRITICAL) rolling_horizon = cf_solving.pop("rolling_horizon", False) skip_iterations = cf_solving.pop("skip_iterations", False) From e594f34e0029ce73a8083e6b28b60315b23b91c0 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 4 Jan 2024 20:30:00 +0100 Subject: [PATCH 100/101] fix sequestration buses string replacement with pd.Index --- scripts/prepare_sector_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 9fe4c95a..e35d3a64 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -591,7 +591,7 @@ def add_co2_tracking(n, costs, options): n.add("Carrier", "co2 stored") # this tracks CO2 sequestered, e.g. underground - sequestration_buses = spatial.co2.nodes.str.replace(" stored", " sequestered") + sequestration_buses = pd.Index(spatial.co2.nodes).str.replace(" stored", " sequestered") n.madd( "Bus", sequestration_buses, From 29cda7042b2b506001d2955059ca0d6ce75f1d0c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 19:30:29 +0000 Subject: [PATCH 101/101] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index e35d3a64..51ab52d9 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -591,7 +591,9 @@ def add_co2_tracking(n, costs, options): n.add("Carrier", "co2 stored") # this tracks CO2 sequestered, e.g. underground - sequestration_buses = pd.Index(spatial.co2.nodes).str.replace(" stored", " sequestered") + sequestration_buses = pd.Index(spatial.co2.nodes).str.replace( + " stored", " sequestered" + ) n.madd( "Bus", sequestration_buses,