From 4fbb3c81c4ddb589614f67dc01c885c9b3806bbe Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 14 Mar 2024 16:48:32 +0100 Subject: [PATCH] fix build_district_heat_share and build_transport_demand for multiyear --- rules/build_sector.smk | 4 ++++ scripts/build_district_heat_share.py | 7 +++---- scripts/build_energy_totals.py | 19 +++++++++++++++++-- ...build_population_weighted_energy_totals.py | 2 +- scripts/build_transport_demand.py | 10 ++++++---- scripts/prepare_network.py | 2 +- scripts/prepare_sector_network.py | 2 +- 7 files changed, 33 insertions(+), 13 deletions(-) diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 14226ef8..e1575a78 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -729,6 +729,8 @@ rule build_retro_cost: rule build_population_weighted_energy_totals: + params: + snapshots=config_provider("snapshots"), input: energy_totals=resources("{kind}_totals.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), @@ -771,6 +773,7 @@ rule build_transport_demand: snapshots=config_provider("snapshots"), drop_leap_day=config_provider("enable", "drop_leap_day"), sector=config_provider("sector"), + energy_totals_year=config_provider("energy", "energy_totals_year"), input: clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), pop_weighted_energy_totals=resources( @@ -799,6 +802,7 @@ rule build_transport_demand: rule build_district_heat_share: params: sector=config_provider("sector"), + energy_totals_year=config_provider("energy", "energy_totals_year"), input: district_heat_share=resources("district_heat_share.csv"), clustered_pop_layout=resources("pop_layout_elec_s{simpl}_{clusters}.csv"), diff --git a/scripts/build_district_heat_share.py b/scripts/build_district_heat_share.py index 3cf88466..10a30915 100644 --- a/scripts/build_district_heat_share.py +++ b/scripts/build_district_heat_share.py @@ -22,7 +22,7 @@ if __name__ == "__main__": snakemake = mock_snakemake( "build_district_heat_share", simpl="", - clusters=48, + clusters=60, planning_horizons="2050", ) configure_logging(snakemake) @@ -32,9 +32,8 @@ if __name__ == "__main__": pop_layout = pd.read_csv(snakemake.input.clustered_pop_layout, index_col=0) - district_heat_share = pd.read_csv(snakemake.input.district_heat_share, index_col=0)[ - "district heat share" - ] + year = str(snakemake.params.energy_totals_year) + district_heat_share = pd.read_csv(snakemake.input.district_heat_share, index_col=0)[year] # make ct-based share nodal district_heat_share = district_heat_share.reindex(pop_layout.ct).fillna(0) diff --git a/scripts/build_energy_totals.py b/scripts/build_energy_totals.py index 01532872..a53c6208 100644 --- a/scripts/build_energy_totals.py +++ b/scripts/build_energy_totals.py @@ -721,10 +721,18 @@ def build_transport_data(countries, population, idees): transport_data = pd.DataFrame(idees["passenger cars"]) + countries_without_ch = set(countries) - {"CH"} + new_index = pd.MultiIndex.from_product( + [countries_without_ch, transport_data.index.levels[1]], + names=["country", "year"] + ) + + transport_data = transport_data.reindex(index=new_index) + # https://www.bfs.admin.ch/bfs/en/home/statistics/mobility-transport/transport-infrastructure-vehicles/vehicles/road-vehicles-stock-level-motorisation.html if "CH" in countries: fn = snakemake.input.swiss_transport - swiss_cars = pd.read_csv(fn, index_col=0).loc[1990:2021, ["passenger cars"]] + swiss_cars = pd.read_csv(fn, index_col=0).loc[2000:2015, ["passenger cars"]] swiss_cars.index = pd.MultiIndex.from_product( [["CH"], swiss_cars.index], names=["country", "year"] @@ -741,7 +749,14 @@ def build_transport_data(countries, population, idees): ) cars_pp = transport_data["number cars"] / population - transport_data.loc[missing, "number cars"] = cars_pp.mean() * population + + fill_values = {year: cars_pp.mean() * population for year in transport_data.index.levels[1]} + fill_values = pd.DataFrame(fill_values).stack() + fill_values = pd.DataFrame(fill_values, columns=["number cars"]) + fill_values.index.names = ["country", "year"] + fill_values = fill_values.reindex(transport_data.index) + + transport_data = transport_data.combine_first(fill_values) # collect average fuel efficiency in kWh/km diff --git a/scripts/build_population_weighted_energy_totals.py b/scripts/build_population_weighted_energy_totals.py index 70105a06..69d4c681 100644 --- a/scripts/build_population_weighted_energy_totals.py +++ b/scripts/build_population_weighted_energy_totals.py @@ -15,7 +15,7 @@ if __name__ == "__main__": snakemake = mock_snakemake( "build_population_weighted_energy_totals", - kind="energy", + kind="heat", simpl="", clusters=60, ) diff --git a/scripts/build_transport_demand.py b/scripts/build_transport_demand.py index e39774fa..085a0954 100644 --- a/scripts/build_transport_demand.py +++ b/scripts/build_transport_demand.py @@ -23,8 +23,9 @@ from _helpers import ( logger = logging.getLogger(__name__) -def build_nodal_transport_data(fn, pop_layout): - transport_data = pd.read_csv(fn, index_col=0) +def build_nodal_transport_data(fn, pop_layout, year): + transport_data = pd.read_csv(fn, index_col=[0, 1]) + transport_data = transport_data.xs(min(2015, year), level='year') nodal_transport_data = transport_data.loc[pop_layout.ct].fillna(0.0) nodal_transport_data.index = pop_layout.index @@ -173,7 +174,7 @@ if __name__ == "__main__": snakemake = mock_snakemake( "build_transport_demand", simpl="", - clusters=48, + clusters=60, ) configure_logging(snakemake) set_scenario_config(snakemake) @@ -194,8 +195,9 @@ if __name__ == "__main__": nyears = len(snapshots) / 8760 + energy_totals_year = snakemake.params.energy_totals_year nodal_transport_data = build_nodal_transport_data( - snakemake.input.transport_data, pop_layout + snakemake.input.transport_data, pop_layout, energy_totals_year ) transport_demand = build_transport_demand( diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index f7a3f6b0..00cb00bf 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -205,7 +205,7 @@ def average_every_nhours(n, offset): snapshot_weightings = n.snapshot_weightings.resample(offset).sum() sns = snapshot_weightings.index - if snakemake.params.drop_leap_days: + if snakemake.params.drop_leap_day: sns = sns[~((sns.month == 2) & (sns.day == 29))] m.set_snapshots(snapshot_weightings.index) m.snapshot_weightings = snapshot_weightings diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index e5f0dac8..ceb7764e 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -813,7 +813,7 @@ def average_every_nhours(n, offset): snapshot_weightings = n.snapshot_weightings.resample(offset).sum() sns = snapshot_weightings.index - if snakemake.params.drop_leap_days: + if snakemake.params.drop_leap_day: sns = sns[~((sns.month == 2) & (sns.day == 29))] snapshot_weightings = snapshot_weightings.loc[sns] m.set_snapshots(snapshot_weightings.index)