diff --git a/doc/configtables/load.csv b/doc/configtables/load.csv index b245268f..02e7b97b 100644 --- a/doc/configtables/load.csv +++ b/doc/configtables/load.csv @@ -4,4 +4,4 @@ time_shift_for_large_gaps,string,string,"Periods which are used for copying time manual_adjustments,bool,"{true, false}","Whether to adjust the load data manually according to the function in :func:`manual_adjustment`." scaling_factor,--,float,"Global correction factor for the load time series." fixed_year,--,Year or False,"To specify a fixed year for the load time series that deviates from the snapshots' year" -supplement_missing_data_artificially,bool,"{true, false}","Whether to supplement missing data for selected time period should be supplemented by artificial data from https://zenodo.org/record/7070438/files/demand_hourly.csv." \ No newline at end of file +supplement_missing_data_artificially,bool,"{true, false}","Whether to supplement missing data for selected time period should be supplemented by artificial data from https://zenodo.org/record/7070438/files/demand_hourly.csv." diff --git a/rules/build_electricity.smk b/rules/build_electricity.smk index edf5fca3..6db8bcd7 100644 --- a/rules/build_electricity.smk +++ b/rules/build_electricity.smk @@ -21,6 +21,7 @@ if config["enable"].get("prepare_links_p_nom", False): rule build_electricity_demand: params: snapshots=config_provider("snapshots"), + drop_leap_day=config_provider("enable", "drop_leap_day"), countries=config_provider("countries"), load=config_provider("load"), input: @@ -68,6 +69,7 @@ rule base_network: params: countries=config_provider("countries"), snapshots=config_provider("snapshots"), + drop_leap_day=config_provider("enable", "drop_leap_day"), lines=config_provider("lines"), links=config_provider("links"), transformers=config_provider("transformers"), diff --git a/scripts/_helpers.py b/scripts/_helpers.py index a1504c3c..09687a11 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -724,3 +724,15 @@ def validate_checksum(file_path, zenodo_url=None, checksum=None): assert ( calculated_checksum == checksum ), "Checksum is invalid. This may be due to an incomplete download. Delete the file and re-execute the rule." + + +def get_snapshots(snapshots, drop_leap_day=False, freq="h", **kwargs): + """ + Returns pandas DateTimeIndex potentially without leap days. + """ + + time = pd.date_range(freq=freq, **snapshots, **kwargs) + if drop_leap_day and time.is_leap_year.any(): + time = time[~((time.month == 2) & (time.day == 29))] + + return time diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index a0d41e1d..ac73cb1f 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -93,7 +93,12 @@ import powerplantmatching as pm import pypsa import scipy.sparse as sparse import xarray as xr -from _helpers import configure_logging, set_scenario_config, update_p_nom_max +from _helpers import ( + configure_logging, + get_snapshots, + set_scenario_config, + update_p_nom_max, +) from powerplantmatching.export import map_country_bus from shapely.prepared import prep @@ -760,15 +765,6 @@ def estimate_renewable_capacities( ) -def drop_leap_day(n): - if not n.snapshots.is_leap_year.any(): - return - leap_days = (n.snapshots.day == 29) & (n.snapshots.month == 2) - n.set_snapshots(n.snapshots[~leap_days]) - n.snapshot_weightings[:] = 8760 / len(n.snapshots) - logger.info("Dropped February 29 from leap year.") - - def attach_line_rating( n, rating, s_max_pu, correction_factor, max_voltage_difference, max_line_rating ): @@ -805,7 +801,8 @@ if __name__ == "__main__": n = pypsa.Network(snakemake.input.base_network) - n.set_snapshots(pd.date_range(freq="h", **snakemake.params.snapshots)) + time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day) + n.set_snapshots(time) Nyears = n.snapshot_weightings.objective.sum() / 8760.0 @@ -916,8 +913,5 @@ if __name__ == "__main__": sanitize_carriers(n, snakemake.config) - if snakemake.params.drop_leap_day: - drop_leap_day(n) - n.meta = snakemake.config n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/base_network.py b/scripts/base_network.py index 66438994..346f99a5 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -75,7 +75,7 @@ import shapely import shapely.prepared import shapely.wkt import yaml -from _helpers import configure_logging, set_scenario_config +from _helpers import configure_logging, get_snapshots, set_scenario_config from packaging.version import Version, parse from scipy import spatial from scipy.sparse import csgraph @@ -730,12 +730,12 @@ def base_network( transformers = _set_electrical_parameters_transformers(transformers, config) links = _set_electrical_parameters_links(links, config, links_p_nom) converters = _set_electrical_parameters_converters(converters, config) - snapshots = snakemake.params.snapshots n = pypsa.Network() n.name = "PyPSA-Eur" - n.set_snapshots(pd.date_range(freq="h", **snapshots)) + time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day) + n.set_snapshots(time) n.madd("Carrier", ["AC", "DC"]) n.import_components_from_dataframe(buses, "Bus") diff --git a/scripts/build_daily_heat_demand.py b/scripts/build_daily_heat_demand.py index c08cfe33..54c5c386 100644 --- a/scripts/build_daily_heat_demand.py +++ b/scripts/build_daily_heat_demand.py @@ -11,7 +11,7 @@ import geopandas as gpd import numpy as np import pandas as pd import xarray as xr -from _helpers import set_scenario_config +from _helpers import get_snapshots, set_scenario_config from dask.distributed import Client, LocalCluster if __name__ == "__main__": @@ -32,11 +32,12 @@ if __name__ == "__main__": cutout_name = snakemake.input.cutout - time = pd.date_range(freq="h", **snakemake.params.snapshots) - daily = pd.date_range(freq="D", **snakemake.params.snapshots) - if snakemake.params.drop_leap_day: - time = time[~((time.month == 2) & (time.day == 29))] - daily = daily[~((daily.month == 2) & (daily.day == 29))] + time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day) + daily = get_snapshots( + snakemake.params.snapshots, + snakemake.params.drop_leap_day, + freq="D", + ) cutout = atlite.Cutout(cutout_name).sel(time=time) diff --git a/scripts/build_electricity_demand.py b/scripts/build_electricity_demand.py index 68df4bac..2d3da224 100755 --- a/scripts/build_electricity_demand.py +++ b/scripts/build_electricity_demand.py @@ -39,7 +39,7 @@ import logging import numpy as np import pandas as pd -from _helpers import configure_logging, set_scenario_config +from _helpers import configure_logging, get_snapshots, set_scenario_config from pandas import Timedelta as Delta logger = logging.getLogger(__name__) @@ -263,7 +263,9 @@ if __name__ == "__main__": configure_logging(snakemake) set_scenario_config(snakemake) - snapshots = pd.date_range(freq="h", **snakemake.params.snapshots) + snapshots = get_snapshots( + snakemake.params.snapshots, snakemake.params.drop_leap_day + ) fixed_year = snakemake.config["load"].get("fixed_year", False) years = ( diff --git a/scripts/build_hourly_heat_demand.py b/scripts/build_hourly_heat_demand.py index c3916b54..1fb4f5a4 100644 --- a/scripts/build_hourly_heat_demand.py +++ b/scripts/build_hourly_heat_demand.py @@ -10,7 +10,7 @@ from itertools import product import pandas as pd import xarray as xr -from _helpers import generate_periodic_profiles, set_scenario_config +from _helpers import generate_periodic_profiles, get_snapshots, set_scenario_config if __name__ == "__main__": if "snakemake" not in globals(): @@ -24,9 +24,9 @@ if __name__ == "__main__": ) set_scenario_config(snakemake) - snapshots = pd.date_range(freq="h", **snakemake.params.snapshots) - if snakemake.params.drop_leap_day: - snapshots = snapshots[~((snapshots.month == 2) & (snapshots.day == 29))] + snapshots = get_snapshots( + snakemake.params.snapshots, snakemake.params.drop_leap_day + ) daily_space_heat_demand = ( xr.open_dataarray(snakemake.input.heat_demand) diff --git a/scripts/build_hydro_profile.py b/scripts/build_hydro_profile.py index 510bc6fa..b7f270b3 100644 --- a/scripts/build_hydro_profile.py +++ b/scripts/build_hydro_profile.py @@ -65,7 +65,7 @@ import atlite import country_converter as coco import geopandas as gpd import pandas as pd -from _helpers import configure_logging, set_scenario_config +from _helpers import configure_logging, get_snapshots, set_scenario_config from numpy.polynomial import Polynomial cc = coco.CountryConverter() @@ -73,9 +73,7 @@ cc = coco.CountryConverter() def get_eia_annual_hydro_generation(fn, countries, capacities=False): # in billion kWh/a = TWh/a - df = pd.read_csv( - fn, skiprows=2, index_col=1, na_values=[" ", "--"] - ).iloc[1:, 1:] + df = pd.read_csv(fn, skiprows=2, index_col=1, na_values=[" ", "--"]).iloc[1:, 1:] df.index = df.index.str.strip() df.columns = df.columns.astype(int) @@ -175,9 +173,7 @@ if __name__ == "__main__": params_hydro = snakemake.params.hydro - time = pd.date_range(freq="h", **snakemake.params.snapshots) - if snakemake.params.drop_leap_day: - time = time[~((time.month == 2) & (time.day == 29))] + time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day) cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) diff --git a/scripts/build_line_rating.py b/scripts/build_line_rating.py index 794638d2..f9c71ea3 100755 --- a/scripts/build_line_rating.py +++ b/scripts/build_line_rating.py @@ -58,7 +58,7 @@ import numpy as np import pandas as pd import pypsa import xarray as xr -from _helpers import configure_logging, set_scenario_config +from _helpers import configure_logging, get_snapshots, set_scenario_config from shapely.geometry import LineString as Line from shapely.geometry import Point @@ -147,9 +147,7 @@ if __name__ == "__main__": set_scenario_config(snakemake) n = pypsa.Network(snakemake.input.base_network) - time = pd.date_range(freq="h", **snakemake.params.snapshots) - if snakemake.params.drop_leap_day: - time = time[~((time.month == 2) & (time.day == 29))] + time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day) cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) diff --git a/scripts/build_population_weighted_energy_totals.py b/scripts/build_population_weighted_energy_totals.py index c2be19a9..70105a06 100644 --- a/scripts/build_population_weighted_energy_totals.py +++ b/scripts/build_population_weighted_energy_totals.py @@ -15,7 +15,7 @@ if __name__ == "__main__": snakemake = mock_snakemake( "build_population_weighted_energy_totals", - kind='energy', + kind="energy", simpl="", clusters=60, ) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 88057fb6..f1eb5e15 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -186,9 +186,8 @@ import time import atlite import geopandas as gpd import numpy as np -import pandas as pd import xarray as xr -from _helpers import configure_logging, set_scenario_config +from _helpers import configure_logging, get_snapshots, set_scenario_config from dask.distributed import Client from pypsa.geo import haversine from shapely.geometry import LineString @@ -227,11 +226,9 @@ if __name__ == "__main__": else: client = None - time = pd.date_range(freq="h", **snakemake.params.snapshots) - if snakemake.params.drop_leap_day: - time = time[~((time.month == 2) & (time.day == 29))] + sns = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day) - cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) + cutout = atlite.Cutout(snakemake.input.cutout).sel(time=sns) regions = gpd.read_file(snakemake.input.regions) assert not regions.empty, ( f"List of regions in {snakemake.input.regions} is empty, please " diff --git a/scripts/build_solar_thermal_profiles.py b/scripts/build_solar_thermal_profiles.py index c9eaffcc..bb5180b9 100644 --- a/scripts/build_solar_thermal_profiles.py +++ b/scripts/build_solar_thermal_profiles.py @@ -9,9 +9,8 @@ Build solar thermal collector time series. import atlite import geopandas as gpd import numpy as np -import pandas as pd import xarray as xr -from _helpers import set_scenario_config +from _helpers import get_snapshots, set_scenario_config from dask.distributed import Client, LocalCluster if __name__ == "__main__": @@ -32,9 +31,7 @@ if __name__ == "__main__": config = snakemake.params.solar_thermal config.pop("cutout", None) - time = pd.date_range(freq="h", **snakemake.params.snapshots) - if snakemake.params.drop_leap_day: - time = time[~((time.month == 2) & (time.day == 29))] + time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day) cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index e5fa0b38..00c88b5b 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -9,9 +9,8 @@ Build time series for air and soil temperatures per clustered model region. import atlite import geopandas as gpd import numpy as np -import pandas as pd import xarray as xr -from _helpers import set_scenario_config +from _helpers import get_snapshots, set_scenario_config from dask.distributed import Client, LocalCluster if __name__ == "__main__": @@ -29,9 +28,7 @@ if __name__ == "__main__": cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1) client = Client(cluster, asynchronous=True) - time = pd.date_range(freq="h", **snakemake.params.snapshots) - if snakemake.params.drop_leap_day: - time = time[~((time.month == 2) & (time.day == 29))] + time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day) cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) diff --git a/scripts/build_transport_demand.py b/scripts/build_transport_demand.py index d3c740be..e39774fa 100644 --- a/scripts/build_transport_demand.py +++ b/scripts/build_transport_demand.py @@ -13,7 +13,12 @@ import logging import numpy as np import pandas as pd import xarray as xr -from _helpers import configure_logging, generate_periodic_profiles, set_scenario_config +from _helpers import ( + configure_logging, + generate_periodic_profiles, + get_snapshots, + set_scenario_config, +) logger = logging.getLogger(__name__) @@ -183,10 +188,9 @@ if __name__ == "__main__": options = snakemake.params.sector - snapshots = pd.date_range(freq="h", **snakemake.params.snapshots, tz="UTC") - if snakemake.params.drop_leap_day: - leap_day = (snapshots.month == 2) & (snapshots.day == 29) - snapshots = snapshots[~leap_day] + snapshots = get_snapshots( + snakemake.params.snapshots, snakemake.params.drop_leap_day, tz="UTC" + ) nyears = len(snapshots) / 8760 diff --git a/scripts/make_summary.py b/scripts/make_summary.py index b9e1a8af..8c2a1aea 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -13,7 +13,7 @@ import sys import numpy as np import pandas as pd import pypsa -from _helpers import configure_logging, set_scenario_config +from _helpers import configure_logging, get_snapshots, set_scenario_config from prepare_sector_network import prepare_costs idx = pd.IndexSlice @@ -690,7 +690,8 @@ if __name__ == "__main__": for planning_horizon in snakemake.params.scenario["planning_horizons"] } - Nyears = len(pd.date_range(freq="h", **snakemake.params.snapshots)) / 8760 + time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day) + Nyears = len(time) / 8760 costs_db = prepare_costs( snakemake.input.costs, diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 9c4404f2..5757a88b 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3507,10 +3507,6 @@ def set_temporal_aggregation(n, resolution, solver_name, drop_leap_day=False): logger.info("Aggregate to frequency %s", resolution) n = average_every_nhours(n, resolution) - if drop_leap_day: - sns = n.snapshots[~((n.snapshots.month == 2) & (n.snapshots.day == 29))] - n.set_snapshots(sns) - return n