integrate artificial load data supplement into build_electricity_demand
This commit is contained in:
parent
96f71d7e9c
commit
8904943a87
@ -73,8 +73,6 @@ enable:
|
||||
build_cutout: false
|
||||
retrieve_irena: false
|
||||
retrieve_cutout: true
|
||||
retrieve_opsd_load_data: true
|
||||
retrieve_artificial_load_data: false
|
||||
build_natura_raster: false
|
||||
retrieve_natura_raster: true
|
||||
custom_busmap: false
|
||||
@ -295,6 +293,7 @@ load:
|
||||
manual_adjustments: true # false
|
||||
scaling_factor: 1.0
|
||||
fixed_year: false # false or year (e.g. 2013)
|
||||
supplement_missing_data_artificially: true
|
||||
|
||||
# docs
|
||||
# TODO: PyPSA-Eur merge issue in prepare_sector_network.py
|
||||
|
@ -18,45 +18,26 @@ if config["enable"].get("prepare_links_p_nom", False):
|
||||
"../scripts/prepare_links_p_nom.py"
|
||||
|
||||
|
||||
if config["enable"].get("retrieve_opsd_load_data", True):
|
||||
|
||||
rule build_electricity_demand:
|
||||
params:
|
||||
snapshots=config_provider("snapshots"),
|
||||
countries=config_provider("countries"),
|
||||
load=config_provider("load"),
|
||||
input:
|
||||
ancient("data/electricity_demand_raw.csv"),
|
||||
output:
|
||||
resources("electricity_demand.csv"),
|
||||
log:
|
||||
logs("build_electricity_demand.log"),
|
||||
resources:
|
||||
mem_mb=5000,
|
||||
conda:
|
||||
"../envs/environment.yaml"
|
||||
script:
|
||||
"../scripts/build_electricity_demand.py"
|
||||
|
||||
|
||||
if config["enable"].get("retrieve_artificial_load_data", False):
|
||||
|
||||
rule build_artificial_load_data:
|
||||
input:
|
||||
ancient("data/load_artificial_raw.csv"),
|
||||
output:
|
||||
resources("electricity_demand.csv"),
|
||||
log:
|
||||
logs("build_artificial_load_data.log"),
|
||||
resources:
|
||||
mem_mb=5000,
|
||||
conda:
|
||||
"../envs/environment.yaml"
|
||||
script:
|
||||
"../scripts/build_artificial_load_data.py"
|
||||
|
||||
|
||||
ruleorder: build_artificial_load_data > build_electricity_demand
|
||||
rule build_electricity_demand:
|
||||
params:
|
||||
snapshots=config_provider("snapshots"),
|
||||
countries=config_provider("countries"),
|
||||
load=config_provider("load"),
|
||||
input:
|
||||
reported=ancient("data/electricity_demand_raw.csv"),
|
||||
artificial=lambda w: ancient("data/load_artificial_raw.csv")
|
||||
if config_provider("load", "supplement_missing_data_artificially")(w)
|
||||
else [],
|
||||
output:
|
||||
resources("electricity_demand.csv"),
|
||||
log:
|
||||
logs("build_electricity_demand.log"),
|
||||
resources:
|
||||
mem_mb=5000,
|
||||
conda:
|
||||
"../envs/environment.yaml"
|
||||
script:
|
||||
"../scripts/build_electricity_demand.py"
|
||||
|
||||
|
||||
rule build_powerplants:
|
||||
|
@ -180,9 +180,7 @@ if config["enable"]["retrieve"]:
|
||||
"../scripts/retrieve_gas_infrastructure_data.py"
|
||||
|
||||
|
||||
if config["enable"]["retrieve"] and config["enable"].get(
|
||||
"retrieve_opsd_load_data", True
|
||||
):
|
||||
if config["enable"]["retrieve"]:
|
||||
|
||||
rule retrieve_electricity_demand:
|
||||
params:
|
||||
@ -200,9 +198,7 @@ if config["enable"]["retrieve"] and config["enable"].get(
|
||||
"../scripts/retrieve_electricity_demand.py"
|
||||
|
||||
|
||||
if config["enable"]["retrieve"] and config["enable"].get(
|
||||
"retrieve_artificial_load_data", False
|
||||
):
|
||||
if config["enable"]["retrieve"]:
|
||||
|
||||
rule retrieve_artificial_load_data:
|
||||
input:
|
||||
@ -220,6 +216,7 @@ if config["enable"]["retrieve"] and config["enable"].get(
|
||||
retries: 2
|
||||
run:
|
||||
move(input[0], output[0])
|
||||
validate_checksum(output[0], input[0])
|
||||
|
||||
|
||||
if config["enable"]["retrieve"]:
|
||||
|
@ -1,42 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# SPDX-FileCopyrightText: 2022 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
"""
|
||||
This rule downloads the load data.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
import pandas as pd
|
||||
from _helpers import configure_logging
|
||||
|
||||
if __name__ == "__main__":
|
||||
if "snakemake" not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
||||
snakemake = mock_snakemake("build_artificial_load_data")
|
||||
|
||||
configure_logging(snakemake)
|
||||
|
||||
snapshots = pd.date_range(freq="h", **snakemake.params.snapshots)
|
||||
|
||||
fixed_year = snakemake.config["load"].get("fixed_year", False)
|
||||
years = (
|
||||
slice(str(fixed_year), str(fixed_year))
|
||||
if fixed_year
|
||||
else slice(snapshots[0], snapshots[-1])
|
||||
)
|
||||
countries = snakemake.config["countries"]
|
||||
|
||||
load = pd.read_csv(snakemake.input[0], index_col=0, parse_dates=True).loc[
|
||||
snapshots, countries
|
||||
]
|
||||
|
||||
assert not load.isna().any().any(), "Load data contains nans."
|
||||
|
||||
if fixed_year:
|
||||
load.index = load.index.map(lambda t: t.replace(year=snapshots.year[0]))
|
||||
|
||||
load.to_csv(snakemake.output[0])
|
@ -277,16 +277,15 @@ if __name__ == "__main__":
|
||||
|
||||
time_shift = snakemake.params.load["time_shift_for_large_gaps"]
|
||||
|
||||
load = load_timeseries(snakemake.input[0], years, countries)
|
||||
load = load_timeseries(snakemake.input.reported, years, countries)
|
||||
|
||||
if "UA" in countries:
|
||||
# attach load of UA (best data only for entsoe transparency)
|
||||
load_ua = load_timeseries(snakemake.input[0], "2018", ["UA"], False)
|
||||
load_ua = load_timeseries(snakemake.input.reported, "2018", ["UA"])
|
||||
snapshot_year = str(snapshots.year.unique().item())
|
||||
time_diff = pd.Timestamp("2018") - pd.Timestamp(snapshot_year)
|
||||
load_ua.index -= (
|
||||
time_diff # hack indices (currently, UA is manually set to 2018)
|
||||
)
|
||||
# hack indices (currently, UA is manually set to 2018)
|
||||
load_ua.index -= time_diff
|
||||
load["UA"] = load_ua
|
||||
# attach load of MD (no time-series available, use 2020-totals and distribute according to UA):
|
||||
# https://www.iea.org/data-and-statistics/data-browser/?country=MOLDOVA&fuel=Energy%20consumption&indicator=TotElecCons
|
||||
@ -307,6 +306,13 @@ if __name__ == "__main__":
|
||||
)
|
||||
load = load.apply(fill_large_gaps, shift=time_shift)
|
||||
|
||||
if snakemake.params.load["supplement_missing_data_artificially"]:
|
||||
logger.info("Supplement missing data with artificial data.")
|
||||
fn = snakemake.input.artificial
|
||||
artificial_load = pd.read_csv(fn, index_col=0, parse_dates=True)
|
||||
artificial_load = artificial_load.loc[snapshots, countries]
|
||||
load = load.combine_first(artificial_load)
|
||||
|
||||
assert not load.isna().any().any(), (
|
||||
"Load data contains nans. Adjust the parameters "
|
||||
"`time_shift_for_large_gaps` or modify the `manual_adjustment` function "
|
||||
|
Loading…
Reference in New Issue
Block a user