move leap year handling into helper function
This commit is contained in:
parent
2023924572
commit
0d6ba9fbd1
@ -4,4 +4,4 @@ time_shift_for_large_gaps,string,string,"Periods which are used for copying time
|
|||||||
manual_adjustments,bool,"{true, false}","Whether to adjust the load data manually according to the function in :func:`manual_adjustment`."
|
manual_adjustments,bool,"{true, false}","Whether to adjust the load data manually according to the function in :func:`manual_adjustment`."
|
||||||
scaling_factor,--,float,"Global correction factor for the load time series."
|
scaling_factor,--,float,"Global correction factor for the load time series."
|
||||||
fixed_year,--,Year or False,"To specify a fixed year for the load time series that deviates from the snapshots' year"
|
fixed_year,--,Year or False,"To specify a fixed year for the load time series that deviates from the snapshots' year"
|
||||||
supplement_missing_data_artificially,bool,"{true, false}","Whether to supplement missing data for selected time period should be supplemented by artificial data from https://zenodo.org/record/7070438/files/demand_hourly.csv."
|
supplement_missing_data_artificially,bool,"{true, false}","Whether to supplement missing data for selected time period should be supplemented by artificial data from https://zenodo.org/record/7070438/files/demand_hourly.csv."
|
||||||
|
|
@ -21,6 +21,7 @@ if config["enable"].get("prepare_links_p_nom", False):
|
|||||||
rule build_electricity_demand:
|
rule build_electricity_demand:
|
||||||
params:
|
params:
|
||||||
snapshots=config_provider("snapshots"),
|
snapshots=config_provider("snapshots"),
|
||||||
|
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
||||||
countries=config_provider("countries"),
|
countries=config_provider("countries"),
|
||||||
load=config_provider("load"),
|
load=config_provider("load"),
|
||||||
input:
|
input:
|
||||||
@ -68,6 +69,7 @@ rule base_network:
|
|||||||
params:
|
params:
|
||||||
countries=config_provider("countries"),
|
countries=config_provider("countries"),
|
||||||
snapshots=config_provider("snapshots"),
|
snapshots=config_provider("snapshots"),
|
||||||
|
drop_leap_day=config_provider("enable", "drop_leap_day"),
|
||||||
lines=config_provider("lines"),
|
lines=config_provider("lines"),
|
||||||
links=config_provider("links"),
|
links=config_provider("links"),
|
||||||
transformers=config_provider("transformers"),
|
transformers=config_provider("transformers"),
|
||||||
|
@ -724,3 +724,15 @@ def validate_checksum(file_path, zenodo_url=None, checksum=None):
|
|||||||
assert (
|
assert (
|
||||||
calculated_checksum == checksum
|
calculated_checksum == checksum
|
||||||
), "Checksum is invalid. This may be due to an incomplete download. Delete the file and re-execute the rule."
|
), "Checksum is invalid. This may be due to an incomplete download. Delete the file and re-execute the rule."
|
||||||
|
|
||||||
|
|
||||||
|
def get_snapshots(snapshots, drop_leap_day=False, freq="h", **kwargs):
|
||||||
|
"""
|
||||||
|
Returns pandas DateTimeIndex potentially without leap days.
|
||||||
|
"""
|
||||||
|
|
||||||
|
time = pd.date_range(freq=freq, **snapshots, **kwargs)
|
||||||
|
if drop_leap_day and time.is_leap_year.any():
|
||||||
|
time = time[~((time.month == 2) & (time.day == 29))]
|
||||||
|
|
||||||
|
return time
|
||||||
|
@ -93,7 +93,12 @@ import powerplantmatching as pm
|
|||||||
import pypsa
|
import pypsa
|
||||||
import scipy.sparse as sparse
|
import scipy.sparse as sparse
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import configure_logging, set_scenario_config, update_p_nom_max
|
from _helpers import (
|
||||||
|
configure_logging,
|
||||||
|
get_snapshots,
|
||||||
|
set_scenario_config,
|
||||||
|
update_p_nom_max,
|
||||||
|
)
|
||||||
from powerplantmatching.export import map_country_bus
|
from powerplantmatching.export import map_country_bus
|
||||||
from shapely.prepared import prep
|
from shapely.prepared import prep
|
||||||
|
|
||||||
@ -760,15 +765,6 @@ def estimate_renewable_capacities(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def drop_leap_day(n):
|
|
||||||
if not n.snapshots.is_leap_year.any():
|
|
||||||
return
|
|
||||||
leap_days = (n.snapshots.day == 29) & (n.snapshots.month == 2)
|
|
||||||
n.set_snapshots(n.snapshots[~leap_days])
|
|
||||||
n.snapshot_weightings[:] = 8760 / len(n.snapshots)
|
|
||||||
logger.info("Dropped February 29 from leap year.")
|
|
||||||
|
|
||||||
|
|
||||||
def attach_line_rating(
|
def attach_line_rating(
|
||||||
n, rating, s_max_pu, correction_factor, max_voltage_difference, max_line_rating
|
n, rating, s_max_pu, correction_factor, max_voltage_difference, max_line_rating
|
||||||
):
|
):
|
||||||
@ -805,7 +801,8 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
n = pypsa.Network(snakemake.input.base_network)
|
n = pypsa.Network(snakemake.input.base_network)
|
||||||
|
|
||||||
n.set_snapshots(pd.date_range(freq="h", **snakemake.params.snapshots))
|
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
|
n.set_snapshots(time)
|
||||||
|
|
||||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.0
|
Nyears = n.snapshot_weightings.objective.sum() / 8760.0
|
||||||
|
|
||||||
@ -916,8 +913,5 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
sanitize_carriers(n, snakemake.config)
|
sanitize_carriers(n, snakemake.config)
|
||||||
|
|
||||||
if snakemake.params.drop_leap_day:
|
|
||||||
drop_leap_day(n)
|
|
||||||
|
|
||||||
n.meta = snakemake.config
|
n.meta = snakemake.config
|
||||||
n.export_to_netcdf(snakemake.output[0])
|
n.export_to_netcdf(snakemake.output[0])
|
||||||
|
@ -75,7 +75,7 @@ import shapely
|
|||||||
import shapely.prepared
|
import shapely.prepared
|
||||||
import shapely.wkt
|
import shapely.wkt
|
||||||
import yaml
|
import yaml
|
||||||
from _helpers import configure_logging, set_scenario_config
|
from _helpers import configure_logging, get_snapshots, set_scenario_config
|
||||||
from packaging.version import Version, parse
|
from packaging.version import Version, parse
|
||||||
from scipy import spatial
|
from scipy import spatial
|
||||||
from scipy.sparse import csgraph
|
from scipy.sparse import csgraph
|
||||||
@ -730,12 +730,12 @@ def base_network(
|
|||||||
transformers = _set_electrical_parameters_transformers(transformers, config)
|
transformers = _set_electrical_parameters_transformers(transformers, config)
|
||||||
links = _set_electrical_parameters_links(links, config, links_p_nom)
|
links = _set_electrical_parameters_links(links, config, links_p_nom)
|
||||||
converters = _set_electrical_parameters_converters(converters, config)
|
converters = _set_electrical_parameters_converters(converters, config)
|
||||||
snapshots = snakemake.params.snapshots
|
|
||||||
|
|
||||||
n = pypsa.Network()
|
n = pypsa.Network()
|
||||||
n.name = "PyPSA-Eur"
|
n.name = "PyPSA-Eur"
|
||||||
|
|
||||||
n.set_snapshots(pd.date_range(freq="h", **snapshots))
|
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
|
n.set_snapshots(time)
|
||||||
n.madd("Carrier", ["AC", "DC"])
|
n.madd("Carrier", ["AC", "DC"])
|
||||||
|
|
||||||
n.import_components_from_dataframe(buses, "Bus")
|
n.import_components_from_dataframe(buses, "Bus")
|
||||||
|
@ -11,7 +11,7 @@ import geopandas as gpd
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import set_scenario_config
|
from _helpers import get_snapshots, set_scenario_config
|
||||||
from dask.distributed import Client, LocalCluster
|
from dask.distributed import Client, LocalCluster
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
@ -32,11 +32,12 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
cutout_name = snakemake.input.cutout
|
cutout_name = snakemake.input.cutout
|
||||||
|
|
||||||
time = pd.date_range(freq="h", **snakemake.params.snapshots)
|
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
daily = pd.date_range(freq="D", **snakemake.params.snapshots)
|
daily = get_snapshots(
|
||||||
if snakemake.params.drop_leap_day:
|
snakemake.params.snapshots,
|
||||||
time = time[~((time.month == 2) & (time.day == 29))]
|
snakemake.params.drop_leap_day,
|
||||||
daily = daily[~((daily.month == 2) & (daily.day == 29))]
|
freq="D",
|
||||||
|
)
|
||||||
|
|
||||||
cutout = atlite.Cutout(cutout_name).sel(time=time)
|
cutout = atlite.Cutout(cutout_name).sel(time=time)
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ import logging
|
|||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from _helpers import configure_logging, set_scenario_config
|
from _helpers import configure_logging, get_snapshots, set_scenario_config
|
||||||
from pandas import Timedelta as Delta
|
from pandas import Timedelta as Delta
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -263,7 +263,9 @@ if __name__ == "__main__":
|
|||||||
configure_logging(snakemake)
|
configure_logging(snakemake)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
snapshots = pd.date_range(freq="h", **snakemake.params.snapshots)
|
snapshots = get_snapshots(
|
||||||
|
snakemake.params.snapshots, snakemake.params.drop_leap_day
|
||||||
|
)
|
||||||
|
|
||||||
fixed_year = snakemake.config["load"].get("fixed_year", False)
|
fixed_year = snakemake.config["load"].get("fixed_year", False)
|
||||||
years = (
|
years = (
|
||||||
|
@ -10,7 +10,7 @@ from itertools import product
|
|||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import generate_periodic_profiles, set_scenario_config
|
from _helpers import generate_periodic_profiles, get_snapshots, set_scenario_config
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if "snakemake" not in globals():
|
if "snakemake" not in globals():
|
||||||
@ -24,9 +24,9 @@ if __name__ == "__main__":
|
|||||||
)
|
)
|
||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
snapshots = pd.date_range(freq="h", **snakemake.params.snapshots)
|
snapshots = get_snapshots(
|
||||||
if snakemake.params.drop_leap_day:
|
snakemake.params.snapshots, snakemake.params.drop_leap_day
|
||||||
snapshots = snapshots[~((snapshots.month == 2) & (snapshots.day == 29))]
|
)
|
||||||
|
|
||||||
daily_space_heat_demand = (
|
daily_space_heat_demand = (
|
||||||
xr.open_dataarray(snakemake.input.heat_demand)
|
xr.open_dataarray(snakemake.input.heat_demand)
|
||||||
|
@ -65,7 +65,7 @@ import atlite
|
|||||||
import country_converter as coco
|
import country_converter as coco
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from _helpers import configure_logging, set_scenario_config
|
from _helpers import configure_logging, get_snapshots, set_scenario_config
|
||||||
from numpy.polynomial import Polynomial
|
from numpy.polynomial import Polynomial
|
||||||
|
|
||||||
cc = coco.CountryConverter()
|
cc = coco.CountryConverter()
|
||||||
@ -73,9 +73,7 @@ cc = coco.CountryConverter()
|
|||||||
|
|
||||||
def get_eia_annual_hydro_generation(fn, countries, capacities=False):
|
def get_eia_annual_hydro_generation(fn, countries, capacities=False):
|
||||||
# in billion kWh/a = TWh/a
|
# in billion kWh/a = TWh/a
|
||||||
df = pd.read_csv(
|
df = pd.read_csv(fn, skiprows=2, index_col=1, na_values=[" ", "--"]).iloc[1:, 1:]
|
||||||
fn, skiprows=2, index_col=1, na_values=[" ", "--"]
|
|
||||||
).iloc[1:, 1:]
|
|
||||||
df.index = df.index.str.strip()
|
df.index = df.index.str.strip()
|
||||||
df.columns = df.columns.astype(int)
|
df.columns = df.columns.astype(int)
|
||||||
|
|
||||||
@ -175,9 +173,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
params_hydro = snakemake.params.hydro
|
params_hydro = snakemake.params.hydro
|
||||||
|
|
||||||
time = pd.date_range(freq="h", **snakemake.params.snapshots)
|
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
if snakemake.params.drop_leap_day:
|
|
||||||
time = time[~((time.month == 2) & (time.day == 29))]
|
|
||||||
|
|
||||||
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
||||||
|
|
||||||
|
@ -58,7 +58,7 @@ import numpy as np
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pypsa
|
import pypsa
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import configure_logging, set_scenario_config
|
from _helpers import configure_logging, get_snapshots, set_scenario_config
|
||||||
from shapely.geometry import LineString as Line
|
from shapely.geometry import LineString as Line
|
||||||
from shapely.geometry import Point
|
from shapely.geometry import Point
|
||||||
|
|
||||||
@ -147,9 +147,7 @@ if __name__ == "__main__":
|
|||||||
set_scenario_config(snakemake)
|
set_scenario_config(snakemake)
|
||||||
|
|
||||||
n = pypsa.Network(snakemake.input.base_network)
|
n = pypsa.Network(snakemake.input.base_network)
|
||||||
time = pd.date_range(freq="h", **snakemake.params.snapshots)
|
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
if snakemake.params.drop_leap_day:
|
|
||||||
time = time[~((time.month == 2) & (time.day == 29))]
|
|
||||||
|
|
||||||
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
snakemake = mock_snakemake(
|
snakemake = mock_snakemake(
|
||||||
"build_population_weighted_energy_totals",
|
"build_population_weighted_energy_totals",
|
||||||
kind='energy',
|
kind="energy",
|
||||||
simpl="",
|
simpl="",
|
||||||
clusters=60,
|
clusters=60,
|
||||||
)
|
)
|
||||||
|
@ -186,9 +186,8 @@ import time
|
|||||||
import atlite
|
import atlite
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import configure_logging, set_scenario_config
|
from _helpers import configure_logging, get_snapshots, set_scenario_config
|
||||||
from dask.distributed import Client
|
from dask.distributed import Client
|
||||||
from pypsa.geo import haversine
|
from pypsa.geo import haversine
|
||||||
from shapely.geometry import LineString
|
from shapely.geometry import LineString
|
||||||
@ -227,11 +226,9 @@ if __name__ == "__main__":
|
|||||||
else:
|
else:
|
||||||
client = None
|
client = None
|
||||||
|
|
||||||
time = pd.date_range(freq="h", **snakemake.params.snapshots)
|
sns = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
if snakemake.params.drop_leap_day:
|
|
||||||
time = time[~((time.month == 2) & (time.day == 29))]
|
|
||||||
|
|
||||||
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=sns)
|
||||||
regions = gpd.read_file(snakemake.input.regions)
|
regions = gpd.read_file(snakemake.input.regions)
|
||||||
assert not regions.empty, (
|
assert not regions.empty, (
|
||||||
f"List of regions in {snakemake.input.regions} is empty, please "
|
f"List of regions in {snakemake.input.regions} is empty, please "
|
||||||
|
@ -9,9 +9,8 @@ Build solar thermal collector time series.
|
|||||||
import atlite
|
import atlite
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import set_scenario_config
|
from _helpers import get_snapshots, set_scenario_config
|
||||||
from dask.distributed import Client, LocalCluster
|
from dask.distributed import Client, LocalCluster
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
@ -32,9 +31,7 @@ if __name__ == "__main__":
|
|||||||
config = snakemake.params.solar_thermal
|
config = snakemake.params.solar_thermal
|
||||||
config.pop("cutout", None)
|
config.pop("cutout", None)
|
||||||
|
|
||||||
time = pd.date_range(freq="h", **snakemake.params.snapshots)
|
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
if snakemake.params.drop_leap_day:
|
|
||||||
time = time[~((time.month == 2) & (time.day == 29))]
|
|
||||||
|
|
||||||
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
||||||
|
|
||||||
|
@ -9,9 +9,8 @@ Build time series for air and soil temperatures per clustered model region.
|
|||||||
import atlite
|
import atlite
|
||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import set_scenario_config
|
from _helpers import get_snapshots, set_scenario_config
|
||||||
from dask.distributed import Client, LocalCluster
|
from dask.distributed import Client, LocalCluster
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
@ -29,9 +28,7 @@ if __name__ == "__main__":
|
|||||||
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
|
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
|
||||||
client = Client(cluster, asynchronous=True)
|
client = Client(cluster, asynchronous=True)
|
||||||
|
|
||||||
time = pd.date_range(freq="h", **snakemake.params.snapshots)
|
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
if snakemake.params.drop_leap_day:
|
|
||||||
time = time[~((time.month == 2) & (time.day == 29))]
|
|
||||||
|
|
||||||
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
|
||||||
|
|
||||||
|
@ -13,7 +13,12 @@ import logging
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from _helpers import configure_logging, generate_periodic_profiles, set_scenario_config
|
from _helpers import (
|
||||||
|
configure_logging,
|
||||||
|
generate_periodic_profiles,
|
||||||
|
get_snapshots,
|
||||||
|
set_scenario_config,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -183,10 +188,9 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
options = snakemake.params.sector
|
options = snakemake.params.sector
|
||||||
|
|
||||||
snapshots = pd.date_range(freq="h", **snakemake.params.snapshots, tz="UTC")
|
snapshots = get_snapshots(
|
||||||
if snakemake.params.drop_leap_day:
|
snakemake.params.snapshots, snakemake.params.drop_leap_day, tz="UTC"
|
||||||
leap_day = (snapshots.month == 2) & (snapshots.day == 29)
|
)
|
||||||
snapshots = snapshots[~leap_day]
|
|
||||||
|
|
||||||
nyears = len(snapshots) / 8760
|
nyears = len(snapshots) / 8760
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ import sys
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pypsa
|
import pypsa
|
||||||
from _helpers import configure_logging, set_scenario_config
|
from _helpers import configure_logging, get_snapshots, set_scenario_config
|
||||||
from prepare_sector_network import prepare_costs
|
from prepare_sector_network import prepare_costs
|
||||||
|
|
||||||
idx = pd.IndexSlice
|
idx = pd.IndexSlice
|
||||||
@ -690,7 +690,8 @@ if __name__ == "__main__":
|
|||||||
for planning_horizon in snakemake.params.scenario["planning_horizons"]
|
for planning_horizon in snakemake.params.scenario["planning_horizons"]
|
||||||
}
|
}
|
||||||
|
|
||||||
Nyears = len(pd.date_range(freq="h", **snakemake.params.snapshots)) / 8760
|
time = get_snapshots(snakemake.params.snapshots, snakemake.params.drop_leap_day)
|
||||||
|
Nyears = len(time) / 8760
|
||||||
|
|
||||||
costs_db = prepare_costs(
|
costs_db = prepare_costs(
|
||||||
snakemake.input.costs,
|
snakemake.input.costs,
|
||||||
|
@ -3507,10 +3507,6 @@ def set_temporal_aggregation(n, resolution, solver_name, drop_leap_day=False):
|
|||||||
logger.info("Aggregate to frequency %s", resolution)
|
logger.info("Aggregate to frequency %s", resolution)
|
||||||
n = average_every_nhours(n, resolution)
|
n = average_every_nhours(n, resolution)
|
||||||
|
|
||||||
if drop_leap_day:
|
|
||||||
sns = n.snapshots[~((n.snapshots.month == 2) & (n.snapshots.day == 29))]
|
|
||||||
n.set_snapshots(sns)
|
|
||||||
|
|
||||||
return n
|
return n
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user