[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
This commit is contained in:
parent
6acd5da4d4
commit
add135fe05
@ -47,4 +47,4 @@ Report generated on: 07-21-2022 16:15:18
|
||||
"INTL.33-7-SWE-MK.A"," Sweden","14.859","14.919","15.215","15.29","15.445","15.69","15.813","15.996","16.112","15.759","15.904","15.891","16.021","15.867","16.072","15.725","15.776","16.371","16.169","16.432","16.506","16.523","16.187","16.098","16.302","16.302","16.234","16.592","16.352","16.544","16.624","16.478","16.315","16.395","15.897","16.23","16.367","16.403","16.332","16.332","16.379"
|
||||
"INTL.33-7-CHE-MK.A"," Switzerland","11.45","11.46","11.47","11.47","11.48","11.48","11.51","11.51","11.52","11.58","3.474","3.484","3.504","3.509","3.526","3.541","3.55","3.553","3.584","3.614","3.636","3.642","3.653","3.669","3.65","3.682","3.694","3.7","3.709","3.749","3.81","3.852","3.882","3.896","3.948","3.996","4.06","4.112","4.193","4.193","4.193"
|
||||
"INTL.33-7-TUR-MK.A"," Turkey","2.131","2.356","3.082","3.239","3.875","3.875","3.878","5.003","6.219","6.598","6.764","7.114","8.379","9.682","9.865","9.863","9.935","10.102","10.307","10.537","11.175","11.673","12.241","12.579","12.645","12.906","13.063","13.395","13.829","14.553","15.831","17.137","19.609","22.289","23.643","25.868","26.681","27.273","28.291","28.503","30.984"
|
||||
"INTL.33-7-GBR-MK.A"," United Kingdom","2.451","2.451","2.451","2.721","4.188","4.19","4.192","4.197","4.196","1.424","1.11","1.415","1.423","1.425","1.425","1.432","1.455","1.488","1.475","1.477","1.485","1.629","1.59","1.486","1.499","1.501","1.515","1.522","1.626","1.638","1.637","1.673","1.693","1.709","1.73","1.777","1.836","1.873","1.878","1.878","1.879"
|
||||
"INTL.33-7-GBR-MK.A"," United Kingdom","2.451","2.451","2.451","2.721","4.188","4.19","4.192","4.197","4.196","1.424","1.11","1.415","1.423","1.425","1.425","1.432","1.455","1.488","1.475","1.477","1.485","1.629","1.59","1.486","1.499","1.501","1.515","1.522","1.626","1.638","1.637","1.673","1.693","1.709","1.73","1.777","1.836","1.873","1.878","1.878","1.879"
|
||||
|
Can't render this file because it has a wrong number of fields in line 3.
|
@ -6,4 +6,4 @@ hydro_max_hours,h,"Any of {float, 'energy_capacity_totals_by_country', 'estimate
|
||||
clip_min_inflow,MW,float,"To avoid too small values in the inflow time series, values below this threshold are set to zero."
|
||||
eia_norm_year,--,"Year in EIA hydro generation dataset; or False to disable","To specify a specific year by which hydro inflow is normed that deviates from the snapshots' year"
|
||||
eia_correct_by_capacity,--,boolean,"Correct EIA annual hydro generation data by installed capacity."
|
||||
eia_approximate_missing,--,boolean,"Approximate hydro generation data for years not included in EIA dataset through a regression based on annual runoff."
|
||||
eia_approximate_missing,--,boolean,"Approximate hydro generation data for years not included in EIA dataset through a regression based on annual runoff."
|
||||
|
|
@ -262,7 +262,7 @@ rule build_heat_totals:
|
||||
resources: mem_mb=2000
|
||||
log:
|
||||
LOGS + "build_heat_totals.log",
|
||||
benchmark:
|
||||
benchmark:
|
||||
BENCHMARKS + "build_heat_totals",
|
||||
conda:
|
||||
"../envs/environment.yaml"
|
||||
|
@ -1,45 +1,47 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# SPDX-FileCopyrightText: 2022 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""This rule downloads the load data"""
|
||||
"""
|
||||
This rule downloads the load data.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
import pandas as pd
|
||||
from _helpers import configure_logging
|
||||
|
||||
import pandas as pd
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
if 'snakemake' not in globals():
|
||||
if "snakemake" not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
snakemake = mock_snakemake('build_artificial_load_data', weather_year='')
|
||||
|
||||
snakemake = mock_snakemake("build_artificial_load_data", weather_year="")
|
||||
|
||||
configure_logging(snakemake)
|
||||
|
||||
weather_year = snakemake.wildcards.weather_year
|
||||
if weather_year:
|
||||
snapshots = dict(
|
||||
start=weather_year,
|
||||
end=str(int(weather_year)+1),
|
||||
inclusive="left"
|
||||
start=weather_year, end=str(int(weather_year) + 1), inclusive="left"
|
||||
)
|
||||
else:
|
||||
snapshots = snakemake.config['snapshots']
|
||||
snapshots = pd.date_range(freq='h', **snapshots)
|
||||
snapshots = snakemake.config["snapshots"]
|
||||
snapshots = pd.date_range(freq="h", **snapshots)
|
||||
|
||||
fixed_year = snakemake.config["load"].get("fixed_year", False)
|
||||
years = slice(str(fixed_year), str(fixed_year)) if fixed_year else slice(snapshots[0], snapshots[-1])
|
||||
countries = snakemake.config['countries']
|
||||
years = (
|
||||
slice(str(fixed_year), str(fixed_year))
|
||||
if fixed_year
|
||||
else slice(snapshots[0], snapshots[-1])
|
||||
)
|
||||
countries = snakemake.config["countries"]
|
||||
|
||||
load = pd.read_csv(
|
||||
snakemake.input[0],
|
||||
index_col=0,
|
||||
parse_dates=True
|
||||
).loc[snapshots, countries]
|
||||
load = pd.read_csv(snakemake.input[0], index_col=0, parse_dates=True).loc[
|
||||
snapshots, countries
|
||||
]
|
||||
|
||||
assert not load.isna().any().any(), 'Load data contains nans.'
|
||||
assert not load.isna().any().any(), "Load data contains nans."
|
||||
|
||||
if fixed_year:
|
||||
load.index = load.index.map(lambda t: t.replace(year=snapshots.year[0]))
|
||||
|
@ -208,7 +208,9 @@ if __name__ == "__main__":
|
||||
if "snakemake" not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
||||
snakemake = mock_snakemake("build_biomass_potentials", weather_year="", simpl="", clusters="5")
|
||||
snakemake = mock_snakemake(
|
||||
"build_biomass_potentials", weather_year="", simpl="", clusters="5"
|
||||
)
|
||||
|
||||
config = snakemake.config["biomass"]
|
||||
year = config["year"]
|
||||
|
@ -25,7 +25,8 @@ if __name__ == "__main__":
|
||||
|
||||
cutout_name = snakemake.input.cutout
|
||||
year = snakemake.wildcards.weather_year
|
||||
if year: cutout_name = cutout_name.format(weather_year=year)
|
||||
if year:
|
||||
cutout_name = cutout_name.format(weather_year=year)
|
||||
cutout = atlite.Cutout(cutout_name)
|
||||
|
||||
clustered_regions = (
|
||||
|
@ -73,7 +73,7 @@ to_ipcc = {
|
||||
|
||||
|
||||
def eurostat_per_country(country):
|
||||
|
||||
|
||||
country_fn = idees_rename.get(country, country)
|
||||
fn = snakemake.input.eurostat + f"/{country_fn}-Energy-balance-sheets-June-2021-edition.xlsb"
|
||||
|
||||
@ -91,7 +91,9 @@ def eurostat_per_country(country):
|
||||
|
||||
|
||||
def build_eurostat(countries, year=None):
|
||||
"""Return multi-index for all countries' energy data in TWh/a."""
|
||||
"""
|
||||
Return multi-index for all countries' energy data in TWh/a.
|
||||
"""
|
||||
|
||||
nprocesses = snakemake.threads
|
||||
tqdm_kwargs = dict(ascii=False, unit=' country', total=len(countries),
|
||||
@ -128,7 +130,9 @@ def build_eurostat(countries, year=None):
|
||||
|
||||
|
||||
def build_swiss(year=None):
|
||||
"""Return a pd.DataFrame of Swiss energy data in TWh/a"""
|
||||
"""
|
||||
Return a pd.DataFrame of Swiss energy data in TWh/a.
|
||||
"""
|
||||
|
||||
fn = snakemake.input.swiss
|
||||
|
||||
@ -350,7 +354,7 @@ def build_idees(countries, year=None):
|
||||
|
||||
nprocesses = snakemake.threads
|
||||
disable_progress = snakemake.config["run"].get("disable_progressbar", False)
|
||||
|
||||
|
||||
func = partial(idees_per_country, year=year, base_dir=snakemake.input.idees)
|
||||
tqdm_kwargs = dict(
|
||||
ascii=False,
|
||||
@ -359,7 +363,7 @@ def build_idees(countries, year=None):
|
||||
desc="Build from IDEES database",
|
||||
disable=disable_progress
|
||||
)
|
||||
|
||||
|
||||
with mute_print():
|
||||
with mp.Pool(processes=nprocesses) as pool:
|
||||
dfs = list(tqdm(pool.imap(func, countries), **tqdm_kwargs))
|
||||
@ -438,7 +442,7 @@ def build_energy_totals(countries, eurostat, swiss, idees):
|
||||
# fuel use
|
||||
|
||||
for fuel in ["electricity", "total"]:
|
||||
|
||||
|
||||
slicer = idx[c, y, :, :, eurostat_sectors[sector]]
|
||||
fill_values = eurostat.loc[slicer, eurostat_fuels[fuel]].groupby(level=[0,1]).sum()
|
||||
df.loc[to_fill, f"{fuel} {sector}"] = fill_values
|
||||
|
@ -28,19 +28,18 @@ if __name__ == "__main__":
|
||||
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
|
||||
client = Client(cluster, asynchronous=True)
|
||||
|
||||
|
||||
cutout_name = snakemake.input.cutout
|
||||
year = snakemake.wildcards.weather_year
|
||||
|
||||
if year:
|
||||
snapshots = dict(start=year, end=str(int(year)+1), inclusive="left")
|
||||
snapshots = dict(start=year, end=str(int(year) + 1), inclusive="left")
|
||||
cutout_name = cutout_name.format(weather_year=year)
|
||||
else:
|
||||
snapshots = snakemake.config['snapshots']
|
||||
|
||||
snapshots = snakemake.config["snapshots"]
|
||||
|
||||
drop_leap_day = snakemake.config["atlite"].get("drop_leap_day", False)
|
||||
time = pd.date_range(freq='h', **snapshots)
|
||||
daily = pd.date_range(freq='D', **snapshots)
|
||||
time = pd.date_range(freq="h", **snapshots)
|
||||
daily = pd.date_range(freq="D", **snapshots)
|
||||
if drop_leap_day:
|
||||
time = time[~((time.month == 2) & (time.day == 29))]
|
||||
daily = daily[~((daily.month == 2) & (daily.day == 29))]
|
||||
|
@ -1,15 +1,17 @@
|
||||
"""Approximate heat demand for all weather years."""
|
||||
|
||||
import pandas as pd
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Approximate heat demand for all weather years.
|
||||
"""
|
||||
|
||||
from itertools import product
|
||||
|
||||
import pandas as pd
|
||||
from numpy.polynomial import Polynomial
|
||||
|
||||
idx = pd.IndexSlice
|
||||
|
||||
|
||||
def approximate_heat_demand(energy_totals, hdd):
|
||||
|
||||
if isinstance(hdd, str):
|
||||
hdd = pd.read_csv(hdd, index_col=0).T
|
||||
hdd.index = hdd.index.astype(int)
|
||||
@ -17,7 +19,6 @@ def approximate_heat_demand(energy_totals, hdd):
|
||||
demands = {}
|
||||
|
||||
for kind, sector in product(["total", "electricity"], ["services", "residential"]):
|
||||
|
||||
row = idx[:, 2007:2015]
|
||||
col = f"{kind} {sector} space"
|
||||
demand = energy_totals.loc[row, col].unstack(0)
|
||||
@ -25,7 +26,6 @@ def approximate_heat_demand(energy_totals, hdd):
|
||||
demand_approx = {}
|
||||
|
||||
for c in countries:
|
||||
|
||||
Y = demand[c].dropna()
|
||||
X = hdd.loc[Y.index, c]
|
||||
|
||||
@ -39,22 +39,25 @@ def approximate_heat_demand(energy_totals, hdd):
|
||||
|
||||
demand_approx = pd.DataFrame(demand_approx)
|
||||
demand_approx = pd.concat([demand, demand_approx]).sort_index()
|
||||
demands[f"{kind} {sector} space"] = demand_approx.groupby(demand_approx.index).sum()
|
||||
demands[f"{kind} {sector} space"] = demand_approx.groupby(
|
||||
demand_approx.index
|
||||
).sum()
|
||||
|
||||
demands = pd.concat(demands).unstack().T.clip(lower=0)
|
||||
demands.index.names = ["country", "year"]
|
||||
|
||||
|
||||
return demands
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
if "snakemake" not in globals():
|
||||
from helper import mock_snakemake
|
||||
snakemake = mock_snakemake('build_energy_totals')
|
||||
|
||||
snakemake = mock_snakemake("build_energy_totals")
|
||||
|
||||
hdd = pd.read_csv(snakemake.input.hdd, index_col=0).T
|
||||
|
||||
energy_totals = pd.read_csv(snakemake.input.energy_totals, index_col=[0,1])
|
||||
energy_totals = pd.read_csv(snakemake.input.energy_totals, index_col=[0, 1])
|
||||
|
||||
countries = hdd.columns
|
||||
|
||||
|
@ -20,8 +20,9 @@ import xarray as xr
|
||||
if __name__ == "__main__":
|
||||
if "snakemake" not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
||||
snakemake = mock_snakemake(
|
||||
'build_population_layouts',
|
||||
"build_population_layouts",
|
||||
weather_year="",
|
||||
)
|
||||
|
||||
@ -29,7 +30,8 @@ if __name__ == "__main__":
|
||||
|
||||
cutout_name = snakemake.input.cutout
|
||||
year = snakemake.wildcards.weather_year
|
||||
if year: cutout_name = cutout_name.format(weather_year=year)
|
||||
if year:
|
||||
cutout_name = cutout_name.format(weather_year=year)
|
||||
cutout = atlite.Cutout(cutout_name)
|
||||
|
||||
grid_cells = cutout.grid.geometry
|
||||
|
@ -21,15 +21,15 @@ if __name__ == "__main__":
|
||||
|
||||
config = snakemake.config["energy"]
|
||||
data_year = int(config["energy_totals_year"])
|
||||
if snakemake.wildcards.weather_year and snakemake.wildcards.kind == 'heat':
|
||||
if snakemake.wildcards.weather_year and snakemake.wildcards.kind == "heat":
|
||||
data_year = int(snakemake.wildcards.weather_year)
|
||||
|
||||
pop_layout = pd.read_csv(snakemake.input.clustered_pop_layout, index_col=0)
|
||||
|
||||
totals = pd.read_csv(snakemake.input.totals, index_col=[0,1])
|
||||
totals = totals.xs(data_year, level='year')
|
||||
totals = pd.read_csv(snakemake.input.totals, index_col=[0, 1])
|
||||
totals = totals.xs(data_year, level="year")
|
||||
|
||||
nodal_totals = totals.loc[pop_layout.ct].fillna(0.)
|
||||
nodal_totals = totals.loc[pop_layout.ct].fillna(0.0)
|
||||
nodal_totals.index = pop_layout.index
|
||||
nodal_totals = nodal_totals.multiply(pop_layout.fraction, axis=0)
|
||||
|
||||
|
@ -28,18 +28,18 @@ if __name__ == "__main__":
|
||||
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
|
||||
client = Client(cluster, asynchronous=True)
|
||||
|
||||
config = snakemake.config['solar_thermal']
|
||||
|
||||
config = snakemake.config["solar_thermal"]
|
||||
|
||||
cutout_name = snakemake.input.cutout
|
||||
year = snakemake.wildcards.weather_year
|
||||
|
||||
if year:
|
||||
snapshots = dict(start=year, end=str(int(year)+1), inclusive="left")
|
||||
snapshots = dict(start=year, end=str(int(year) + 1), inclusive="left")
|
||||
cutout_name = cutout_name.format(weather_year=year)
|
||||
else:
|
||||
snapshots = snakemake.config['snapshots']
|
||||
|
||||
time = pd.date_range(freq='h', **snapshots)
|
||||
snapshots = snakemake.config["snapshots"]
|
||||
|
||||
time = pd.date_range(freq="h", **snapshots)
|
||||
if snakemake.config["atlite"].get("drop_leap_day", False):
|
||||
time = time[~((time.month == 2) & (time.day == 29))]
|
||||
|
||||
|
@ -31,12 +31,12 @@ if __name__ == "__main__":
|
||||
year = snakemake.wildcards.weather_year
|
||||
|
||||
if year:
|
||||
snapshots = dict(start=year, end=str(int(year)+1), inclusive="left")
|
||||
snapshots = dict(start=year, end=str(int(year) + 1), inclusive="left")
|
||||
cutout_name = cutout_name.format(weather_year=year)
|
||||
else:
|
||||
snapshots = snakemake.config['snapshots']
|
||||
|
||||
time = pd.date_range(freq='h', **snapshots)
|
||||
snapshots = snakemake.config["snapshots"]
|
||||
|
||||
time = pd.date_range(freq="h", **snapshots)
|
||||
if snakemake.config["atlite"].get("drop_leap_day", False):
|
||||
time = time[~((time.month == 2) & (time.day == 29))]
|
||||
|
||||
|
@ -163,7 +163,7 @@ if __name__ == "__main__":
|
||||
|
||||
snakemake = mock_snakemake(
|
||||
"build_transport_demand",
|
||||
weather_year='',
|
||||
weather_year="",
|
||||
simpl="",
|
||||
clusters=48,
|
||||
)
|
||||
@ -179,8 +179,12 @@ if __name__ == "__main__":
|
||||
options = snakemake.config["sector"]
|
||||
|
||||
year = snakemake.wildcards.weather_year
|
||||
snapshots = dict(start=year, end=str(int(year)+1), inclusive="left") if year else snakemake.config['snapshots']
|
||||
snapshots = pd.date_range(freq='h', **snapshots, tz="UTC")
|
||||
snapshots = (
|
||||
dict(start=year, end=str(int(year) + 1), inclusive="left")
|
||||
if year
|
||||
else snakemake.config["snapshots"]
|
||||
)
|
||||
snapshots = pd.date_range(freq="h", **snapshots, tz="UTC")
|
||||
if snakemake.config["atlite"].get("drop_leap_day", False):
|
||||
leap_day = (snapshots.month == 2) & (snapshots.day == 29)
|
||||
snapshots = snapshots[~leap_day]
|
||||
|
@ -108,7 +108,9 @@ if __name__ == "__main__":
|
||||
if "snakemake" not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
||||
snakemake = mock_snakemake("cluster_gas_network", weather_year="", simpl="", clusters="37")
|
||||
snakemake = mock_snakemake(
|
||||
"cluster_gas_network", weather_year="", simpl="", clusters="37"
|
||||
)
|
||||
|
||||
logging.basicConfig(level=snakemake.config["logging"]["level"])
|
||||
|
||||
|
@ -648,7 +648,8 @@ def make_summaries(networks_dict):
|
||||
]
|
||||
|
||||
columns = pd.MultiIndex.from_tuples(
|
||||
networks_dict.keys(), names=["weather_year", "cluster", "ll", "opt", "planning_horizon"]
|
||||
networks_dict.keys(),
|
||||
names=["weather_year", "cluster", "ll", "opt", "planning_horizon"],
|
||||
)
|
||||
|
||||
df = {}
|
||||
@ -688,7 +689,7 @@ if __name__ == "__main__":
|
||||
(weather_year, cluster, ll, opt + sector_opt, planning_horizon): "results/"
|
||||
+ snakemake.params.RDIR
|
||||
+ f"/postnetworks/elec_s{simpl}_{cluster}_l{ll}_{opt}_{sector_opt}_{planning_horizon}.nc"
|
||||
for weather_year in snakemake.config['scenario']['weather_year']
|
||||
for weather_year in snakemake.config["scenario"]["weather_year"]
|
||||
for simpl in snakemake.config["scenario"]["simpl"]
|
||||
for cluster in snakemake.config["scenario"]["clusters"]
|
||||
for opt in snakemake.config["scenario"]["opts"]
|
||||
|
@ -3272,8 +3272,12 @@ if __name__ == "__main__":
|
||||
nyears,
|
||||
)
|
||||
|
||||
pop_weighted_energy_totals = pd.read_csv(snakemake.input.pop_weighted_energy_totals, index_col=0) * nyears
|
||||
pop_weighted_heat_totals = pd.read_csv(snakemake.input.pop_weighted_heat_totals, index_col=0) * nyears
|
||||
pop_weighted_energy_totals = (
|
||||
pd.read_csv(snakemake.input.pop_weighted_energy_totals, index_col=0) * nyears
|
||||
)
|
||||
pop_weighted_heat_totals = (
|
||||
pd.read_csv(snakemake.input.pop_weighted_heat_totals, index_col=0) * nyears
|
||||
)
|
||||
pop_weighted_energy_totals.update(pop_weighted_heat_totals)
|
||||
|
||||
patch_electricity_network(n)
|
||||
|
@ -1,50 +1,62 @@
|
||||
"""Solve operations network."""
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Solve operations network.
|
||||
"""
|
||||
|
||||
|
||||
import pypsa
|
||||
import numpy as np
|
||||
|
||||
from solve_network import solve_network, prepare_network
|
||||
from helper import override_component_attrs
|
||||
|
||||
import logging
|
||||
|
||||
import numpy as np
|
||||
import pypsa
|
||||
from helper import override_component_attrs
|
||||
from solve_network import prepare_network, solve_network
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
pypsa.pf.logger.setLevel(logging.WARNING)
|
||||
|
||||
|
||||
def set_parameters_from_optimized(n, n_optim):
|
||||
lines_typed_i = n.lines.index[n.lines.type != '']
|
||||
n.lines.loc[lines_typed_i, 'num_parallel'] = \
|
||||
n_optim.lines['num_parallel'].reindex(lines_typed_i, fill_value=0.)
|
||||
n.lines.loc[lines_typed_i, 's_nom'] = (
|
||||
np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) *
|
||||
n.lines.bus0.map(n.buses.v_nom) * n.lines.num_parallel)
|
||||
lines_typed_i = n.lines.index[n.lines.type != ""]
|
||||
n.lines.loc[lines_typed_i, "num_parallel"] = n_optim.lines["num_parallel"].reindex(
|
||||
lines_typed_i, fill_value=0.0
|
||||
)
|
||||
n.lines.loc[lines_typed_i, "s_nom"] = (
|
||||
np.sqrt(3)
|
||||
* n.lines["type"].map(n.line_types.i_nom)
|
||||
* n.lines.bus0.map(n.buses.v_nom)
|
||||
* n.lines.num_parallel
|
||||
)
|
||||
|
||||
lines_untyped_i = n.lines.index[n.lines.type == '']
|
||||
for attr in ('s_nom', 'r', 'x'):
|
||||
n.lines.loc[lines_untyped_i, attr] = \
|
||||
n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.)
|
||||
n.lines['s_nom_extendable'] = False
|
||||
lines_untyped_i = n.lines.index[n.lines.type == ""]
|
||||
for attr in ("s_nom", "r", "x"):
|
||||
n.lines.loc[lines_untyped_i, attr] = n_optim.lines[attr].reindex(
|
||||
lines_untyped_i, fill_value=0.0
|
||||
)
|
||||
n.lines["s_nom_extendable"] = False
|
||||
|
||||
links_dc_i = n.links.index[n.links.p_nom_extendable]
|
||||
n.links.loc[links_dc_i, 'p_nom'] = \
|
||||
n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.)
|
||||
n.links.loc[links_dc_i, 'p_nom_extendable'] = False
|
||||
n.links.loc[links_dc_i, "p_nom"] = n_optim.links["p_nom_opt"].reindex(
|
||||
links_dc_i, fill_value=0.0
|
||||
)
|
||||
n.links.loc[links_dc_i, "p_nom_extendable"] = False
|
||||
|
||||
gen_extend_i = n.generators.index[n.generators.p_nom_extendable]
|
||||
n.generators.loc[gen_extend_i, 'p_nom'] = \
|
||||
n_optim.generators['p_nom_opt'].reindex(gen_extend_i, fill_value=0.)
|
||||
n.generators.loc[gen_extend_i, 'p_nom_extendable'] = False
|
||||
n.generators.loc[gen_extend_i, "p_nom"] = n_optim.generators["p_nom_opt"].reindex(
|
||||
gen_extend_i, fill_value=0.0
|
||||
)
|
||||
n.generators.loc[gen_extend_i, "p_nom_extendable"] = False
|
||||
|
||||
stor_units_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable]
|
||||
n.storage_units.loc[stor_units_extend_i, 'p_nom'] = \
|
||||
n_optim.storage_units['p_nom_opt'].reindex(stor_units_extend_i, fill_value=0.)
|
||||
n.storage_units.loc[stor_units_extend_i, 'p_nom_extendable'] = False
|
||||
n.storage_units.loc[stor_units_extend_i, "p_nom"] = n_optim.storage_units[
|
||||
"p_nom_opt"
|
||||
].reindex(stor_units_extend_i, fill_value=0.0)
|
||||
n.storage_units.loc[stor_units_extend_i, "p_nom_extendable"] = False
|
||||
|
||||
stor_extend_i = n.stores.index[n.stores.e_nom_extendable]
|
||||
n.stores.loc[stor_extend_i, 'e_nom'] = \
|
||||
n_optim.stores['e_nom_opt'].reindex(stor_extend_i, fill_value=0.)
|
||||
n.stores.loc[stor_extend_i, 'e_nom_extendable'] = False
|
||||
n.stores.loc[stor_extend_i, "e_nom"] = n_optim.stores["e_nom_opt"].reindex(
|
||||
stor_extend_i, fill_value=0.0
|
||||
)
|
||||
n.stores.loc[stor_extend_i, "e_nom_extendable"] = False
|
||||
|
||||
return n
|
||||
|
||||
@ -68,39 +80,44 @@ def add_load_shedding(n, voll=1e4):
|
||||
to_remove = n.generators.query("carrier == 'load'").index
|
||||
logger.info(f"Removing pre-existing load shedding:\n{to_remove}")
|
||||
n.mremove("Generator", to_remove)
|
||||
|
||||
n.madd("Generator", n.buses.index,
|
||||
|
||||
n.madd(
|
||||
"Generator",
|
||||
n.buses.index,
|
||||
suffix=" load",
|
||||
bus=n.buses.index,
|
||||
carrier='load',
|
||||
carrier="load",
|
||||
marginal_cost=voll,
|
||||
p_nom=1e6
|
||||
p_nom=1e6,
|
||||
)
|
||||
|
||||
return n
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
if "snakemake" not in globals():
|
||||
from helper import mock_snakemake
|
||||
|
||||
snakemake = mock_snakemake(
|
||||
'solve_operations_network',
|
||||
"solve_operations_network",
|
||||
capacity_year=1952,
|
||||
simpl='',
|
||||
opts='',
|
||||
simpl="",
|
||||
opts="",
|
||||
clusters=37,
|
||||
lv=2.0,
|
||||
sector_opts='Co2L0-25H-T-H-B-I-A',
|
||||
sector_opts="Co2L0-25H-T-H-B-I-A",
|
||||
planning_horizons=2030,
|
||||
weather_year=2013
|
||||
weather_year=2013,
|
||||
)
|
||||
|
||||
logging.basicConfig(filename=snakemake.log.python,
|
||||
level=snakemake.config['logging_level'])
|
||||
logging.basicConfig(
|
||||
filename=snakemake.log.python, level=snakemake.config["logging_level"]
|
||||
)
|
||||
|
||||
tmpdir = snakemake.config['solving'].get('tmpdir')
|
||||
tmpdir = snakemake.config["solving"].get("tmpdir")
|
||||
if tmpdir is not None:
|
||||
from pathlib import Path
|
||||
|
||||
Path(tmpdir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
overrides = override_component_attrs(snakemake.input.overrides)
|
||||
@ -113,14 +130,18 @@ if __name__ == "__main__":
|
||||
n = remove_unused_components(n)
|
||||
n = add_load_shedding(n)
|
||||
|
||||
opts = snakemake.wildcards.sector_opts.split('-')
|
||||
solve_opts = snakemake.config['solving']['options']
|
||||
solve_opts['skip_iterations'] = True
|
||||
opts = snakemake.wildcards.sector_opts.split("-")
|
||||
solve_opts = snakemake.config["solving"]["options"]
|
||||
solve_opts["skip_iterations"] = True
|
||||
|
||||
n = prepare_network(n, solve_opts)
|
||||
|
||||
n = solve_network(n, config=snakemake.config, opts=opts,
|
||||
solver_dir=tmpdir,
|
||||
solver_logfile=snakemake.log.solver)
|
||||
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
||||
n = solve_network(
|
||||
n,
|
||||
config=snakemake.config,
|
||||
opts=opts,
|
||||
solver_dir=tmpdir,
|
||||
solver_logfile=snakemake.log.solver,
|
||||
)
|
||||
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
@ -1,20 +1,26 @@
|
||||
"""Solve myopic operations network."""
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Solve myopic operations network.
|
||||
"""
|
||||
|
||||
|
||||
import pypsa
|
||||
import pandas as pd
|
||||
|
||||
from solve_network import solve_network, prepare_network
|
||||
from solve_operations_network import set_parameters_from_optimized, remove_unused_components, add_load_shedding
|
||||
from helper import override_component_attrs
|
||||
|
||||
import logging
|
||||
|
||||
import pandas as pd
|
||||
import pypsa
|
||||
from helper import override_component_attrs
|
||||
from solve_network import prepare_network, solve_network
|
||||
from solve_operations_network import (
|
||||
add_load_shedding,
|
||||
remove_unused_components,
|
||||
set_parameters_from_optimized,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
pypsa.pf.logger.setLevel(logging.WARNING)
|
||||
|
||||
|
||||
def prepare_myopic(n, config, store_soc, storage_unit_soc):
|
||||
|
||||
n.stores.e_cyclic = False
|
||||
n.storage_units.cyclic_state_of_charge = False
|
||||
|
||||
@ -30,17 +36,19 @@ def prepare_myopic(n, config, store_soc, storage_unit_soc):
|
||||
n.stores.at["co2 atmosphere", "marginal_cost"] = -config["co2_price"]
|
||||
|
||||
# handle co2 sequestration
|
||||
assert sum(n.stores.carriers == "co2 stored") == 1, "Myopic operation not implemented for spatially resolved CO2 sequestration."
|
||||
n.stores.at["co2 stored", 'e_nom'] = config['co2_sequestration_limit'] * 1e6 # t/a
|
||||
assert (
|
||||
sum(n.stores.carriers == "co2 stored") == 1
|
||||
), "Myopic operation not implemented for spatially resolved CO2 sequestration."
|
||||
n.stores.at["co2 stored", "e_nom"] = config["co2_sequestration_limit"] * 1e6 # t/a
|
||||
|
||||
# reset co2 emissions
|
||||
n.stores.loc[n.stores.carrier == 'co2 stored', "e_initial"] = 0.
|
||||
n.stores.at["co2 atmosphere", "e_initial"] = 0.
|
||||
n.stores.loc[n.stores.carrier == "co2 stored", "e_initial"] = 0.0
|
||||
n.stores.at["co2 atmosphere", "e_initial"] = 0.0
|
||||
|
||||
# replenish fossil gas and oil with 1000 TWh each
|
||||
fossil_stores = n.stores.carrier.str.isin(["gas", "oil"])
|
||||
n.stores.loc[fossil_stores, 'e_initial'] = 1e9
|
||||
n.stores.loc[fossil_stores, 'e_nom'] = 10e9
|
||||
n.stores.loc[fossil_stores, "e_initial"] = 1e9
|
||||
n.stores.loc[fossil_stores, "e_nom"] = 10e9
|
||||
|
||||
# replenish annual solid biomass and biogas potentials
|
||||
n.stores.loc[biomass_stores, "e_initial"] = biomass_potential
|
||||
@ -51,15 +59,18 @@ def prepare_myopic(n, config, store_soc, storage_unit_soc):
|
||||
c.df.marginal_cost.update(c.df.carrier.map(bidding_prices).dropna())
|
||||
|
||||
# deduct industry solid biomass
|
||||
assert sum(n.stores.carriers == "solid biomass") == 1, "Myopic operation not implemented for spatially resolved solid biomass."
|
||||
n.stores.at["EU solid biomass", "e_initial"] -= n.loads.at["solid biomass for industry", "p_set"] * 8760
|
||||
assert (
|
||||
sum(n.stores.carriers == "solid biomass") == 1
|
||||
), "Myopic operation not implemented for spatially resolved solid biomass."
|
||||
n.stores.at["EU solid biomass", "e_initial"] -= (
|
||||
n.loads.at["solid biomass for industry", "p_set"] * 8760
|
||||
)
|
||||
n.remove("Load", "solid biomass for industry")
|
||||
|
||||
return n
|
||||
|
||||
|
||||
def solve_network_myopic(n, config, opts='', **kwargs):
|
||||
|
||||
def solve_network_myopic(n, config, opts="", **kwargs):
|
||||
rolling_horizon = config["operations"]["rolling_horizon"]
|
||||
|
||||
freq = int(pd.infer_freq(n.snapshots)[:-1])
|
||||
@ -68,11 +79,12 @@ def solve_network_myopic(n, config, opts='', **kwargs):
|
||||
kept = window - overlap
|
||||
length = len(n.snapshots)
|
||||
|
||||
assert kept > 0, f"Overlap ({overlap} days) must be smaller than windows ({window} days)."
|
||||
assert (
|
||||
kept > 0
|
||||
), f"Overlap ({overlap} days) must be smaller than windows ({window} days)."
|
||||
|
||||
for i in range(length // kept):
|
||||
|
||||
snapshots = n.snapshots[i * kept:(i + 1) * kept + overlap]
|
||||
for i in range(length // kept):
|
||||
snapshots = n.snapshots[i * kept : (i + 1) * kept + overlap]
|
||||
logger.info(f"Optimising operations from {snapshots[0]} to {snapshots[-1]}")
|
||||
|
||||
n = solve_network(n, config, opts=opts, snapshots=snapshots, **kwargs)
|
||||
@ -81,36 +93,41 @@ def solve_network_myopic(n, config, opts='', **kwargs):
|
||||
logger.info(f"Setting initial SOCs from {last_kept} for next iteration.\n")
|
||||
|
||||
n.stores.e_initial = n.stores_t.e.loc[last_kept]
|
||||
n.storage_units.state_of_charge_initial = n.storage_units_t.state_of_charge.loc[last_kept]
|
||||
n.storage_units.state_of_charge_initial = n.storage_units_t.state_of_charge.loc[
|
||||
last_kept
|
||||
]
|
||||
|
||||
# final segment until end of year
|
||||
snapshots = n.snapshots[(i + 1) * kept:]
|
||||
snapshots = n.snapshots[(i + 1) * kept :]
|
||||
n = solve_network(n, config, opts=opts, snapshots=snapshots, **kwargs)
|
||||
|
||||
return n
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
if "snakemake" not in globals():
|
||||
from helper import mock_snakemake
|
||||
|
||||
snakemake = mock_snakemake(
|
||||
'solve_operations_network_myopic',
|
||||
"solve_operations_network_myopic",
|
||||
capacity_year=1952,
|
||||
simpl='',
|
||||
opts='',
|
||||
simpl="",
|
||||
opts="",
|
||||
clusters=37,
|
||||
lv=2.0,
|
||||
sector_opts='Co2L0-25H-T-H-B-I-A',
|
||||
sector_opts="Co2L0-25H-T-H-B-I-A",
|
||||
planning_horizons=2030,
|
||||
weather_year=2013
|
||||
weather_year=2013,
|
||||
)
|
||||
|
||||
logging.basicConfig(filename=snakemake.log.python,
|
||||
level=snakemake.config['logging_level'])
|
||||
logging.basicConfig(
|
||||
filename=snakemake.log.python, level=snakemake.config["logging_level"]
|
||||
)
|
||||
|
||||
tmpdir = snakemake.config['solving'].get('tmpdir')
|
||||
tmpdir = snakemake.config["solving"].get("tmpdir")
|
||||
if tmpdir is not None:
|
||||
from pathlib import Path
|
||||
|
||||
Path(tmpdir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
config = snakemake.config["operations"]
|
||||
@ -121,8 +138,10 @@ if __name__ == "__main__":
|
||||
n_post = pypsa.Network(snakemake.input.post, override_component_attrs=overrides)
|
||||
n = set_parameters_from_optimized(n, n_post)
|
||||
del n_post
|
||||
|
||||
n_previous = pypsa.Network(snakemake.input.previous, override_component_attrs=overrides)
|
||||
|
||||
n_previous = pypsa.Network(
|
||||
snakemake.input.previous, override_component_attrs=overrides
|
||||
)
|
||||
store_soc = n_previous.stores_t.e.iloc[-1]
|
||||
storage_unit_soc = n_previous.storage_units_t.state_of_charge.iloc[-1]
|
||||
del n_previous
|
||||
@ -131,18 +150,18 @@ if __name__ == "__main__":
|
||||
n = add_load_shedding(n)
|
||||
n = prepare_myopic(n, config, store_soc, storage_unit_soc)
|
||||
|
||||
opts = snakemake.wildcards.sector_opts.split('-')
|
||||
solve_opts = snakemake.config['solving']['options']
|
||||
solve_opts['skip_iterations'] = True
|
||||
opts = snakemake.wildcards.sector_opts.split("-")
|
||||
solve_opts = snakemake.config["solving"]["options"]
|
||||
solve_opts["skip_iterations"] = True
|
||||
|
||||
n = prepare_network(n, solve_opts)
|
||||
|
||||
|
||||
n = solve_network_myopic(
|
||||
n,
|
||||
config=snakemake.config,
|
||||
opts=opts,
|
||||
solver_dir=tmpdir,
|
||||
solver_logfile=snakemake.log.solver
|
||||
solver_logfile=snakemake.log.solver,
|
||||
)
|
||||
|
||||
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
Loading…
Reference in New Issue
Block a user