[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
This commit is contained in:
pre-commit-ci[bot] 2023-04-30 08:52:58 +00:00
parent 6acd5da4d4
commit add135fe05
19 changed files with 230 additions and 166 deletions

View File

@ -1,45 +1,47 @@
# -*- coding: utf-8 -*-
# SPDX-FileCopyrightText: 2022 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: MIT
"""This rule downloads the load data"""
"""
This rule downloads the load data.
"""
import logging
logger = logging.getLogger(__name__)
import pandas as pd
from _helpers import configure_logging
import pandas as pd
if __name__ == "__main__":
if 'snakemake' not in globals():
if "snakemake" not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake('build_artificial_load_data', weather_year='')
snakemake = mock_snakemake("build_artificial_load_data", weather_year="")
configure_logging(snakemake)
weather_year = snakemake.wildcards.weather_year
if weather_year:
snapshots = dict(
start=weather_year,
end=str(int(weather_year)+1),
inclusive="left"
start=weather_year, end=str(int(weather_year) + 1), inclusive="left"
)
else:
snapshots = snakemake.config['snapshots']
snapshots = pd.date_range(freq='h', **snapshots)
snapshots = snakemake.config["snapshots"]
snapshots = pd.date_range(freq="h", **snapshots)
fixed_year = snakemake.config["load"].get("fixed_year", False)
years = slice(str(fixed_year), str(fixed_year)) if fixed_year else slice(snapshots[0], snapshots[-1])
countries = snakemake.config['countries']
years = (
slice(str(fixed_year), str(fixed_year))
if fixed_year
else slice(snapshots[0], snapshots[-1])
)
countries = snakemake.config["countries"]
load = pd.read_csv(
snakemake.input[0],
index_col=0,
parse_dates=True
).loc[snapshots, countries]
load = pd.read_csv(snakemake.input[0], index_col=0, parse_dates=True).loc[
snapshots, countries
]
assert not load.isna().any().any(), 'Load data contains nans.'
assert not load.isna().any().any(), "Load data contains nans."
if fixed_year:
load.index = load.index.map(lambda t: t.replace(year=snapshots.year[0]))

View File

@ -208,7 +208,9 @@ if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake("build_biomass_potentials", weather_year="", simpl="", clusters="5")
snakemake = mock_snakemake(
"build_biomass_potentials", weather_year="", simpl="", clusters="5"
)
config = snakemake.config["biomass"]
year = config["year"]

View File

@ -25,7 +25,8 @@ if __name__ == "__main__":
cutout_name = snakemake.input.cutout
year = snakemake.wildcards.weather_year
if year: cutout_name = cutout_name.format(weather_year=year)
if year:
cutout_name = cutout_name.format(weather_year=year)
cutout = atlite.Cutout(cutout_name)
clustered_regions = (

View File

@ -91,7 +91,9 @@ def eurostat_per_country(country):
def build_eurostat(countries, year=None):
"""Return multi-index for all countries' energy data in TWh/a."""
"""
Return multi-index for all countries' energy data in TWh/a.
"""
nprocesses = snakemake.threads
tqdm_kwargs = dict(ascii=False, unit=' country', total=len(countries),
@ -128,7 +130,9 @@ def build_eurostat(countries, year=None):
def build_swiss(year=None):
"""Return a pd.DataFrame of Swiss energy data in TWh/a"""
"""
Return a pd.DataFrame of Swiss energy data in TWh/a.
"""
fn = snakemake.input.swiss

View File

@ -28,7 +28,6 @@ if __name__ == "__main__":
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True)
cutout_name = snakemake.input.cutout
year = snakemake.wildcards.weather_year
@ -36,11 +35,11 @@ if __name__ == "__main__":
snapshots = dict(start=year, end=str(int(year) + 1), inclusive="left")
cutout_name = cutout_name.format(weather_year=year)
else:
snapshots = snakemake.config['snapshots']
snapshots = snakemake.config["snapshots"]
drop_leap_day = snakemake.config["atlite"].get("drop_leap_day", False)
time = pd.date_range(freq='h', **snapshots)
daily = pd.date_range(freq='D', **snapshots)
time = pd.date_range(freq="h", **snapshots)
daily = pd.date_range(freq="D", **snapshots)
if drop_leap_day:
time = time[~((time.month == 2) & (time.day == 29))]
daily = daily[~((daily.month == 2) & (daily.day == 29))]

View File

@ -1,15 +1,17 @@
"""Approximate heat demand for all weather years."""
import pandas as pd
# -*- coding: utf-8 -*-
"""
Approximate heat demand for all weather years.
"""
from itertools import product
import pandas as pd
from numpy.polynomial import Polynomial
idx = pd.IndexSlice
def approximate_heat_demand(energy_totals, hdd):
if isinstance(hdd, str):
hdd = pd.read_csv(hdd, index_col=0).T
hdd.index = hdd.index.astype(int)
@ -17,7 +19,6 @@ def approximate_heat_demand(energy_totals, hdd):
demands = {}
for kind, sector in product(["total", "electricity"], ["services", "residential"]):
row = idx[:, 2007:2015]
col = f"{kind} {sector} space"
demand = energy_totals.loc[row, col].unstack(0)
@ -25,7 +26,6 @@ def approximate_heat_demand(energy_totals, hdd):
demand_approx = {}
for c in countries:
Y = demand[c].dropna()
X = hdd.loc[Y.index, c]
@ -39,7 +39,9 @@ def approximate_heat_demand(energy_totals, hdd):
demand_approx = pd.DataFrame(demand_approx)
demand_approx = pd.concat([demand, demand_approx]).sort_index()
demands[f"{kind} {sector} space"] = demand_approx.groupby(demand_approx.index).sum()
demands[f"{kind} {sector} space"] = demand_approx.groupby(
demand_approx.index
).sum()
demands = pd.concat(demands).unstack().T.clip(lower=0)
demands.index.names = ["country", "year"]
@ -48,9 +50,10 @@ def approximate_heat_demand(energy_totals, hdd):
if __name__ == "__main__":
if 'snakemake' not in globals():
if "snakemake" not in globals():
from helper import mock_snakemake
snakemake = mock_snakemake('build_energy_totals')
snakemake = mock_snakemake("build_energy_totals")
hdd = pd.read_csv(snakemake.input.hdd, index_col=0).T

View File

@ -20,8 +20,9 @@ import xarray as xr
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake(
'build_population_layouts',
"build_population_layouts",
weather_year="",
)
@ -29,7 +30,8 @@ if __name__ == "__main__":
cutout_name = snakemake.input.cutout
year = snakemake.wildcards.weather_year
if year: cutout_name = cutout_name.format(weather_year=year)
if year:
cutout_name = cutout_name.format(weather_year=year)
cutout = atlite.Cutout(cutout_name)
grid_cells = cutout.grid.geometry

View File

@ -21,15 +21,15 @@ if __name__ == "__main__":
config = snakemake.config["energy"]
data_year = int(config["energy_totals_year"])
if snakemake.wildcards.weather_year and snakemake.wildcards.kind == 'heat':
if snakemake.wildcards.weather_year and snakemake.wildcards.kind == "heat":
data_year = int(snakemake.wildcards.weather_year)
pop_layout = pd.read_csv(snakemake.input.clustered_pop_layout, index_col=0)
totals = pd.read_csv(snakemake.input.totals, index_col=[0, 1])
totals = totals.xs(data_year, level='year')
totals = totals.xs(data_year, level="year")
nodal_totals = totals.loc[pop_layout.ct].fillna(0.)
nodal_totals = totals.loc[pop_layout.ct].fillna(0.0)
nodal_totals.index = pop_layout.index
nodal_totals = nodal_totals.multiply(pop_layout.fraction, axis=0)

View File

@ -28,7 +28,7 @@ if __name__ == "__main__":
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True)
config = snakemake.config['solar_thermal']
config = snakemake.config["solar_thermal"]
cutout_name = snakemake.input.cutout
year = snakemake.wildcards.weather_year
@ -37,9 +37,9 @@ if __name__ == "__main__":
snapshots = dict(start=year, end=str(int(year) + 1), inclusive="left")
cutout_name = cutout_name.format(weather_year=year)
else:
snapshots = snakemake.config['snapshots']
snapshots = snakemake.config["snapshots"]
time = pd.date_range(freq='h', **snapshots)
time = pd.date_range(freq="h", **snapshots)
if snakemake.config["atlite"].get("drop_leap_day", False):
time = time[~((time.month == 2) & (time.day == 29))]

View File

@ -34,9 +34,9 @@ if __name__ == "__main__":
snapshots = dict(start=year, end=str(int(year) + 1), inclusive="left")
cutout_name = cutout_name.format(weather_year=year)
else:
snapshots = snakemake.config['snapshots']
snapshots = snakemake.config["snapshots"]
time = pd.date_range(freq='h', **snapshots)
time = pd.date_range(freq="h", **snapshots)
if snakemake.config["atlite"].get("drop_leap_day", False):
time = time[~((time.month == 2) & (time.day == 29))]

View File

@ -163,7 +163,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake(
"build_transport_demand",
weather_year='',
weather_year="",
simpl="",
clusters=48,
)
@ -179,8 +179,12 @@ if __name__ == "__main__":
options = snakemake.config["sector"]
year = snakemake.wildcards.weather_year
snapshots = dict(start=year, end=str(int(year)+1), inclusive="left") if year else snakemake.config['snapshots']
snapshots = pd.date_range(freq='h', **snapshots, tz="UTC")
snapshots = (
dict(start=year, end=str(int(year) + 1), inclusive="left")
if year
else snakemake.config["snapshots"]
)
snapshots = pd.date_range(freq="h", **snapshots, tz="UTC")
if snakemake.config["atlite"].get("drop_leap_day", False):
leap_day = (snapshots.month == 2) & (snapshots.day == 29)
snapshots = snapshots[~leap_day]

View File

@ -108,7 +108,9 @@ if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake("cluster_gas_network", weather_year="", simpl="", clusters="37")
snakemake = mock_snakemake(
"cluster_gas_network", weather_year="", simpl="", clusters="37"
)
logging.basicConfig(level=snakemake.config["logging"]["level"])

View File

@ -648,7 +648,8 @@ def make_summaries(networks_dict):
]
columns = pd.MultiIndex.from_tuples(
networks_dict.keys(), names=["weather_year", "cluster", "ll", "opt", "planning_horizon"]
networks_dict.keys(),
names=["weather_year", "cluster", "ll", "opt", "planning_horizon"],
)
df = {}
@ -688,7 +689,7 @@ if __name__ == "__main__":
(weather_year, cluster, ll, opt + sector_opt, planning_horizon): "results/"
+ snakemake.params.RDIR
+ f"/postnetworks/elec_s{simpl}_{cluster}_l{ll}_{opt}_{sector_opt}_{planning_horizon}.nc"
for weather_year in snakemake.config['scenario']['weather_year']
for weather_year in snakemake.config["scenario"]["weather_year"]
for simpl in snakemake.config["scenario"]["simpl"]
for cluster in snakemake.config["scenario"]["clusters"]
for opt in snakemake.config["scenario"]["opts"]

View File

@ -3272,8 +3272,12 @@ if __name__ == "__main__":
nyears,
)
pop_weighted_energy_totals = pd.read_csv(snakemake.input.pop_weighted_energy_totals, index_col=0) * nyears
pop_weighted_heat_totals = pd.read_csv(snakemake.input.pop_weighted_heat_totals, index_col=0) * nyears
pop_weighted_energy_totals = (
pd.read_csv(snakemake.input.pop_weighted_energy_totals, index_col=0) * nyears
)
pop_weighted_heat_totals = (
pd.read_csv(snakemake.input.pop_weighted_heat_totals, index_col=0) * nyears
)
pop_weighted_energy_totals.update(pop_weighted_heat_totals)
patch_electricity_network(n)

View File

@ -1,50 +1,62 @@
"""Solve operations network."""
# -*- coding: utf-8 -*-
"""
Solve operations network.
"""
import pypsa
import numpy as np
from solve_network import solve_network, prepare_network
from helper import override_component_attrs
import logging
import numpy as np
import pypsa
from helper import override_component_attrs
from solve_network import prepare_network, solve_network
logger = logging.getLogger(__name__)
pypsa.pf.logger.setLevel(logging.WARNING)
def set_parameters_from_optimized(n, n_optim):
lines_typed_i = n.lines.index[n.lines.type != '']
n.lines.loc[lines_typed_i, 'num_parallel'] = \
n_optim.lines['num_parallel'].reindex(lines_typed_i, fill_value=0.)
n.lines.loc[lines_typed_i, 's_nom'] = (
np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) *
n.lines.bus0.map(n.buses.v_nom) * n.lines.num_parallel)
lines_typed_i = n.lines.index[n.lines.type != ""]
n.lines.loc[lines_typed_i, "num_parallel"] = n_optim.lines["num_parallel"].reindex(
lines_typed_i, fill_value=0.0
)
n.lines.loc[lines_typed_i, "s_nom"] = (
np.sqrt(3)
* n.lines["type"].map(n.line_types.i_nom)
* n.lines.bus0.map(n.buses.v_nom)
* n.lines.num_parallel
)
lines_untyped_i = n.lines.index[n.lines.type == '']
for attr in ('s_nom', 'r', 'x'):
n.lines.loc[lines_untyped_i, attr] = \
n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.)
n.lines['s_nom_extendable'] = False
lines_untyped_i = n.lines.index[n.lines.type == ""]
for attr in ("s_nom", "r", "x"):
n.lines.loc[lines_untyped_i, attr] = n_optim.lines[attr].reindex(
lines_untyped_i, fill_value=0.0
)
n.lines["s_nom_extendable"] = False
links_dc_i = n.links.index[n.links.p_nom_extendable]
n.links.loc[links_dc_i, 'p_nom'] = \
n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.)
n.links.loc[links_dc_i, 'p_nom_extendable'] = False
n.links.loc[links_dc_i, "p_nom"] = n_optim.links["p_nom_opt"].reindex(
links_dc_i, fill_value=0.0
)
n.links.loc[links_dc_i, "p_nom_extendable"] = False
gen_extend_i = n.generators.index[n.generators.p_nom_extendable]
n.generators.loc[gen_extend_i, 'p_nom'] = \
n_optim.generators['p_nom_opt'].reindex(gen_extend_i, fill_value=0.)
n.generators.loc[gen_extend_i, 'p_nom_extendable'] = False
n.generators.loc[gen_extend_i, "p_nom"] = n_optim.generators["p_nom_opt"].reindex(
gen_extend_i, fill_value=0.0
)
n.generators.loc[gen_extend_i, "p_nom_extendable"] = False
stor_units_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable]
n.storage_units.loc[stor_units_extend_i, 'p_nom'] = \
n_optim.storage_units['p_nom_opt'].reindex(stor_units_extend_i, fill_value=0.)
n.storage_units.loc[stor_units_extend_i, 'p_nom_extendable'] = False
n.storage_units.loc[stor_units_extend_i, "p_nom"] = n_optim.storage_units[
"p_nom_opt"
].reindex(stor_units_extend_i, fill_value=0.0)
n.storage_units.loc[stor_units_extend_i, "p_nom_extendable"] = False
stor_extend_i = n.stores.index[n.stores.e_nom_extendable]
n.stores.loc[stor_extend_i, 'e_nom'] = \
n_optim.stores['e_nom_opt'].reindex(stor_extend_i, fill_value=0.)
n.stores.loc[stor_extend_i, 'e_nom_extendable'] = False
n.stores.loc[stor_extend_i, "e_nom"] = n_optim.stores["e_nom_opt"].reindex(
stor_extend_i, fill_value=0.0
)
n.stores.loc[stor_extend_i, "e_nom_extendable"] = False
return n
@ -69,38 +81,43 @@ def add_load_shedding(n, voll=1e4):
logger.info(f"Removing pre-existing load shedding:\n{to_remove}")
n.mremove("Generator", to_remove)
n.madd("Generator", n.buses.index,
n.madd(
"Generator",
n.buses.index,
suffix=" load",
bus=n.buses.index,
carrier='load',
carrier="load",
marginal_cost=voll,
p_nom=1e6
p_nom=1e6,
)
return n
if __name__ == "__main__":
if 'snakemake' not in globals():
if "snakemake" not in globals():
from helper import mock_snakemake
snakemake = mock_snakemake(
'solve_operations_network',
"solve_operations_network",
capacity_year=1952,
simpl='',
opts='',
simpl="",
opts="",
clusters=37,
lv=2.0,
sector_opts='Co2L0-25H-T-H-B-I-A',
sector_opts="Co2L0-25H-T-H-B-I-A",
planning_horizons=2030,
weather_year=2013
weather_year=2013,
)
logging.basicConfig(filename=snakemake.log.python,
level=snakemake.config['logging_level'])
logging.basicConfig(
filename=snakemake.log.python, level=snakemake.config["logging_level"]
)
tmpdir = snakemake.config['solving'].get('tmpdir')
tmpdir = snakemake.config["solving"].get("tmpdir")
if tmpdir is not None:
from pathlib import Path
Path(tmpdir).mkdir(parents=True, exist_ok=True)
overrides = override_component_attrs(snakemake.input.overrides)
@ -113,14 +130,18 @@ if __name__ == "__main__":
n = remove_unused_components(n)
n = add_load_shedding(n)
opts = snakemake.wildcards.sector_opts.split('-')
solve_opts = snakemake.config['solving']['options']
solve_opts['skip_iterations'] = True
opts = snakemake.wildcards.sector_opts.split("-")
solve_opts = snakemake.config["solving"]["options"]
solve_opts["skip_iterations"] = True
n = prepare_network(n, solve_opts)
n = solve_network(n, config=snakemake.config, opts=opts,
n = solve_network(
n,
config=snakemake.config,
opts=opts,
solver_dir=tmpdir,
solver_logfile=snakemake.log.solver)
solver_logfile=snakemake.log.solver,
)
n.export_to_netcdf(snakemake.output[0])

View File

@ -1,20 +1,26 @@
"""Solve myopic operations network."""
# -*- coding: utf-8 -*-
"""
Solve myopic operations network.
"""
import pypsa
import pandas as pd
from solve_network import solve_network, prepare_network
from solve_operations_network import set_parameters_from_optimized, remove_unused_components, add_load_shedding
from helper import override_component_attrs
import logging
import pandas as pd
import pypsa
from helper import override_component_attrs
from solve_network import prepare_network, solve_network
from solve_operations_network import (
add_load_shedding,
remove_unused_components,
set_parameters_from_optimized,
)
logger = logging.getLogger(__name__)
pypsa.pf.logger.setLevel(logging.WARNING)
def prepare_myopic(n, config, store_soc, storage_unit_soc):
n.stores.e_cyclic = False
n.storage_units.cyclic_state_of_charge = False
@ -30,17 +36,19 @@ def prepare_myopic(n, config, store_soc, storage_unit_soc):
n.stores.at["co2 atmosphere", "marginal_cost"] = -config["co2_price"]
# handle co2 sequestration
assert sum(n.stores.carriers == "co2 stored") == 1, "Myopic operation not implemented for spatially resolved CO2 sequestration."
n.stores.at["co2 stored", 'e_nom'] = config['co2_sequestration_limit'] * 1e6 # t/a
assert (
sum(n.stores.carriers == "co2 stored") == 1
), "Myopic operation not implemented for spatially resolved CO2 sequestration."
n.stores.at["co2 stored", "e_nom"] = config["co2_sequestration_limit"] * 1e6 # t/a
# reset co2 emissions
n.stores.loc[n.stores.carrier == 'co2 stored', "e_initial"] = 0.
n.stores.at["co2 atmosphere", "e_initial"] = 0.
n.stores.loc[n.stores.carrier == "co2 stored", "e_initial"] = 0.0
n.stores.at["co2 atmosphere", "e_initial"] = 0.0
# replenish fossil gas and oil with 1000 TWh each
fossil_stores = n.stores.carrier.str.isin(["gas", "oil"])
n.stores.loc[fossil_stores, 'e_initial'] = 1e9
n.stores.loc[fossil_stores, 'e_nom'] = 10e9
n.stores.loc[fossil_stores, "e_initial"] = 1e9
n.stores.loc[fossil_stores, "e_nom"] = 10e9
# replenish annual solid biomass and biogas potentials
n.stores.loc[biomass_stores, "e_initial"] = biomass_potential
@ -51,15 +59,18 @@ def prepare_myopic(n, config, store_soc, storage_unit_soc):
c.df.marginal_cost.update(c.df.carrier.map(bidding_prices).dropna())
# deduct industry solid biomass
assert sum(n.stores.carriers == "solid biomass") == 1, "Myopic operation not implemented for spatially resolved solid biomass."
n.stores.at["EU solid biomass", "e_initial"] -= n.loads.at["solid biomass for industry", "p_set"] * 8760
assert (
sum(n.stores.carriers == "solid biomass") == 1
), "Myopic operation not implemented for spatially resolved solid biomass."
n.stores.at["EU solid biomass", "e_initial"] -= (
n.loads.at["solid biomass for industry", "p_set"] * 8760
)
n.remove("Load", "solid biomass for industry")
return n
def solve_network_myopic(n, config, opts='', **kwargs):
def solve_network_myopic(n, config, opts="", **kwargs):
rolling_horizon = config["operations"]["rolling_horizon"]
freq = int(pd.infer_freq(n.snapshots)[:-1])
@ -68,10 +79,11 @@ def solve_network_myopic(n, config, opts='', **kwargs):
kept = window - overlap
length = len(n.snapshots)
assert kept > 0, f"Overlap ({overlap} days) must be smaller than windows ({window} days)."
assert (
kept > 0
), f"Overlap ({overlap} days) must be smaller than windows ({window} days)."
for i in range(length // kept):
snapshots = n.snapshots[i * kept : (i + 1) * kept + overlap]
logger.info(f"Optimising operations from {snapshots[0]} to {snapshots[-1]}")
@ -81,7 +93,9 @@ def solve_network_myopic(n, config, opts='', **kwargs):
logger.info(f"Setting initial SOCs from {last_kept} for next iteration.\n")
n.stores.e_initial = n.stores_t.e.loc[last_kept]
n.storage_units.state_of_charge_initial = n.storage_units_t.state_of_charge.loc[last_kept]
n.storage_units.state_of_charge_initial = n.storage_units_t.state_of_charge.loc[
last_kept
]
# final segment until end of year
snapshots = n.snapshots[(i + 1) * kept :]
@ -91,26 +105,29 @@ def solve_network_myopic(n, config, opts='', **kwargs):
if __name__ == "__main__":
if 'snakemake' not in globals():
if "snakemake" not in globals():
from helper import mock_snakemake
snakemake = mock_snakemake(
'solve_operations_network_myopic',
"solve_operations_network_myopic",
capacity_year=1952,
simpl='',
opts='',
simpl="",
opts="",
clusters=37,
lv=2.0,
sector_opts='Co2L0-25H-T-H-B-I-A',
sector_opts="Co2L0-25H-T-H-B-I-A",
planning_horizons=2030,
weather_year=2013
weather_year=2013,
)
logging.basicConfig(filename=snakemake.log.python,
level=snakemake.config['logging_level'])
logging.basicConfig(
filename=snakemake.log.python, level=snakemake.config["logging_level"]
)
tmpdir = snakemake.config['solving'].get('tmpdir')
tmpdir = snakemake.config["solving"].get("tmpdir")
if tmpdir is not None:
from pathlib import Path
Path(tmpdir).mkdir(parents=True, exist_ok=True)
config = snakemake.config["operations"]
@ -122,7 +139,9 @@ if __name__ == "__main__":
n = set_parameters_from_optimized(n, n_post)
del n_post
n_previous = pypsa.Network(snakemake.input.previous, override_component_attrs=overrides)
n_previous = pypsa.Network(
snakemake.input.previous, override_component_attrs=overrides
)
store_soc = n_previous.stores_t.e.iloc[-1]
storage_unit_soc = n_previous.storage_units_t.state_of_charge.iloc[-1]
del n_previous
@ -131,9 +150,9 @@ if __name__ == "__main__":
n = add_load_shedding(n)
n = prepare_myopic(n, config, store_soc, storage_unit_soc)
opts = snakemake.wildcards.sector_opts.split('-')
solve_opts = snakemake.config['solving']['options']
solve_opts['skip_iterations'] = True
opts = snakemake.wildcards.sector_opts.split("-")
solve_opts = snakemake.config["solving"]["options"]
solve_opts["skip_iterations"] = True
n = prepare_network(n, solve_opts)
@ -142,7 +161,7 @@ if __name__ == "__main__":
config=snakemake.config,
opts=opts,
solver_dir=tmpdir,
solver_logfile=snakemake.log.solver
solver_logfile=snakemake.log.solver,
)
n.export_to_netcdf(snakemake.output[0])