pypsa-eur/scripts/solve_operations_network.py

114 lines
4.3 KiB
Python
Raw Normal View History

"""
2019-08-11 20:34:18 +00:00
Solves linear optimal dispatch in hourly resolution
using the capacities of previous capacity expansion in rule ``solve_network``.
2019-08-11 09:40:47 +00:00
Relevant Settings
-----------------
2019-08-11 11:17:36 +00:00
.. code:: yaml
solving:
tmpdir:
options:
formulation:
clip_p_max_pu:
load_shedding:
noisy_costs:
nhours:
min_iterations:
max_iterations:
solver:
name:
{solveroptions}:
2019-08-11 09:40:47 +00:00
Inputs
------
2019-08-11 20:34:18 +00:00
- ``networks/{network}_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
- ``results/networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}.nc``: confer :ref:`solver`
2019-08-11 09:40:47 +00:00
Outputs
-------
2019-08-11 20:34:18 +00:00
- ``results/networks/{network}_s{simpl}_{clusters}_l{ll}_{opts}_op.nc``:
- ``logs/solve_operations_network/{network}_s{simpl}_{clusters}_l{ll}_{opts}_op_solver.log``:
- ``logs/solve_operations_network/{network}_s{simpl}_{clusters}_l{ll}_{opts}_op_python.log``:
- ``logs/solve_operations_network/{network}_s{simpl}_{clusters}_l{ll}_{opts}_op_memory.log``:
2019-08-11 09:40:47 +00:00
Description
-----------
"""
import pypsa
2018-03-13 09:47:47 +00:00
import numpy as np
import re
import logging
logger = logging.getLogger(__name__)
2018-03-13 09:47:47 +00:00
from vresutils.benchmark import memory_logger
from solve_network import patch_pyomo_tmpdir, solve_network, prepare_network
def set_parameters_from_optimized(n, n_optim):
lines_typed_i = n.lines.index[n.lines.type != '']
2018-03-13 09:47:47 +00:00
n.lines.loc[lines_typed_i, 'num_parallel'] = n_optim.lines['num_parallel'].reindex(lines_typed_i, fill_value=0.)
n.lines.loc[lines_typed_i, 's_nom'] = (
np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) *
n.lines.bus0.map(n.buses.v_nom) * n.lines.num_parallel
)
lines_untyped_i = n.lines.index[n.lines.type == '']
for attr in ('s_nom', 'r', 'x'):
2018-03-13 09:47:47 +00:00
n.lines.loc[lines_untyped_i, attr] = n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.)
n.lines['s_nom_extendable'] = False
2018-03-13 09:47:47 +00:00
links_dc_i = n.links.index[n.links.carrier == 'DC']
n.links.loc[links_dc_i, 'p_nom'] = n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.)
n.links.loc[links_dc_i, 'p_nom_extendable'] = False
gen_extend_i = n.generators.index[n.generators.p_nom_extendable]
n.generators.loc[gen_extend_i, 'p_nom'] = n_optim.generators['p_nom_opt'].reindex(gen_extend_i, fill_value=0.)
n.generators.loc[gen_extend_i, 'p_nom_extendable'] = False
stor_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable]
n.storage_units.loc[stor_extend_i, 'p_nom'] = n_optim.storage_units['p_nom_opt'].reindex(stor_extend_i, fill_value=0.)
n.storage_units.loc[stor_extend_i, 'p_nom_extendable'] = False
return n
if __name__ == "__main__":
# Detect running outside of snakemake and mock snakemake for testing
if 'snakemake' not in globals():
2018-03-13 09:47:47 +00:00
from vresutils.snakemake import MockSnakemake
snakemake = MockSnakemake(
wildcards=dict(network='elec', simpl='', clusters='45', lv='1.5', opts='Co2L-3H'),
2018-03-13 09:47:47 +00:00
input=dict(unprepared="networks/{network}_s{simpl}_{clusters}.nc",
optimized="results/networks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}.nc"),
output=["results/networks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_op.nc"],
log=dict(solver="logs/s{simpl}_{clusters}_lv{lv}_{opts}_op_solver.log",
python="logs/s{simpl}_{clusters}_lv{lv}_{opts}_op_python.log")
)
tmpdir = snakemake.config['solving'].get('tmpdir')
if tmpdir is not None:
patch_pyomo_tmpdir(tmpdir)
logging.basicConfig(filename=snakemake.log.python,
level=snakemake.config['logging_level'])
n = pypsa.Network(snakemake.input.unprepared)
n_optim = pypsa.Network(snakemake.input.optimized)
n = set_parameters_from_optimized(n, n_optim)
del n_optim
2018-03-13 09:47:47 +00:00
opts = [o
for o in snakemake.wildcards.opts.split('-')
if not re.match(r'^\d+h$', o, re.IGNORECASE)]
2018-03-13 09:47:47 +00:00
with memory_logger(filename=getattr(snakemake.log, 'memory', None), interval=30.) as mem:
n = prepare_network(n, solve_opts=snakemake.config['solving']['options'])
n = solve_network(n, config=snakemake.config['solving'], solver_log=snakemake.log.solver, opts=opts)
n.export_to_netcdf(snakemake.output[0])
2018-03-13 09:47:47 +00:00
logger.info("Maximum memory usage: {}".format(mem.mem_usage))