diff --git a/.gitignore b/.gitignore index 99429eae..dd1b53d6 100644 --- a/.gitignore +++ b/.gitignore @@ -18,3 +18,5 @@ gurobi.log doc/_build config.yaml + +dconf diff --git a/.travis.yml b/.travis.yml index 282a766b..162992df 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,4 @@ -os: +os: - windows - linux - osx @@ -9,11 +9,11 @@ before_install: # install conda - wget https://raw.githubusercontent.com/trichter/conda4travis/latest/conda4travis.sh -O conda4travis.sh - source conda4travis.sh - + # install conda environment - conda env create -f ./environment.yaml - conda activate pypsa-eur - + # install open-source solver - conda install -c conda-forge ipopt glpk diff --git a/Snakefile b/Snakefile index a5ad51c6..700d02b7 100644 --- a/Snakefile +++ b/Snakefile @@ -297,15 +297,6 @@ rule solve_network: # group: "solve" # with group, threads is ignored https://bitbucket.org/snakemake/snakemake/issues/971/group-job-description-does-not-contain script: "scripts/solve_network.py" -rule trace_solve_network: - input: "networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc" - output: "results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_trace.nc" - shadow: "shallow" - log: python="logs/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_python_trace.log", - threads: 4 - resources: mem=memory - script: "scripts/trace_solve_network.py" - rule solve_operations_network: input: unprepared="networks/{network}_s{simpl}_{clusters}_ec.nc", diff --git a/config.default.yaml b/config.default.yaml index 7ca37868..a02895a0 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -184,6 +184,8 @@ solving: min_iterations: 3 max_iterations: 5 clip_p_max_pu: 0.01 + skip_iterations: false + track_iterations: false #nhours: 10 solver: name: gurobi diff --git a/config.tutorial.yaml b/config.tutorial.yaml index 53f2e6d7..b6676a18 100755 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -161,6 +161,8 @@ solving: min_iterations: 1 max_iterations: 1 clip_p_max_pu: 0.01 + skip_iterations: false + track_iterations: false #nhours: 10 solver: name: cbc diff --git a/doc/configtables/solving-options.csv b/doc/configtables/solving-options.csv index 72873c83..0c640684 100644 --- a/doc/configtables/solving-options.csv +++ b/doc/configtables/solving-options.csv @@ -5,4 +5,6 @@ noisy_costs,bool,"{'true','false'}","Add random noise to marginal cost of genera min_iterations,--,int,"Minimum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run." max_iterations,--,int,"Maximum number of solving iterations in between which resistance and reactence (``x/r``) are updated for branches according to ``s_nom_opt`` of the previous run." nhours,--,int,"Specifies the :math:`n` first snapshots to take into account. Must be less than the total number of snapshots. Rather recommended only for debugging." -clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." \ No newline at end of file +clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." +skip_iterations,bool,"{'true','false'}","Skip iterating, do not update impedances of branches." +track_iterations,bool,"{'true','false'}","Flag whether to store the intermediate branch capacities and objective function values are recorded for each iteration in ``network.lines['s_nom_opt_X']`` (where ``X`` labels the iteration)" \ No newline at end of file diff --git a/doc/release_notes.rst b/doc/release_notes.rst index c9d0612e..3708065b 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -2,6 +2,12 @@ Release Notes ########################################## +PyPSA-Eur 0.2.0 (TBD) +================================== + +* The optimization is now performed using the ``pyomo=False`` setting in the :func:`pypsa.lopf.network_lopf`. This speeds up the solving process significantly and consumes much less memory. The inclusion of additional constraints were adjusted to the new implementation. They are all passed to the :func:`network_lopf` function via the ``extra_functionality`` argument. The rule ``trace_solve_network`` was integrated into the rule :mod:`solve_network` and can be activated via configuration with ``solving: options: track_iterations: true``. The charging and discharging capacities of batteries modelled as store-link combination are now coupled (`#116 `_). + + PyPSA-Eur 0.1.0 (9th January 2020) ================================== diff --git a/doc/solving.rst b/doc/solving.rst index 717aa2df..69b620d1 100644 --- a/doc/solving.rst +++ b/doc/solving.rst @@ -2,11 +2,10 @@ Solving Networks ########################################## -After generating and simplifying the networks they can be solved through the rule :mod:`solve_network` by using the collection rule :mod:`solve_all_elec_networks`. Moreover, networks can be solved for another focus with the derivative rules :mod:`solve_network` by using the collection rule :mod:`trace_solve_network` to log changes during iterations and :mod:`solve_network` by using the collection rule :mod:`solve_operations_network` for dispatch-only analyses on an already solved network. +After generating and simplifying the networks they can be solved through the rule :mod:`solve_network` by using the collection rule :mod:`solve_all_elec_networks`. Moreover, networks can be solved for another focus with the derivative rules :mod:`solve_network` by using the collection rule :mod:`solve_operations_network` for dispatch-only analyses on an already solved network. .. toctree:: :caption: Overview solving/solve_network - solving/trace_solve_network solving/solve_operations_network diff --git a/doc/solving/trace_solve_network.rst b/doc/solving/trace_solve_network.rst deleted file mode 100644 index 79eec832..00000000 --- a/doc/solving/trace_solve_network.rst +++ /dev/null @@ -1,34 +0,0 @@ -.. _trace_solve: - -Rule ``trace_solve_network`` -=============================== - -.. graphviz:: - :align: center - - digraph snakemake_dag { - graph [bgcolor=white, - margin=0, - size="8,5" - ]; - node [fontname=sans, - fontsize=10, - penwidth=2, - shape=box, - style=rounded - ]; - edge [color=grey, - penwidth=2 - ]; - 0 [color="0.17 0.6 0.85", - fillcolor=gray, - label=trace_solve_network, - style=filled]; - 1 [color="0.58 0.6 0.85", - label=prepare_network]; - 1 -> 0; - } - -| - -.. automodule:: trace_solve_network diff --git a/environment.docs.yaml b/environment.docs.yaml old mode 100644 new mode 100755 index 79031624..7beb2f45 --- a/environment.docs.yaml +++ b/environment.docs.yaml @@ -5,7 +5,7 @@ channels: dependencies: #- python - pip - - pypsa>=0.16 + # - pypsa>=0.16 # until PyPSA/a3c0991 released - atlite # Dependencies of the workflow itself @@ -47,6 +47,7 @@ dependencies: - git+https://github.com/FRESNA/vresutils.git#egg=vresutils - git+https://github.com/PyPSA/glaes.git#egg=glaes - git+https://github.com/PyPSA/geokit.git#egg=geokit + - git+https://github.com/PyPSA/pypsa.git#egg=pypsa # until PyPSA/a3c0991 released - cdsapi - sphinx - sphinx_rtd_theme diff --git a/environment.yaml b/environment.yaml index df14f482..9bb8386e 100644 --- a/environment.yaml +++ b/environment.yaml @@ -1,12 +1,13 @@ name: pypsa-eur channels: + - defaults - conda-forge - bioconda dependencies: - python - pip - - pypsa>=0.16 + # - pypsa>=0.16 # until PyPSA/a3c0991 released - atlite # Dependencies of the workflow itself @@ -33,18 +34,19 @@ dependencies: # environment by calling ipython - ipython - # GIS dependencies have to come all from conda-forge - - conda-forge::cartopy - - conda-forge::fiona - - conda-forge::proj - - conda-forge::pyshp - - conda-forge::geopandas - - conda-forge::rasterio - - conda-forge::shapely - - conda-forge::libgdal + # GIS dependencies: + - cartopy + - fiona + - proj + - pyshp + - geopandas + - rasterio + - shapely + - libgdal - pip: - git+https://github.com/FRESNA/vresutils.git#egg=vresutils - git+https://github.com/PyPSA/glaes.git#egg=glaes - git+https://github.com/PyPSA/geokit.git#egg=geokit + - git+https://github.com/PyPSA/pypsa.git#egg=pypsa # until PyPSA/a3c0991 released - cdsapi diff --git a/scripts/plot_network.py b/scripts/plot_network.py old mode 100644 new mode 100755 diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py old mode 100644 new mode 100755 index b86cbd07..0a6a13ba --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -122,8 +122,10 @@ def set_line_cost_limit(n, lc, Nyears=1.): n.links.loc[links_dc_b, 'p_nom_extendable'] = True if lc != 'opt': - n.line_cost_limit = float(lc) * total_line_cost - + line_cost = float(lc) * total_line_cost + n.add('GlobalConstraint', 'lc_limit', + type='transmission_expansion_cost_limit', + sense='<=', constant=line_cost, carrier_attribute='AC, DC') return n def set_line_volume_limit(n, lv, Nyears=1.): @@ -156,8 +158,10 @@ def set_line_volume_limit(n, lv, Nyears=1.): n.links.loc[links_dc_b, 'p_nom_extendable'] = True if lv != 'opt': - n.line_volume_limit = float(lv) * total_line_volume - + line_volume = float(lv) * total_line_volume + n.add('GlobalConstraint', 'lv_limit', + type='transmission_volume_expansion_limit', + sense='<=', constant=line_volume, carrier_attribute='AC, DC') return n def average_every_nhours(n, offset): diff --git a/scripts/solve_network.py b/scripts/solve_network.py old mode 100644 new mode 100755 index 6109c950..a7113176 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -20,6 +20,8 @@ Relevant Settings nhours: min_iterations: max_iterations: + skip_iterating: + track_iterations: solver: name: (solveroptions): @@ -51,7 +53,8 @@ Total annual system costs are minimised with PyPSA. The full formulation of the linear optimal power flow (plus investment planning is provided in the `documentation of PyPSA `_. -Additionaly some extra constraints from :mod:`prepare_network` are added. +The optimization is based on the ``pyomo=False`` setting in the :func:`network.lopf` and :func:`pypsa.linopf.ilopf` function. +Additionally, some extra constraints specified in :mod:`prepare_network` are added. Solving the network in multiple iterations is motivated through the dependence of transmission line capacities and impedances. As lines are expanded their electrical parameters change, which renders the optimisation bilinear even if the power flow @@ -83,27 +86,14 @@ from _helpers import configure_logging import numpy as np import pandas as pd -import gc import pypsa -from pypsa.descriptors import free_output_series_dataframes - -# Suppress logging of the slack bus choices -pypsa.pf.logger.setLevel(logging.WARNING) - +from pypsa.linopf import (get_var, define_constraints, linexpr, join_exprs, + network_lopf, ilopf) +from pathlib import Path from vresutils.benchmark import memory_logger -def patch_pyomo_tmpdir(tmpdir): - # PYOMO should write its lp files into tmp here - import os - if not os.path.isdir(tmpdir): - os.mkdir(tmpdir) - from pyutilib.services import TempfileManager - TempfileManager.tempdir = tmpdir - -def prepare_network(n, solve_opts=None): - if solve_opts is None: - solve_opts = snakemake.config['solving']['options'] +def prepare_network(n, solve_opts): if 'clip_p_max_pu' in solve_opts: for df in (n.generators_t.p_max_pu, n.storage_units_t.inflow): @@ -120,17 +110,19 @@ def prepare_network(n, solve_opts=None): # willingness to pay # http://journal.frontiersin.org/article/10.3389/fenrg.2015.00055/full p_nom=1e9 # kW - ) + ) if solve_opts.get('noisy_costs'): for t in n.iterate_components(n.one_port_components): #if 'capital_cost' in t.df: # t.df['capital_cost'] += 1e1 + 2.*(np.random.random(len(t.df)) - 0.5) if 'marginal_cost' in t.df: - t.df['marginal_cost'] += 1e-2 + 2e-3*(np.random.random(len(t.df)) - 0.5) + t.df['marginal_cost'] += (1e-2 + 2e-3 * + (np.random.random(len(t.df)) - 0.5)) for t in n.iterate_components(['Line', 'Link']): - t.df['capital_cost'] += (1e-1 + 2e-2*(np.random.random(len(t.df)) - 0.5)) * t.df['length'] + t.df['capital_cost'] += (1e-1 + + 2e-2*(np.random.random(len(t.df)) - 0.5)) * t.df['length'] if solve_opts.get('nhours'): nhours = solve_opts['nhours'] @@ -139,253 +131,125 @@ def prepare_network(n, solve_opts=None): return n -def add_opts_constraints(n, opts=None): - if opts is None: - opts = snakemake.wildcards.opts.split('-') - if 'BAU' in opts: - mincaps = snakemake.config['electricity']['BAU_mincapacities'] - def bau_mincapacities_rule(model, carrier): - gens = n.generators.index[n.generators.p_nom_extendable & (n.generators.carrier == carrier)] - return sum(model.generator_p_nom[gen] for gen in gens) >= mincaps[carrier] - n.model.bau_mincapacities = pypsa.opt.Constraint(list(mincaps), rule=bau_mincapacities_rule) +def add_CCL_constraints(n, config): + agg_p_nom_limits = config['electricity'].get('agg_p_nom_limits') - if 'SAFE' in opts: - peakdemand = (1. + snakemake.config['electricity']['SAFE_reservemargin']) * n.loads_t.p_set.sum(axis=1).max() - conv_techs = snakemake.config['plotting']['conv_techs'] - exist_conv_caps = n.generators.loc[n.generators.carrier.isin(conv_techs) & ~n.generators.p_nom_extendable, 'p_nom'].sum() - ext_gens_i = n.generators.index[n.generators.carrier.isin(conv_techs) & n.generators.p_nom_extendable] - n.model.safe_peakdemand = pypsa.opt.Constraint(expr=sum(n.model.generator_p_nom[gen] for gen in ext_gens_i) >= peakdemand - exist_conv_caps) + try: + agg_p_nom_minmax = pd.read_csv(agg_p_nom_limits, + index_col=list(range(2))) + except IOError: + logger.exception("Need to specify the path to a .csv file containing " + "aggregate capacity limits per country in " + "config['electricity']['agg_p_nom_limit'].") + logger.info("Adding per carrier generation capacity constraints for " + "individual countries") - # Add constraints on the per-carrier capacity in each country - if 'CCL' in opts: - agg_p_nom_limits = snakemake.config['electricity'].get('agg_p_nom_limits') + gen_country = n.generators.bus.map(n.buses.country) + # cc means country and carrier + p_nom_per_cc = (pd.DataFrame( + {'p_nom': linexpr((1, get_var(n, 'Generator', 'p_nom'))), + 'country': gen_country, 'carrier': n.generators.carrier}) + .dropna(subset=['p_nom']) + .groupby(['country', 'carrier']).p_nom + .apply(join_exprs)) + minimum = agg_p_nom_minmax['min'].dropna() + if not minimum.empty: + minconstraint = define_constraints(n, p_nom_per_cc[minimum.index], + '>=', minimum, 'agg_p_nom', 'min') + maximum = agg_p_nom_minmax['max'].dropna() + if not maximum.empty: + maxconstraint = define_constraints(n, p_nom_per_cc[maximum.index], + '<=', maximum, 'agg_p_nom', 'max') - try: - agg_p_nom_minmax = pd.read_csv(agg_p_nom_limits, index_col=list(range(2))) - except IOError: - logger.exception("Need to specify the path to a .csv file containing aggregate capacity limits per country in config['electricity']['agg_p_nom_limit'].") - logger.info("Adding per carrier generation capacity constraints for individual countries") +def add_BAU_constraints(n, config): + mincaps = pd.Series(config['electricity']['BAU_mincapacities']) + lhs = (linexpr((1, get_var(n, 'Generator', 'p_nom'))) + .groupby(n.generators.carrier).apply(join_exprs)) + define_constraints(n, lhs, '>=', mincaps[lhs.index], 'Carrier', 'bau_mincaps') - gen_country = n.generators.bus.map(n.buses.country) - def agg_p_nom_min_rule(model, country, carrier): - min = agg_p_nom_minmax.at[(country, carrier), 'min'] - return ((sum(model.generator_p_nom[gen] - for gen in n.generators.index[(gen_country == country) & (n.generators.carrier == carrier)]) - >= min) - if np.isfinite(min) else pypsa.opt.Constraint.Skip) +def add_SAFE_constraints(n, config): + peakdemand = (1. + config['electricity']['SAFE_reservemargin']) *\ + n.loads_t.p_set.sum(axis=1).max() + conv_techs = config['plotting']['conv_techs'] + exist_conv_caps = n.generators.query('~p_nom_extendable & carrier in @conv_techs')\ + .p_nom.sum() + ext_gens_i = n.generators.query('carrier in @conv_techs & p_nom_extendable').index + lhs = linexpr((1, get_var(n, 'Generator', 'p_nom')[ext_gens_i])).sum() + rhs = peakdemand - exist_conv_caps + define_constraints(n, lhs, '>=', rhs, 'Safe', 'mintotalcap') - def agg_p_nom_max_rule(model, country, carrier): - max = agg_p_nom_minmax.at[(country, carrier), 'max'] - return ((sum(model.generator_p_nom[gen] - for gen in n.generators.index[(gen_country == country) & (n.generators.carrier == carrier)]) - <= max) - if np.isfinite(max) else pypsa.opt.Constraint.Skip) - n.model.agg_p_nom_min = pypsa.opt.Constraint(list(agg_p_nom_minmax.index), rule=agg_p_nom_min_rule) - n.model.agg_p_nom_max = pypsa.opt.Constraint(list(agg_p_nom_minmax.index), rule=agg_p_nom_max_rule) +def add_battery_constraints(n): + nodes = n.buses.index[n.buses.carrier == "battery"] + if nodes.empty or ('Link', 'p_nom') not in n.variables.index: + return + link_p_nom = get_var(n, "Link", "p_nom") + lhs = linexpr((1,link_p_nom[nodes + " charger"]), + (-n.links.loc[nodes + " discharger", "efficiency"].values, + link_p_nom[nodes + " discharger"].values)) + define_constraints(n, lhs, "=", 0, 'Link', 'charger_ratio') -def add_lv_constraint(n): - line_volume = getattr(n, 'line_volume_limit', None) - if line_volume is not None and not np.isinf(line_volume): - links_dc_ext_i = n.links.index[(n.links.carrier == 'DC') & n.links.p_nom_extendable] if not n.links.empty else pd.Index([]) - n.model.line_volume_constraint = pypsa.opt.Constraint( - expr=((sum(n.model.passive_branch_s_nom["Line",line]*n.lines.at[line,"length"] - for line in n.lines.index[n.lines.s_nom_extendable]) + - sum(n.model.link_p_nom[link]*n.links.at[link,"length"] - for link in links_dc_ext_i)) - <= line_volume) - ) -def add_lc_constraint(n): - line_cost = getattr(n, 'line_cost_limit', None) - if line_cost is not None and not np.isinf(line_cost): - links_dc_ext_i = n.links.index[(n.links.carrier == 'DC') & n.links.p_nom_extendable] if not n.links.empty else pd.Index([]) - n.model.line_cost_constraint = pypsa.opt.Constraint( - expr=((sum(n.model.passive_branch_s_nom["Line",line]*n.lines.at[line,"capital_cost_lc"] - for line in n.lines.index[n.lines.s_nom_extendable]) + - sum(n.model.link_p_nom[link]*n.links.at[link,"capital_cost_lc"] - for link in links_dc_ext_i)) - <= line_cost) - ) +def extra_functionality(n, snapshots): + """ + Collects supplementary constraints which will be passed to ``pypsa.linopf.network_lopf``. + If you want to enforce additional custom constraints, this is a good location to add them. + The arguments ``opts`` and ``snakemake.config`` are expected to be attached to the network. + """ + opts = n.opts + config = n.config + if 'BAU' in opts and n.generators.p_nom_extendable.any(): + add_BAU_constraints(n, config) + if 'SAFE' in opts and n.generators.p_nom_extendable.any(): + add_SAFE_constraints(n, config) + if 'CCL' in opts and n.generators.p_nom_extendable.any(): + add_CCL_constraints(n, config) + add_battery_constraints(n) -def add_eps_storage_constraint(n): - if not hasattr(n, 'epsilon'): - n.epsilon = 1e-5 - fix_sus_i = n.storage_units.index[~ n.storage_units.p_nom_extendable] - n.model.objective.expr += sum(n.epsilon * n.model.state_of_charge[su, n.snapshots[0]] for su in fix_sus_i) -def fix_branches(n, lines_s_nom=None, links_p_nom=None): - if lines_s_nom is not None and len(lines_s_nom) > 0: - for l, s_nom in lines_s_nom.iteritems(): - n.model.passive_branch_s_nom["Line", l].fix(s_nom) - if isinstance(n.opt, pypsa.opf.PersistentSolver): - n.opt.update_var(n.model.passive_branch_s_nom) - - if links_p_nom is not None and len(links_p_nom) > 0: - for l, p_nom in links_p_nom.iteritems(): - n.model.link_p_nom[l].fix(p_nom) - if isinstance(n.opt, pypsa.opf.PersistentSolver): - n.opt.update_var(n.model.link_p_nom) - -def solve_network(n, config=None, solver_log=None, opts=None, callback=None, - skip_iterating=False, - extra_functionality=None, extra_functionality_args=None, - extra_postprocessing=None): - if config is None: - config = snakemake.config['solving'] - solve_opts = config['options'] - - solver_options = config['solver'].copy() - if solver_log is None: - solver_log = snakemake.log.solver +def solve_network(n, config=None, solver_log=None, opts=None, **kwargs): + solve_opts = snakemake.config['solving']['options'] + solver_options = config['solving']['solver'].copy() solver_name = solver_options.pop('name') - - if extra_postprocessing is None: - - def get_line_limit_duals(n, snapshots, duals): - if hasattr(n, 'line_volume_limit') and hasattr(n.model, 'line_volume_constraint'): - cdata = pd.Series(list(n.model.line_volume_constraint.values()), - index=list(n.model.line_volume_constraint.keys())) - n.line_volume_limit_dual = -cdata.map(duals).sum() - - if hasattr(n, 'line_cost_limit') and hasattr(n.model, 'line_cost_constraint'): - cdata = pd.Series(list(n.model.line_cost_constraint.values()), - index=list(n.model.line_cost_constraint.keys())) - n.line_cost_limit_dual = -cdata.map(duals).sum() - - extra_postprocessing = get_line_limit_duals - - def run_lopf(n, allow_warning_status=False, fix_ext_lines=False): - free_output_series_dataframes(n) - - pypsa.opf.network_lopf_build_model(n, formulation=solve_opts['formulation']) - - add_opts_constraints(n, opts) - - if not fix_ext_lines: - add_lv_constraint(n) - add_lc_constraint(n) - - if extra_functionality is not None: - extra_functionality(n, n.snapshots, *extra_functionality_args) - - pypsa.opf.network_lopf_prepare_solver(n, solver_name=solver_name) - - if fix_ext_lines: - fix_branches(n, - lines_s_nom=n.lines.loc[n.lines.s_nom_extendable, 's_nom_opt'], - links_p_nom=n.links.loc[n.links.p_nom_extendable, 'p_nom_opt']) - - # Firing up solve will increase memory consumption tremendously, so - # make sure we freed everything we can - gc.collect() - status, termination_condition = \ - pypsa.opf.network_lopf_solve(n, - solver_logfile=solver_log, - solver_options=solver_options, - formulation=solve_opts['formulation'], - extra_postprocessing=extra_postprocessing - #free_memory={'pypsa'} - ) - - assert status == "ok" or allow_warning_status and status == 'warning', \ - ("network_lopf did abort with status={} " - "and termination_condition={}" - .format(status, termination_condition)) - - return status, termination_condition - - if not skip_iterating: - iteration = 0 - lines_ext_b = n.lines.s_nom_extendable - if lines_ext_b.any(): - # puh: ok, we need to iterate, since there is a relation - # between s/p_nom and r, x for branches. - msq_threshold = 0.01 - lines = pd.DataFrame(n.lines[['r', 'x', 'type', 'num_parallel']]) - - lines['s_nom'] = ( - np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) * - n.lines.bus0.map(n.buses.v_nom) - ).where(n.lines.type != '', n.lines['s_nom']) - - lines_ext_typed_b = (n.lines.type != '') & lines_ext_b - lines_ext_untyped_b = (n.lines.type == '') & lines_ext_b - - def update_line_parameters(n, zero_lines_below=10): - if zero_lines_below > 0: - n.lines.loc[n.lines.s_nom_opt < zero_lines_below, 's_nom_opt'] = 0. - n.links.loc[n.links.p_nom_opt < zero_lines_below, 'p_nom_opt'] = 0. - - if lines_ext_untyped_b.any(): - for attr in ('r', 'x'): - n.lines.loc[lines_ext_untyped_b, attr] = ( - lines[attr].multiply(lines['s_nom']/n.lines['s_nom_opt']) - ) - - if lines_ext_typed_b.any(): - n.lines.loc[lines_ext_typed_b, 'num_parallel'] = ( - n.lines['s_nom_opt']/lines['s_nom'] - ) - logger.debug("lines.num_parallel={}".format(n.lines.loc[lines_ext_typed_b, 'num_parallel'])) - - iteration += 1 - lines['s_nom_opt'] = lines['s_nom'] * n.lines['num_parallel'].where(n.lines.type != '', 1.) - status, termination_condition = run_lopf(n, allow_warning_status=True) - if callback is not None: callback(n, iteration, status) - - def msq_diff(n): - lines_err = np.sqrt(((n.lines['s_nom_opt'] - lines['s_nom_opt'])**2).mean())/lines['s_nom_opt'].mean() - logger.info("Mean square difference after iteration {} is {}".format(iteration, lines_err)) - return lines_err - - min_iterations = solve_opts.get('min_iterations', 2) - max_iterations = solve_opts.get('max_iterations', 999) - while msq_diff(n) > msq_threshold or iteration < min_iterations: - if iteration >= max_iterations: - logger.info("Iteration {} beyond max_iterations {}. Stopping ...".format(iteration, max_iterations)) - break - - update_line_parameters(n) - lines['s_nom_opt'] = n.lines['s_nom_opt'] - iteration += 1 - - status, termination_condition = run_lopf(n, allow_warning_status=True) - if callback is not None: callback(n, iteration, status) - - - update_line_parameters(n, zero_lines_below=100) - - logger.info("Starting last run with fixed extendable lines") - - iteration += 1 - status, termination_condition = run_lopf(n, fix_ext_lines=True) + skip_iterating = solve_opts.get('skip_iterating', False) + track_iterations = solve_opts.get('track_iterations', False) + + # add to network for extra_functionality + n.config = config + n.opts = opts + + if skip_iterating: + network_lopf(n, solver_name=solver_name, solver_options=solver_options, + extra_functionality=extra_functionality, **kwargs) else: - status, termination_condition = run_lopf(n, fix_ext_lines=False) - if callback is not None: callback(n, iteration, status) - + ilopf(n, solver_name=solver_name, solver_options=solver_options, + track_iterations=track_iterations, + extra_functionality=extra_functionality, **kwargs) return n + if __name__ == "__main__": if 'snakemake' not in globals(): from _helpers import mock_snakemake snakemake = mock_snakemake('solve_network', network='elec', simpl='', - clusters='5', ll='copt', opts='Co2L-24H') + clusters='5', ll='copt', opts='Co2L-BAU-CCL-24H') configure_logging(snakemake) tmpdir = snakemake.config['solving'].get('tmpdir') if tmpdir is not None: - patch_pyomo_tmpdir(tmpdir) + Path(tmpdir).mkdir(parents=True, exist_ok=True) + opts = snakemake.wildcards.opts.split('-') + solve_opts = snakemake.config['solving']['options'] - with memory_logger(filename=getattr(snakemake.log, 'memory', None), interval=30.) as mem: + with memory_logger(filename=getattr(snakemake.log, 'memory', None), + interval=30.) as mem: n = pypsa.Network(snakemake.input[0]) - - n = prepare_network(n) - n = solve_network(n) - + n = prepare_network(n, solve_opts) + n = solve_network(n, config=snakemake.config, solver_dir=tmpdir, + solver_log=snakemake.log.solver, opts=opts) n.export_to_netcdf(snakemake.output[0]) logger.info("Maximum memory usage: {}".format(mem.mem_usage)) diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index b295f8f2..4b4c3e43 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -49,31 +49,37 @@ import pypsa import numpy as np import re +from pathlib import Path from vresutils.benchmark import memory_logger -from solve_network import patch_pyomo_tmpdir, solve_network, prepare_network +from solve_network import solve_network, prepare_network def set_parameters_from_optimized(n, n_optim): lines_typed_i = n.lines.index[n.lines.type != ''] - n.lines.loc[lines_typed_i, 'num_parallel'] = n_optim.lines['num_parallel'].reindex(lines_typed_i, fill_value=0.) + n.lines.loc[lines_typed_i, 'num_parallel'] = \ + n_optim.lines['num_parallel'].reindex(lines_typed_i, fill_value=0.) n.lines.loc[lines_typed_i, 's_nom'] = ( np.sqrt(3) * n.lines['type'].map(n.line_types.i_nom) * - n.lines.bus0.map(n.buses.v_nom) * n.lines.num_parallel - ) + n.lines.bus0.map(n.buses.v_nom) * n.lines.num_parallel) + lines_untyped_i = n.lines.index[n.lines.type == ''] for attr in ('s_nom', 'r', 'x'): - n.lines.loc[lines_untyped_i, attr] = n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.) + n.lines.loc[lines_untyped_i, attr] = \ + n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.) n.lines['s_nom_extendable'] = False links_dc_i = n.links.index[n.links.carrier == 'DC'] - n.links.loc[links_dc_i, 'p_nom'] = n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.) + n.links.loc[links_dc_i, 'p_nom'] = \ + n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.) n.links.loc[links_dc_i, 'p_nom_extendable'] = False gen_extend_i = n.generators.index[n.generators.p_nom_extendable] - n.generators.loc[gen_extend_i, 'p_nom'] = n_optim.generators['p_nom_opt'].reindex(gen_extend_i, fill_value=0.) + n.generators.loc[gen_extend_i, 'p_nom'] = \ + n_optim.generators['p_nom_opt'].reindex(gen_extend_i, fill_value=0.) n.generators.loc[gen_extend_i, 'p_nom_extendable'] = False stor_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable] - n.storage_units.loc[stor_extend_i, 'p_nom'] = n_optim.storage_units['p_nom_opt'].reindex(stor_extend_i, fill_value=0.) + n.storage_units.loc[stor_extend_i, 'p_nom'] = \ + n_optim.storage_units['p_nom_opt'].reindex(stor_extend_i, fill_value=0.) n.storage_units.loc[stor_extend_i, 'p_nom_extendable'] = False return n @@ -82,27 +88,25 @@ if __name__ == "__main__": if 'snakemake' not in globals(): from _helpers import mock_snakemake snakemake = mock_snakemake('solve_operations_network', network='elec', - simpl='', clusters='5', ll='copt', opts='Co2L-24H') + simpl='', clusters='5', ll='copt', opts='Co2L-BAU-24H') configure_logging(snakemake) tmpdir = snakemake.config['solving'].get('tmpdir') if tmpdir is not None: - patch_pyomo_tmpdir(tmpdir) - + Path(tmpdir).mkdir(parents=True, exist_ok=True) n = pypsa.Network(snakemake.input.unprepared) - n_optim = pypsa.Network(snakemake.input.optimized) n = set_parameters_from_optimized(n, n_optim) del n_optim - opts = [o - for o in snakemake.wildcards.opts.split('-') - if not re.match(r'^\d+h$', o, re.IGNORECASE)] + opts = snakemake.wildcards.opts.split('-') with memory_logger(filename=getattr(snakemake.log, 'memory', None), interval=30.) as mem: n = prepare_network(n, solve_opts=snakemake.config['solving']['options']) - n = solve_network(n, config=snakemake.config['solving'], solver_log=snakemake.log.solver, opts=opts) + n = solve_network(n, config=snakemake.config, solver_dir=tmpdir, + solver_log=snakemake.log.solver, opts=opts, + skip_iterating=True) n.export_to_netcdf(snakemake.output[0]) logger.info("Maximum memory usage: {}".format(mem.mem_usage)) diff --git a/scripts/trace_solve_network.py b/scripts/trace_solve_network.py deleted file mode 100644 index 9630a27d..00000000 --- a/scripts/trace_solve_network.py +++ /dev/null @@ -1,81 +0,0 @@ -""" -Iteratively solves expansion problem like the rule :mod:`solve_network`, but additionally -records intermediate branch capacity steps and values of the objective function. - -Relevant Settings ------------------ - -.. code:: yaml - - solving: - tmpdir: - options: - formulation: - clip_p_max_pu: - load_shedding: - noisy_costs: - nhours: - min_iterations: - max_iterations: - solver: - name: - (solveroptions): - -.. seealso:: - Documentation of the configuration file ``config.yaml`` at - :ref:`solving_cf` - -Inputs ------- - -- ``networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`prepare` - -Outputs -------- - -- ``results/networks/{network}_s{simpl}_{clusters}_ec_l{ll}_{opts}_trace.nc``: Solved PyPSA network including optimisation results (with trace) - -Description ------------ - -""" - -import logging -logger = logging.getLogger(__name__) -from _helpers import configure_logging - -from solve_network import patch_pyomo_tmpdir, prepare_network, solve_network - -import pypsa - -if __name__ == "__main__": - if 'snakemake' not in globals(): - from _helpers import mock_snakemake - snakemake = mock_snakemake('trace_solve_network', network='elec', simpl='', - clusters='5', ll='copt', opts='Co2L-24H') - configure_logging(snakemake) - - tmpdir = snakemake.config['solving'].get('tmpdir') - if tmpdir is not None: - patch_pyomo_tmpdir(tmpdir) - - - n = pypsa.Network(snakemake.input[0]) - - solver_log = 'solver.log' - config = snakemake.config['solving'] - opts = snakemake.wildcards.opts.split('-') - - def save_optimal_capacities(net, iteration, status): - net.lines[f"s_nom_opt_{iteration}"] = net.lines["s_nom_opt"] - net.links[f"p_nom_opt_{iteration}"] = net.links["p_nom_opt"] - setattr(net, f"status_{iteration}", status) - setattr(net, f"objective_{iteration}", net.objective) - net.iteration = iteration - - net.export_to_netcdf(snakemake.output[0]) - - config['options']['max_iterations'] = 12 - n = prepare_network(n, config['options']) - n = solve_network(n, config, solver_log, opts, save_optimal_capacities) - n.export_to_netcdf(snakemake.output[0])