From 2372bb156ddff68f3dc40b96159591eb295d7767 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20H=C3=B6rsch?= Date: Fri, 14 Sep 2018 11:22:13 +0200 Subject: [PATCH] Prepare Snakefile and scripts for other solvers --- Snakefile | 4 ++-- scripts/cluster_network.py | 5 ++++- scripts/solve_network.py | 10 +++++----- scripts/solve_operations_network.py | 4 ++-- 4 files changed, 13 insertions(+), 10 deletions(-) diff --git a/Snakefile b/Snakefile index 890ae55d..fa6d1676 100644 --- a/Snakefile +++ b/Snakefile @@ -214,7 +214,7 @@ rule solve_network: shadow: "shallow" params: partition=partition log: - gurobi="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_gurobi.log", + solver="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_solver.log", python="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_python.log", memory="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_memory.log" benchmark: "benchmarks/solve_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}" @@ -240,7 +240,7 @@ rule solve_operations_network: shadow: "shallow" params: partition=partition_op log: - gurobi="logs/solve_operations_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_op_gurobi.log", + solver="logs/solve_operations_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_op_solver.log", python="logs/solve_operations_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_op_python.log", memory="logs/solve_operations_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_op_memory.log" benchmark: "benchmarks/solve_operations_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}" diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 6a5f4281..f60d9881 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -87,7 +87,10 @@ def distribute_clusters_exactly(n, n_clusters): else: return distribute_clusters(n, n_clusters) -def distribute_clusters_optim(n, n_clusters, solver_name='gurobi'): +def distribute_clusters_optim(n, n_clusters, solver_name=None): + if solver_name is None: + solver_name = snakemake.config['solver']['solver']['name'] + L = (n.loads_t.p_set.mean() .groupby(n.loads.bus).sum() .groupby([n.buses.country, n.buses.sub_network]).sum() diff --git a/scripts/solve_network.py b/scripts/solve_network.py index c0e94b9d..a8570970 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -106,15 +106,14 @@ def fix_branches(n, lines_s_nom=None, links_p_nom=None): if isinstance(n.opt, pypsa.opf.PersistentSolver): n.opt.update_var(n.model.link_p_nom) -def solve_network(n, config=None, gurobi_log=None, opts=None): +def solve_network(n, config=None, solver_log=None, opts=None): if config is None: config = snakemake.config['solving'] solve_opts = config['options'] solver_options = config['solver'].copy() - if gurobi_log is None: - gurobi_log = snakemake.log.gurobi - solver_options['logfile'] = gurobi_log + if solver_log is None: + solver_log = snakemake.log.solver solver_name = solver_options.pop('name') def run_lopf(n, allow_warning_status=False, fix_zero_lines=False, fix_ext_lines=False): @@ -145,6 +144,7 @@ def solve_network(n, config=None, gurobi_log=None, opts=None): gc.collect() status, termination_condition = \ pypsa.opf.network_lopf_solve(n, + solver_logfile=solver_log, solver_options=solver_options, formulation=solve_opts['formulation'], #free_memory={'pypsa'} @@ -256,7 +256,7 @@ if __name__ == "__main__": wildcards=dict(network='elec', simpl='', clusters='45', lv='1.0', opts='Co2L-3H'), input=["networks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}.nc"], output=["results/networks/s{simpl}_{clusters}_lv{lv}_{opts}.nc"], - log=dict(gurobi="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_gurobi.log", + log=dict(solver="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_solver.log", python="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_python.log") ) diff --git a/scripts/solve_operations_network.py b/scripts/solve_operations_network.py index 983e197a..9b454f61 100644 --- a/scripts/solve_operations_network.py +++ b/scripts/solve_operations_network.py @@ -42,7 +42,7 @@ if __name__ == "__main__": input=dict(unprepared="networks/{network}_s{simpl}_{clusters}.nc", optimized="results/networks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}.nc"), output=["results/networks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_op.nc"], - log=dict(gurobi="logs/s{simpl}_{clusters}_lv{lv}_{opts}_op_gurobi.log", + log=dict(solver="logs/s{simpl}_{clusters}_lv{lv}_{opts}_op_solver.log", python="logs/s{simpl}_{clusters}_lv{lv}_{opts}_op_python.log") ) @@ -65,7 +65,7 @@ if __name__ == "__main__": with memory_logger(filename=getattr(snakemake.log, 'memory', None), interval=30.) as mem: n = prepare_network(n, solve_opts=snakemake.config['solving']['options']) - n = solve_network(n, config=snakemake.config['solving'], gurobi_log=snakemake.log.gurobi, opts=opts) + n = solve_network(n, config=snakemake.config['solving'], solver_log=snakemake.log.solver, opts=opts) n.export_to_netcdf(snakemake.output[0]) logger.info("Maximum memory usage: {}".format(mem.mem_usage))