Prepare Snakefile and scripts for other solvers

This commit is contained in:
Jonas Hörsch 2018-09-14 11:22:13 +02:00
parent fac30eb5a7
commit 2372bb156d
4 changed files with 13 additions and 10 deletions

View File

@ -214,7 +214,7 @@ rule solve_network:
shadow: "shallow"
params: partition=partition
log:
gurobi="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_gurobi.log",
solver="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_solver.log",
python="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_python.log",
memory="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_memory.log"
benchmark: "benchmarks/solve_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}"
@ -240,7 +240,7 @@ rule solve_operations_network:
shadow: "shallow"
params: partition=partition_op
log:
gurobi="logs/solve_operations_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_op_gurobi.log",
solver="logs/solve_operations_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_op_solver.log",
python="logs/solve_operations_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_op_python.log",
memory="logs/solve_operations_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_op_memory.log"
benchmark: "benchmarks/solve_operations_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}"

View File

@ -87,7 +87,10 @@ def distribute_clusters_exactly(n, n_clusters):
else:
return distribute_clusters(n, n_clusters)
def distribute_clusters_optim(n, n_clusters, solver_name='gurobi'):
def distribute_clusters_optim(n, n_clusters, solver_name=None):
if solver_name is None:
solver_name = snakemake.config['solver']['solver']['name']
L = (n.loads_t.p_set.mean()
.groupby(n.loads.bus).sum()
.groupby([n.buses.country, n.buses.sub_network]).sum()

View File

@ -106,15 +106,14 @@ def fix_branches(n, lines_s_nom=None, links_p_nom=None):
if isinstance(n.opt, pypsa.opf.PersistentSolver):
n.opt.update_var(n.model.link_p_nom)
def solve_network(n, config=None, gurobi_log=None, opts=None):
def solve_network(n, config=None, solver_log=None, opts=None):
if config is None:
config = snakemake.config['solving']
solve_opts = config['options']
solver_options = config['solver'].copy()
if gurobi_log is None:
gurobi_log = snakemake.log.gurobi
solver_options['logfile'] = gurobi_log
if solver_log is None:
solver_log = snakemake.log.solver
solver_name = solver_options.pop('name')
def run_lopf(n, allow_warning_status=False, fix_zero_lines=False, fix_ext_lines=False):
@ -145,6 +144,7 @@ def solve_network(n, config=None, gurobi_log=None, opts=None):
gc.collect()
status, termination_condition = \
pypsa.opf.network_lopf_solve(n,
solver_logfile=solver_log,
solver_options=solver_options,
formulation=solve_opts['formulation'],
#free_memory={'pypsa'}
@ -256,7 +256,7 @@ if __name__ == "__main__":
wildcards=dict(network='elec', simpl='', clusters='45', lv='1.0', opts='Co2L-3H'),
input=["networks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}.nc"],
output=["results/networks/s{simpl}_{clusters}_lv{lv}_{opts}.nc"],
log=dict(gurobi="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_gurobi.log",
log=dict(solver="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_solver.log",
python="logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_python.log")
)

View File

@ -42,7 +42,7 @@ if __name__ == "__main__":
input=dict(unprepared="networks/{network}_s{simpl}_{clusters}.nc",
optimized="results/networks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}.nc"),
output=["results/networks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_op.nc"],
log=dict(gurobi="logs/s{simpl}_{clusters}_lv{lv}_{opts}_op_gurobi.log",
log=dict(solver="logs/s{simpl}_{clusters}_lv{lv}_{opts}_op_solver.log",
python="logs/s{simpl}_{clusters}_lv{lv}_{opts}_op_python.log")
)
@ -65,7 +65,7 @@ if __name__ == "__main__":
with memory_logger(filename=getattr(snakemake.log, 'memory', None), interval=30.) as mem:
n = prepare_network(n, solve_opts=snakemake.config['solving']['options'])
n = solve_network(n, config=snakemake.config['solving'], gurobi_log=snakemake.log.gurobi, opts=opts)
n = solve_network(n, config=snakemake.config['solving'], solver_log=snakemake.log.solver, opts=opts)
n.export_to_netcdf(snakemake.output[0])
logger.info("Maximum memory usage: {}".format(mem.mem_usage))