From 869e4b4161f2ef22772d050556e00c514f1e5675 Mon Sep 17 00:00:00 2001 From: Alinson S Xavier Date: Fri, 31 Jul 2020 17:43:00 -0500 Subject: [PATCH] Refactor JuMPSolver; capture log output --- src/julia/src/MIPLearn.jl | 3 +- src/julia/src/jump_solver.jl | 332 +++++++++++++++-------------- src/julia/test/jump_solver_test.jl | 2 + 3 files changed, 179 insertions(+), 158 deletions(-) diff --git a/src/julia/src/MIPLearn.jl b/src/julia/src/MIPLearn.jl index fdf0b74..a9e5a78 100644 --- a/src/julia/src/MIPLearn.jl +++ b/src/julia/src/MIPLearn.jl @@ -9,9 +9,10 @@ module MIPLearn Instance = miplearn.Instance LearningSolver = miplearn.LearningSolver InternalSolver = miplearn.solvers.internal.InternalSolver + BenchmarkRunner = miplearn.BenchmarkRunner include("jump_solver.jl") include("knapsack.jl") - export Instance, LearningSolver, InternalSolver, JuMPSolver + export Instance, LearningSolver, InternalSolver, JuMPSolver, BenchmarkRunner end diff --git a/src/julia/src/jump_solver.jl b/src/julia/src/jump_solver.jl index 4d78dbd..c270627 100644 --- a/src/julia/src/jump_solver.jl +++ b/src/julia/src/jump_solver.jl @@ -8,6 +8,7 @@ using MathOptInterface const MOI = MathOptInterface using TimerOutputs + mutable struct JuMPSolverData basename_idx_to_var var_to_basename_idx @@ -18,6 +19,7 @@ mutable struct JuMPSolverData solution::Union{Nothing,Dict{String,Dict{String,Float64}}} end + function varname_split(varname::String) m = match(r"([^[]*)\[(.*)\]", varname) if m == nothing @@ -27,188 +29,204 @@ function varname_split(varname::String) end -function set_solver_verbosity!(model::JuMP.Model, tee::Bool) - if tee - JuMP.unset_silent(model) - else - JuMP.set_silent(model) - end +""" + optimize_and_capture_output!(model; tee=tee) + +Optimizes a given JuMP model while capturing the solver log, then returns that log. +If tee=true, prints the solver log to the standard output as the optimization takes place. +""" +function optimize_and_capture_output!(model; tee::Bool=false) + original_stdout = stdout + rd, wr = redirect_stdout() + task = @async begin + log = "" + while true + line = String(readavailable(rd)) + isopen(rd) || break + log *= String(line) + if tee + print(original_stdout, line) + flush(original_stdout) + end + end + return log + end + JuMP.unset_silent(model) + JuMP.optimize!(model) + sleep(1) + redirect_stdout(original_stdout) + close(rd) + return fetch(task) end -@pydef mutable struct JuMPSolver <: InternalSolver - function __init__(self; optimizer) - self.data = JuMPSolverData(nothing, # basename_idx_to_var - nothing, # var_to_basename_idx - optimizer, - nothing, # instance - nothing, # model - nothing, # bin_vars - nothing, # solution - ) +function solve(data::JuMPSolverData; tee::Bool=false) + instance, model = data.instance, data.model + wallclock_time = 0 + found_lazy = [] + log = "" + while true + log *= optimize_and_capture_output!(model, tee=tee) + wallclock_time += JuMP.solve_time(model) + violations = instance.find_violated_lazy_constraints(model) + if length(violations) == 0 + break + end + append!(found_lazy, violations) + for v in violations + instance.build_lazy_constraint(data.model, v) + end end + update_solution!(data) + instance.found_violated_lazy_constraints = found_lazy + instance.found_violated_user_cuts = [] + primal_bound = JuMP.objective_value(model) + dual_bound = JuMP.objective_bound(model) + if JuMP.objective_sense(model) == MOI.MIN_SENSE + sense = "min" + lower_bound = dual_bound + upper_bound = primal_bound + else + sense = "max" + lower_bound = primal_bound + upper_bound = dual_bound + end + return Dict("Lower bound" => lower_bound, + "Upper bound" => upper_bound, + "Sense" => sense, + "Wallclock time" => wallclock_time, + "Nodes" => 1, + "Log" => log, + "Warm start value" => nothing) +end - function add_constraint(self, constraint) - @warn "JuMPSolver: add_constraint not implemented" - end - function set_warm_start(self, solution) - basename_idx_to_var = self.data.basename_idx_to_var - for (basename, subsolution) in solution - for (idx, value) in subsolution - value != nothing || continue - var = basename_idx_to_var[basename, idx] - JuMP.set_start_value(var, value) - end - end +function solve_lp(data::JuMPSolverData; tee::Bool=false) + model, bin_vars = data.model, data.bin_vars + for var in bin_vars + JuMP.unset_binary(var) + JuMP.set_upper_bound(var, 1.0) + JuMP.set_lower_bound(var, 0.0) end - - function clear_warm_start(self) - @error "JuMPSolver: clear_warm_start not implemented" + log = optimize_and_capture_output!(model, tee=tee) + update_solution!(data) + obj_value = JuMP.objective_value(model) + for var in bin_vars + JuMP.set_binary(var) end + return Dict("Optimal value" => obj_value, + "Log" => log) +end - function fix(self, solution) - @timeit "fix" begin - basename_idx_to_var = self.data.basename_idx_to_var - for (basename, subsolution) in solution - for (idx, value) in subsolution - value != nothing || continue - var = basename_idx_to_var[basename, idx] - JuMP.fix(var, value, force=true) - end - end - end - end - function set_instance(self, instance, model) - @timeit "set_instance" begin - self.data.instance = instance - self.data.model = model - self.data.var_to_basename_idx = Dict(var => varname_split(JuMP.name(var)) - for var in JuMP.all_variables(model)) - self.data.basename_idx_to_var = Dict(varname_split(JuMP.name(var)) => var - for var in JuMP.all_variables(model)) - self.data.bin_vars = [var - for var in JuMP.all_variables(model) - if JuMP.is_binary(var)] - if self.data.optimizer != nothing - JuMP.set_optimizer(model, self.data.optimizer) - end +function update_solution!(data::JuMPSolverData) + var_to_basename_idx, model = data.var_to_basename_idx, data.model + solution = Dict{String,Dict{String,Float64}}() + for var in JuMP.all_variables(model) + var in keys(var_to_basename_idx) || continue + basename, idx = var_to_basename_idx[var] + if !haskey(solution, basename) + solution[basename] = Dict{String,Float64}() end + solution[basename][idx] = JuMP.value(var) end + data.solution = solution +end - function solve(self; tee=false) - @timeit "solve" begin - instance, model = self.data.instance, self.data.model - set_solver_verbosity!(model, tee) - wallclock_time = 0 - found_lazy = [] - while true - @timeit "optimize!" begin - JuMP.optimize!(model) - end - wallclock_time += JuMP.solve_time(model) - @timeit "find_violated_lazy_constraints" begin - violations = instance.find_violated_lazy_constraints(model) - end - #@info "$(length(violations)) violations found" - if length(violations) == 0 - break - end - append!(found_lazy, violations) - for v in violations - instance.build_lazy_constraint(self.data.model, v) - end - end - @timeit "update solution" begin - self._update_solution() - instance.found_violated_lazy_constraints = found_lazy - instance.found_violated_user_cuts = [] - end - primal_bound = JuMP.objective_value(model) - dual_bound = JuMP.objective_bound(model) - if JuMP.objective_sense(model) == MOI.MIN_SENSE - sense = "min" - lower_bound = dual_bound - upper_bound = primal_bound - else - sense = "max" - lower_bound = primal_bound - upper_bound = dual_bound - end - end - return Dict("Lower bound" => lower_bound, - "Upper bound" => upper_bound, - "Sense" => sense, - "Wallclock time" => wallclock_time, - "Nodes" => 1, - "Log" => nothing, - "Warm start value" => nothing) - end - function solve_lp(self; tee=false) - @timeit "solve_lp" begin - model = self.data.model - set_solver_verbosity!(model, tee) - bin_vars = self.data.bin_vars - @timeit "unset_binary" begin - for var in bin_vars - JuMP.unset_binary(var) - JuMP.set_upper_bound(var, 1.0) - JuMP.set_lower_bound(var, 0.0) - end - end - @timeit "optimize" begin - JuMP.optimize!(model) - end - @timeit "update solution" begin - self._update_solution() - end - obj_value = JuMP.objective_value(model) - @timeit "set_binary" begin - for var in bin_vars - JuMP.set_binary(var) - end - end +function set_instance!(data::JuMPSolverData, instance, model) + data.instance = instance + data.model = model + data.var_to_basename_idx = Dict(var => varname_split(JuMP.name(var)) + for var in JuMP.all_variables(model)) + data.basename_idx_to_var = Dict(varname_split(JuMP.name(var)) => var + for var in JuMP.all_variables(model)) + data.bin_vars = [var + for var in JuMP.all_variables(model) + if JuMP.is_binary(var)] + if data.optimizer != nothing + JuMP.set_optimizer(model, data.optimizer) + end +end + + +function fix!(data::JuMPSolverData, solution) + count = 0 + for (basename, subsolution) in solution + for (idx, value) in subsolution + value != nothing || continue + var = data.basename_idx_to_var[basename, idx] + JuMP.fix(var, value, force=true) + count += 1 end - return Dict("Optimal value" => obj_value) end + @info "Fixing $count variables" +end - function get_solution(self) - return self.data.solution - end - function _update_solution(self) - var_to_basename_idx, model = self.data.var_to_basename_idx, self.data.model - solution = Dict{String,Dict{String,Float64}}() - for var in JuMP.all_variables(model) - var in keys(var_to_basename_idx) || continue - basename, idx = var_to_basename_idx[var] - if !haskey(solution, basename) - solution[basename] = Dict{String,Float64}() - end - solution[basename][idx] = JuMP.value(var) +function set_warm_start!(data::JuMPSolverData, solution) + count = 0 + for (basename, subsolution) in solution + for (idx, value) in subsolution + value != nothing || continue + var = data.basename_idx_to_var[basename, idx] + JuMP.set_start_value(var, value) + count += 1 end - self.data.solution = solution end + @info "Setting warm start values for $count variables" +end - function set_gap_tolerance(self, gap_tolerance) - @warn "JuMPSolver: set_gap_tolerance not implemented" - end - function set_node_limit(self) - @warn "JuMPSolver: set_node_limit not implemented" +@pydef mutable struct JuMPSolver <: InternalSolver + function __init__(self; optimizer) + self.data = JuMPSolverData(nothing, # basename_idx_to_var + nothing, # var_to_basename_idx + optimizer, + nothing, # instance + nothing, # model + nothing, # bin_vars + nothing, # solution + ) end - function set_threads(self, threads) - @warn "JuMPSolver: set_threads not implemented" - end + set_warm_start(self, solution) = + set_warm_start!(self.data, solution) + + fix(self, solution) = + fix!(self.data, solution) + + set_instance(self, instance, model) = + set_instance!(self.data, instance, model) + + solve(self; tee=false) = + solve(self.data, tee=tee) + + solve_lp(self; tee=false) = + solve_lp(self.data, tee=tee) + + get_solution(self) = + self.data.solution + + set_time_limit(self, time_limit) = + JuMP.set_time_limit_sec(self.data.model, time_limit) - function set_branching_priorities(self, priorities) + set_gap_tolerance(self, gap_tolerance) = + @warn "JuMPSolver: set_gap_tolerance not implemented" + + set_node_limit(self) = + @warn "JuMPSolver: set_node_limit not implemented" + + set_threads(self, threads) = + @warn "JuMPSolver: set_threads not implemented" + + set_branching_priorities(self, priorities) = @warn "JuMPSolver: set_branching_priorities not implemented" - end + + add_constraint(self, constraint) = + error("JuMPSolver.add_constraint should never be called") + + clear_warm_start(self) = + error("JuMPSolver.clear_warm_start should never be called") - function set_time_limit(self, time_limit) - JuMP.set_time_limit_sec(self.data.model, time_limit) - end end diff --git a/src/julia/test/jump_solver_test.jl b/src/julia/test/jump_solver_test.jl index c3eeb0e..a61b37a 100644 --- a/src/julia/test/jump_solver_test.jl +++ b/src/julia/test/jump_solver_test.jl @@ -34,6 +34,7 @@ end @test stats["Upper bound"] == 1183.0 @test stats["Sense"] == "max" @test stats["Wallclock time"] > 0 + @test length(stats["Log"]) > 100 solution = solver.get_solution() @test solution["x"]["1"] == 1.0 @@ -43,6 +44,7 @@ end stats = solver.solve_lp() @test round(stats["Optimal value"], digits=3) == 1287.923 + @test length(stats["Log"]) > 100 solution = solver.get_solution() @test round(solution["x"]["1"], digits=3) == 1.000