|
|
|
@ -9,23 +9,13 @@ using TimerOutputs
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
mutable struct JuMPSolverData
|
|
|
|
|
basename_idx_to_var
|
|
|
|
|
var_to_basename_idx
|
|
|
|
|
varname_to_var
|
|
|
|
|
optimizer
|
|
|
|
|
instance
|
|
|
|
|
model
|
|
|
|
|
bin_vars
|
|
|
|
|
solution::Union{Nothing,Dict{String,Dict{String,Float64}}}
|
|
|
|
|
time_limit::Union{Nothing, Float64}
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function varname_split(varname::String)
|
|
|
|
|
m = match(r"([^[]*)\[(.*)\]", varname)
|
|
|
|
|
if m == nothing
|
|
|
|
|
return varname, ""
|
|
|
|
|
end
|
|
|
|
|
return m.captures[1], m.captures[2]
|
|
|
|
|
solution::Union{Nothing, Dict{String,Float64}}
|
|
|
|
|
cname_to_constr
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -59,29 +49,24 @@ function optimize_and_capture_output!(model; tee::Bool=false)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function solve(data::JuMPSolverData; tee::Bool=false)
|
|
|
|
|
function solve(
|
|
|
|
|
data::JuMPSolverData;
|
|
|
|
|
tee::Bool=false,
|
|
|
|
|
iteration_cb,
|
|
|
|
|
)::Dict
|
|
|
|
|
instance, model = data.instance, data.model
|
|
|
|
|
if data.time_limit != nothing
|
|
|
|
|
JuMP.set_time_limit_sec(model, data.time_limit)
|
|
|
|
|
end
|
|
|
|
|
wallclock_time = 0
|
|
|
|
|
found_lazy = []
|
|
|
|
|
log = ""
|
|
|
|
|
while true
|
|
|
|
|
log *= optimize_and_capture_output!(model, tee=tee)
|
|
|
|
|
wallclock_time += JuMP.solve_time(model)
|
|
|
|
|
violations = instance.find_violated_lazy_constraints(model)
|
|
|
|
|
if length(violations) == 0
|
|
|
|
|
if iteration_cb !== nothing
|
|
|
|
|
iteration_cb() || break
|
|
|
|
|
else
|
|
|
|
|
break
|
|
|
|
|
end
|
|
|
|
|
append!(found_lazy, violations)
|
|
|
|
|
for v in violations
|
|
|
|
|
instance.build_lazy_constraint(data.model, v)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
update_solution!(data)
|
|
|
|
|
instance.found_violated_lazy_constraints = found_lazy
|
|
|
|
|
instance.found_violated_user_cuts = []
|
|
|
|
|
primal_bound = JuMP.objective_value(model)
|
|
|
|
|
dual_bound = JuMP.objective_bound(model)
|
|
|
|
|
if JuMP.objective_sense(model) == MOI.MIN_SENSE
|
|
|
|
@ -93,13 +78,15 @@ function solve(data::JuMPSolverData; tee::Bool=false)
|
|
|
|
|
lower_bound = primal_bound
|
|
|
|
|
upper_bound = dual_bound
|
|
|
|
|
end
|
|
|
|
|
return Dict("Lower bound" => lower_bound,
|
|
|
|
|
"Upper bound" => upper_bound,
|
|
|
|
|
"Sense" => sense,
|
|
|
|
|
"Wallclock time" => wallclock_time,
|
|
|
|
|
"Nodes" => 1,
|
|
|
|
|
"Log" => log,
|
|
|
|
|
"Warm start value" => nothing)
|
|
|
|
|
return Dict(
|
|
|
|
|
"Lower bound" => lower_bound,
|
|
|
|
|
"Upper bound" => upper_bound,
|
|
|
|
|
"Sense" => sense,
|
|
|
|
|
"Wallclock time" => wallclock_time,
|
|
|
|
|
"Nodes" => 1,
|
|
|
|
|
"MIP log" => log,
|
|
|
|
|
"Warm start value" => nothing,
|
|
|
|
|
)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -116,95 +103,270 @@ function solve_lp(data::JuMPSolverData; tee::Bool=false)
|
|
|
|
|
for var in bin_vars
|
|
|
|
|
JuMP.set_binary(var)
|
|
|
|
|
end
|
|
|
|
|
return Dict("Optimal value" => obj_value,
|
|
|
|
|
"Log" => log)
|
|
|
|
|
return Dict(
|
|
|
|
|
"LP value" => obj_value,
|
|
|
|
|
"LP log" => log,
|
|
|
|
|
)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function update_solution!(data::JuMPSolverData)
|
|
|
|
|
var_to_basename_idx, model = data.var_to_basename_idx, data.model
|
|
|
|
|
solution = Dict{String,Dict{String,Float64}}()
|
|
|
|
|
for var in JuMP.all_variables(model)
|
|
|
|
|
var in keys(var_to_basename_idx) || continue
|
|
|
|
|
basename, idx = var_to_basename_idx[var]
|
|
|
|
|
if !haskey(solution, basename)
|
|
|
|
|
solution[basename] = Dict{String,Float64}()
|
|
|
|
|
end
|
|
|
|
|
solution[basename][idx] = JuMP.value(var)
|
|
|
|
|
end
|
|
|
|
|
data.solution = solution
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_variables(data::JuMPSolverData)
|
|
|
|
|
var_to_basename_idx, model = data.var_to_basename_idx, data.model
|
|
|
|
|
variables = Dict()
|
|
|
|
|
for var in JuMP.all_variables(model)
|
|
|
|
|
var in keys(var_to_basename_idx) || continue
|
|
|
|
|
basename, idx = var_to_basename_idx[var]
|
|
|
|
|
if !haskey(variables, basename)
|
|
|
|
|
variables[basename] = []
|
|
|
|
|
end
|
|
|
|
|
push!(variables[basename], idx)
|
|
|
|
|
end
|
|
|
|
|
return variables
|
|
|
|
|
data.solution = Dict(
|
|
|
|
|
JuMP.name(var) => JuMP.value(var)
|
|
|
|
|
for var in JuMP.all_variables(data.model)
|
|
|
|
|
)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function set_instance!(data::JuMPSolverData, instance, model)
|
|
|
|
|
data.instance = instance
|
|
|
|
|
data.model = model
|
|
|
|
|
data.var_to_basename_idx = Dict(var => varname_split(JuMP.name(var))
|
|
|
|
|
for var in JuMP.all_variables(model))
|
|
|
|
|
data.basename_idx_to_var = Dict(varname_split(JuMP.name(var)) => var
|
|
|
|
|
for var in JuMP.all_variables(model))
|
|
|
|
|
data.bin_vars = [var
|
|
|
|
|
for var in JuMP.all_variables(model)
|
|
|
|
|
if JuMP.is_binary(var)]
|
|
|
|
|
if data.optimizer != nothing
|
|
|
|
|
data.bin_vars = [
|
|
|
|
|
var
|
|
|
|
|
for var in JuMP.all_variables(data.model)
|
|
|
|
|
if JuMP.is_binary(var)
|
|
|
|
|
]
|
|
|
|
|
data.varname_to_var = Dict(
|
|
|
|
|
JuMP.name(var) => var
|
|
|
|
|
for var in JuMP.all_variables(data.model)
|
|
|
|
|
)
|
|
|
|
|
if data.optimizer !== nothing
|
|
|
|
|
JuMP.set_optimizer(model, data.optimizer)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
data.cname_to_constr = Dict()
|
|
|
|
|
for (ftype, stype) in JuMP.list_of_constraint_types(model)
|
|
|
|
|
for constr in JuMP.all_constraints(model, ftype, stype)
|
|
|
|
|
name = JuMP.name(constr)
|
|
|
|
|
length(name) > 0 || continue
|
|
|
|
|
data.cname_to_constr[name] = constr
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function fix!(data::JuMPSolverData, solution)
|
|
|
|
|
count = 0
|
|
|
|
|
for (basename, subsolution) in solution
|
|
|
|
|
for (idx, value) in subsolution
|
|
|
|
|
value != nothing || continue
|
|
|
|
|
var = data.basename_idx_to_var[basename, idx]
|
|
|
|
|
JuMP.fix(var, value, force=true)
|
|
|
|
|
count += 1
|
|
|
|
|
end
|
|
|
|
|
for (varname, value) in solution
|
|
|
|
|
value !== nothing || continue
|
|
|
|
|
var = data.varname_to_var[varname]
|
|
|
|
|
JuMP.fix(var, value, force=true)
|
|
|
|
|
end
|
|
|
|
|
@info "Fixing $count variables"
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function set_warm_start!(data::JuMPSolverData, solution)
|
|
|
|
|
count = 0
|
|
|
|
|
for (basename, subsolution) in solution
|
|
|
|
|
for (idx, value) in subsolution
|
|
|
|
|
value != nothing || continue
|
|
|
|
|
var = data.basename_idx_to_var[basename, idx]
|
|
|
|
|
JuMP.set_start_value(var, value)
|
|
|
|
|
count += 1
|
|
|
|
|
end
|
|
|
|
|
for (varname, value) in solution
|
|
|
|
|
value !== nothing || continue
|
|
|
|
|
var = data.varname_to_var[varname]
|
|
|
|
|
JuMP.set_start_value(var, value)
|
|
|
|
|
end
|
|
|
|
|
@info "Setting warm start values for $count variables"
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_variable_names(data::JuMPSolverData)
|
|
|
|
|
return [JuMP.name(var) for var in JuMP.all_variables(data.model)]
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function is_infeasible(data::JuMPSolverData)
|
|
|
|
|
return JuMP.termination_status(data.model) == MOI.INFEASIBLE
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_constraint_ids(data::JuMPSolverData)
|
|
|
|
|
return [cname for cname in keys(data.cname_to_constr)]
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_constraint_rhs(data::JuMPSolverData, cname)
|
|
|
|
|
constr = data.cname_to_constr[cname]
|
|
|
|
|
return get_constraint_rhs(constr)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_constraint_lhs(data::JuMPSolverData, cname)
|
|
|
|
|
constr = data.cname_to_constr[cname]
|
|
|
|
|
return get_constraint_lhs(constr)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_constraint_sense(data::JuMPSolverData, cname)
|
|
|
|
|
constr = data.cname_to_constr[cname]
|
|
|
|
|
return get_constraint_sense(constr)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Constraints: ScalarAffineFunction, LessThan
|
|
|
|
|
# -------------------------------------------------------------------------
|
|
|
|
|
function get_constraint_rhs(
|
|
|
|
|
constr::ConstraintRef{
|
|
|
|
|
Model,
|
|
|
|
|
MathOptInterface.ConstraintIndex{
|
|
|
|
|
MathOptInterface.ScalarAffineFunction{T},
|
|
|
|
|
MathOptInterface.LessThan{T},
|
|
|
|
|
},
|
|
|
|
|
ScalarShape,
|
|
|
|
|
},
|
|
|
|
|
)::T where T
|
|
|
|
|
return MOI.get(
|
|
|
|
|
constr.model.moi_backend,
|
|
|
|
|
MOI.ConstraintSet(),
|
|
|
|
|
constr.index,
|
|
|
|
|
).upper
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function _terms_dict(constr)
|
|
|
|
|
terms = MOI.get(
|
|
|
|
|
constr.model.moi_backend,
|
|
|
|
|
MOI.ConstraintFunction(),
|
|
|
|
|
constr.index,
|
|
|
|
|
).terms
|
|
|
|
|
return Dict(
|
|
|
|
|
MOI.get(
|
|
|
|
|
constr.model.moi_backend,
|
|
|
|
|
MOI.VariableName(),
|
|
|
|
|
term.variable_index
|
|
|
|
|
) => term.coefficient
|
|
|
|
|
for term in terms
|
|
|
|
|
)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_constraint_lhs(
|
|
|
|
|
constr::ConstraintRef{
|
|
|
|
|
Model,
|
|
|
|
|
MathOptInterface.ConstraintIndex{
|
|
|
|
|
MathOptInterface.ScalarAffineFunction{T},
|
|
|
|
|
MathOptInterface.LessThan{T},
|
|
|
|
|
},
|
|
|
|
|
ScalarShape,
|
|
|
|
|
},
|
|
|
|
|
)::Dict{String, T} where T
|
|
|
|
|
return _terms_dict(constr)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_constraint_sense(
|
|
|
|
|
constr::ConstraintRef{
|
|
|
|
|
Model,
|
|
|
|
|
MathOptInterface.ConstraintIndex{
|
|
|
|
|
MathOptInterface.ScalarAffineFunction{T},
|
|
|
|
|
MathOptInterface.LessThan{T},
|
|
|
|
|
},
|
|
|
|
|
ScalarShape,
|
|
|
|
|
},
|
|
|
|
|
)::String where T
|
|
|
|
|
return "<"
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Constraints: ScalarAffineFunction, GreaterThan
|
|
|
|
|
# -------------------------------------------------------------------------
|
|
|
|
|
function get_constraint_rhs(
|
|
|
|
|
constr::ConstraintRef{
|
|
|
|
|
Model,
|
|
|
|
|
MathOptInterface.ConstraintIndex{
|
|
|
|
|
MathOptInterface.ScalarAffineFunction{T},
|
|
|
|
|
MathOptInterface.GreaterThan{T},
|
|
|
|
|
},
|
|
|
|
|
ScalarShape,
|
|
|
|
|
},
|
|
|
|
|
)::T where T
|
|
|
|
|
return MOI.get(
|
|
|
|
|
constr.model.moi_backend,
|
|
|
|
|
MOI.ConstraintSet(),
|
|
|
|
|
constr.index,
|
|
|
|
|
).lower
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_constraint_lhs(
|
|
|
|
|
constr::ConstraintRef{
|
|
|
|
|
Model,
|
|
|
|
|
MathOptInterface.ConstraintIndex{
|
|
|
|
|
MathOptInterface.ScalarAffineFunction{T},
|
|
|
|
|
MathOptInterface.GreaterThan{T},
|
|
|
|
|
},
|
|
|
|
|
ScalarShape,
|
|
|
|
|
},
|
|
|
|
|
)::Dict{String, T} where T
|
|
|
|
|
return _terms_dict(constr)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_constraint_sense(
|
|
|
|
|
constr::ConstraintRef{
|
|
|
|
|
Model,
|
|
|
|
|
MathOptInterface.ConstraintIndex{
|
|
|
|
|
MathOptInterface.ScalarAffineFunction{T},
|
|
|
|
|
MathOptInterface.GreaterThan{T},
|
|
|
|
|
},
|
|
|
|
|
ScalarShape,
|
|
|
|
|
},
|
|
|
|
|
)::String where T
|
|
|
|
|
return ">"
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Constraints: ScalarAffineFunction, EqualTo
|
|
|
|
|
# -------------------------------------------------------------------------
|
|
|
|
|
function get_constraint_rhs(
|
|
|
|
|
constr::ConstraintRef{
|
|
|
|
|
Model,
|
|
|
|
|
MathOptInterface.ConstraintIndex{
|
|
|
|
|
MathOptInterface.ScalarAffineFunction{T},
|
|
|
|
|
MathOptInterface.EqualTo{T},
|
|
|
|
|
},
|
|
|
|
|
ScalarShape,
|
|
|
|
|
},
|
|
|
|
|
)::T where T
|
|
|
|
|
return MOI.get(
|
|
|
|
|
constr.model.moi_backend,
|
|
|
|
|
MOI.ConstraintSet(),
|
|
|
|
|
constr.index,
|
|
|
|
|
).value
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_constraint_lhs(
|
|
|
|
|
constr::ConstraintRef{
|
|
|
|
|
Model,
|
|
|
|
|
MathOptInterface.ConstraintIndex{
|
|
|
|
|
MathOptInterface.ScalarAffineFunction{T},
|
|
|
|
|
MathOptInterface.EqualTo{T},
|
|
|
|
|
},
|
|
|
|
|
ScalarShape,
|
|
|
|
|
},
|
|
|
|
|
)::Dict{String, T} where T
|
|
|
|
|
return _terms_dict(constr)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function get_constraint_sense(
|
|
|
|
|
constr::ConstraintRef{
|
|
|
|
|
Model,
|
|
|
|
|
MathOptInterface.ConstraintIndex{
|
|
|
|
|
MathOptInterface.ScalarAffineFunction{T},
|
|
|
|
|
MathOptInterface.EqualTo{T},
|
|
|
|
|
},
|
|
|
|
|
ScalarShape,
|
|
|
|
|
},
|
|
|
|
|
)::String where T
|
|
|
|
|
return "="
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pydef mutable struct JuMPSolver <: miplearn.solvers.internal.InternalSolver
|
|
|
|
|
function __init__(self; optimizer)
|
|
|
|
|
self.data = JuMPSolverData(nothing, # basename_idx_to_var
|
|
|
|
|
nothing, # var_to_basename_idx
|
|
|
|
|
optimizer,
|
|
|
|
|
nothing, # instance
|
|
|
|
|
nothing, # model
|
|
|
|
|
nothing, # bin_vars
|
|
|
|
|
nothing, # solution
|
|
|
|
|
nothing, # time limit
|
|
|
|
|
)
|
|
|
|
|
self.data = JuMPSolverData(
|
|
|
|
|
nothing, # varname_to_var
|
|
|
|
|
optimizer,
|
|
|
|
|
nothing, # instance
|
|
|
|
|
nothing, # model
|
|
|
|
|
nothing, # bin_vars
|
|
|
|
|
nothing, # solution
|
|
|
|
|
nothing, # cname_to_constr
|
|
|
|
|
)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
set_warm_start(self, solution) =
|
|
|
|
@ -216,8 +378,17 @@ end
|
|
|
|
|
set_instance(self, instance, model) =
|
|
|
|
|
set_instance!(self.data, instance, model)
|
|
|
|
|
|
|
|
|
|
solve(self; tee=false) =
|
|
|
|
|
solve(self.data, tee=tee)
|
|
|
|
|
solve(
|
|
|
|
|
self;
|
|
|
|
|
tee=false,
|
|
|
|
|
iteration_cb,
|
|
|
|
|
lazy_cb,
|
|
|
|
|
user_cut_cb,
|
|
|
|
|
) = solve(
|
|
|
|
|
self.data,
|
|
|
|
|
tee=tee,
|
|
|
|
|
iteration_cb=iteration_cb,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
solve_lp(self; tee=false) =
|
|
|
|
|
solve_lp(self.data, tee=tee)
|
|
|
|
@ -228,26 +399,40 @@ end
|
|
|
|
|
get_variables(self) =
|
|
|
|
|
get_variables(self.data)
|
|
|
|
|
|
|
|
|
|
set_time_limit(self, time_limit) =
|
|
|
|
|
self.data.time_limit = time_limit
|
|
|
|
|
|
|
|
|
|
set_gap_tolerance(self, gap_tolerance) =
|
|
|
|
|
@warn "JuMPSolver: set_gap_tolerance not implemented"
|
|
|
|
|
|
|
|
|
|
set_node_limit(self) =
|
|
|
|
|
@warn "JuMPSolver: set_node_limit not implemented"
|
|
|
|
|
|
|
|
|
|
set_threads(self, threads) =
|
|
|
|
|
@warn "JuMPSolver: set_threads not implemented"
|
|
|
|
|
|
|
|
|
|
set_branching_priorities(self, priorities) =
|
|
|
|
|
@warn "JuMPSolver: set_branching_priorities not implemented"
|
|
|
|
|
|
|
|
|
|
add_constraint(self, constraint) = nothing
|
|
|
|
|
add_constraint(self, constraint) =
|
|
|
|
|
nothing
|
|
|
|
|
|
|
|
|
|
get_variable_names(self) =
|
|
|
|
|
get_variable_names(self.data)
|
|
|
|
|
|
|
|
|
|
is_infeasible(self) =
|
|
|
|
|
is_infeasible(self.data)
|
|
|
|
|
|
|
|
|
|
get_constraint_ids(self) =
|
|
|
|
|
get_constraint_ids(self.data)
|
|
|
|
|
|
|
|
|
|
get_constraint_rhs(self, cname) =
|
|
|
|
|
get_constraint_rhs(self.data, cname)
|
|
|
|
|
|
|
|
|
|
get_constraint_lhs(self, cname) =
|
|
|
|
|
get_constraint_lhs(self.data, cname)
|
|
|
|
|
|
|
|
|
|
get_constraint_sense(self, cname) =
|
|
|
|
|
get_constraint_sense(self.data, cname)
|
|
|
|
|
|
|
|
|
|
clear_warm_start(self) =
|
|
|
|
|
error("JuMPSolver.clear_warm_start should never be called")
|
|
|
|
|
clone(self) = self
|
|
|
|
|
|
|
|
|
|
add_cut(self) = error("not implemented")
|
|
|
|
|
extract_constraint(self) = error("not implemented")
|
|
|
|
|
is_constraint_satisfied(self) = error("not implemented")
|
|
|
|
|
set_constraint_sense(self) = error("not implemented")
|
|
|
|
|
relax(self) = error("not implemented")
|
|
|
|
|
get_inequality_slacks(self) = error("not implemented")
|
|
|
|
|
get_dual(self) = error("not implemented")
|
|
|
|
|
get_sense(self) = error("not implemented")
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
export JuMPSolver, solve!, fit!, add!
|
|
|
|
|
export JuMPSolver, solve!, fit!, add!
|
|
|
|
|