mirror of
https://github.com/ANL-CEEESA/MIPLearn.jl.git
synced 2025-12-07 00:48:50 -06:00
Use UpperBoundRef and LowerBoundRef; relax int vars
This commit is contained in:
@@ -10,7 +10,7 @@ struct Variable
|
|||||||
end
|
end
|
||||||
|
|
||||||
mutable struct MIP
|
mutable struct MIP
|
||||||
constructor
|
constructor::Any
|
||||||
optimizers::Vector
|
optimizers::Vector
|
||||||
binary_variables::Vector{Variable}
|
binary_variables::Vector{Variable}
|
||||||
sense::Float64
|
sense::Float64
|
||||||
|
|||||||
@@ -43,10 +43,7 @@ function find_branching_var(
|
|||||||
else
|
else
|
||||||
if var in keys(pool.var_history)
|
if var in keys(pool.var_history)
|
||||||
varhist = pool.var_history[var]
|
varhist = pool.var_history[var]
|
||||||
hlength = min(
|
hlength = min(length(varhist.obj_ratio_up), length(varhist.obj_ratio_down))
|
||||||
length(varhist.obj_ratio_up),
|
|
||||||
length(varhist.obj_ratio_down),
|
|
||||||
)
|
|
||||||
if hlength >= rule.min_samples
|
if hlength >= rule.min_samples
|
||||||
use_strong_branch = false
|
use_strong_branch = false
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ Base.@kwdef mutable struct JuMPSolverData
|
|||||||
optimizer_factory::Any
|
optimizer_factory::Any
|
||||||
basis_status::Dict{ConstraintRef,MOI.BasisStatusCode} = Dict()
|
basis_status::Dict{ConstraintRef,MOI.BasisStatusCode} = Dict()
|
||||||
bin_vars::Vector{JuMP.VariableRef} = []
|
bin_vars::Vector{JuMP.VariableRef} = []
|
||||||
|
int_vars::Vector{JuMP.VariableRef} = []
|
||||||
cb_data::Any = nothing
|
cb_data::Any = nothing
|
||||||
cname_to_constr::Dict{String,JuMP.ConstraintRef} = Dict()
|
cname_to_constr::Dict{String,JuMP.ConstraintRef} = Dict()
|
||||||
dual_values::Dict{JuMP.ConstraintRef,Float64} = Dict()
|
dual_values::Dict{JuMP.ConstraintRef,Float64} = Dict()
|
||||||
@@ -24,8 +25,6 @@ Base.@kwdef mutable struct JuMPSolverData
|
|||||||
reduced_costs::Vector{Float64} = []
|
reduced_costs::Vector{Float64} = []
|
||||||
sensitivity_report::Any = nothing
|
sensitivity_report::Any = nothing
|
||||||
solution::Dict{JuMP.VariableRef,Float64} = Dict()
|
solution::Dict{JuMP.VariableRef,Float64} = Dict()
|
||||||
var_lb_constr::Dict{MOI.VariableIndex,ConstraintRef} = Dict()
|
|
||||||
var_ub_constr::Dict{MOI.VariableIndex,ConstraintRef} = Dict()
|
|
||||||
varname_to_var::Dict{String,VariableRef} = Dict()
|
varname_to_var::Dict{String,VariableRef} = Dict()
|
||||||
x::Vector{Float64} = Float64[]
|
x::Vector{Float64} = Float64[]
|
||||||
end
|
end
|
||||||
@@ -104,18 +103,6 @@ function _update_solution!(data::JuMPSolverData)
|
|||||||
data.basis_status = Dict()
|
data.basis_status = Dict()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Build map between variables and bound constraints
|
|
||||||
if ftype == VariableRef
|
|
||||||
var = MOI.get(data.model, MOI.ConstraintFunction(), constr).variable
|
|
||||||
if stype == MOI.GreaterThan{Float64}
|
|
||||||
data.var_lb_constr[var] = constr
|
|
||||||
elseif stype == MOI.LessThan{Float64}
|
|
||||||
data.var_ub_constr[var] = constr
|
|
||||||
else
|
|
||||||
error("Unsupported constraint: $(ftype)-in-$(stype)")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -124,8 +111,6 @@ function _update_solution!(data::JuMPSolverData)
|
|||||||
data.dual_values = Dict()
|
data.dual_values = Dict()
|
||||||
data.sensitivity_report = nothing
|
data.sensitivity_report = nothing
|
||||||
data.basis_status = Dict()
|
data.basis_status = Dict()
|
||||||
data.var_lb_constr = Dict()
|
|
||||||
data.var_ub_constr = Dict()
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -282,37 +267,44 @@ end
|
|||||||
|
|
||||||
|
|
||||||
function solve_lp(data::JuMPSolverData; tee::Bool = false)
|
function solve_lp(data::JuMPSolverData; tee::Bool = false)
|
||||||
model, bin_vars = data.model, data.bin_vars
|
for var in data.bin_vars
|
||||||
for var in bin_vars
|
|
||||||
~is_fixed(var) || continue
|
~is_fixed(var) || continue
|
||||||
unset_binary(var)
|
unset_binary(var)
|
||||||
set_upper_bound(var, 1.0)
|
set_upper_bound(var, 1.0)
|
||||||
set_lower_bound(var, 0.0)
|
set_lower_bound(var, 0.0)
|
||||||
end
|
end
|
||||||
|
for var in data.int_vars
|
||||||
|
~is_fixed(var) || continue
|
||||||
|
unset_integer(var)
|
||||||
|
end
|
||||||
# If the optimizer is Cbc, we need to replace it by Clp,
|
# If the optimizer is Cbc, we need to replace it by Clp,
|
||||||
# otherwise dual values are not available.
|
# otherwise dual values are not available.
|
||||||
# https://github.com/jump-dev/Cbc.jl/issues/50
|
# https://github.com/jump-dev/Cbc.jl/issues/50
|
||||||
is_cbc = (data.optimizer_factory == Cbc.Optimizer)
|
is_cbc = (data.optimizer_factory == Cbc.Optimizer)
|
||||||
if is_cbc
|
if is_cbc
|
||||||
set_optimizer(model, Clp.Optimizer)
|
set_optimizer(data.model, Clp.Optimizer)
|
||||||
end
|
end
|
||||||
wallclock_time = @elapsed begin
|
wallclock_time = @elapsed begin
|
||||||
log = _optimize_and_capture_output!(model, tee = tee)
|
log = _optimize_and_capture_output!(data.model, tee = tee)
|
||||||
end
|
end
|
||||||
if is_infeasible(data)
|
if is_infeasible(data)
|
||||||
data.solution = Dict()
|
data.solution = Dict()
|
||||||
obj_value = nothing
|
obj_value = nothing
|
||||||
else
|
else
|
||||||
_update_solution!(data)
|
_update_solution!(data)
|
||||||
obj_value = objective_value(model)
|
obj_value = objective_value(data.model)
|
||||||
end
|
end
|
||||||
if is_cbc
|
if is_cbc
|
||||||
set_optimizer(model, data.optimizer_factory)
|
set_optimizer(data.model, data.optimizer_factory)
|
||||||
end
|
end
|
||||||
for var in bin_vars
|
for var in data.bin_vars
|
||||||
~is_fixed(var) || continue
|
~is_fixed(var) || continue
|
||||||
set_binary(var)
|
set_binary(var)
|
||||||
end
|
end
|
||||||
|
for var in data.int_vars
|
||||||
|
~is_fixed(var) || continue
|
||||||
|
set_integer(var)
|
||||||
|
end
|
||||||
return miplearn.solvers.internal.LPSolveStats(
|
return miplearn.solvers.internal.LPSolveStats(
|
||||||
lp_value = obj_value,
|
lp_value = obj_value,
|
||||||
lp_log = log,
|
lp_log = log,
|
||||||
@@ -332,6 +324,7 @@ function set_instance!(
|
|||||||
end
|
end
|
||||||
data.model = model
|
data.model = model
|
||||||
data.bin_vars = [var for var in JuMP.all_variables(model) if JuMP.is_binary(var)]
|
data.bin_vars = [var for var in JuMP.all_variables(model) if JuMP.is_binary(var)]
|
||||||
|
data.int_vars = [var for var in JuMP.all_variables(model) if JuMP.is_integer(var)]
|
||||||
data.varname_to_var = Dict(JuMP.name(var) => var for var in JuMP.all_variables(model))
|
data.varname_to_var = Dict(JuMP.name(var) => var for var in JuMP.all_variables(model))
|
||||||
JuMP.set_optimizer(model, data.optimizer_factory)
|
JuMP.set_optimizer(model, data.optimizer_factory)
|
||||||
data.cname_to_constr = Dict()
|
data.cname_to_constr = Dict()
|
||||||
@@ -419,8 +412,8 @@ function get_variables(data::JuMPSolverData; with_static::Bool, with_sa::Bool)
|
|||||||
push!(sa_obj_up, delta_up + obj_coeffs[i])
|
push!(sa_obj_up, delta_up + obj_coeffs[i])
|
||||||
|
|
||||||
# Lower bound
|
# Lower bound
|
||||||
if v.index in keys(data.var_lb_constr)
|
if has_lower_bound(v)
|
||||||
constr = data.var_lb_constr[v.index]
|
constr = LowerBoundRef(v)
|
||||||
(delta_down, delta_up) = data.sensitivity_report[constr]
|
(delta_down, delta_up) = data.sensitivity_report[constr]
|
||||||
push!(sa_lb_down, lower_bound(v) + delta_down)
|
push!(sa_lb_down, lower_bound(v) + delta_down)
|
||||||
push!(sa_lb_up, lower_bound(v) + delta_up)
|
push!(sa_lb_up, lower_bound(v) + delta_up)
|
||||||
@@ -430,8 +423,8 @@ function get_variables(data::JuMPSolverData; with_static::Bool, with_sa::Bool)
|
|||||||
end
|
end
|
||||||
|
|
||||||
# Upper bound
|
# Upper bound
|
||||||
if v.index in keys(data.var_ub_constr)
|
if has_upper_bound(v)
|
||||||
constr = data.var_ub_constr[v.index]
|
constr = JuMP.UpperBoundRef(v)
|
||||||
(delta_down, delta_up) = data.sensitivity_report[constr]
|
(delta_down, delta_up) = data.sensitivity_report[constr]
|
||||||
push!(sa_ub_down, upper_bound(v) + delta_down)
|
push!(sa_ub_down, upper_bound(v) + delta_down)
|
||||||
push!(sa_ub_up, upper_bound(v) + delta_up)
|
push!(sa_ub_up, upper_bound(v) + delta_up)
|
||||||
@@ -447,14 +440,14 @@ function get_variables(data::JuMPSolverData; with_static::Bool, with_sa::Bool)
|
|||||||
basis_status = []
|
basis_status = []
|
||||||
for v in vars
|
for v in vars
|
||||||
basis_status_v = "B"
|
basis_status_v = "B"
|
||||||
if v.index in keys(data.var_lb_constr)
|
if has_lower_bound(v)
|
||||||
constr = data.var_lb_constr[v.index]
|
constr = LowerBoundRef(v)
|
||||||
if data.basis_status[constr] == MOI.NONBASIC
|
if data.basis_status[constr] == MOI.NONBASIC
|
||||||
basis_status_v = "L"
|
basis_status_v = "L"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
if v.index in keys(data.var_ub_constr)
|
if has_upper_bound(v)
|
||||||
constr = data.var_ub_constr[v.index]
|
constr = UpperBoundRef(v)
|
||||||
if data.basis_status[constr] == MOI.NONBASIC
|
if data.basis_status[constr] == MOI.NONBASIC
|
||||||
basis_status_v = "U"
|
basis_status_v = "U"
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -51,12 +51,7 @@ function runtests(optimizer_name, optimizer; large = true)
|
|||||||
@test round(obj, digits = 6) == 62.714777
|
@test round(obj, digits = 6) == 62.714777
|
||||||
|
|
||||||
# Probe (up is infeasible, down is feasible)
|
# Probe (up is infeasible, down is feasible)
|
||||||
BB.set_bounds!(
|
BB.set_bounds!(mip, mip.binary_variables[1:3], [1.0, 1.0, 0.0], [1.0, 1.0, 1.0])
|
||||||
mip,
|
|
||||||
mip.binary_variables[1:3],
|
|
||||||
[1.0, 1.0, 0.0],
|
|
||||||
[1.0, 1.0, 1.0],
|
|
||||||
)
|
|
||||||
status, obj = BB.solve_relaxation!(mip)
|
status, obj = BB.solve_relaxation!(mip)
|
||||||
@test status == :Optimal
|
@test status == :Optimal
|
||||||
probe_up, probe_down = BB.probe(mip, mip.binary_variables[3])
|
probe_up, probe_down = BB.probe(mip, mip.binary_variables[3])
|
||||||
|
|||||||
@@ -7,14 +7,25 @@ using MIPLearn
|
|||||||
using Cbc
|
using Cbc
|
||||||
|
|
||||||
@testset "FileInstance" begin
|
@testset "FileInstance" begin
|
||||||
@testset "Solve" begin
|
@testset "Solve (knapsack)" begin
|
||||||
data = KnapsackData()
|
data = KnapsackData()
|
||||||
filename = tempname()
|
filename = tempname()
|
||||||
MIPLearn.save_data(filename, data)
|
MIPLearn.save_data(filename, data)
|
||||||
instance = FileInstance(filename, build_knapsack_model)
|
instance = FileInstance(filename, build_knapsack_model)
|
||||||
solver = LearningSolver(Cbc.Optimizer)
|
solver = LearningSolver(Cbc.Optimizer)
|
||||||
solve!(solver, instance)
|
solve!(solver, instance)
|
||||||
|
h5 = Hdf5Sample("$filename.h5")
|
||||||
|
@test h5.get_scalar("mip_wallclock_time") > 0
|
||||||
|
end
|
||||||
|
|
||||||
|
@testset "Solve (vpm2)" begin
|
||||||
|
data = Dict("filename" => joinpath(@__DIR__, "../fixtures/danoint.mps.gz"))
|
||||||
|
build_model(data) = read_from_file(data["filename"])
|
||||||
|
filename = tempname()
|
||||||
|
MIPLearn.save_data(filename, data)
|
||||||
|
instance = FileInstance(filename, build_model)
|
||||||
|
solver = LearningSolver(optimizer_with_attributes(Cbc.Optimizer, "seconds" => 1.0))
|
||||||
|
solve!(solver, instance)
|
||||||
h5 = Hdf5Sample("$filename.h5")
|
h5 = Hdf5Sample("$filename.h5")
|
||||||
@test h5.get_scalar("mip_wallclock_time") > 0
|
@test h5.get_scalar("mip_wallclock_time") > 0
|
||||||
end
|
end
|
||||||
|
|||||||
Reference in New Issue
Block a user