mirror of
https://github.com/ANL-CEEESA/MIPLearn.jl.git
synced 2025-12-08 09:18:52 -06:00
Compare commits
2 Commits
dev
...
feature/re
| Author | SHA1 | Date | |
|---|---|---|---|
| cbedb02a9f | |||
|
20d6570ea6
|
@@ -1,7 +1,7 @@
|
|||||||
name = "MIPLearn"
|
name = "MIPLearn"
|
||||||
uuid = "2b1277c3-b477-4c49-a15e-7ba350325c68"
|
uuid = "2b1277c3-b477-4c49-a15e-7ba350325c68"
|
||||||
authors = ["Alinson S Xavier <git@axavier.org>"]
|
authors = ["Alinson S Xavier <git@axavier.org>"]
|
||||||
version = "0.4.2"
|
version = "0.4.0"
|
||||||
|
|
||||||
[deps]
|
[deps]
|
||||||
Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d"
|
Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d"
|
||||||
@@ -41,5 +41,3 @@ Requires = "1"
|
|||||||
Statistics = "1"
|
Statistics = "1"
|
||||||
TimerOutputs = "0.5"
|
TimerOutputs = "0.5"
|
||||||
julia = "1"
|
julia = "1"
|
||||||
PrecompileTools = "1"
|
|
||||||
SCIP = "0.12"
|
|
||||||
2
deps/build.jl
vendored
2
deps/build.jl
vendored
@@ -5,7 +5,7 @@ function install_miplearn()
|
|||||||
Conda.update()
|
Conda.update()
|
||||||
pip = joinpath(dirname(pyimport("sys").executable), "pip")
|
pip = joinpath(dirname(pyimport("sys").executable), "pip")
|
||||||
isfile(pip) || error("$pip: invalid path")
|
isfile(pip) || error("$pip: invalid path")
|
||||||
run(`$pip install miplearn==0.4.4`)
|
run(`$pip install miplearn==0.4.0`)
|
||||||
end
|
end
|
||||||
|
|
||||||
install_miplearn()
|
install_miplearn()
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ using Printf
|
|||||||
|
|
||||||
function print_progress_header()
|
function print_progress_header()
|
||||||
@printf(
|
@printf(
|
||||||
"%8s %9s %9s %13s %13s %9s %9s %13s %9s %-24s %9s %9s %6s %6s",
|
"%8s %9s %9s %13s %13s %9s %6s %13s %6s %-24s %9s %9s %6s %6s",
|
||||||
"time",
|
"time",
|
||||||
"processed",
|
"processed",
|
||||||
"pending",
|
"pending",
|
||||||
@@ -31,9 +31,9 @@ function print_progress(
|
|||||||
node::Node;
|
node::Node;
|
||||||
time_elapsed::Float64,
|
time_elapsed::Float64,
|
||||||
print_interval::Int,
|
print_interval::Int,
|
||||||
primal_update::Bool,
|
primal_update::Bool
|
||||||
)::Nothing
|
)::Nothing
|
||||||
if (pool.processed % print_interval == 0) || isempty(pool.pending) || primal_update
|
if (pool.processed % print_interval == 0) || isempty(pool.pending)
|
||||||
if isempty(node.branch_vars)
|
if isempty(node.branch_vars)
|
||||||
branch_var_name = "---"
|
branch_var_name = "---"
|
||||||
branch_lb = "---"
|
branch_lb = "---"
|
||||||
@@ -46,7 +46,7 @@ function print_progress(
|
|||||||
branch_ub = @sprintf("%9.2f", last(node.branch_ub))
|
branch_ub = @sprintf("%9.2f", last(node.branch_ub))
|
||||||
end
|
end
|
||||||
@printf(
|
@printf(
|
||||||
"%8.2f %9d %9d %13.6e %13.6e %9.2e %9d %13.6e %9s %-24s %9s %9s %6d %6d",
|
"%8.2f %9d %9d %13.6e %13.6e %9.2e %6d %13.6e %6s %-24s %9s %9s %6d %6d",
|
||||||
time_elapsed,
|
time_elapsed,
|
||||||
pool.processed,
|
pool.processed,
|
||||||
length(pool.processing) + length(pool.pending),
|
length(pool.processing) + length(pool.pending),
|
||||||
|
|||||||
@@ -134,11 +134,7 @@ function _get_int_variables(
|
|||||||
var_ub = constr.upper
|
var_ub = constr.upper
|
||||||
MOI.delete(optimizer, _upper_bound_index(var))
|
MOI.delete(optimizer, _upper_bound_index(var))
|
||||||
end
|
end
|
||||||
MOI.add_constraint(
|
MOI.add_constraint(optimizer, var, MOI.Interval(var_lb, var_ub))
|
||||||
optimizer,
|
|
||||||
MOI.VariableIndex(var.index),
|
|
||||||
MOI.Interval(var_lb, var_ub),
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
push!(vars, var)
|
push!(vars, var)
|
||||||
push!(lb, var_lb)
|
push!(lb, var_lb)
|
||||||
|
|||||||
@@ -8,10 +8,10 @@ import Base.Threads: threadid
|
|||||||
|
|
||||||
function take(
|
function take(
|
||||||
pool::NodePool;
|
pool::NodePool;
|
||||||
suggestions::Array{Node} = [],
|
suggestions::Array{Node}=[],
|
||||||
time_remaining::Float64,
|
time_remaining::Float64,
|
||||||
gap_limit::Float64,
|
gap_limit::Float64,
|
||||||
node_limit::Int,
|
node_limit::Int
|
||||||
)::Union{Symbol,Node}
|
)::Union{Symbol,Node}
|
||||||
t = threadid()
|
t = threadid()
|
||||||
lock(pool.lock) do
|
lock(pool.lock) do
|
||||||
@@ -53,8 +53,8 @@ function offer(
|
|||||||
pool::NodePool;
|
pool::NodePool;
|
||||||
parent_node::Union{Nothing,Node},
|
parent_node::Union{Nothing,Node},
|
||||||
child_nodes::Vector{Node},
|
child_nodes::Vector{Node},
|
||||||
time_elapsed::Float64 = 0.0,
|
time_elapsed::Float64=0.0,
|
||||||
print_interval::Int = 100,
|
print_interval::Int=100
|
||||||
)::Nothing
|
)::Nothing
|
||||||
lock(pool.lock) do
|
lock(pool.lock) do
|
||||||
primal_update = false
|
primal_update = false
|
||||||
@@ -101,15 +101,16 @@ function offer(
|
|||||||
# Update branching variable history
|
# Update branching variable history
|
||||||
branch_var = child_nodes[1].branch_vars[end]
|
branch_var = child_nodes[1].branch_vars[end]
|
||||||
offset = findfirst(isequal(branch_var), parent_node.fractional_variables)
|
offset = findfirst(isequal(branch_var), parent_node.fractional_variables)
|
||||||
|
if offset !== nothing
|
||||||
x = parent_node.fractional_values[offset]
|
x = parent_node.fractional_values[offset]
|
||||||
obj_change_up = child_nodes[1].obj - parent_node.obj
|
obj_change_up = child_nodes[1].obj - parent_node.obj
|
||||||
obj_change_down = child_nodes[2].obj - parent_node.obj
|
obj_change_down = child_nodes[2].obj - parent_node.obj
|
||||||
_update_var_history(
|
_update_var_history(
|
||||||
pool = pool,
|
pool=pool,
|
||||||
var = branch_var,
|
var=branch_var,
|
||||||
x = x,
|
x=x,
|
||||||
obj_change_down = obj_change_down,
|
obj_change_down=obj_change_down,
|
||||||
obj_change_up = obj_change_up,
|
obj_change_up=obj_change_up,
|
||||||
)
|
)
|
||||||
# Update global history
|
# Update global history
|
||||||
pool.history.avg_pseudocost_up =
|
pool.history.avg_pseudocost_up =
|
||||||
@@ -117,14 +118,15 @@ function offer(
|
|||||||
pool.history.avg_pseudocost_down =
|
pool.history.avg_pseudocost_down =
|
||||||
mean(vh.pseudocost_down for vh in values(pool.var_history))
|
mean(vh.pseudocost_down for vh in values(pool.var_history))
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
for node in child_nodes
|
for node in child_nodes
|
||||||
print_progress(
|
print_progress(
|
||||||
pool,
|
pool,
|
||||||
node,
|
node,
|
||||||
time_elapsed = time_elapsed,
|
time_elapsed=time_elapsed,
|
||||||
print_interval = print_interval,
|
print_interval=print_interval,
|
||||||
primal_update = isfinite(node.obj) && isempty(node.fractional_variables),
|
primal_update=isfinite(node.obj) && isempty(node.fractional_variables),
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -136,7 +138,7 @@ function _update_var_history(;
|
|||||||
var::Variable,
|
var::Variable,
|
||||||
x::Float64,
|
x::Float64,
|
||||||
obj_change_down::Float64,
|
obj_change_down::Float64,
|
||||||
obj_change_up::Float64,
|
obj_change_up::Float64
|
||||||
)::Nothing
|
)::Nothing
|
||||||
# Create new history entry
|
# Create new history entry
|
||||||
if var ∉ keys(pool.var_history)
|
if var ∉ keys(pool.var_history)
|
||||||
|
|||||||
@@ -10,16 +10,22 @@ import ..H5File
|
|||||||
|
|
||||||
function solve!(
|
function solve!(
|
||||||
mip::MIP;
|
mip::MIP;
|
||||||
time_limit::Float64 = Inf,
|
time_limit::Float64=Inf,
|
||||||
node_limit::Int = typemax(Int),
|
node_limit::Int=typemax(Int),
|
||||||
gap_limit::Float64 = 1e-4,
|
gap_limit::Float64=1e-4,
|
||||||
print_interval::Int = 5,
|
print_interval::Int=5,
|
||||||
initial_primal_bound::Float64 = Inf,
|
initial_primal_bound::Float64=Inf,
|
||||||
branch_rule::VariableBranchingRule = ReliabilityBranching(),
|
branch_rule::VariableBranchingRule=ReliabilityBranching(),
|
||||||
enable_plunging = true,
|
enable_plunging=true,
|
||||||
)::NodePool
|
replay=nothing
|
||||||
|
)::Tuple{NodePool,ReplayInfo}
|
||||||
|
|
||||||
|
if replay === nothing
|
||||||
|
replay = ReplayInfo()
|
||||||
|
end
|
||||||
|
|
||||||
time_initial = time()
|
time_initial = time()
|
||||||
pool = NodePool(mip = mip)
|
pool = NodePool(mip=mip, next_index=replay.next_index)
|
||||||
pool.primal_bound = initial_primal_bound
|
pool.primal_bound = initial_primal_bound
|
||||||
|
|
||||||
root_node = _create_node(mip)
|
root_node = _create_node(mip)
|
||||||
@@ -34,9 +40,9 @@ function solve!(
|
|||||||
|
|
||||||
offer(
|
offer(
|
||||||
pool,
|
pool,
|
||||||
parent_node = nothing,
|
parent_node=nothing,
|
||||||
child_nodes = [root_node],
|
child_nodes=[root_node],
|
||||||
print_interval = print_interval,
|
print_interval=print_interval,
|
||||||
)
|
)
|
||||||
@threads for t = 1:nthreads()
|
@threads for t = 1:nthreads()
|
||||||
child_one, child_zero, suggestions = nothing, nothing, Node[]
|
child_one, child_zero, suggestions = nothing, nothing, Node[]
|
||||||
@@ -47,10 +53,10 @@ function solve!(
|
|||||||
end
|
end
|
||||||
node = take(
|
node = take(
|
||||||
pool,
|
pool,
|
||||||
suggestions = suggestions,
|
suggestions=suggestions,
|
||||||
time_remaining = time_limit - time_elapsed,
|
time_remaining=time_limit - time_elapsed,
|
||||||
node_limit = node_limit,
|
node_limit=node_limit,
|
||||||
gap_limit = gap_limit,
|
gap_limit=gap_limit,
|
||||||
)
|
)
|
||||||
if node == :END
|
if node == :END
|
||||||
break
|
break
|
||||||
@@ -64,10 +70,25 @@ function solve!(
|
|||||||
@assert status == :Optimal
|
@assert status == :Optimal
|
||||||
_unset_node_bounds(node)
|
_unset_node_bounds(node)
|
||||||
|
|
||||||
|
if node.index in keys(replay.node_decisions)
|
||||||
|
decision = replay.node_decisions[node.index]
|
||||||
|
ids = decision.ids
|
||||||
|
branch_var = decision.branch_var
|
||||||
|
var_value = decision.var_value
|
||||||
|
else
|
||||||
# Find branching variable
|
# Find branching variable
|
||||||
ids = generate_indices(pool, 2)
|
ids = generate_indices(pool, 2)
|
||||||
branch_var = find_branching_var(branch_rule, node, pool)
|
branch_var = find_branching_var(branch_rule, node, pool)
|
||||||
|
|
||||||
|
# Query current fractional value
|
||||||
|
offset = findfirst(isequal(branch_var), node.fractional_variables)
|
||||||
|
var_value = node.fractional_values[offset]
|
||||||
|
|
||||||
|
# Update replay
|
||||||
|
decision = ReplayNodeDecision(; branch_var, var_value, ids)
|
||||||
|
replay.node_decisions[node.index] = decision
|
||||||
|
end
|
||||||
|
|
||||||
# Find current variable lower and upper bounds
|
# Find current variable lower and upper bounds
|
||||||
offset = findfirst(isequal(branch_var), mip.int_vars)
|
offset = findfirst(isequal(branch_var), mip.int_vars)
|
||||||
var_lb = mip.int_vars_lb[offset]
|
var_lb = mip.int_vars_lb[offset]
|
||||||
@@ -79,46 +100,43 @@ function solve!(
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Query current fractional value
|
|
||||||
offset = findfirst(isequal(branch_var), node.fractional_variables)
|
|
||||||
var_value = node.fractional_values[offset]
|
|
||||||
|
|
||||||
child_zero = _create_node(
|
child_zero = _create_node(
|
||||||
mip,
|
mip,
|
||||||
index = ids[2],
|
index=ids[2],
|
||||||
parent = node,
|
parent=node,
|
||||||
branch_var = branch_var,
|
branch_var=branch_var,
|
||||||
branch_var_lb = var_lb,
|
branch_var_lb=var_lb,
|
||||||
branch_var_ub = floor(var_value),
|
branch_var_ub=floor(var_value),
|
||||||
)
|
)
|
||||||
child_one = _create_node(
|
child_one = _create_node(
|
||||||
mip,
|
mip,
|
||||||
index = ids[1],
|
index=ids[1],
|
||||||
parent = node,
|
parent=node,
|
||||||
branch_var = branch_var,
|
branch_var=branch_var,
|
||||||
branch_var_lb = ceil(var_value),
|
branch_var_lb=ceil(var_value),
|
||||||
branch_var_ub = var_ub,
|
branch_var_ub=var_ub,
|
||||||
)
|
)
|
||||||
offer(
|
offer(
|
||||||
pool,
|
pool,
|
||||||
parent_node = node,
|
parent_node=node,
|
||||||
child_nodes = [child_one, child_zero],
|
child_nodes=[child_one, child_zero],
|
||||||
time_elapsed = time_elapsed,
|
time_elapsed=time_elapsed,
|
||||||
print_interval = print_interval,
|
print_interval=print_interval,
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
return pool
|
replay.next_index = pool.next_index
|
||||||
|
return pool, replay
|
||||||
end
|
end
|
||||||
|
|
||||||
function _create_node(
|
function _create_node(
|
||||||
mip;
|
mip;
|
||||||
index::Int = 0,
|
index::Int=0,
|
||||||
parent::Union{Nothing,Node} = nothing,
|
parent::Union{Nothing,Node}=nothing,
|
||||||
branch_var::Union{Nothing,Variable} = nothing,
|
branch_var::Union{Nothing,Variable}=nothing,
|
||||||
branch_var_lb::Union{Nothing,Float64} = nothing,
|
branch_var_lb::Union{Nothing,Float64}=nothing,
|
||||||
branch_var_ub::Union{Nothing,Float64} = nothing,
|
branch_var_ub::Union{Nothing,Float64}=nothing
|
||||||
)::Node
|
)::Node
|
||||||
if parent === nothing
|
if parent === nothing
|
||||||
branch_vars = Variable[]
|
branch_vars = Variable[]
|
||||||
|
|||||||
@@ -72,3 +72,14 @@ Base.@kwdef mutable struct NodePool
|
|||||||
history::History = History()
|
history::History = History()
|
||||||
var_history::Dict{Variable,VariableHistory} = Dict()
|
var_history::Dict{Variable,VariableHistory} = Dict()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
Base.@kwdef struct ReplayNodeDecision
|
||||||
|
branch_var
|
||||||
|
var_value
|
||||||
|
ids
|
||||||
|
end
|
||||||
|
|
||||||
|
Base.@kwdef mutable struct ReplayInfo
|
||||||
|
node_decisions::Dict{Int,ReplayNodeDecision} = Dict()
|
||||||
|
next_index::Int = 1
|
||||||
|
end
|
||||||
@@ -8,8 +8,6 @@ using HiGHS
|
|||||||
using Random
|
using Random
|
||||||
using DataStructures
|
using DataStructures
|
||||||
|
|
||||||
import ..H5FieldsExtractor
|
|
||||||
|
|
||||||
global ExpertDualGmiComponent = PyNULL()
|
global ExpertDualGmiComponent = PyNULL()
|
||||||
global KnnDualGmiComponent = PyNULL()
|
global KnnDualGmiComponent = PyNULL()
|
||||||
|
|
||||||
@@ -26,10 +24,8 @@ function collect_gmi_dual(
|
|||||||
optimizer,
|
optimizer,
|
||||||
max_rounds = 10,
|
max_rounds = 10,
|
||||||
max_cuts_per_round = 500,
|
max_cuts_per_round = 500,
|
||||||
time_limit = 3_600,
|
|
||||||
)
|
)
|
||||||
reset_timer!()
|
reset_timer!()
|
||||||
initial_time = time()
|
|
||||||
|
|
||||||
@timeit "Read H5" begin
|
@timeit "Read H5" begin
|
||||||
h5_filename = replace(mps_filename, ".mps.gz" => ".h5")
|
h5_filename = replace(mps_filename, ".mps.gz" => ".h5")
|
||||||
@@ -209,12 +205,6 @@ function collect_gmi_dual(
|
|||||||
sum(sp[i] * gmi_exps[i] for (i, c) in enumerate(constrs) if useful[i]),
|
sum(sp[i] * gmi_exps[i] for (i, c) in enumerate(constrs) if useful[i]),
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
elapsed_time = time() - initial_time
|
|
||||||
if elapsed_time > time_limit
|
|
||||||
@info "Time limit exceeded. Stopping."
|
|
||||||
break
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@timeit "Store cuts in H5 file" begin
|
@timeit "Store cuts in H5 file" begin
|
||||||
@@ -263,6 +253,138 @@ function collect_gmi_dual(
|
|||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
function ExpertDualGmiComponent_before_mip(test_h5, model, _)
|
||||||
|
# Read cuts and optimal solution
|
||||||
|
h5 = H5File(test_h5, "r")
|
||||||
|
sol_opt_dict = Dict(
|
||||||
|
zip(
|
||||||
|
h5.get_array("static_var_names"),
|
||||||
|
convert(Array{Float64}, h5.get_array("mip_var_values")),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
cut_basis_vars = h5.get_array("cuts_basis_vars")
|
||||||
|
cut_basis_sizes = h5.get_array("cuts_basis_sizes")
|
||||||
|
cut_rows = h5.get_array("cuts_rows")
|
||||||
|
obj_mip = h5.get_scalar("mip_lower_bound")
|
||||||
|
if obj_mip === nothing
|
||||||
|
obj_mip = h5.get_scalar("mip_obj_value")
|
||||||
|
end
|
||||||
|
h5.close()
|
||||||
|
|
||||||
|
# Initialize stats
|
||||||
|
stats_time_convert = 0
|
||||||
|
stats_time_tableau = 0
|
||||||
|
stats_time_gmi = 0
|
||||||
|
all_cuts = nothing
|
||||||
|
|
||||||
|
stats_time_convert = @elapsed begin
|
||||||
|
# Extract problem data
|
||||||
|
data = ProblemData(model)
|
||||||
|
|
||||||
|
# Construct optimal solution vector (with correct variable sequence)
|
||||||
|
sol_opt = [sol_opt_dict[n] for n in data.var_names]
|
||||||
|
|
||||||
|
# Assert optimal solution is feasible for the original problem
|
||||||
|
assert_leq(data.constr_lb, data.constr_lhs * sol_opt)
|
||||||
|
assert_leq(data.constr_lhs * sol_opt, data.constr_ub)
|
||||||
|
|
||||||
|
# Convert to standard form
|
||||||
|
data_s, transforms = convert_to_standard_form(data)
|
||||||
|
model_s = to_model(data_s)
|
||||||
|
set_optimizer(model_s, HiGHS.Optimizer)
|
||||||
|
relax_integrality(model_s)
|
||||||
|
|
||||||
|
# Convert optimal solution to standard form
|
||||||
|
sol_opt_s = forward(transforms, sol_opt)
|
||||||
|
|
||||||
|
# Assert converted solution is feasible for standard form problem
|
||||||
|
assert_eq(data_s.constr_lhs * sol_opt_s, data_s.constr_lb)
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
current_basis = nothing
|
||||||
|
for (r, row) in enumerate(cut_rows)
|
||||||
|
stats_time_tableau += @elapsed begin
|
||||||
|
if r == 1 || cut_basis_vars[r, :] != cut_basis_vars[r-1, :]
|
||||||
|
vbb, vnn, cbb, cnn = cut_basis_sizes[r, :]
|
||||||
|
current_basis = Basis(;
|
||||||
|
var_basic = cut_basis_vars[r, 1:vbb],
|
||||||
|
var_nonbasic = cut_basis_vars[r, vbb+1:vbb+vnn],
|
||||||
|
constr_basic = cut_basis_vars[r, vbb+vnn+1:vbb+vnn+cbb],
|
||||||
|
constr_nonbasic = cut_basis_vars[r, vbb+vnn+cbb+1:vbb+vnn+cbb+cnn],
|
||||||
|
)
|
||||||
|
end
|
||||||
|
tableau = compute_tableau(data_s, current_basis, rows = [row])
|
||||||
|
assert_eq(tableau.lhs * sol_opt_s, tableau.rhs)
|
||||||
|
end
|
||||||
|
stats_time_gmi += @elapsed begin
|
||||||
|
cuts_s = compute_gmi(data_s, tableau)
|
||||||
|
assert_does_not_cut_off(cuts_s, sol_opt_s)
|
||||||
|
end
|
||||||
|
cuts = backwards(transforms, cuts_s)
|
||||||
|
assert_does_not_cut_off(cuts, sol_opt)
|
||||||
|
|
||||||
|
if all_cuts === nothing
|
||||||
|
all_cuts = cuts
|
||||||
|
else
|
||||||
|
all_cuts.lhs = [all_cuts.lhs; cuts.lhs]
|
||||||
|
all_cuts.lb = [all_cuts.lb; cuts.lb]
|
||||||
|
all_cuts.ub = [all_cuts.ub; cuts.ub]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Strategy 1: Add all cuts during the first call
|
||||||
|
function cut_callback_1(cb_data)
|
||||||
|
if all_cuts !== nothing
|
||||||
|
constrs = build_constraints(model, all_cuts)
|
||||||
|
@info "Enforcing $(length(constrs)) cuts..."
|
||||||
|
for c in constrs
|
||||||
|
MOI.submit(model, MOI.UserCut(cb_data), c)
|
||||||
|
end
|
||||||
|
all_cuts = nothing
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Strategy 2: Add violated cuts repeatedly until unable to separate
|
||||||
|
callback_disabled = false
|
||||||
|
function cut_callback_2(cb_data)
|
||||||
|
if callback_disabled
|
||||||
|
return
|
||||||
|
end
|
||||||
|
x = all_variables(model)
|
||||||
|
x_val = callback_value.(cb_data, x)
|
||||||
|
lhs_val = all_cuts.lhs * x_val
|
||||||
|
is_violated = lhs_val .> all_cuts.ub
|
||||||
|
selected_idx = findall(is_violated .== true)
|
||||||
|
selected_cuts = ConstraintSet(
|
||||||
|
lhs=all_cuts.lhs[selected_idx, :],
|
||||||
|
ub=all_cuts.ub[selected_idx],
|
||||||
|
lb=all_cuts.lb[selected_idx],
|
||||||
|
)
|
||||||
|
constrs = build_constraints(model, selected_cuts)
|
||||||
|
if length(constrs) > 0
|
||||||
|
@info "Enforcing $(length(constrs)) cuts..."
|
||||||
|
for c in constrs
|
||||||
|
MOI.submit(model, MOI.UserCut(cb_data), c)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
@info "No violated cuts found. Disabling callback."
|
||||||
|
callback_disabled = true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Set up cut callback
|
||||||
|
set_attribute(model, MOI.UserCutCallback(), cut_callback_1)
|
||||||
|
# set_attribute(model, MOI.UserCutCallback(), cut_callback_2)
|
||||||
|
|
||||||
|
stats = Dict()
|
||||||
|
stats["ExpertDualGmi: cuts"] = length(all_cuts.lb)
|
||||||
|
stats["ExpertDualGmi: time convert"] = stats_time_convert
|
||||||
|
stats["ExpertDualGmi: time tableau"] = stats_time_tableau
|
||||||
|
stats["ExpertDualGmi: time gmi"] = stats_time_gmi
|
||||||
|
return stats
|
||||||
|
end
|
||||||
|
|
||||||
function add_constraint_set_dual_v2(model::JuMP.Model, cs::ConstraintSet)
|
function add_constraint_set_dual_v2(model::JuMP.Model, cs::ConstraintSet)
|
||||||
vars = all_variables(model)
|
vars = all_variables(model)
|
||||||
nrows, ncols = size(cs.lhs)
|
nrows, ncols = size(cs.lhs)
|
||||||
@@ -319,58 +441,6 @@ function _dualgmi_features(h5_filename, extractor)
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
function _dualgmi_compress_h5(h5_filename)
|
|
||||||
vars_to_basis_offset = Dict()
|
|
||||||
basis_vars = []
|
|
||||||
basis_sizes = []
|
|
||||||
cut_basis::Array{Int} = []
|
|
||||||
cut_row::Array{Int} = []
|
|
||||||
|
|
||||||
h5 = H5File(h5_filename, "r")
|
|
||||||
orig_cut_basis_vars = h5.get_array("cuts_basis_vars")
|
|
||||||
orig_cut_basis_sizes = h5.get_array("cuts_basis_sizes")
|
|
||||||
orig_cut_rows = h5.get_array("cuts_rows")
|
|
||||||
h5.close()
|
|
||||||
if orig_cut_basis_vars === nothing
|
|
||||||
@warn "orig_cut_basis_vars is null; skipping file"
|
|
||||||
return
|
|
||||||
end
|
|
||||||
ncuts, _ = size(orig_cut_basis_vars)
|
|
||||||
if ncuts == 0
|
|
||||||
return
|
|
||||||
end
|
|
||||||
|
|
||||||
for i in 1:ncuts
|
|
||||||
vars = orig_cut_basis_vars[i, :]
|
|
||||||
sizes = orig_cut_basis_sizes[i, :]
|
|
||||||
row = orig_cut_rows[i]
|
|
||||||
if vars ∉ keys(vars_to_basis_offset)
|
|
||||||
offset = size(basis_vars)[1] + 1
|
|
||||||
vars_to_basis_offset[vars] = offset
|
|
||||||
push!(basis_vars, vars)
|
|
||||||
push!(basis_sizes, sizes)
|
|
||||||
end
|
|
||||||
offset = vars_to_basis_offset[vars]
|
|
||||||
push!(cut_basis, offset)
|
|
||||||
push!(cut_row, row)
|
|
||||||
end
|
|
||||||
|
|
||||||
basis_vars = hcat(basis_vars...)'
|
|
||||||
basis_sizes = hcat(basis_sizes...)'
|
|
||||||
_, n_vars = size(basis_vars)
|
|
||||||
if n_vars == 0
|
|
||||||
@warn "n_vars is zero; skipping file"
|
|
||||||
return
|
|
||||||
end
|
|
||||||
|
|
||||||
h5 = H5File(h5_filename, "r+")
|
|
||||||
h5.put_array("gmi_basis_vars", basis_vars)
|
|
||||||
h5.put_array("gmi_basis_sizes", basis_sizes)
|
|
||||||
h5.put_array("gmi_cut_basis", cut_basis)
|
|
||||||
h5.put_array("gmi_cut_row", cut_row)
|
|
||||||
h5.file.close()
|
|
||||||
end
|
|
||||||
|
|
||||||
function _dualgmi_generate(train_h5, model)
|
function _dualgmi_generate(train_h5, model)
|
||||||
@timeit "Read problem data" begin
|
@timeit "Read problem data" begin
|
||||||
data = ProblemData(model)
|
data = ProblemData(model)
|
||||||
@@ -378,71 +448,54 @@ function _dualgmi_generate(train_h5, model)
|
|||||||
@timeit "Convert to standard form" begin
|
@timeit "Convert to standard form" begin
|
||||||
data_s, transforms = convert_to_standard_form(data)
|
data_s, transforms = convert_to_standard_form(data)
|
||||||
end
|
end
|
||||||
|
|
||||||
@timeit "Collect cuts from H5 files" begin
|
@timeit "Collect cuts from H5 files" begin
|
||||||
basis_vars_to_basis_offset = Dict()
|
vars_to_unique_basis_offset = Dict()
|
||||||
combined_basis_sizes = nothing
|
unique_basis_vars = nothing
|
||||||
combined_basis_sizes_list = Any[]
|
unique_basis_sizes = nothing
|
||||||
combined_basis_vars = nothing
|
unique_basis_rows = nothing
|
||||||
combined_basis_vars_list = Any[]
|
|
||||||
combined_cut_rows = Any[]
|
|
||||||
for h5_filename in train_h5
|
for h5_filename in train_h5
|
||||||
@timeit "get_array (new)" begin
|
|
||||||
h5 = H5File(h5_filename, "r")
|
h5 = H5File(h5_filename, "r")
|
||||||
gmi_basis_vars = h5.get_array("gmi_basis_vars")
|
cut_basis_vars = h5.get_array("cuts_basis_vars")
|
||||||
if gmi_basis_vars === nothing
|
cut_basis_sizes = h5.get_array("cuts_basis_sizes")
|
||||||
@warn "$(h5_filename) does not contain gmi_basis_vars; skipping"
|
cut_rows = h5.get_array("cuts_rows")
|
||||||
continue
|
ncuts, nvars = size(cut_basis_vars)
|
||||||
|
if unique_basis_vars === nothing
|
||||||
|
unique_basis_vars = Matrix{Int}(undef, 0, nvars)
|
||||||
|
unique_basis_sizes = Matrix{Int}(undef, 0, 4)
|
||||||
|
unique_basis_rows = Dict{Int,Set{Int}}()
|
||||||
|
end
|
||||||
|
for i in 1:ncuts
|
||||||
|
vars = cut_basis_vars[i, :]
|
||||||
|
sizes = cut_basis_sizes[i, :]
|
||||||
|
row = cut_rows[i]
|
||||||
|
if vars ∉ keys(vars_to_unique_basis_offset)
|
||||||
|
offset = size(unique_basis_vars)[1] + 1
|
||||||
|
vars_to_unique_basis_offset[vars] = offset
|
||||||
|
unique_basis_vars = [unique_basis_vars; vars']
|
||||||
|
unique_basis_sizes = [unique_basis_sizes; sizes']
|
||||||
|
unique_basis_rows[offset] = Set()
|
||||||
|
end
|
||||||
|
offset = vars_to_unique_basis_offset[vars]
|
||||||
|
push!(unique_basis_rows[offset], row)
|
||||||
end
|
end
|
||||||
gmi_basis_sizes = h5.get_array("gmi_basis_sizes")
|
|
||||||
gmi_cut_basis = h5.get_array("gmi_cut_basis")
|
|
||||||
gmi_cut_row = h5.get_array("gmi_cut_row")
|
|
||||||
h5.close()
|
h5.close()
|
||||||
end
|
end
|
||||||
@timeit "combine basis" begin
|
|
||||||
nbasis, _ = size(gmi_basis_vars)
|
|
||||||
local_to_combined_offset = Dict()
|
|
||||||
for local_offset in 1:nbasis
|
|
||||||
vars = gmi_basis_vars[local_offset, :]
|
|
||||||
sizes = gmi_basis_sizes[local_offset, :]
|
|
||||||
if vars ∉ keys(basis_vars_to_basis_offset)
|
|
||||||
combined_offset = length(combined_basis_vars_list) + 1
|
|
||||||
basis_vars_to_basis_offset[vars] = combined_offset
|
|
||||||
push!(combined_basis_vars_list, vars)
|
|
||||||
push!(combined_basis_sizes_list, sizes)
|
|
||||||
push!(combined_cut_rows, Set{Int}())
|
|
||||||
end
|
|
||||||
combined_offset = basis_vars_to_basis_offset[vars]
|
|
||||||
local_to_combined_offset[local_offset] = combined_offset
|
|
||||||
end
|
|
||||||
end
|
|
||||||
@timeit "combine rows" begin
|
|
||||||
ncuts = length(gmi_cut_row)
|
|
||||||
for i in 1:ncuts
|
|
||||||
local_offset = gmi_cut_basis[i]
|
|
||||||
combined_offset = local_to_combined_offset[local_offset]
|
|
||||||
row = gmi_cut_row[i]
|
|
||||||
push!(combined_cut_rows[combined_offset], row)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
@timeit "convert lists to matrices" begin
|
|
||||||
combined_basis_vars = hcat(combined_basis_vars_list...)'
|
|
||||||
combined_basis_sizes = hcat(combined_basis_sizes_list...)'
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@timeit "Compute tableaus and cuts" begin
|
@timeit "Compute tableaus and cuts" begin
|
||||||
all_cuts = nothing
|
all_cuts = nothing
|
||||||
nbasis = length(combined_cut_rows)
|
for (offset, rows) in unique_basis_rows
|
||||||
for offset in 1:nbasis
|
|
||||||
rows = combined_cut_rows[offset]
|
|
||||||
try
|
try
|
||||||
vbb, vnn, cbb, cnn = combined_basis_sizes[offset, :]
|
vbb, vnn, cbb, cnn = unique_basis_sizes[offset, :]
|
||||||
current_basis = Basis(;
|
current_basis = Basis(;
|
||||||
var_basic = combined_basis_vars[offset, 1:vbb],
|
var_basic = unique_basis_vars[offset, 1:vbb],
|
||||||
var_nonbasic = combined_basis_vars[offset, vbb+1:vbb+vnn],
|
var_nonbasic = unique_basis_vars[offset, vbb+1:vbb+vnn],
|
||||||
constr_basic = combined_basis_vars[offset, vbb+vnn+1:vbb+vnn+cbb],
|
constr_basic = unique_basis_vars[offset, vbb+vnn+1:vbb+vnn+cbb],
|
||||||
constr_nonbasic = combined_basis_vars[offset, vbb+vnn+cbb+1:vbb+vnn+cbb+cnn],
|
constr_nonbasic = unique_basis_vars[offset, vbb+vnn+cbb+1:vbb+vnn+cbb+cnn],
|
||||||
)
|
)
|
||||||
|
|
||||||
tableau = compute_tableau(data_s, current_basis; rows=collect(rows))
|
tableau = compute_tableau(data_s, current_basis; rows=collect(rows))
|
||||||
cuts_s = compute_gmi(data_s, tableau)
|
cuts_s = compute_gmi(data_s, tableau)
|
||||||
cuts = backwards(transforms, cuts_s)
|
cuts = backwards(transforms, cuts_s)
|
||||||
@@ -546,7 +599,15 @@ function KnnDualGmiComponent_before_mip(data::_KnnDualGmiData, test_h5, model, _
|
|||||||
end
|
end
|
||||||
|
|
||||||
function __init_gmi_dual__()
|
function __init_gmi_dual__()
|
||||||
@pydef mutable struct KnnDualGmiComponentPy
|
@pydef mutable struct Class1
|
||||||
|
function fit(_, _) end
|
||||||
|
function before_mip(self, test_h5, model, stats)
|
||||||
|
ExpertDualGmiComponent_before_mip(test_h5, model.inner, stats)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
copy!(ExpertDualGmiComponent, Class1)
|
||||||
|
|
||||||
|
@pydef mutable struct Class2
|
||||||
function __init__(self; extractor, k = 3, strategy = "near")
|
function __init__(self; extractor, k = 3, strategy = "near")
|
||||||
self.data = _KnnDualGmiData(; extractor, k, strategy)
|
self.data = _KnnDualGmiData(; extractor, k, strategy)
|
||||||
end
|
end
|
||||||
@@ -557,23 +618,7 @@ function __init_gmi_dual__()
|
|||||||
return @time KnnDualGmiComponent_before_mip(self.data, test_h5, model.inner, stats)
|
return @time KnnDualGmiComponent_before_mip(self.data, test_h5, model.inner, stats)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
copy!(KnnDualGmiComponent, KnnDualGmiComponentPy)
|
copy!(KnnDualGmiComponent, Class2)
|
||||||
|
|
||||||
@pydef mutable struct ExpertDualGmiComponentPy
|
|
||||||
function __init__(self)
|
|
||||||
self.inner = KnnDualGmiComponentPy(
|
|
||||||
extractor=H5FieldsExtractor(instance_fields=["static_var_obj_coeffs"]),
|
|
||||||
k=1,
|
|
||||||
)
|
|
||||||
end
|
|
||||||
function fit(self, train_h5)
|
|
||||||
end
|
|
||||||
function before_mip(self, test_h5, model, stats)
|
|
||||||
self.inner.fit([test_h5])
|
|
||||||
return self.inner.before_mip(test_h5, model, stats)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
copy!(ExpertDualGmiComponent, ExpertDualGmiComponentPy)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
export collect_gmi_dual, expert_gmi_dual, ExpertDualGmiComponent, KnnDualGmiComponent
|
export collect_gmi_dual, expert_gmi_dual, ExpertDualGmiComponent, KnnDualGmiComponent
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ include("collectors.jl")
|
|||||||
include("components.jl")
|
include("components.jl")
|
||||||
include("extractors.jl")
|
include("extractors.jl")
|
||||||
include("io.jl")
|
include("io.jl")
|
||||||
include("problems/maxcut.jl")
|
|
||||||
include("problems/setcover.jl")
|
include("problems/setcover.jl")
|
||||||
include("problems/stab.jl")
|
include("problems/stab.jl")
|
||||||
include("problems/tsp.jl")
|
include("problems/tsp.jl")
|
||||||
@@ -25,7 +24,6 @@ function __init__()
|
|||||||
__init_components__()
|
__init_components__()
|
||||||
__init_extractors__()
|
__init_extractors__()
|
||||||
__init_io__()
|
__init_io__()
|
||||||
__init_problems_maxcut__()
|
|
||||||
__init_problems_setcover__()
|
__init_problems_setcover__()
|
||||||
__init_problems_stab__()
|
__init_problems_stab__()
|
||||||
__init_problems_tsp__()
|
__init_problems_tsp__()
|
||||||
@@ -39,48 +37,48 @@ include("Cuts/Cuts.jl")
|
|||||||
# Precompilation
|
# Precompilation
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
# function __precompile_cuts__()
|
function __precompile_cuts__()
|
||||||
# function build_model(mps_filename)
|
function build_model(mps_filename)
|
||||||
# model = read_from_file(mps_filename)
|
model = read_from_file(mps_filename)
|
||||||
# set_optimizer(model, SCIP.Optimizer)
|
set_optimizer(model, SCIP.Optimizer)
|
||||||
# return JumpModel(model)
|
return JumpModel(model)
|
||||||
# end
|
end
|
||||||
# BASEDIR = dirname(@__FILE__)
|
BASEDIR = dirname(@__FILE__)
|
||||||
# mps_filename = "$BASEDIR/../test/fixtures/bell5.mps.gz"
|
mps_filename = "$BASEDIR/../test/fixtures/bell5.mps.gz"
|
||||||
# h5_filename = "$BASEDIR/../test/fixtures/bell5.h5"
|
h5_filename = "$BASEDIR/../test/fixtures/bell5.h5"
|
||||||
# collect_gmi_dual(
|
collect_gmi_dual(
|
||||||
# mps_filename;
|
mps_filename;
|
||||||
# optimizer=HiGHS.Optimizer,
|
optimizer=HiGHS.Optimizer,
|
||||||
# max_rounds = 10,
|
max_rounds = 10,
|
||||||
# max_cuts_per_round = 500,
|
max_cuts_per_round = 500,
|
||||||
# )
|
)
|
||||||
# knn = KnnDualGmiComponent(
|
knn = KnnDualGmiComponent(
|
||||||
# extractor = H5FieldsExtractor(instance_fields = ["static_var_obj_coeffs"]),
|
extractor = H5FieldsExtractor(instance_fields = ["static_var_obj_coeffs"]),
|
||||||
# k = 2,
|
k = 2,
|
||||||
# )
|
)
|
||||||
# knn.fit([h5_filename, h5_filename])
|
knn.fit([h5_filename, h5_filename])
|
||||||
# solver = LearningSolver(
|
solver = LearningSolver(
|
||||||
# components = [
|
components = [
|
||||||
# ExpertPrimalComponent(action = SetWarmStart()),
|
ExpertPrimalComponent(action = SetWarmStart()),
|
||||||
# knn,
|
knn,
|
||||||
# ],
|
],
|
||||||
# skip_lp = true,
|
skip_lp = true,
|
||||||
# )
|
)
|
||||||
# solver.optimize(mps_filename, build_model)
|
solver.optimize(mps_filename, build_model)
|
||||||
# end
|
end
|
||||||
|
|
||||||
# @setup_workload begin
|
@setup_workload begin
|
||||||
# using SCIP
|
using SCIP
|
||||||
# using HiGHS
|
using HiGHS
|
||||||
# using MIPLearn.Cuts
|
using MIPLearn.Cuts
|
||||||
# using PrecompileTools: @setup_workload, @compile_workload
|
using PrecompileTools: @setup_workload, @compile_workload
|
||||||
|
|
||||||
# __init__()
|
__init__()
|
||||||
# Cuts.__init__()
|
Cuts.__init__()
|
||||||
|
|
||||||
# @compile_workload begin
|
@compile_workload begin
|
||||||
# __precompile_cuts__()
|
__precompile_cuts__()
|
||||||
# end
|
end
|
||||||
# end
|
end
|
||||||
|
|
||||||
end # module
|
end # module
|
||||||
|
|||||||
@@ -1,31 +0,0 @@
|
|||||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
|
||||||
# Copyright (C) 2020-2025, UChicago Argonne, LLC. All rights reserved.
|
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
|
||||||
|
|
||||||
using JuMP
|
|
||||||
|
|
||||||
global MaxCutData = PyNULL()
|
|
||||||
global MaxCutGenerator = PyNULL()
|
|
||||||
|
|
||||||
function __init_problems_maxcut__()
|
|
||||||
copy!(MaxCutData, pyimport("miplearn.problems.maxcut").MaxCutData)
|
|
||||||
copy!(MaxCutGenerator, pyimport("miplearn.problems.maxcut").MaxCutGenerator)
|
|
||||||
end
|
|
||||||
|
|
||||||
function build_maxcut_model_jump(data::Any; optimizer)
|
|
||||||
if data isa String
|
|
||||||
data = read_pkl_gz(data)
|
|
||||||
end
|
|
||||||
nodes = collect(data.graph.nodes())
|
|
||||||
edges = collect(data.graph.edges())
|
|
||||||
model = Model(optimizer)
|
|
||||||
@variable(model, x[nodes], Bin)
|
|
||||||
@objective(
|
|
||||||
model,
|
|
||||||
Min,
|
|
||||||
sum(-data.weights[i] * x[e[1]] * (1 - x[e[2]]) for (i, e) in enumerate(edges))
|
|
||||||
)
|
|
||||||
return JumpModel(model)
|
|
||||||
end
|
|
||||||
|
|
||||||
export MaxCutData, MaxCutGenerator, build_maxcut_model_jump
|
|
||||||
@@ -69,13 +69,14 @@ function submit(model::JuMP.Model, constr)
|
|||||||
end
|
end
|
||||||
|
|
||||||
function _extract_after_load(model::JuMP.Model, h5)
|
function _extract_after_load(model::JuMP.Model, h5)
|
||||||
|
@info "_extract_after_load"
|
||||||
if JuMP.objective_sense(model) == MOI.MIN_SENSE
|
if JuMP.objective_sense(model) == MOI.MIN_SENSE
|
||||||
h5.put_scalar("static_sense", "min")
|
h5.put_scalar("static_sense", "min")
|
||||||
else
|
else
|
||||||
h5.put_scalar("static_sense", "max")
|
h5.put_scalar("static_sense", "max")
|
||||||
end
|
end
|
||||||
_extract_after_load_vars(model, h5)
|
@time _extract_after_load_vars(model, h5)
|
||||||
_extract_after_load_constrs(model, h5)
|
@time _extract_after_load_constrs(model, h5)
|
||||||
end
|
end
|
||||||
|
|
||||||
function _extract_after_load_vars(model::JuMP.Model, h5)
|
function _extract_after_load_vars(model::JuMP.Model, h5)
|
||||||
@@ -89,27 +90,14 @@ function _extract_after_load_vars(model::JuMP.Model, h5)
|
|||||||
for v in vars
|
for v in vars
|
||||||
]
|
]
|
||||||
types = [JuMP.is_binary(v) ? "B" : JuMP.is_integer(v) ? "I" : "C" for v in vars]
|
types = [JuMP.is_binary(v) ? "B" : JuMP.is_integer(v) ? "I" : "C" for v in vars]
|
||||||
|
obj = objective_function(model, AffExpr)
|
||||||
# Linear obj terms
|
obj_coeffs = [v ∈ keys(obj.terms) ? obj.terms[v] : 0.0 for v in vars]
|
||||||
obj = objective_function(model, QuadExpr)
|
|
||||||
obj_coeffs_linear = [v ∈ keys(obj.aff.terms) ? obj.aff.terms[v] : 0.0 for v in vars]
|
|
||||||
|
|
||||||
# Quadratic obj terms
|
|
||||||
if length(obj.terms) > 0
|
|
||||||
nvars = length(vars)
|
|
||||||
obj_coeffs_quad = zeros(nvars, nvars)
|
|
||||||
for (pair, coeff) in obj.terms
|
|
||||||
obj_coeffs_quad[pair.a.index.value, pair.b.index.value] = coeff
|
|
||||||
end
|
|
||||||
h5.put_array("static_var_obj_coeffs_quad", obj_coeffs_quad)
|
|
||||||
end
|
|
||||||
|
|
||||||
h5.put_array("static_var_names", to_str_array(JuMP.name.(vars)))
|
h5.put_array("static_var_names", to_str_array(JuMP.name.(vars)))
|
||||||
h5.put_array("static_var_types", to_str_array(types))
|
h5.put_array("static_var_types", to_str_array(types))
|
||||||
h5.put_array("static_var_lower_bounds", lb)
|
h5.put_array("static_var_lower_bounds", lb)
|
||||||
h5.put_array("static_var_upper_bounds", ub)
|
h5.put_array("static_var_upper_bounds", ub)
|
||||||
h5.put_array("static_var_obj_coeffs", obj_coeffs_linear)
|
h5.put_array("static_var_obj_coeffs", obj_coeffs)
|
||||||
h5.put_scalar("static_obj_offset", obj.aff.constant)
|
h5.put_scalar("static_obj_offset", obj.constant)
|
||||||
end
|
end
|
||||||
|
|
||||||
function _extract_after_load_constrs(model::JuMP.Model, h5)
|
function _extract_after_load_constrs(model::JuMP.Model, h5)
|
||||||
@@ -156,7 +144,7 @@ function _extract_after_load_constrs(model::JuMP.Model, h5)
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
if isempty(names)
|
if isempty(names)
|
||||||
return
|
error("no model constraints found; note that MIPLearn ignores unnamed constraints")
|
||||||
end
|
end
|
||||||
lhs = sparse(lhs_rows, lhs_cols, lhs_values, length(rhs), JuMP.num_variables(model))
|
lhs = sparse(lhs_rows, lhs_cols, lhs_values, length(rhs), JuMP.num_variables(model))
|
||||||
h5.put_sparse("static_constr_lhs", lhs)
|
h5.put_sparse("static_constr_lhs", lhs)
|
||||||
@@ -166,10 +154,11 @@ function _extract_after_load_constrs(model::JuMP.Model, h5)
|
|||||||
end
|
end
|
||||||
|
|
||||||
function _extract_after_lp(model::JuMP.Model, h5)
|
function _extract_after_lp(model::JuMP.Model, h5)
|
||||||
|
@info "_extract_after_lp"
|
||||||
h5.put_scalar("lp_wallclock_time", solve_time(model))
|
h5.put_scalar("lp_wallclock_time", solve_time(model))
|
||||||
h5.put_scalar("lp_obj_value", objective_value(model))
|
h5.put_scalar("lp_obj_value", objective_value(model))
|
||||||
_extract_after_lp_vars(model, h5)
|
@time _extract_after_lp_vars(model, h5)
|
||||||
_extract_after_lp_constrs(model, h5)
|
@time _extract_after_lp_constrs(model, h5)
|
||||||
end
|
end
|
||||||
|
|
||||||
function _extract_after_lp_vars(model::JuMP.Model, h5)
|
function _extract_after_lp_vars(model::JuMP.Model, h5)
|
||||||
@@ -195,46 +184,46 @@ function _extract_after_lp_vars(model::JuMP.Model, h5)
|
|||||||
end
|
end
|
||||||
h5.put_array("lp_var_basis_status", to_str_array(basis_status))
|
h5.put_array("lp_var_basis_status", to_str_array(basis_status))
|
||||||
|
|
||||||
# Sensitivity analysis
|
# # Sensitivity analysis
|
||||||
obj_coeffs = h5.get_array("static_var_obj_coeffs")
|
# obj_coeffs = h5.get_array("static_var_obj_coeffs")
|
||||||
sensitivity_report = lp_sensitivity_report(model)
|
# sensitivity_report = lp_sensitivity_report(model)
|
||||||
sa_obj_down, sa_obj_up = Float64[], Float64[]
|
# sa_obj_down, sa_obj_up = Float64[], Float64[]
|
||||||
sa_lb_down, sa_lb_up = Float64[], Float64[]
|
# sa_lb_down, sa_lb_up = Float64[], Float64[]
|
||||||
sa_ub_down, sa_ub_up = Float64[], Float64[]
|
# sa_ub_down, sa_ub_up = Float64[], Float64[]
|
||||||
for (i, v) in enumerate(vars)
|
# for (i, v) in enumerate(vars)
|
||||||
# Objective function
|
# # Objective function
|
||||||
(delta_down, delta_up) = sensitivity_report[v]
|
# (delta_down, delta_up) = sensitivity_report[v]
|
||||||
push!(sa_obj_down, delta_down + obj_coeffs[i])
|
# push!(sa_obj_down, delta_down + obj_coeffs[i])
|
||||||
push!(sa_obj_up, delta_up + obj_coeffs[i])
|
# push!(sa_obj_up, delta_up + obj_coeffs[i])
|
||||||
|
|
||||||
# Lower bound
|
# # Lower bound
|
||||||
if has_lower_bound(v)
|
# if has_lower_bound(v)
|
||||||
constr = LowerBoundRef(v)
|
# constr = LowerBoundRef(v)
|
||||||
(delta_down, delta_up) = sensitivity_report[constr]
|
# (delta_down, delta_up) = sensitivity_report[constr]
|
||||||
push!(sa_lb_down, lower_bound(v) + delta_down)
|
# push!(sa_lb_down, lower_bound(v) + delta_down)
|
||||||
push!(sa_lb_up, lower_bound(v) + delta_up)
|
# push!(sa_lb_up, lower_bound(v) + delta_up)
|
||||||
else
|
# else
|
||||||
push!(sa_lb_down, -Inf)
|
# push!(sa_lb_down, -Inf)
|
||||||
push!(sa_lb_up, -Inf)
|
# push!(sa_lb_up, -Inf)
|
||||||
end
|
# end
|
||||||
|
|
||||||
# Upper bound
|
# # Upper bound
|
||||||
if has_upper_bound(v)
|
# if has_upper_bound(v)
|
||||||
constr = JuMP.UpperBoundRef(v)
|
# constr = JuMP.UpperBoundRef(v)
|
||||||
(delta_down, delta_up) = sensitivity_report[constr]
|
# (delta_down, delta_up) = sensitivity_report[constr]
|
||||||
push!(sa_ub_down, upper_bound(v) + delta_down)
|
# push!(sa_ub_down, upper_bound(v) + delta_down)
|
||||||
push!(sa_ub_up, upper_bound(v) + delta_up)
|
# push!(sa_ub_up, upper_bound(v) + delta_up)
|
||||||
else
|
# else
|
||||||
push!(sa_ub_down, Inf)
|
# push!(sa_ub_down, Inf)
|
||||||
push!(sa_ub_up, Inf)
|
# push!(sa_ub_up, Inf)
|
||||||
end
|
# end
|
||||||
end
|
# end
|
||||||
h5.put_array("lp_var_sa_obj_up", sa_obj_up)
|
# h5.put_array("lp_var_sa_obj_up", sa_obj_up)
|
||||||
h5.put_array("lp_var_sa_obj_down", sa_obj_down)
|
# h5.put_array("lp_var_sa_obj_down", sa_obj_down)
|
||||||
h5.put_array("lp_var_sa_ub_up", sa_ub_up)
|
# h5.put_array("lp_var_sa_ub_up", sa_ub_up)
|
||||||
h5.put_array("lp_var_sa_ub_down", sa_ub_down)
|
# h5.put_array("lp_var_sa_ub_down", sa_ub_down)
|
||||||
h5.put_array("lp_var_sa_lb_up", sa_lb_up)
|
# h5.put_array("lp_var_sa_lb_up", sa_lb_up)
|
||||||
h5.put_array("lp_var_sa_lb_down", sa_lb_down)
|
# h5.put_array("lp_var_sa_lb_down", sa_lb_down)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
@@ -250,7 +239,7 @@ function _extract_after_lp_constrs(model::JuMP.Model, h5)
|
|||||||
duals = Float64[]
|
duals = Float64[]
|
||||||
basis_status = []
|
basis_status = []
|
||||||
constr_idx = 1
|
constr_idx = 1
|
||||||
sensitivity_report = lp_sensitivity_report(model)
|
# sensitivity_report = lp_sensitivity_report(model)
|
||||||
for (ftype, stype) in JuMP.list_of_constraint_types(model)
|
for (ftype, stype) in JuMP.list_of_constraint_types(model)
|
||||||
for constr in JuMP.all_constraints(model, ftype, stype)
|
for constr in JuMP.all_constraints(model, ftype, stype)
|
||||||
length(JuMP.name(constr)) > 0 || continue
|
length(JuMP.name(constr)) > 0 || continue
|
||||||
@@ -268,21 +257,22 @@ function _extract_after_lp_constrs(model::JuMP.Model, h5)
|
|||||||
error("Unknown basis status: $b")
|
error("Unknown basis status: $b")
|
||||||
end
|
end
|
||||||
|
|
||||||
# Sensitivity analysis
|
# # Sensitivity analysis
|
||||||
(delta_down, delta_up) = sensitivity_report[constr]
|
# (delta_down, delta_up) = sensitivity_report[constr]
|
||||||
push!(sa_rhs_down, rhs[constr_idx] + delta_down)
|
# push!(sa_rhs_down, rhs[constr_idx] + delta_down)
|
||||||
push!(sa_rhs_up, rhs[constr_idx] + delta_up)
|
# push!(sa_rhs_up, rhs[constr_idx] + delta_up)
|
||||||
|
|
||||||
constr_idx += 1
|
constr_idx += 1
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
h5.put_array("lp_constr_dual_values", duals)
|
h5.put_array("lp_constr_dual_values", duals)
|
||||||
h5.put_array("lp_constr_basis_status", to_str_array(basis_status))
|
h5.put_array("lp_constr_basis_status", to_str_array(basis_status))
|
||||||
h5.put_array("lp_constr_sa_rhs_up", sa_rhs_up)
|
# h5.put_array("lp_constr_sa_rhs_up", sa_rhs_up)
|
||||||
h5.put_array("lp_constr_sa_rhs_down", sa_rhs_down)
|
# h5.put_array("lp_constr_sa_rhs_down", sa_rhs_down)
|
||||||
end
|
end
|
||||||
|
|
||||||
function _extract_after_mip(model::JuMP.Model, h5)
|
function _extract_after_mip(model::JuMP.Model, h5)
|
||||||
|
@info "_extract_after_mip"
|
||||||
h5.put_scalar("mip_obj_value", objective_value(model))
|
h5.put_scalar("mip_obj_value", objective_value(model))
|
||||||
h5.put_scalar("mip_obj_bound", objective_bound(model))
|
h5.put_scalar("mip_obj_bound", objective_bound(model))
|
||||||
h5.put_scalar("mip_wallclock_time", solve_time(model))
|
h5.put_scalar("mip_wallclock_time", solve_time(model))
|
||||||
@@ -295,11 +285,9 @@ function _extract_after_mip(model::JuMP.Model, h5)
|
|||||||
|
|
||||||
# Slacks
|
# Slacks
|
||||||
lhs = h5.get_sparse("static_constr_lhs")
|
lhs = h5.get_sparse("static_constr_lhs")
|
||||||
if lhs !== nothing
|
|
||||||
rhs = h5.get_array("static_constr_rhs")
|
rhs = h5.get_array("static_constr_rhs")
|
||||||
slacks = abs.(lhs * x - rhs)
|
slacks = abs.(lhs * x - rhs)
|
||||||
h5.put_array("mip_constr_slacks", slacks)
|
h5.put_array("mip_constr_slacks", slacks)
|
||||||
end
|
|
||||||
|
|
||||||
# Cuts and lazy constraints
|
# Cuts and lazy constraints
|
||||||
ext = model.ext[:miplearn]
|
ext = model.ext[:miplearn]
|
||||||
@@ -310,7 +298,9 @@ end
|
|||||||
function _fix_variables(model::JuMP.Model, var_names, var_values, stats)
|
function _fix_variables(model::JuMP.Model, var_names, var_values, stats)
|
||||||
vars = [variable_by_name(model, v) for v in var_names]
|
vars = [variable_by_name(model, v) for v in var_names]
|
||||||
for (i, var) in enumerate(vars)
|
for (i, var) in enumerate(vars)
|
||||||
fix(var, var_values[i], force = true)
|
if isfinite(var_values[i])
|
||||||
|
fix(var, var_values[i], force=true)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@@ -427,7 +417,7 @@ function __init_solvers_jump__()
|
|||||||
constrs_lhs,
|
constrs_lhs,
|
||||||
constrs_sense,
|
constrs_sense,
|
||||||
constrs_rhs,
|
constrs_rhs,
|
||||||
stats = nothing,
|
stats=nothing,
|
||||||
) = _add_constrs(
|
) = _add_constrs(
|
||||||
self.inner,
|
self.inner,
|
||||||
from_str_array(var_names),
|
from_str_array(var_names),
|
||||||
@@ -443,14 +433,14 @@ function __init_solvers_jump__()
|
|||||||
|
|
||||||
extract_after_mip(self, h5) = _extract_after_mip(self.inner, h5)
|
extract_after_mip(self, h5) = _extract_after_mip(self.inner, h5)
|
||||||
|
|
||||||
fix_variables(self, var_names, var_values, stats = nothing) =
|
fix_variables(self, var_names, var_values, stats=nothing) =
|
||||||
_fix_variables(self.inner, from_str_array(var_names), var_values, stats)
|
_fix_variables(self.inner, from_str_array(var_names), var_values, stats)
|
||||||
|
|
||||||
optimize(self) = _optimize(self.inner)
|
optimize(self) = _optimize(self.inner)
|
||||||
|
|
||||||
relax(self) = Class(_relax(self.inner))
|
relax(self) = Class(_relax(self.inner))
|
||||||
|
|
||||||
set_warm_starts(self, var_names, var_values, stats = nothing) =
|
set_warm_starts(self, var_names, var_values, stats=nothing) =
|
||||||
_set_warm_starts(self.inner, from_str_array(var_names), var_values, stats)
|
_set_warm_starts(self.inner, from_str_array(var_names), var_values, stats)
|
||||||
|
|
||||||
write(self, filename) = _write(self.inner, filename)
|
write(self, filename) = _write(self.inner, filename)
|
||||||
|
|||||||
@@ -4,7 +4,9 @@ authors = ["Alinson S. Xavier <git@axavier.org>"]
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
|
||||||
[deps]
|
[deps]
|
||||||
|
CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
|
||||||
Clp = "e2554f3b-3117-50c0-817c-e040a3ddf72d"
|
Clp = "e2554f3b-3117-50c0-817c-e040a3ddf72d"
|
||||||
|
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
|
||||||
GLPK = "60bf3e95-4087-53dc-ae20-288a0d20c6a6"
|
GLPK = "60bf3e95-4087-53dc-ae20-288a0d20c6a6"
|
||||||
Glob = "c27321d9-0574-5035-807b-f59d2c89b15c"
|
Glob = "c27321d9-0574-5035-807b-f59d2c89b15c"
|
||||||
HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f"
|
HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f"
|
||||||
|
|||||||
BIN
test/fixtures/stab/stab-n190-00000.h5
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00000.h5
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/stab/stab-n190-00000.mps.gz
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00000.mps.gz
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/stab/stab-n190-00002.h5
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00002.h5
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/stab/stab-n190-00002.mps.gz
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00002.mps.gz
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/stab/stab-n190-00003.h5
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00003.h5
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/stab/stab-n190-00003.mps.gz
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00003.mps.gz
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/stab/stab-n190-00004.h5
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00004.h5
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/stab/stab-n190-00004.mps.gz
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00004.mps.gz
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/stab/stab-n190-00005.h5
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00005.h5
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/stab/stab-n190-00005.mps.gz
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00005.mps.gz
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/stab/stab-n190-00006.h5
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00006.h5
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/stab/stab-n190-00006.mps.gz
vendored
Normal file
BIN
test/fixtures/stab/stab-n190-00006.mps.gz
vendored
Normal file
Binary file not shown.
284
test/src/BB/tables.ipynb
Normal file
284
test/src/BB/tables.ipynb
Normal file
File diff suppressed because one or more lines are too long
@@ -10,9 +10,12 @@ using Test
|
|||||||
using MIPLearn.BB
|
using MIPLearn.BB
|
||||||
using MIPLearn
|
using MIPLearn
|
||||||
|
|
||||||
|
using CSV
|
||||||
|
using DataFrames
|
||||||
|
|
||||||
basepath = @__DIR__
|
basepath = @__DIR__
|
||||||
|
|
||||||
function bb_run(optimizer_name, optimizer; large = true)
|
function bb_run(optimizer_name, optimizer; large=true)
|
||||||
@testset "Solve ($optimizer_name)" begin
|
@testset "Solve ($optimizer_name)" begin
|
||||||
@testset "interface" begin
|
@testset "interface" begin
|
||||||
filename = "$FIXTURES/danoint.mps.gz"
|
filename = "$FIXTURES/danoint.mps.gz"
|
||||||
@@ -25,7 +28,7 @@ function bb_run(optimizer_name, optimizer; large = true)
|
|||||||
|
|
||||||
status, obj = BB.solve_relaxation!(mip)
|
status, obj = BB.solve_relaxation!(mip)
|
||||||
@test status == :Optimal
|
@test status == :Optimal
|
||||||
@test round(obj, digits = 6) == 62.637280
|
@test round(obj, digits=6) == 62.637280
|
||||||
|
|
||||||
@test BB.name(mip, mip.int_vars[1]) == "xab"
|
@test BB.name(mip, mip.int_vars[1]) == "xab"
|
||||||
@test BB.name(mip, mip.int_vars[2]) == "xac"
|
@test BB.name(mip, mip.int_vars[2]) == "xac"
|
||||||
@@ -35,26 +38,26 @@ function bb_run(optimizer_name, optimizer; large = true)
|
|||||||
@test mip.int_vars_ub[1] == 1.0
|
@test mip.int_vars_ub[1] == 1.0
|
||||||
|
|
||||||
vals = BB.values(mip, mip.int_vars)
|
vals = BB.values(mip, mip.int_vars)
|
||||||
@test round(vals[1], digits = 6) == 0.046933
|
@test round(vals[1], digits=6) == 0.046933
|
||||||
@test round(vals[2], digits = 6) == 0.000841
|
@test round(vals[2], digits=6) == 0.000841
|
||||||
@test round(vals[3], digits = 6) == 0.248696
|
@test round(vals[3], digits=6) == 0.248696
|
||||||
|
|
||||||
# Probe (up and down are feasible)
|
# Probe (up and down are feasible)
|
||||||
probe_up, probe_down = BB.probe(mip, mip.int_vars[1], 0.5, 0.0, 1.0, 1_000_000)
|
probe_up, probe_down = BB.probe(mip, mip.int_vars[1], 0.5, 0.0, 1.0, 1_000_000)
|
||||||
@test round(probe_down, digits = 6) == 62.690000
|
@test round(probe_down, digits=6) == 62.690000
|
||||||
@test round(probe_up, digits = 6) == 62.714100
|
@test round(probe_up, digits=6) == 62.714100
|
||||||
|
|
||||||
# Fix one variable to zero
|
# Fix one variable to zero
|
||||||
BB.set_bounds!(mip, mip.int_vars[1:1], [0.0], [0.0])
|
BB.set_bounds!(mip, mip.int_vars[1:1], [0.0], [0.0])
|
||||||
status, obj = BB.solve_relaxation!(mip)
|
status, obj = BB.solve_relaxation!(mip)
|
||||||
@test status == :Optimal
|
@test status == :Optimal
|
||||||
@test round(obj, digits = 6) == 62.690000
|
@test round(obj, digits=6) == 62.690000
|
||||||
|
|
||||||
# Fix one variable to one and another variable variable to zero
|
# Fix one variable to one and another variable variable to zero
|
||||||
BB.set_bounds!(mip, mip.int_vars[1:2], [1.0, 0.0], [1.0, 0.0])
|
BB.set_bounds!(mip, mip.int_vars[1:2], [1.0, 0.0], [1.0, 0.0])
|
||||||
status, obj = BB.solve_relaxation!(mip)
|
status, obj = BB.solve_relaxation!(mip)
|
||||||
@test status == :Optimal
|
@test status == :Optimal
|
||||||
@test round(obj, digits = 6) == 62.714777
|
@test round(obj, digits=6) == 62.714777
|
||||||
|
|
||||||
# Fix all binary variables to one, making problem infeasible
|
# Fix all binary variables to one, making problem infeasible
|
||||||
N = length(mip.int_vars)
|
N = length(mip.int_vars)
|
||||||
@@ -68,7 +71,7 @@ function bb_run(optimizer_name, optimizer; large = true)
|
|||||||
BB.set_bounds!(mip, mip.int_vars, zeros(N), ones(N))
|
BB.set_bounds!(mip, mip.int_vars, zeros(N), ones(N))
|
||||||
status, obj = BB.solve_relaxation!(mip)
|
status, obj = BB.solve_relaxation!(mip)
|
||||||
@test status == :Optimal
|
@test status == :Optimal
|
||||||
@test round(obj, digits = 6) == 62.637280
|
@test round(obj, digits=6) == 62.637280
|
||||||
end
|
end
|
||||||
|
|
||||||
@testset "varbranch" begin
|
@testset "varbranch" begin
|
||||||
@@ -82,8 +85,8 @@ function bb_run(optimizer_name, optimizer; large = true)
|
|||||||
BB.StrongBranching(),
|
BB.StrongBranching(),
|
||||||
BB.ReliabilityBranching(),
|
BB.ReliabilityBranching(),
|
||||||
BB.HybridBranching(),
|
BB.HybridBranching(),
|
||||||
BB.StrongBranching(aggregation = :min),
|
BB.StrongBranching(aggregation=:min),
|
||||||
BB.ReliabilityBranching(aggregation = :min, collect = true),
|
BB.ReliabilityBranching(aggregation=:min, collect=true),
|
||||||
]
|
]
|
||||||
h5 = H5File("$FIXTURES/$instance.h5")
|
h5 = H5File("$FIXTURES/$instance.h5")
|
||||||
mip_obj_bound = h5.get_scalar("mip_obj_bound")
|
mip_obj_bound = h5.get_scalar("mip_obj_bound")
|
||||||
@@ -104,13 +107,13 @@ function bb_run(optimizer_name, optimizer; large = true)
|
|||||||
end
|
end
|
||||||
|
|
||||||
@testset "collect" begin
|
@testset "collect" begin
|
||||||
rule = BB.ReliabilityBranching(collect = true)
|
rule = BB.ReliabilityBranching(collect=true)
|
||||||
BB.collect!(
|
BB.collect!(
|
||||||
optimizer,
|
optimizer,
|
||||||
"$FIXTURES/bell5.mps.gz",
|
"$FIXTURES/bell5.mps.gz",
|
||||||
node_limit = 100,
|
node_limit=100,
|
||||||
print_interval = 10,
|
print_interval=10,
|
||||||
branch_rule = rule,
|
branch_rule=rule,
|
||||||
)
|
)
|
||||||
n_sb = rule.stats.num_strong_branch_calls
|
n_sb = rule.stats.num_strong_branch_calls
|
||||||
h5 = H5File("$FIXTURES/bell5.h5")
|
h5 = H5File("$FIXTURES/bell5.h5")
|
||||||
@@ -128,3 +131,67 @@ function test_bb()
|
|||||||
@time bb_run("HiGHS", optimizer_with_attributes(HiGHS.Optimizer))
|
@time bb_run("HiGHS", optimizer_with_attributes(HiGHS.Optimizer))
|
||||||
# @time bb_run("CPLEX", optimizer_with_attributes(CPLEX.Optimizer, "CPXPARAM_Threads" => 1))
|
# @time bb_run("CPLEX", optimizer_with_attributes(CPLEX.Optimizer, "CPXPARAM_Threads" => 1))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
function test_bb_replay()
|
||||||
|
rule_sb = BB.StrongBranching()
|
||||||
|
rule_rb = BB.ReliabilityBranching()
|
||||||
|
optimizer = optimizer_with_attributes(HiGHS.Optimizer)
|
||||||
|
filenames = [replace(f, ".h5" => "") for f in glob("test/fixtures/stab/*.h5")]
|
||||||
|
results_filename = "tmp.csv"
|
||||||
|
|
||||||
|
lk = ReentrantLock()
|
||||||
|
results = []
|
||||||
|
|
||||||
|
function push_result(r)
|
||||||
|
lock(lk) do
|
||||||
|
push!(results, r)
|
||||||
|
df = DataFrame()
|
||||||
|
for row in results
|
||||||
|
push!(df, row, cols=:union)
|
||||||
|
end
|
||||||
|
CSV.write(results_filename, df)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
function solve(filename; replay=nothing, skip=false, rule)
|
||||||
|
has_replay = (replay !== nothing)
|
||||||
|
h5 = H5File("$filename.h5", "r")
|
||||||
|
mip_obj_bound = h5.get_scalar("mip_obj_bound")
|
||||||
|
@show filename
|
||||||
|
@show has_replay
|
||||||
|
h5.file.close()
|
||||||
|
mip = BB.init(optimizer)
|
||||||
|
BB.read!(mip, "$filename.mps.gz")
|
||||||
|
time_solve = @elapsed begin
|
||||||
|
pool, replay = BB.solve!(
|
||||||
|
mip,
|
||||||
|
initial_primal_bound=mip_obj_bound,
|
||||||
|
print_interval=100,
|
||||||
|
node_limit=1_000,
|
||||||
|
branch_rule=rule,
|
||||||
|
replay=replay,
|
||||||
|
)
|
||||||
|
end
|
||||||
|
if !skip
|
||||||
|
push_result(
|
||||||
|
Dict(
|
||||||
|
"Filename" => filename,
|
||||||
|
"Replay?" => has_replay,
|
||||||
|
"Solve time (s)" => time_solve,
|
||||||
|
"Relative MIP gap (%)" => round(pool.gap * 100, digits=3)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
return replay
|
||||||
|
end
|
||||||
|
|
||||||
|
# Solve reference instance
|
||||||
|
replay = solve(filenames[1], skip=true, rule=rule_sb)
|
||||||
|
|
||||||
|
# Solve perturbations
|
||||||
|
for i in 2:6
|
||||||
|
solve(filenames[i], rule=rule_rb, replay=nothing)
|
||||||
|
solve(filenames[i], rule=rule_rb, replay=deepcopy(replay))
|
||||||
|
end
|
||||||
|
return
|
||||||
|
end
|
||||||
@@ -24,7 +24,6 @@ include("Cuts/tableau/test_gmi_dual.jl")
|
|||||||
include("problems/test_setcover.jl")
|
include("problems/test_setcover.jl")
|
||||||
include("problems/test_stab.jl")
|
include("problems/test_stab.jl")
|
||||||
include("problems/test_tsp.jl")
|
include("problems/test_tsp.jl")
|
||||||
include("problems/test_maxcut.jl")
|
|
||||||
include("solvers/test_jump.jl")
|
include("solvers/test_jump.jl")
|
||||||
include("test_io.jl")
|
include("test_io.jl")
|
||||||
include("test_usage.jl")
|
include("test_usage.jl")
|
||||||
@@ -38,7 +37,6 @@ function runtests()
|
|||||||
test_problems_setcover()
|
test_problems_setcover()
|
||||||
test_problems_stab()
|
test_problems_stab()
|
||||||
test_problems_tsp()
|
test_problems_tsp()
|
||||||
test_problems_maxcut()
|
|
||||||
test_solvers_jump()
|
test_solvers_jump()
|
||||||
test_usage()
|
test_usage()
|
||||||
test_cuts()
|
test_cuts()
|
||||||
|
|||||||
@@ -1,54 +0,0 @@
|
|||||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
|
||||||
# Copyright (C) 2020-2025, UChicago Argonne, LLC. All rights reserved.
|
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
|
||||||
|
|
||||||
using PyCall
|
|
||||||
|
|
||||||
function test_problems_maxcut()
|
|
||||||
np = pyimport("numpy")
|
|
||||||
random = pyimport("random")
|
|
||||||
scipy_stats = pyimport("scipy.stats")
|
|
||||||
randint = scipy_stats.randint
|
|
||||||
uniform = scipy_stats.uniform
|
|
||||||
|
|
||||||
# Set random seed
|
|
||||||
random.seed(42)
|
|
||||||
np.random.seed(42)
|
|
||||||
|
|
||||||
# Build random instance
|
|
||||||
data = MaxCutGenerator(
|
|
||||||
n = randint(low = 10, high = 11),
|
|
||||||
p = uniform(loc = 0.5, scale = 0.0),
|
|
||||||
fix_graph = false,
|
|
||||||
).generate(
|
|
||||||
1,
|
|
||||||
)[1]
|
|
||||||
|
|
||||||
# Build model
|
|
||||||
model = build_maxcut_model_jump(data, optimizer = SCIP.Optimizer)
|
|
||||||
|
|
||||||
# Check static features
|
|
||||||
h5 = H5File(tempname(), "w")
|
|
||||||
model.extract_after_load(h5)
|
|
||||||
obj_linear = h5.get_array("static_var_obj_coeffs")
|
|
||||||
obj_quad = h5.get_array("static_var_obj_coeffs_quad")
|
|
||||||
@test obj_linear == [3.0, 1.0, 3.0, 1.0, -1.0, 0.0, -1.0, 0.0, -1.0, 0.0]
|
|
||||||
@test obj_quad == [
|
|
||||||
0.0 0.0 -1.0 1.0 -1.0 0.0 0.0 0.0 -1.0 -1.0
|
|
||||||
0.0 0.0 1.0 -1.0 0.0 -1.0 -1.0 0.0 0.0 1.0
|
|
||||||
0.0 0.0 0.0 0.0 0.0 -1.0 0.0 0.0 -1.0 -1.0
|
|
||||||
0.0 0.0 0.0 0.0 0.0 -1.0 1.0 -1.0 0.0 0.0
|
|
||||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0
|
|
||||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
|
|
||||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0
|
|
||||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 -1.0
|
|
||||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0
|
|
||||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
|
|
||||||
]
|
|
||||||
|
|
||||||
# Check optimal solution
|
|
||||||
model.optimize()
|
|
||||||
model.extract_after_mip(h5)
|
|
||||||
@test h5.get_scalar("mip_obj_value") == -4
|
|
||||||
h5.close()
|
|
||||||
end
|
|
||||||
Reference in New Issue
Block a user