mirror of
https://github.com/ANL-CEEESA/MIPLearn.jl.git
synced 2025-12-07 08:48:52 -06:00
Compare commits
15 Commits
feature/re
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
| d351d84d58 | |||
| 1aaf4ebdc4 | |||
| 5662e5c2e6 | |||
| 63bbd750fb | |||
| 6c903d0b19 | |||
| c3a8fa6a08 | |||
| 5c522dbc5f | |||
| a9f1b2c394 | |||
| 2ea0043c03 | |||
| 9ac2f74856 | |||
| 672bb220c1 | |||
| 20a7cfb42d | |||
| b6ba75c3dc | |||
| a5a3690bb6 | |||
| e5a2550c21 |
@@ -1,7 +1,7 @@
|
||||
name = "MIPLearn"
|
||||
uuid = "2b1277c3-b477-4c49-a15e-7ba350325c68"
|
||||
authors = ["Alinson S Xavier <git@axavier.org>"]
|
||||
version = "0.4.0"
|
||||
version = "0.4.2"
|
||||
|
||||
[deps]
|
||||
Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d"
|
||||
@@ -41,3 +41,5 @@ Requires = "1"
|
||||
Statistics = "1"
|
||||
TimerOutputs = "0.5"
|
||||
julia = "1"
|
||||
PrecompileTools = "1"
|
||||
SCIP = "0.12"
|
||||
2
deps/build.jl
vendored
2
deps/build.jl
vendored
@@ -5,7 +5,7 @@ function install_miplearn()
|
||||
Conda.update()
|
||||
pip = joinpath(dirname(pyimport("sys").executable), "pip")
|
||||
isfile(pip) || error("$pip: invalid path")
|
||||
run(`$pip install miplearn==0.4.0`)
|
||||
run(`$pip install miplearn==0.4.4`)
|
||||
end
|
||||
|
||||
install_miplearn()
|
||||
|
||||
@@ -6,7 +6,7 @@ using Printf
|
||||
|
||||
function print_progress_header()
|
||||
@printf(
|
||||
"%8s %9s %9s %13s %13s %9s %6s %13s %6s %-24s %9s %9s %6s %6s",
|
||||
"%8s %9s %9s %13s %13s %9s %9s %13s %9s %-24s %9s %9s %6s %6s",
|
||||
"time",
|
||||
"processed",
|
||||
"pending",
|
||||
@@ -46,7 +46,7 @@ function print_progress(
|
||||
branch_ub = @sprintf("%9.2f", last(node.branch_ub))
|
||||
end
|
||||
@printf(
|
||||
"%8.2f %9d %9d %13.6e %13.6e %9.2e %6d %13.6e %6s %-24s %9s %9s %6d %6d",
|
||||
"%8.2f %9d %9d %13.6e %13.6e %9.2e %9d %13.6e %9s %-24s %9s %9s %6d %6d",
|
||||
time_elapsed,
|
||||
pool.processed,
|
||||
length(pool.processing) + length(pool.pending),
|
||||
|
||||
@@ -134,7 +134,11 @@ function _get_int_variables(
|
||||
var_ub = constr.upper
|
||||
MOI.delete(optimizer, _upper_bound_index(var))
|
||||
end
|
||||
MOI.add_constraint(optimizer, var, MOI.Interval(var_lb, var_ub))
|
||||
MOI.add_constraint(
|
||||
optimizer,
|
||||
MOI.VariableIndex(var.index),
|
||||
MOI.Interval(var_lb, var_ub),
|
||||
)
|
||||
end
|
||||
push!(vars, var)
|
||||
push!(lb, var_lb)
|
||||
|
||||
@@ -8,6 +8,8 @@ using HiGHS
|
||||
using Random
|
||||
using DataStructures
|
||||
|
||||
import ..H5FieldsExtractor
|
||||
|
||||
global ExpertDualGmiComponent = PyNULL()
|
||||
global KnnDualGmiComponent = PyNULL()
|
||||
|
||||
@@ -24,8 +26,10 @@ function collect_gmi_dual(
|
||||
optimizer,
|
||||
max_rounds = 10,
|
||||
max_cuts_per_round = 500,
|
||||
time_limit = 3_600,
|
||||
)
|
||||
reset_timer!()
|
||||
initial_time = time()
|
||||
|
||||
@timeit "Read H5" begin
|
||||
h5_filename = replace(mps_filename, ".mps.gz" => ".h5")
|
||||
@@ -205,6 +209,12 @@ function collect_gmi_dual(
|
||||
sum(sp[i] * gmi_exps[i] for (i, c) in enumerate(constrs) if useful[i]),
|
||||
)
|
||||
end
|
||||
|
||||
elapsed_time = time() - initial_time
|
||||
if elapsed_time > time_limit
|
||||
@info "Time limit exceeded. Stopping."
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
@timeit "Store cuts in H5 file" begin
|
||||
@@ -253,138 +263,6 @@ function collect_gmi_dual(
|
||||
)
|
||||
end
|
||||
|
||||
function ExpertDualGmiComponent_before_mip(test_h5, model, _)
|
||||
# Read cuts and optimal solution
|
||||
h5 = H5File(test_h5, "r")
|
||||
sol_opt_dict = Dict(
|
||||
zip(
|
||||
h5.get_array("static_var_names"),
|
||||
convert(Array{Float64}, h5.get_array("mip_var_values")),
|
||||
),
|
||||
)
|
||||
cut_basis_vars = h5.get_array("cuts_basis_vars")
|
||||
cut_basis_sizes = h5.get_array("cuts_basis_sizes")
|
||||
cut_rows = h5.get_array("cuts_rows")
|
||||
obj_mip = h5.get_scalar("mip_lower_bound")
|
||||
if obj_mip === nothing
|
||||
obj_mip = h5.get_scalar("mip_obj_value")
|
||||
end
|
||||
h5.close()
|
||||
|
||||
# Initialize stats
|
||||
stats_time_convert = 0
|
||||
stats_time_tableau = 0
|
||||
stats_time_gmi = 0
|
||||
all_cuts = nothing
|
||||
|
||||
stats_time_convert = @elapsed begin
|
||||
# Extract problem data
|
||||
data = ProblemData(model)
|
||||
|
||||
# Construct optimal solution vector (with correct variable sequence)
|
||||
sol_opt = [sol_opt_dict[n] for n in data.var_names]
|
||||
|
||||
# Assert optimal solution is feasible for the original problem
|
||||
assert_leq(data.constr_lb, data.constr_lhs * sol_opt)
|
||||
assert_leq(data.constr_lhs * sol_opt, data.constr_ub)
|
||||
|
||||
# Convert to standard form
|
||||
data_s, transforms = convert_to_standard_form(data)
|
||||
model_s = to_model(data_s)
|
||||
set_optimizer(model_s, HiGHS.Optimizer)
|
||||
relax_integrality(model_s)
|
||||
|
||||
# Convert optimal solution to standard form
|
||||
sol_opt_s = forward(transforms, sol_opt)
|
||||
|
||||
# Assert converted solution is feasible for standard form problem
|
||||
assert_eq(data_s.constr_lhs * sol_opt_s, data_s.constr_lb)
|
||||
|
||||
end
|
||||
|
||||
current_basis = nothing
|
||||
for (r, row) in enumerate(cut_rows)
|
||||
stats_time_tableau += @elapsed begin
|
||||
if r == 1 || cut_basis_vars[r, :] != cut_basis_vars[r-1, :]
|
||||
vbb, vnn, cbb, cnn = cut_basis_sizes[r, :]
|
||||
current_basis = Basis(;
|
||||
var_basic = cut_basis_vars[r, 1:vbb],
|
||||
var_nonbasic = cut_basis_vars[r, vbb+1:vbb+vnn],
|
||||
constr_basic = cut_basis_vars[r, vbb+vnn+1:vbb+vnn+cbb],
|
||||
constr_nonbasic = cut_basis_vars[r, vbb+vnn+cbb+1:vbb+vnn+cbb+cnn],
|
||||
)
|
||||
end
|
||||
tableau = compute_tableau(data_s, current_basis, rows = [row])
|
||||
assert_eq(tableau.lhs * sol_opt_s, tableau.rhs)
|
||||
end
|
||||
stats_time_gmi += @elapsed begin
|
||||
cuts_s = compute_gmi(data_s, tableau)
|
||||
assert_does_not_cut_off(cuts_s, sol_opt_s)
|
||||
end
|
||||
cuts = backwards(transforms, cuts_s)
|
||||
assert_does_not_cut_off(cuts, sol_opt)
|
||||
|
||||
if all_cuts === nothing
|
||||
all_cuts = cuts
|
||||
else
|
||||
all_cuts.lhs = [all_cuts.lhs; cuts.lhs]
|
||||
all_cuts.lb = [all_cuts.lb; cuts.lb]
|
||||
all_cuts.ub = [all_cuts.ub; cuts.ub]
|
||||
end
|
||||
end
|
||||
|
||||
# Strategy 1: Add all cuts during the first call
|
||||
function cut_callback_1(cb_data)
|
||||
if all_cuts !== nothing
|
||||
constrs = build_constraints(model, all_cuts)
|
||||
@info "Enforcing $(length(constrs)) cuts..."
|
||||
for c in constrs
|
||||
MOI.submit(model, MOI.UserCut(cb_data), c)
|
||||
end
|
||||
all_cuts = nothing
|
||||
end
|
||||
end
|
||||
|
||||
# Strategy 2: Add violated cuts repeatedly until unable to separate
|
||||
callback_disabled = false
|
||||
function cut_callback_2(cb_data)
|
||||
if callback_disabled
|
||||
return
|
||||
end
|
||||
x = all_variables(model)
|
||||
x_val = callback_value.(cb_data, x)
|
||||
lhs_val = all_cuts.lhs * x_val
|
||||
is_violated = lhs_val .> all_cuts.ub
|
||||
selected_idx = findall(is_violated .== true)
|
||||
selected_cuts = ConstraintSet(
|
||||
lhs=all_cuts.lhs[selected_idx, :],
|
||||
ub=all_cuts.ub[selected_idx],
|
||||
lb=all_cuts.lb[selected_idx],
|
||||
)
|
||||
constrs = build_constraints(model, selected_cuts)
|
||||
if length(constrs) > 0
|
||||
@info "Enforcing $(length(constrs)) cuts..."
|
||||
for c in constrs
|
||||
MOI.submit(model, MOI.UserCut(cb_data), c)
|
||||
end
|
||||
else
|
||||
@info "No violated cuts found. Disabling callback."
|
||||
callback_disabled = true
|
||||
end
|
||||
end
|
||||
|
||||
# Set up cut callback
|
||||
set_attribute(model, MOI.UserCutCallback(), cut_callback_1)
|
||||
# set_attribute(model, MOI.UserCutCallback(), cut_callback_2)
|
||||
|
||||
stats = Dict()
|
||||
stats["ExpertDualGmi: cuts"] = length(all_cuts.lb)
|
||||
stats["ExpertDualGmi: time convert"] = stats_time_convert
|
||||
stats["ExpertDualGmi: time tableau"] = stats_time_tableau
|
||||
stats["ExpertDualGmi: time gmi"] = stats_time_gmi
|
||||
return stats
|
||||
end
|
||||
|
||||
function add_constraint_set_dual_v2(model::JuMP.Model, cs::ConstraintSet)
|
||||
vars = all_variables(model)
|
||||
nrows, ncols = size(cs.lhs)
|
||||
@@ -441,6 +319,58 @@ function _dualgmi_features(h5_filename, extractor)
|
||||
end
|
||||
end
|
||||
|
||||
function _dualgmi_compress_h5(h5_filename)
|
||||
vars_to_basis_offset = Dict()
|
||||
basis_vars = []
|
||||
basis_sizes = []
|
||||
cut_basis::Array{Int} = []
|
||||
cut_row::Array{Int} = []
|
||||
|
||||
h5 = H5File(h5_filename, "r")
|
||||
orig_cut_basis_vars = h5.get_array("cuts_basis_vars")
|
||||
orig_cut_basis_sizes = h5.get_array("cuts_basis_sizes")
|
||||
orig_cut_rows = h5.get_array("cuts_rows")
|
||||
h5.close()
|
||||
if orig_cut_basis_vars === nothing
|
||||
@warn "orig_cut_basis_vars is null; skipping file"
|
||||
return
|
||||
end
|
||||
ncuts, _ = size(orig_cut_basis_vars)
|
||||
if ncuts == 0
|
||||
return
|
||||
end
|
||||
|
||||
for i in 1:ncuts
|
||||
vars = orig_cut_basis_vars[i, :]
|
||||
sizes = orig_cut_basis_sizes[i, :]
|
||||
row = orig_cut_rows[i]
|
||||
if vars ∉ keys(vars_to_basis_offset)
|
||||
offset = size(basis_vars)[1] + 1
|
||||
vars_to_basis_offset[vars] = offset
|
||||
push!(basis_vars, vars)
|
||||
push!(basis_sizes, sizes)
|
||||
end
|
||||
offset = vars_to_basis_offset[vars]
|
||||
push!(cut_basis, offset)
|
||||
push!(cut_row, row)
|
||||
end
|
||||
|
||||
basis_vars = hcat(basis_vars...)'
|
||||
basis_sizes = hcat(basis_sizes...)'
|
||||
_, n_vars = size(basis_vars)
|
||||
if n_vars == 0
|
||||
@warn "n_vars is zero; skipping file"
|
||||
return
|
||||
end
|
||||
|
||||
h5 = H5File(h5_filename, "r+")
|
||||
h5.put_array("gmi_basis_vars", basis_vars)
|
||||
h5.put_array("gmi_basis_sizes", basis_sizes)
|
||||
h5.put_array("gmi_cut_basis", cut_basis)
|
||||
h5.put_array("gmi_cut_row", cut_row)
|
||||
h5.file.close()
|
||||
end
|
||||
|
||||
function _dualgmi_generate(train_h5, model)
|
||||
@timeit "Read problem data" begin
|
||||
data = ProblemData(model)
|
||||
@@ -448,54 +378,71 @@ function _dualgmi_generate(train_h5, model)
|
||||
@timeit "Convert to standard form" begin
|
||||
data_s, transforms = convert_to_standard_form(data)
|
||||
end
|
||||
|
||||
@timeit "Collect cuts from H5 files" begin
|
||||
vars_to_unique_basis_offset = Dict()
|
||||
unique_basis_vars = nothing
|
||||
unique_basis_sizes = nothing
|
||||
unique_basis_rows = nothing
|
||||
|
||||
basis_vars_to_basis_offset = Dict()
|
||||
combined_basis_sizes = nothing
|
||||
combined_basis_sizes_list = Any[]
|
||||
combined_basis_vars = nothing
|
||||
combined_basis_vars_list = Any[]
|
||||
combined_cut_rows = Any[]
|
||||
for h5_filename in train_h5
|
||||
h5 = H5File(h5_filename, "r")
|
||||
cut_basis_vars = h5.get_array("cuts_basis_vars")
|
||||
cut_basis_sizes = h5.get_array("cuts_basis_sizes")
|
||||
cut_rows = h5.get_array("cuts_rows")
|
||||
ncuts, nvars = size(cut_basis_vars)
|
||||
if unique_basis_vars === nothing
|
||||
unique_basis_vars = Matrix{Int}(undef, 0, nvars)
|
||||
unique_basis_sizes = Matrix{Int}(undef, 0, 4)
|
||||
unique_basis_rows = Dict{Int,Set{Int}}()
|
||||
end
|
||||
for i in 1:ncuts
|
||||
vars = cut_basis_vars[i, :]
|
||||
sizes = cut_basis_sizes[i, :]
|
||||
row = cut_rows[i]
|
||||
if vars ∉ keys(vars_to_unique_basis_offset)
|
||||
offset = size(unique_basis_vars)[1] + 1
|
||||
vars_to_unique_basis_offset[vars] = offset
|
||||
unique_basis_vars = [unique_basis_vars; vars']
|
||||
unique_basis_sizes = [unique_basis_sizes; sizes']
|
||||
unique_basis_rows[offset] = Set()
|
||||
@timeit "get_array (new)" begin
|
||||
h5 = H5File(h5_filename, "r")
|
||||
gmi_basis_vars = h5.get_array("gmi_basis_vars")
|
||||
if gmi_basis_vars === nothing
|
||||
@warn "$(h5_filename) does not contain gmi_basis_vars; skipping"
|
||||
continue
|
||||
end
|
||||
offset = vars_to_unique_basis_offset[vars]
|
||||
push!(unique_basis_rows[offset], row)
|
||||
gmi_basis_sizes = h5.get_array("gmi_basis_sizes")
|
||||
gmi_cut_basis = h5.get_array("gmi_cut_basis")
|
||||
gmi_cut_row = h5.get_array("gmi_cut_row")
|
||||
h5.close()
|
||||
end
|
||||
@timeit "combine basis" begin
|
||||
nbasis, _ = size(gmi_basis_vars)
|
||||
local_to_combined_offset = Dict()
|
||||
for local_offset in 1:nbasis
|
||||
vars = gmi_basis_vars[local_offset, :]
|
||||
sizes = gmi_basis_sizes[local_offset, :]
|
||||
if vars ∉ keys(basis_vars_to_basis_offset)
|
||||
combined_offset = length(combined_basis_vars_list) + 1
|
||||
basis_vars_to_basis_offset[vars] = combined_offset
|
||||
push!(combined_basis_vars_list, vars)
|
||||
push!(combined_basis_sizes_list, sizes)
|
||||
push!(combined_cut_rows, Set{Int}())
|
||||
end
|
||||
combined_offset = basis_vars_to_basis_offset[vars]
|
||||
local_to_combined_offset[local_offset] = combined_offset
|
||||
end
|
||||
end
|
||||
@timeit "combine rows" begin
|
||||
ncuts = length(gmi_cut_row)
|
||||
for i in 1:ncuts
|
||||
local_offset = gmi_cut_basis[i]
|
||||
combined_offset = local_to_combined_offset[local_offset]
|
||||
row = gmi_cut_row[i]
|
||||
push!(combined_cut_rows[combined_offset], row)
|
||||
end
|
||||
end
|
||||
@timeit "convert lists to matrices" begin
|
||||
combined_basis_vars = hcat(combined_basis_vars_list...)'
|
||||
combined_basis_sizes = hcat(combined_basis_sizes_list...)'
|
||||
end
|
||||
h5.close()
|
||||
end
|
||||
end
|
||||
|
||||
@timeit "Compute tableaus and cuts" begin
|
||||
all_cuts = nothing
|
||||
for (offset, rows) in unique_basis_rows
|
||||
nbasis = length(combined_cut_rows)
|
||||
for offset in 1:nbasis
|
||||
rows = combined_cut_rows[offset]
|
||||
try
|
||||
vbb, vnn, cbb, cnn = unique_basis_sizes[offset, :]
|
||||
vbb, vnn, cbb, cnn = combined_basis_sizes[offset, :]
|
||||
current_basis = Basis(;
|
||||
var_basic = unique_basis_vars[offset, 1:vbb],
|
||||
var_nonbasic = unique_basis_vars[offset, vbb+1:vbb+vnn],
|
||||
constr_basic = unique_basis_vars[offset, vbb+vnn+1:vbb+vnn+cbb],
|
||||
constr_nonbasic = unique_basis_vars[offset, vbb+vnn+cbb+1:vbb+vnn+cbb+cnn],
|
||||
var_basic = combined_basis_vars[offset, 1:vbb],
|
||||
var_nonbasic = combined_basis_vars[offset, vbb+1:vbb+vnn],
|
||||
constr_basic = combined_basis_vars[offset, vbb+vnn+1:vbb+vnn+cbb],
|
||||
constr_nonbasic = combined_basis_vars[offset, vbb+vnn+cbb+1:vbb+vnn+cbb+cnn],
|
||||
)
|
||||
|
||||
tableau = compute_tableau(data_s, current_basis; rows=collect(rows))
|
||||
cuts_s = compute_gmi(data_s, tableau)
|
||||
cuts = backwards(transforms, cuts_s)
|
||||
@@ -599,15 +546,7 @@ function KnnDualGmiComponent_before_mip(data::_KnnDualGmiData, test_h5, model, _
|
||||
end
|
||||
|
||||
function __init_gmi_dual__()
|
||||
@pydef mutable struct Class1
|
||||
function fit(_, _) end
|
||||
function before_mip(self, test_h5, model, stats)
|
||||
ExpertDualGmiComponent_before_mip(test_h5, model.inner, stats)
|
||||
end
|
||||
end
|
||||
copy!(ExpertDualGmiComponent, Class1)
|
||||
|
||||
@pydef mutable struct Class2
|
||||
@pydef mutable struct KnnDualGmiComponentPy
|
||||
function __init__(self; extractor, k = 3, strategy = "near")
|
||||
self.data = _KnnDualGmiData(; extractor, k, strategy)
|
||||
end
|
||||
@@ -618,7 +557,23 @@ function __init_gmi_dual__()
|
||||
return @time KnnDualGmiComponent_before_mip(self.data, test_h5, model.inner, stats)
|
||||
end
|
||||
end
|
||||
copy!(KnnDualGmiComponent, Class2)
|
||||
copy!(KnnDualGmiComponent, KnnDualGmiComponentPy)
|
||||
|
||||
@pydef mutable struct ExpertDualGmiComponentPy
|
||||
function __init__(self)
|
||||
self.inner = KnnDualGmiComponentPy(
|
||||
extractor=H5FieldsExtractor(instance_fields=["static_var_obj_coeffs"]),
|
||||
k=1,
|
||||
)
|
||||
end
|
||||
function fit(self, train_h5)
|
||||
end
|
||||
function before_mip(self, test_h5, model, stats)
|
||||
self.inner.fit([test_h5])
|
||||
return self.inner.before_mip(test_h5, model, stats)
|
||||
end
|
||||
end
|
||||
copy!(ExpertDualGmiComponent, ExpertDualGmiComponentPy)
|
||||
end
|
||||
|
||||
export collect_gmi_dual, expert_gmi_dual, ExpertDualGmiComponent, KnnDualGmiComponent
|
||||
|
||||
@@ -13,6 +13,7 @@ include("collectors.jl")
|
||||
include("components.jl")
|
||||
include("extractors.jl")
|
||||
include("io.jl")
|
||||
include("problems/maxcut.jl")
|
||||
include("problems/setcover.jl")
|
||||
include("problems/stab.jl")
|
||||
include("problems/tsp.jl")
|
||||
@@ -24,6 +25,7 @@ function __init__()
|
||||
__init_components__()
|
||||
__init_extractors__()
|
||||
__init_io__()
|
||||
__init_problems_maxcut__()
|
||||
__init_problems_setcover__()
|
||||
__init_problems_stab__()
|
||||
__init_problems_tsp__()
|
||||
@@ -37,48 +39,48 @@ include("Cuts/Cuts.jl")
|
||||
# Precompilation
|
||||
# =============================================================================
|
||||
|
||||
function __precompile_cuts__()
|
||||
function build_model(mps_filename)
|
||||
model = read_from_file(mps_filename)
|
||||
set_optimizer(model, SCIP.Optimizer)
|
||||
return JumpModel(model)
|
||||
end
|
||||
BASEDIR = dirname(@__FILE__)
|
||||
mps_filename = "$BASEDIR/../test/fixtures/bell5.mps.gz"
|
||||
h5_filename = "$BASEDIR/../test/fixtures/bell5.h5"
|
||||
collect_gmi_dual(
|
||||
mps_filename;
|
||||
optimizer=HiGHS.Optimizer,
|
||||
max_rounds = 10,
|
||||
max_cuts_per_round = 500,
|
||||
)
|
||||
knn = KnnDualGmiComponent(
|
||||
extractor = H5FieldsExtractor(instance_fields = ["static_var_obj_coeffs"]),
|
||||
k = 2,
|
||||
)
|
||||
knn.fit([h5_filename, h5_filename])
|
||||
solver = LearningSolver(
|
||||
components = [
|
||||
ExpertPrimalComponent(action = SetWarmStart()),
|
||||
knn,
|
||||
],
|
||||
skip_lp = true,
|
||||
)
|
||||
solver.optimize(mps_filename, build_model)
|
||||
end
|
||||
# function __precompile_cuts__()
|
||||
# function build_model(mps_filename)
|
||||
# model = read_from_file(mps_filename)
|
||||
# set_optimizer(model, SCIP.Optimizer)
|
||||
# return JumpModel(model)
|
||||
# end
|
||||
# BASEDIR = dirname(@__FILE__)
|
||||
# mps_filename = "$BASEDIR/../test/fixtures/bell5.mps.gz"
|
||||
# h5_filename = "$BASEDIR/../test/fixtures/bell5.h5"
|
||||
# collect_gmi_dual(
|
||||
# mps_filename;
|
||||
# optimizer=HiGHS.Optimizer,
|
||||
# max_rounds = 10,
|
||||
# max_cuts_per_round = 500,
|
||||
# )
|
||||
# knn = KnnDualGmiComponent(
|
||||
# extractor = H5FieldsExtractor(instance_fields = ["static_var_obj_coeffs"]),
|
||||
# k = 2,
|
||||
# )
|
||||
# knn.fit([h5_filename, h5_filename])
|
||||
# solver = LearningSolver(
|
||||
# components = [
|
||||
# ExpertPrimalComponent(action = SetWarmStart()),
|
||||
# knn,
|
||||
# ],
|
||||
# skip_lp = true,
|
||||
# )
|
||||
# solver.optimize(mps_filename, build_model)
|
||||
# end
|
||||
|
||||
@setup_workload begin
|
||||
using SCIP
|
||||
using HiGHS
|
||||
using MIPLearn.Cuts
|
||||
using PrecompileTools: @setup_workload, @compile_workload
|
||||
# @setup_workload begin
|
||||
# using SCIP
|
||||
# using HiGHS
|
||||
# using MIPLearn.Cuts
|
||||
# using PrecompileTools: @setup_workload, @compile_workload
|
||||
|
||||
__init__()
|
||||
Cuts.__init__()
|
||||
# __init__()
|
||||
# Cuts.__init__()
|
||||
|
||||
@compile_workload begin
|
||||
__precompile_cuts__()
|
||||
end
|
||||
end
|
||||
# @compile_workload begin
|
||||
# __precompile_cuts__()
|
||||
# end
|
||||
# end
|
||||
|
||||
end # module
|
||||
|
||||
31
src/problems/maxcut.jl
Normal file
31
src/problems/maxcut.jl
Normal file
@@ -0,0 +1,31 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2025, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using JuMP
|
||||
|
||||
global MaxCutData = PyNULL()
|
||||
global MaxCutGenerator = PyNULL()
|
||||
|
||||
function __init_problems_maxcut__()
|
||||
copy!(MaxCutData, pyimport("miplearn.problems.maxcut").MaxCutData)
|
||||
copy!(MaxCutGenerator, pyimport("miplearn.problems.maxcut").MaxCutGenerator)
|
||||
end
|
||||
|
||||
function build_maxcut_model_jump(data::Any; optimizer)
|
||||
if data isa String
|
||||
data = read_pkl_gz(data)
|
||||
end
|
||||
nodes = collect(data.graph.nodes())
|
||||
edges = collect(data.graph.edges())
|
||||
model = Model(optimizer)
|
||||
@variable(model, x[nodes], Bin)
|
||||
@objective(
|
||||
model,
|
||||
Min,
|
||||
sum(-data.weights[i] * x[e[1]] * (1 - x[e[2]]) for (i, e) in enumerate(edges))
|
||||
)
|
||||
return JumpModel(model)
|
||||
end
|
||||
|
||||
export MaxCutData, MaxCutGenerator, build_maxcut_model_jump
|
||||
@@ -89,14 +89,27 @@ function _extract_after_load_vars(model::JuMP.Model, h5)
|
||||
for v in vars
|
||||
]
|
||||
types = [JuMP.is_binary(v) ? "B" : JuMP.is_integer(v) ? "I" : "C" for v in vars]
|
||||
obj = objective_function(model, AffExpr)
|
||||
obj_coeffs = [v ∈ keys(obj.terms) ? obj.terms[v] : 0.0 for v in vars]
|
||||
|
||||
# Linear obj terms
|
||||
obj = objective_function(model, QuadExpr)
|
||||
obj_coeffs_linear = [v ∈ keys(obj.aff.terms) ? obj.aff.terms[v] : 0.0 for v in vars]
|
||||
|
||||
# Quadratic obj terms
|
||||
if length(obj.terms) > 0
|
||||
nvars = length(vars)
|
||||
obj_coeffs_quad = zeros(nvars, nvars)
|
||||
for (pair, coeff) in obj.terms
|
||||
obj_coeffs_quad[pair.a.index.value, pair.b.index.value] = coeff
|
||||
end
|
||||
h5.put_array("static_var_obj_coeffs_quad", obj_coeffs_quad)
|
||||
end
|
||||
|
||||
h5.put_array("static_var_names", to_str_array(JuMP.name.(vars)))
|
||||
h5.put_array("static_var_types", to_str_array(types))
|
||||
h5.put_array("static_var_lower_bounds", lb)
|
||||
h5.put_array("static_var_upper_bounds", ub)
|
||||
h5.put_array("static_var_obj_coeffs", obj_coeffs)
|
||||
h5.put_scalar("static_obj_offset", obj.constant)
|
||||
h5.put_array("static_var_obj_coeffs", obj_coeffs_linear)
|
||||
h5.put_scalar("static_obj_offset", obj.aff.constant)
|
||||
end
|
||||
|
||||
function _extract_after_load_constrs(model::JuMP.Model, h5)
|
||||
@@ -143,7 +156,7 @@ function _extract_after_load_constrs(model::JuMP.Model, h5)
|
||||
end
|
||||
end
|
||||
if isempty(names)
|
||||
error("no model constraints found; note that MIPLearn ignores unnamed constraints")
|
||||
return
|
||||
end
|
||||
lhs = sparse(lhs_rows, lhs_cols, lhs_values, length(rhs), JuMP.num_variables(model))
|
||||
h5.put_sparse("static_constr_lhs", lhs)
|
||||
@@ -282,9 +295,11 @@ function _extract_after_mip(model::JuMP.Model, h5)
|
||||
|
||||
# Slacks
|
||||
lhs = h5.get_sparse("static_constr_lhs")
|
||||
rhs = h5.get_array("static_constr_rhs")
|
||||
slacks = abs.(lhs * x - rhs)
|
||||
h5.put_array("mip_constr_slacks", slacks)
|
||||
if lhs !== nothing
|
||||
rhs = h5.get_array("static_constr_rhs")
|
||||
slacks = abs.(lhs * x - rhs)
|
||||
h5.put_array("mip_constr_slacks", slacks)
|
||||
end
|
||||
|
||||
# Cuts and lazy constraints
|
||||
ext = model.ext[:miplearn]
|
||||
|
||||
@@ -24,6 +24,7 @@ include("Cuts/tableau/test_gmi_dual.jl")
|
||||
include("problems/test_setcover.jl")
|
||||
include("problems/test_stab.jl")
|
||||
include("problems/test_tsp.jl")
|
||||
include("problems/test_maxcut.jl")
|
||||
include("solvers/test_jump.jl")
|
||||
include("test_io.jl")
|
||||
include("test_usage.jl")
|
||||
@@ -37,6 +38,7 @@ function runtests()
|
||||
test_problems_setcover()
|
||||
test_problems_stab()
|
||||
test_problems_tsp()
|
||||
test_problems_maxcut()
|
||||
test_solvers_jump()
|
||||
test_usage()
|
||||
test_cuts()
|
||||
|
||||
54
test/src/problems/test_maxcut.jl
Normal file
54
test/src/problems/test_maxcut.jl
Normal file
@@ -0,0 +1,54 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2025, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
using PyCall
|
||||
|
||||
function test_problems_maxcut()
|
||||
np = pyimport("numpy")
|
||||
random = pyimport("random")
|
||||
scipy_stats = pyimport("scipy.stats")
|
||||
randint = scipy_stats.randint
|
||||
uniform = scipy_stats.uniform
|
||||
|
||||
# Set random seed
|
||||
random.seed(42)
|
||||
np.random.seed(42)
|
||||
|
||||
# Build random instance
|
||||
data = MaxCutGenerator(
|
||||
n = randint(low = 10, high = 11),
|
||||
p = uniform(loc = 0.5, scale = 0.0),
|
||||
fix_graph = false,
|
||||
).generate(
|
||||
1,
|
||||
)[1]
|
||||
|
||||
# Build model
|
||||
model = build_maxcut_model_jump(data, optimizer = SCIP.Optimizer)
|
||||
|
||||
# Check static features
|
||||
h5 = H5File(tempname(), "w")
|
||||
model.extract_after_load(h5)
|
||||
obj_linear = h5.get_array("static_var_obj_coeffs")
|
||||
obj_quad = h5.get_array("static_var_obj_coeffs_quad")
|
||||
@test obj_linear == [3.0, 1.0, 3.0, 1.0, -1.0, 0.0, -1.0, 0.0, -1.0, 0.0]
|
||||
@test obj_quad == [
|
||||
0.0 0.0 -1.0 1.0 -1.0 0.0 0.0 0.0 -1.0 -1.0
|
||||
0.0 0.0 1.0 -1.0 0.0 -1.0 -1.0 0.0 0.0 1.0
|
||||
0.0 0.0 0.0 0.0 0.0 -1.0 0.0 0.0 -1.0 -1.0
|
||||
0.0 0.0 0.0 0.0 0.0 -1.0 1.0 -1.0 0.0 0.0
|
||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0
|
||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
|
||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0
|
||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 -1.0
|
||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0
|
||||
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
|
||||
]
|
||||
|
||||
# Check optimal solution
|
||||
model.optimize()
|
||||
model.extract_after_mip(h5)
|
||||
@test h5.get_scalar("mip_obj_value") == -4
|
||||
h5.close()
|
||||
end
|
||||
Reference in New Issue
Block a user