Start rewrite; add CPX collector

This commit is contained in:
2023-01-23 13:07:45 -06:00
parent 52ddd076b6
commit 1eee63702d
49 changed files with 131 additions and 4964 deletions

View File

@@ -0,0 +1,20 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2023, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using HDF5
function test_cuts_blackbox_cplex()
# Prepare filenames
mps_filename = joinpath(@__DIR__, "../../fixtures/bell5.mps.gz")
h5_filename = replace(mps_filename, ".mps.gz" => ".h5")
# Run collector
MIPLearn.collect(mps_filename, CplexBlackBoxCuts())
# Read HDF5 file
h5open(h5_filename, "r+") do h5
@test size(h5["cuts_cpx_lhs"]) == (12, 104)
@test size(h5["cuts_cpx_rhs"]) == (12,)
end
end

5
test/Project.toml Normal file
View File

@@ -0,0 +1,5 @@
[deps]
HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f"
MIPLearn = "2b1277c3-b477-4c49-a15e-7ba350325c68"
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

View File

@@ -1,146 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Clp
using JuMP
using Test
using MIPLearn.BB
using MIPLearn
basepath = @__DIR__
function runtests(optimizer_name, optimizer; large = true)
@testset "Solve ($optimizer_name)" begin
@testset "interface" begin
filename = "$basepath/../fixtures/danoint.mps.gz"
mip = BB.init(optimizer)
BB.read!(mip, filename)
@test mip.sense == 1.0
@test length(mip.int_vars) == 56
status, obj = BB.solve_relaxation!(mip)
@test status == :Optimal
@test round(obj, digits = 6) == 62.637280
@test BB.name(mip, mip.int_vars[1]) == "xab"
@test BB.name(mip, mip.int_vars[2]) == "xac"
@test BB.name(mip, mip.int_vars[3]) == "xad"
@test mip.int_vars_lb[1] == 0.0
@test mip.int_vars_ub[1] == 1.0
vals = BB.values(mip, mip.int_vars)
@test round(vals[1], digits = 6) == 0.046933
@test round(vals[2], digits = 6) == 0.000841
@test round(vals[3], digits = 6) == 0.248696
# Probe (up and down are feasible)
probe_up, probe_down = BB.probe(mip, mip.int_vars[1], 0.5, 0.0, 1.0, 1_000_000)
@test round(probe_down, digits = 6) == 62.690000
@test round(probe_up, digits = 6) == 62.714100
# Fix one variable to zero
BB.set_bounds!(mip, mip.int_vars[1:1], [0.0], [0.0])
status, obj = BB.solve_relaxation!(mip)
@test status == :Optimal
@test round(obj, digits = 6) == 62.690000
# Fix one variable to one and another variable variable to zero
BB.set_bounds!(mip, mip.int_vars[1:2], [1.0, 0.0], [1.0, 0.0])
status, obj = BB.solve_relaxation!(mip)
@test status == :Optimal
@test round(obj, digits = 6) == 62.714777
# Fix all binary variables to one, making problem infeasible
N = length(mip.int_vars)
BB.set_bounds!(mip, mip.int_vars, ones(N), ones(N))
status, obj = BB.solve_relaxation!(mip)
@test status == :Infeasible
@test obj == Inf
# Restore original problem
N = length(mip.int_vars)
BB.set_bounds!(mip, mip.int_vars, zeros(N), ones(N))
status, obj = BB.solve_relaxation!(mip)
@test status == :Optimal
@test round(obj, digits = 6) == 62.637280
end
@testset "varbranch" begin
for instance in ["bell5", "vpm2"]
for branch_rule in [
BB.RandomBranching(),
BB.FirstInfeasibleBranching(),
BB.LeastInfeasibleBranching(),
BB.MostInfeasibleBranching(),
BB.PseudocostBranching(),
BB.StrongBranching(),
BB.ReliabilityBranching(),
BB.HybridBranching(),
BB.StrongBranching(aggregation = :min),
BB.ReliabilityBranching(aggregation = :min, collect = true),
]
h5 = Hdf5Sample("$basepath/../fixtures/$instance.h5")
mip_lower_bound = h5.get_scalar("mip_lower_bound")
mip_upper_bound = h5.get_scalar("mip_upper_bound")
mip_sense = h5.get_scalar("mip_sense")
mip_primal_bound =
mip_sense == "min" ? mip_upper_bound : mip_lower_bound
h5.file.close()
mip = BB.init(optimizer)
BB.read!(mip, "$basepath/../fixtures/$instance.mps.gz")
@info optimizer_name, branch_rule, instance
@time BB.solve!(
mip,
initial_primal_bound = mip_primal_bound,
print_interval = 10,
node_limit = 100,
branch_rule = branch_rule,
)
end
end
end
@testset "collect" begin
rule = BB.ReliabilityBranching(collect = true)
BB.collect!(
optimizer,
"$basepath/../fixtures/bell5.mps.gz",
node_limit = 100,
print_interval = 10,
branch_rule = rule,
)
n_sb = rule.stats.num_strong_branch_calls
h5 = Hdf5Sample("$basepath/../fixtures/bell5.h5")
@test size(h5.get_array("bb_var_pseudocost_up")) == (104,)
@test size(h5.get_array("bb_score_var_names")) == (n_sb,)
@test size(h5.get_array("bb_score_features")) == (n_sb, 6)
@test size(h5.get_array("bb_score_targets")) == (n_sb,)
h5.file.close()
end
end
end
@testset "BB" begin
@time runtests("Clp", optimizer_with_attributes(Clp.Optimizer))
if is_gurobi_available
using Gurobi
@time runtests(
"Gurobi",
optimizer_with_attributes(Gurobi.Optimizer, "Threads" => 1),
)
end
if is_cplex_available
using CPLEX
@time runtests(
"CPLEX",
optimizer_with_attributes(CPLEX.Optimizer, "CPXPARAM_Threads" => 1),
)
end
end

BIN
test/fixtures/bell5.h5 vendored

Binary file not shown.

View File

@@ -1,46 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP
using MIPLearn
using Cbc
@testset "FileInstance" begin
@testset "Solve (knapsack)" begin
data = KnapsackData()
basename = tempname()
MIPLearn.save_data("$basename.jld2", data)
instance = FileInstance("$basename.jld2", build_knapsack_model)
solver = LearningSolver(Cbc.Optimizer)
solve!(solver, instance)
h5 = Hdf5Sample("$basename.h5")
@test h5.get_scalar("mip_wallclock_time") > 0
end
@testset "Solve (danoint)" begin
data = Dict("filename" => joinpath(@__DIR__, "../fixtures/danoint.mps.gz"))
build_model(data) = read_from_file(data["filename"])
basename = tempname()
MIPLearn.save_data("$basename.jld2", data)
instance = FileInstance("$basename.jld2", build_model)
solver = LearningSolver(optimizer_with_attributes(Cbc.Optimizer, "seconds" => 1.0))
solve!(solver, instance)
h5 = Hdf5Sample("$basename.h5")
@test h5.get_scalar("mip_wallclock_time") > 0
end
@testset "Save and load data" begin
filename = tempname()
data = KnapsackData(
weights = [5.0, 5.0, 5.0],
prices = [1.0, 1.0, 1.0],
capacity = 3.0,
)
MIPLearn.save_data(filename, data)
loaded = MIPLearn.load_data(filename)
@test loaded.weights == [5.0, 5.0, 5.0]
@test loaded.prices == [1.0, 1.0, 1.0]
@test loaded.capacity == 3.0
end
end

View File

@@ -1,62 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Cbc
using JuMP
using MathOptInterface
using MIPLearn
const MOI = MathOptInterface
function find_lazy(model::Model, cb_data)::Vector{String}
x = variable_by_name(model, "x")
y = variable_by_name(model, "y")
x_val = value(cb_data, x)
y_val = value(cb_data, y)
if x_val + y_val > 1 + 1e-6
return ["con"]
end
return []
end
function enforce_lazy(model::Model, cb_data, violation::String)::Nothing
if violation == "con"
x = variable_by_name(model, "x")
y = variable_by_name(model, "y")
con = @build_constraint(x + y <= 1)
submit(cb_data, con)
end
return
end
function build_model(data)
model = Model()
@variable(model, x, Bin)
@variable(model, y, Bin)
@objective(model, Max, 2 * x + y)
@constraint(model, c1, x + y <= 2)
@lazycb(model, find_lazy, enforce_lazy)
return model
end
@testset "Lazy callback" begin
@testset "JuMPInstance" begin
model = build_model(nothing)
instance = JuMPInstance(model)
solver = LearningSolver(Cbc.Optimizer)
solve!(solver, instance)
@test value(model[:x]) == 1.0
@test value(model[:y]) == 0.0
end
@testset "FileInstance" begin
data = nothing
basename = tempname()
MIPLearn.save_data("$basename.jld2", data)
instance = FileInstance("$basename.jld2", build_model)
solver = LearningSolver(Cbc.Optimizer)
solve!(solver, instance)
h5 = MIPLearn.Hdf5Sample("$basename.h5")
@test h5.get_array("mip_var_values") == [1.0, 0.0]
end
end

View File

@@ -1,28 +1,13 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Copyright (C) 2020-2023, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Revise
using Test
using Requires
using MIPLearn
MIPLearn.setup_logger()
is_cplex_available = false
@require CPLEX = "a076750e-1247-5638-91d2-ce28b192dca0" begin
is_cplex_available = true
end
includet("Cuts/BlackBox/test_cplex.jl")
is_gurobi_available = false
@require Gurobi = "2e9cd046-0924-5485-92f1-d5272153d98b" begin
is_gurobi_available = true
end
@testset "MIPLearn" begin
include("fixtures/knapsack.jl")
include("instance/file_instance_test.jl")
include("instance/jump_instance_test.jl")
include("solvers/jump_solver_test.jl")
include("solvers/learning_solver_test.jl")
include("utils/parse_test.jl")
include("bb/lp_test.jl")
function runtests()
test_cuts_blackbox_cplex()
end

View File

@@ -1,34 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Cbc
using JuMP
using MIPLearn
using PyCall
using Test
if is_gurobi_available
using Gurobi
end
miplearn_tests = pyimport("miplearn.solvers.tests")
traceback = pyimport("traceback")
function _test_solver(optimizer_factory)
MIPLearn.@python_call miplearn_tests.run_internal_solver_tests(
JuMPSolver(optimizer_factory),
)
end
@testset "JuMPSolver" begin
@testset "Cbc" begin
_test_solver(Cbc.Optimizer)
end
if is_gurobi_available
using Gurobi
@testset "Gurobi" begin
_test_solver(Gurobi.Optimizer)
end
end
end

View File

@@ -1,46 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Cbc
using JuMP
using MIPLearn
@testset "LearningSolver" begin
@testset "Model with annotations" begin
model = build_knapsack_model()
solver = LearningSolver(Cbc.Optimizer)
instance = JuMPInstance(model)
stats = solve!(solver, instance)
@test stats["mip_lower_bound"] == 11.0
@test length(instance.samples) == 1
fit!(solver, [instance])
solve!(solver, instance)
end
@testset "Model without annotations" begin
model = build_knapsack_model()
solver = LearningSolver(Cbc.Optimizer)
instance = JuMPInstance(model)
stats = solve!(solver, instance)
@test stats["mip_lower_bound"] == 11.0
end
@testset "Save and load" begin
solver = LearningSolver(Cbc.Optimizer)
solver.py.components = "Placeholder"
filename = tempname()
save(filename, solver)
@test isfile(filename)
loaded = load_solver(filename)
@test loaded.py.components == "Placeholder"
end
# @testset "Discard output" begin
# instance = build_knapsack_file_instance()
# solver = LearningSolver(Cbc.Optimizer)
# solve!(solver, instance, discard_output = true)
# loaded = load_instance(instance.filename)
# @test length(loaded.samples) == 0
# end
end

View File

@@ -1,36 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Cbc
using CSV
using DataFrames
@testset "BenchmarkRunner" begin
@info "Building training data..."
instances = [build_knapsack_file_instance(), build_knapsack_file_instance()]
stats = parallel_solve!(LearningSolver(Cbc.Optimizer), instances)
@test length(stats) == 2
@test stats[1] !== nothing
@test stats[2] !== nothing
benchmark = BenchmarkRunner(
solvers = Dict(
"baseline" => LearningSolver(Cbc.Optimizer, components = []),
"ml-exact" => LearningSolver(Cbc.Optimizer),
"ml-heur" => LearningSolver(Cbc.Optimizer, mode = "heuristic"),
),
)
@info "Fitting..."
fit!(benchmark, instances)
@info "Benchmarking..."
parallel_solve!(benchmark, instances, n_trials = 2)
csv_filename = tempname()
write_csv!(benchmark, csv_filename)
@test isfile(csv_filename)
csv = DataFrame(CSV.File(csv_filename))
@test size(csv)[1] == 12
end

View File

@@ -1,12 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using MIPLearn
@testset "Parse" begin
@test MIPLearn.parse_name("x") == ["x"]
@test MIPLearn.parse_name("x[3]") == ["x", "3"]
@test MIPLearn.parse_name("test_eq[x]") == ["test_eq", "x"]
@test MIPLearn.parse_name("test_eq[x,y,z]") == ["test_eq", "x", "y", "z"]
end