diff --git a/Project.toml b/Project.toml index 9eae059..b8414a8 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "MIPLearn" uuid = "2b1277c3-b477-4c49-a15e-7ba350325c68" authors = ["Alinson S Xavier "] -version = "0.3.0" +version = "0.4.0" [deps] Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d" @@ -9,31 +9,35 @@ DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f" HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" JLD2 = "033835bb-8acc-5ee8-8aae-3f567f8a3819" +JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" JuMP = "4076af6c-e467-56ae-b986-b466b2749572" KLU = "ef3ab10e-7fda-4108-b977-705223b18434" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" +PrecompileTools = "aea7be01-6a6a-4083-8856-8a6e6704d82a" Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Requires = "ae029012-a4dd-5104-9daa-d747884805df" +SCIP = "82193955-e24f-5292-bf16-6f2c5261a85f" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" TimerOutputs = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f" [compat] +Conda = "1" +DataStructures = "0.18" +HDF5 = "0.16" +HiGHS = "1" +JLD2 = "0.4" +JSON = "0.21" +JuMP = "1" +KLU = "0.4" +MathOptInterface = "1" +OrderedCollections = "1" +PyCall = "1" +Requires = "1" +Statistics = "1" +TimerOutputs = "0.5" julia = "1" -Conda="1" -DataStructures="0.18" -HDF5="0.16" -HiGHS="1" -JLD2="0.4" -JuMP="1" -KLU="0.4" -MathOptInterface="1" -OrderedCollections="1" -PyCall="1" -Requires="1" -Statistics="1" -TimerOutputs="0.5" \ No newline at end of file diff --git a/deps/build.jl b/deps/build.jl index 3be992f..8c6cd1e 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -5,7 +5,7 @@ function install_miplearn() Conda.update() pip = joinpath(dirname(pyimport("sys").executable), "pip") isfile(pip) || error("$pip: invalid path") - run(`$pip install miplearn==0.3.0`) + run(`$pip install miplearn==0.4.0`) end install_miplearn() diff --git a/src/Cuts/Cuts.jl b/src/Cuts/Cuts.jl index 77738d6..1e35c2e 100644 --- a/src/Cuts/Cuts.jl +++ b/src/Cuts/Cuts.jl @@ -4,15 +4,22 @@ module Cuts +using PyCall + import ..to_str_array include("tableau/structs.jl") # include("blackbox/cplex.jl") -include("tableau/collect.jl") +include("tableau/numerics.jl") include("tableau/gmi.jl") +include("tableau/gmi_dual.jl") include("tableau/moi.jl") include("tableau/tableau.jl") include("tableau/transform.jl") +function __init__() + __init_gmi_dual__() +end + end # module diff --git a/src/Cuts/tableau/collect.jl b/src/Cuts/tableau/collect.jl deleted file mode 100644 index 01e84ff..0000000 --- a/src/Cuts/tableau/collect.jl +++ /dev/null @@ -1,184 +0,0 @@ -# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization -# Copyright (C) 2020-2023, UChicago Argonne, LLC. All rights reserved. -# Released under the modified BSD license. See COPYING.md for more details. - -import ..H5File - -using OrderedCollections - -function collect_gmi(mps_filename; optimizer, max_rounds = 10, max_cuts_per_round = 100) - @info mps_filename - reset_timer!() - - # Open HDF5 file - h5_filename = replace(mps_filename, ".mps.gz" => ".h5") - h5 = H5File(h5_filename) - - # Read optimal solution - sol_opt_dict = Dict( - zip( - h5.get_array("static_var_names"), - convert(Array{Float64}, h5.get_array("mip_var_values")), - ), - ) - - # Read optimal value - obj_mip = h5.get_scalar("mip_lower_bound") - if obj_mip === nothing - obj_mip = h5.get_scalar("mip_obj_value") - end - obj_lp = nothing - h5.file.close() - - # Define relative MIP gap - gap(v) = 100 * abs(obj_mip - v) / abs(v) - - # Initialize stats - stats_obj = [] - stats_gap = [] - stats_ncuts = [] - stats_time_convert = 0 - stats_time_solve = 0 - stats_time_select = 0 - stats_time_tableau = 0 - stats_time_gmi = 0 - all_cuts = nothing - - # Read problem - model = read_from_file(mps_filename) - - for round = 1:max_rounds - @info "Round $(round)..." - - stats_time_convert = @elapsed begin - # Extract problem data - data = ProblemData(model) - - # Construct optimal solution vector (with correct variable sequence) - sol_opt = [sol_opt_dict[n] for n in data.var_names] - - # Assert optimal solution is feasible for the original problem - @assert all(data.constr_lb .- 1e-3 .<= data.constr_lhs * sol_opt) - @assert all(data.constr_lhs * sol_opt .<= data.constr_ub .+ 1e-3) - - # Convert to standard form - data_s, transforms = convert_to_standard_form(data) - model_s = to_model(data_s) - set_optimizer(model_s, optimizer) - relax_integrality(model_s) - - # Convert optimal solution to standard form - sol_opt_s = forward(transforms, sol_opt) - - # Assert converted solution is feasible for standard form problem - @assert data_s.constr_lhs * sol_opt_s ≈ data_s.constr_lb - end - - # Optimize standard form - optimize!(model_s) - stats_time_solve += solve_time(model_s) - obj = objective_value(model_s) + data_s.obj_offset - if obj_lp === nothing - obj_lp = obj - push!(stats_obj, obj) - push!(stats_gap, gap(obj)) - push!(stats_ncuts, 0) - end - if termination_status(model_s) != MOI.OPTIMAL - return - end - - # Select tableau rows - basis = get_basis(model_s) - sol_frac = get_x(model_s) - stats_time_select += @elapsed begin - selected_rows = - select_gmi_rows(data_s, basis, sol_frac, max_rows = max_cuts_per_round) - end - - # Compute selected tableau rows - stats_time_tableau += @elapsed begin - tableau = compute_tableau(data_s, basis, sol_frac, rows = selected_rows) - - # Assert tableau rows have been computed correctly - @assert tableau.lhs * sol_frac ≈ tableau.rhs - @assert tableau.lhs * sol_opt_s ≈ tableau.rhs - end - - # Compute GMI cuts - stats_time_gmi += @elapsed begin - cuts_s = compute_gmi(data_s, tableau) - - # Assert cuts have been generated correctly - try - assert_cuts_off(cuts_s, sol_frac) - assert_does_not_cut_off(cuts_s, sol_opt_s) - catch - @warn "Invalid cuts detected. Discarding round $round cuts and aborting." - break - end - - # Abort if no cuts are left - if length(cuts_s.lb) == 0 - @info "No cuts generated. Aborting." - break - end - end - - # Add GMI cuts to original problem - cuts = backwards(transforms, cuts_s) - assert_does_not_cut_off(cuts, sol_opt) - constrs = add_constraint_set(model, cuts) - - # Optimize original form - set_optimizer(model, optimizer) - undo_relax = relax_integrality(model) - optimize!(model) - obj = objective_value(model) - push!(stats_obj, obj) - push!(stats_gap, gap(obj)) - - # Store useful cuts; drop useless ones from the problem - useful = [abs(shadow_price(c)) > 1e-3 for c in constrs] - drop = findall(useful .== false) - keep = findall(useful .== true) - delete.(model, constrs[drop]) - if all_cuts === nothing - all_cuts = cuts - else - all_cuts.lhs = [all_cuts.lhs; cuts.lhs[keep, :]] - all_cuts.lb = [all_cuts.lb; cuts.lb[keep]] - all_cuts.lb = [all_cuts.lb; cuts.lb[keep]] - end - push!(stats_ncuts, length(all_cuts.lb)) - - undo_relax() - end - - # Store cuts - if all_cuts !== nothing - @info "Storing $(length(all_cuts.ub)) GMI cuts..." - h5 = H5File(h5_filename) - h5.put_sparse("cuts_lhs", all_cuts.lhs) - h5.put_array("cuts_lb", all_cuts.lb) - h5.put_array("cuts_ub", all_cuts.ub) - h5.file.close() - end - - return OrderedDict( - "instance" => mps_filename, - "max_rounds" => max_rounds, - "rounds" => length(stats_obj) - 1, - "time_convert" => stats_time_convert, - "time_solve" => stats_time_solve, - "time_tableau" => stats_time_tableau, - "time_gmi" => stats_time_gmi, - "obj_mip" => obj_mip, - "obj_lp" => obj_lp, - "stats_obj" => stats_obj, - "stats_gap" => stats_gap, - "stats_ncuts" => stats_ncuts, - ) -end - -export collect_gmi diff --git a/src/Cuts/tableau/gmi.jl b/src/Cuts/tableau/gmi.jl index 29e65e1..506ad2e 100644 --- a/src/Cuts/tableau/gmi.jl +++ b/src/Cuts/tableau/gmi.jl @@ -2,16 +2,196 @@ # Copyright (C) 2020-2023, UChicago Argonne, LLC. All rights reserved. # Released under the modified BSD license. See COPYING.md for more details. +import ..H5File + +using OrderedCollections using SparseArrays +using Statistics using TimerOutputs -@inline frac(x::Float64) = x - floor(x) +function collect_gmi( + mps_filename; + optimizer, + max_rounds = 10, + max_cuts_per_round = 100, + atol = 1e-4, +) + reset_timer!() + + # Open HDF5 file + h5_filename = replace(mps_filename, ".mps.gz" => ".h5") + h5 = H5File(h5_filename) + + # Read optimal solution + sol_opt_dict = Dict( + zip( + h5.get_array("static_var_names"), + convert(Array{Float64}, h5.get_array("mip_var_values")), + ), + ) + + # Read optimal value + obj_mip = h5.get_scalar("mip_lower_bound") + if obj_mip === nothing + obj_mip = h5.get_scalar("mip_obj_value") + end + obj_lp = h5.get_scalar("lp_obj_value") + h5.file.close() + + # Define relative MIP gap + gap(v) = 100 * abs(obj_mip - v) / abs(v) + + # Initialize stats + stats_obj = [] + stats_gap = [] + stats_ncuts = [] + stats_time_convert = 0 + stats_time_solve = 0 + stats_time_select = 0 + stats_time_tableau = 0 + stats_time_gmi = 0 + all_cuts = nothing + + # Read problem + model = read_from_file(mps_filename) + + for round = 1:max_rounds + @info "Round $(round)..." + + stats_time_convert = @elapsed begin + # Extract problem data + data = ProblemData(model) + + # Construct optimal solution vector (with correct variable sequence) + sol_opt = [sol_opt_dict[n] for n in data.var_names] + + # Assert optimal solution is feasible for the original problem + assert_leq(data.constr_lb, data.constr_lhs * sol_opt) + assert_leq(data.constr_lhs * sol_opt, data.constr_ub) + + # Convert to standard form + data_s, transforms = convert_to_standard_form(data) + model_s = to_model(data_s) + set_optimizer(model_s, optimizer) + relax_integrality(model_s) + + # Convert optimal solution to standard form + sol_opt_s = forward(transforms, sol_opt) + + # Assert converted solution is feasible for standard form problem + assert_eq(data_s.constr_lhs * sol_opt_s, data_s.constr_lb) + end + + # Optimize standard form + optimize!(model_s) + stats_time_solve += solve_time(model_s) + obj = objective_value(model_s) + + if round == 1 + # Assert standard form problem has same value as original + assert_eq(obj, obj_lp) + push!(stats_obj, obj) + push!(stats_gap, gap(obj)) + push!(stats_ncuts, 0) + end + if termination_status(model_s) != MOI.OPTIMAL + return + end + + # Select tableau rows + basis = get_basis(model_s) + sol_frac = get_x(model_s) + stats_time_select += @elapsed begin + selected_rows = + select_gmi_rows(data_s, basis, sol_frac, max_rows = max_cuts_per_round) + end + + # Compute selected tableau rows + stats_time_tableau += @elapsed begin + tableau = compute_tableau(data_s, basis, sol_frac, rows = selected_rows) + + # Assert tableau rows have been computed correctly + assert_eq(tableau.lhs * sol_frac, tableau.rhs) + assert_eq(tableau.lhs * sol_opt_s, tableau.rhs) + end -function select_gmi_rows(data, basis, x; max_rows = 10, atol = 0.001) + # Compute GMI cuts + stats_time_gmi += @elapsed begin + cuts_s = compute_gmi(data_s, tableau) + + # Assert cuts have been generated correctly + assert_cuts_off(cuts_s, sol_frac) + assert_does_not_cut_off(cuts_s, sol_opt_s) + + # Abort if no cuts are left + if length(cuts_s.lb) == 0 + @info "No cuts generated. Stopping." + break + end + end + + # Add GMI cuts to original problem + cuts = backwards(transforms, cuts_s) + assert_does_not_cut_off(cuts, sol_opt) + constrs = add_constraint_set(model, cuts) + + # Optimize original form + set_optimizer(model, optimizer) + undo_relax = relax_integrality(model) + optimize!(model) + obj = objective_value(model) + push!(stats_obj, obj) + push!(stats_gap, gap(obj)) + + # Store useful cuts; drop useless ones from the problem + useful = [abs(shadow_price(c)) > atol for c in constrs] + drop = findall(useful .== false) + keep = findall(useful .== true) + delete.(model, constrs[drop]) + if all_cuts === nothing + all_cuts = cuts + else + all_cuts.lhs = [all_cuts.lhs; cuts.lhs[keep, :]] + all_cuts.lb = [all_cuts.lb; cuts.lb[keep]] + all_cuts.ub = [all_cuts.ub; cuts.ub[keep]] + end + push!(stats_ncuts, length(all_cuts.lb)) + + undo_relax() + end + + # Store cuts + if all_cuts !== nothing + @info "Storing $(length(all_cuts.ub)) GMI cuts..." + h5 = H5File(h5_filename) + h5.put_sparse("cuts_lhs", all_cuts.lhs) + h5.put_array("cuts_lb", all_cuts.lb) + h5.put_array("cuts_ub", all_cuts.ub) + h5.file.close() + end + + return OrderedDict( + "instance" => mps_filename, + "max_rounds" => max_rounds, + "rounds" => length(stats_obj) - 1, + "time_convert" => stats_time_convert, + "time_solve" => stats_time_solve, + "time_tableau" => stats_time_tableau, + "time_gmi" => stats_time_gmi, + "obj_mip" => obj_mip, + "stats_obj" => stats_obj, + "stats_gap" => stats_gap, + "stats_ncuts" => stats_ncuts, + ) +end + +function select_gmi_rows(data, basis, x; max_rows = 10, atol = 1e-4) candidate_rows = [ - r for - r = 1:length(basis.var_basic) if (data.var_types[basis.var_basic[r]] != 'C') && - (frac(x[basis.var_basic[r]]) > atol) + r for r = 1:length(basis.var_basic) if ( + (data.var_types[basis.var_basic[r]] != 'C') && + (frac(x[basis.var_basic[r]]) > atol) && + (frac2(x[basis.var_basic[r]]) > atol) + ) ] candidate_vals = frac.(x[basis.var_basic[candidate_rows]]) score = abs.(candidate_vals .- 0.5) @@ -19,10 +199,10 @@ function select_gmi_rows(data, basis, x; max_rows = 10, atol = 0.001) return [candidate_rows[perm[i]] for i = 1:min(length(perm), max_rows)] end -function compute_gmi(data::ProblemData, tableau::Tableau, tol = 1e-8)::ConstraintSet +function compute_gmi(data::ProblemData, tableau::Tableau)::ConstraintSet nrows, ncols = size(tableau.lhs) ub = Float64[Inf for _ = 1:nrows] - lb = Float64[0.999 for _ = 1:nrows] + lb = Float64[0.9999 for _ = 1:nrows] tableau_I, tableau_J, tableau_V = findnz(tableau.lhs) lhs_I = Int[] lhs_J = Int[] @@ -30,23 +210,25 @@ function compute_gmi(data::ProblemData, tableau::Tableau, tol = 1e-8)::Constrain @timeit "Compute coefficients" begin for k = 1:nnz(tableau.lhs) i::Int = tableau_I[k] + j::Int = tableau_J[k] v::Float64 = 0.0 - alpha_j = frac(tableau_V[k]) + frac_alpha_j = frac(tableau_V[k]) + alpha_j = tableau_V[k] beta = frac(tableau.rhs[i]) - if data.var_types[i] == "C" + if data.var_types[j] == 'C' if alpha_j >= 0 v = alpha_j / beta else - v = alpha_j / (1 - beta) + v = -alpha_j / (1 - beta) end else - if alpha_j <= beta - v = alpha_j / beta + if frac_alpha_j < beta + v = frac_alpha_j / beta else - v = (1 - alpha_j) / (1 - beta) + v = (1 - frac_alpha_j) / (1 - beta) end end - if abs(v) > tol + if abs(v) > 1e-8 push!(lhs_I, i) push!(lhs_J, tableau_J[k]) push!(lhs_V, v) @@ -57,28 +239,5 @@ function compute_gmi(data::ProblemData, tableau::Tableau, tol = 1e-8)::Constrain return ConstraintSet(; lhs, ub, lb) end -function assert_cuts_off(cuts::ConstraintSet, x::Vector{Float64}, tol = 1e-6) - for i = 1:length(cuts.lb) - val = cuts.lhs[i, :]' * x - if (val <= cuts.ub[i] - tol) && (val >= cuts.lb[i] + tol) - throw(ErrorException("inequality fails to cut off fractional solution")) - end - end -end - -function assert_does_not_cut_off(cuts::ConstraintSet, x::Vector{Float64}; tol = 1e-6) - for i = 1:length(cuts.lb) - val = cuts.lhs[i, :]' * x - ub = cuts.ub[i] - lb = cuts.lb[i] - if (val >= ub) || (val <= lb) - throw( - ErrorException( - "inequality $i cuts off integer solution ($lb <= $val <= $ub)", - ), - ) - end - end -end - -export compute_gmi, frac, select_gmi_rows, assert_cuts_off, assert_does_not_cut_off +export compute_gmi, + frac, select_gmi_rows, assert_cuts_off, assert_does_not_cut_off, collect_gmi diff --git a/src/Cuts/tableau/gmi_dual.jl b/src/Cuts/tableau/gmi_dual.jl new file mode 100644 index 0000000..6248f71 --- /dev/null +++ b/src/Cuts/tableau/gmi_dual.jl @@ -0,0 +1,625 @@ +# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization +# Copyright (C) 2020-2023, UChicago Argonne, LLC. All rights reserved. +# Released under the modified BSD license. See COPYING.md for more details. + +using Printf +using JuMP +using HiGHS +using Random +using DataStructures + +global ExpertDualGmiComponent = PyNULL() +global KnnDualGmiComponent = PyNULL() + +Base.@kwdef mutable struct _KnnDualGmiData + k = nothing + extractor = nothing + train_h5 = nothing + model = nothing + strategy = nothing +end + +function collect_gmi_dual( + mps_filename; + optimizer, + max_rounds = 10, + max_cuts_per_round = 500, +) + reset_timer!() + + @timeit "Read H5" begin + h5_filename = replace(mps_filename, ".mps.gz" => ".h5") + h5 = H5File(h5_filename, "r") + sol_opt_dict = Dict( + zip( + h5.get_array("static_var_names"), + convert(Array{Float64}, h5.get_array("mip_var_values")), + ), + ) + obj_mip = h5.get_scalar("mip_obj_value") + h5.file.close() + end + + # Define relative MIP gap + gap(v) = 100 * abs(obj_mip - v) / abs(obj_mip) + + @timeit "Initialize" begin + stats_obj = [] + stats_gap = [] + stats_ncuts = [] + original_basis = nothing + all_cuts = nothing + all_cuts_bases = nothing + all_cuts_rows = nothing + last_round_obj = nothing + end + + @timeit "Read problem" begin + model = read_from_file(mps_filename) + set_optimizer(model, optimizer) + obj_original = objective_function(model) + end + + for round = 1:max_rounds + @info "Round $(round)..." + + @timeit "Convert model to standard form" begin + # Extract problem data + data = ProblemData(model) + + # Construct optimal solution vector (with correct variable sequence) + sol_opt = [sol_opt_dict[n] for n in data.var_names] + + # Assert optimal solution is feasible for the original problem + assert_leq(data.constr_lb, data.constr_lhs * sol_opt) + assert_leq(data.constr_lhs * sol_opt, data.constr_ub) + + # Convert to standard form + data_s, transforms = convert_to_standard_form(data) + model_s = to_model(data_s) + set_optimizer(model_s, optimizer) + relax_integrality(model_s) + + # Convert optimal solution to standard form + sol_opt_s = forward(transforms, sol_opt) + + # Assert converted solution is feasible for standard form problem + assert_eq(data_s.constr_lhs * sol_opt_s, data_s.constr_lb) + end + + @timeit "Optimize standard model" begin + @info "Optimizing standard model..." + optimize!(model_s) + obj = objective_value(model_s) + if round == 1 + push!(stats_obj, obj) + push!(stats_gap, gap(obj)) + push!(stats_ncuts, 0) + else + if obj ≈ last_round_obj + @info ("No improvement in obj value. Aborting.") + break + end + end + if termination_status(model_s) != MOI.OPTIMAL + error("Non-optimal termination status") + end + last_round_obj = obj + end + + @timeit "Select tableau rows" begin + basis = get_basis(model_s) + if round == 1 + original_basis = basis + end + sol_frac = get_x(model_s) + selected_rows = + select_gmi_rows(data_s, basis, sol_frac, max_rows = max_cuts_per_round) + end + + @timeit "Compute tableau rows" begin + tableau = compute_tableau(data_s, basis, x = sol_frac, rows = selected_rows) + + # Assert tableau rows have been computed correctly + assert_eq(tableau.lhs * sol_frac, tableau.rhs, atol=1e-3) + assert_eq(tableau.lhs * sol_opt_s, tableau.rhs, atol=1e-3) + end + + @timeit "Compute GMI cuts" begin + cuts_s = compute_gmi(data_s, tableau) + + # Assert cuts have been generated correctly + assert_cuts_off(cuts_s, sol_frac) + assert_does_not_cut_off(cuts_s, sol_opt_s) + + # Abort if no cuts are left + if length(cuts_s.lb) == 0 + @info "No cuts generated. Aborting." + break + else + @info "Generated $(length(cuts_s.lb)) cuts" + end + end + + @timeit "Add GMI cuts to original model" begin + @timeit "Convert to original form" begin + cuts = backwards(transforms, cuts_s) + end + + @timeit "Prepare bv" begin + bv = repeat([basis], length(selected_rows)) + end + + @timeit "Append matrices" begin + if round == 1 + all_cuts = cuts + all_cuts_bases = bv + all_cuts_rows = selected_rows + else + all_cuts.lhs = [all_cuts.lhs; cuts.lhs] + all_cuts.lb = [all_cuts.lb; cuts.lb] + all_cuts.ub = [all_cuts.ub; cuts.ub] + all_cuts_bases = [all_cuts_bases; bv] + all_cuts_rows = [all_cuts_rows; selected_rows] + end + end + + @timeit "Add to model" begin + @info "Adding $(length(all_cuts.lb)) constraints to original model" + constrs, gmi_exps = add_constraint_set_dual_v2(model, all_cuts) + end + end + + @timeit "Optimize original model" begin + set_objective_function(model, obj_original) + undo_relax = relax_integrality(model) + @info "Optimizing original model (constr)..." + optimize!(model) + obj = objective_value(model) + push!(stats_obj, obj) + push!(stats_gap, gap(obj)) + sp = [shadow_price(c) for c in constrs] + undo_relax() + useful = [abs(sp[i]) > 1e-6 for (i, _) in enumerate(constrs)] + keep = findall(useful .== true) + end + + @timeit "Filter out useless cuts" begin + @info "Keeping $(length(keep)) useful cuts" + all_cuts.lhs = all_cuts.lhs[keep, :] + all_cuts.lb = all_cuts.lb[keep] + all_cuts.ub = all_cuts.ub[keep] + all_cuts_bases = all_cuts_bases[keep, :] + all_cuts_rows = all_cuts_rows[keep, :] + push!(stats_ncuts, length(all_cuts_rows)) + if isempty(keep) + break + end + end + + @timeit "Update obj function of original model" begin + delete.(model, constrs) + set_objective_function( + model, + obj_original - + sum(sp[i] * gmi_exps[i] for (i, c) in enumerate(constrs) if useful[i]), + ) + end + end + + @timeit "Store cuts in H5 file" begin + if all_cuts !== nothing + ncuts = length(all_cuts_rows) + total = + length(original_basis.var_basic) + + length(original_basis.var_nonbasic) + + length(original_basis.constr_basic) + + length(original_basis.constr_nonbasic) + all_cuts_basis_sizes = Array{Int64,2}(undef, ncuts, 4) + all_cuts_basis_vars = Array{Int64,2}(undef, ncuts, total) + for i = 1:ncuts + vb = all_cuts_bases[i].var_basic + vn = all_cuts_bases[i].var_nonbasic + cb = all_cuts_bases[i].constr_basic + cn = all_cuts_bases[i].constr_nonbasic + all_cuts_basis_sizes[i, :] = [length(vb) length(vn) length(cb) length(cn)] + all_cuts_basis_vars[i, :] = [vb' vn' cb' cn'] + end + @info "Storing $(length(all_cuts.ub)) GMI cuts..." + h5 = H5File(h5_filename) + h5.put_sparse("cuts_lhs", all_cuts.lhs) + h5.put_array("cuts_lb", all_cuts.lb) + h5.put_array("cuts_ub", all_cuts.ub) + h5.put_array("cuts_basis_vars", all_cuts_basis_vars) + h5.put_array("cuts_basis_sizes", all_cuts_basis_sizes) + h5.put_array("cuts_rows", all_cuts_rows) + h5.file.close() + end + end + + to = TimerOutputs.get_defaulttimer() + stats_time = TimerOutputs.tottime(to) / 1e9 + print_timer() + + return OrderedDict( + "instance" => mps_filename, + "max_rounds" => max_rounds, + "rounds" => length(stats_obj) - 1, + "obj_mip" => obj_mip, + "stats_obj" => stats_obj, + "stats_gap" => stats_gap, + "stats_ncuts" => stats_ncuts, + "stats_time" => stats_time, + ) +end + +function ExpertDualGmiComponent_before_mip(test_h5, model, _) + # Read cuts and optimal solution + h5 = H5File(test_h5, "r") + sol_opt_dict = Dict( + zip( + h5.get_array("static_var_names"), + convert(Array{Float64}, h5.get_array("mip_var_values")), + ), + ) + cut_basis_vars = h5.get_array("cuts_basis_vars") + cut_basis_sizes = h5.get_array("cuts_basis_sizes") + cut_rows = h5.get_array("cuts_rows") + obj_mip = h5.get_scalar("mip_lower_bound") + if obj_mip === nothing + obj_mip = h5.get_scalar("mip_obj_value") + end + h5.close() + + # Initialize stats + stats_time_convert = 0 + stats_time_tableau = 0 + stats_time_gmi = 0 + all_cuts = nothing + + stats_time_convert = @elapsed begin + # Extract problem data + data = ProblemData(model) + + # Construct optimal solution vector (with correct variable sequence) + sol_opt = [sol_opt_dict[n] for n in data.var_names] + + # Assert optimal solution is feasible for the original problem + assert_leq(data.constr_lb, data.constr_lhs * sol_opt) + assert_leq(data.constr_lhs * sol_opt, data.constr_ub) + + # Convert to standard form + data_s, transforms = convert_to_standard_form(data) + model_s = to_model(data_s) + set_optimizer(model_s, HiGHS.Optimizer) + relax_integrality(model_s) + + # Convert optimal solution to standard form + sol_opt_s = forward(transforms, sol_opt) + + # Assert converted solution is feasible for standard form problem + assert_eq(data_s.constr_lhs * sol_opt_s, data_s.constr_lb) + + end + + current_basis = nothing + for (r, row) in enumerate(cut_rows) + stats_time_tableau += @elapsed begin + if r == 1 || cut_basis_vars[r, :] != cut_basis_vars[r-1, :] + vbb, vnn, cbb, cnn = cut_basis_sizes[r, :] + current_basis = Basis(; + var_basic = cut_basis_vars[r, 1:vbb], + var_nonbasic = cut_basis_vars[r, vbb+1:vbb+vnn], + constr_basic = cut_basis_vars[r, vbb+vnn+1:vbb+vnn+cbb], + constr_nonbasic = cut_basis_vars[r, vbb+vnn+cbb+1:vbb+vnn+cbb+cnn], + ) + end + tableau = compute_tableau(data_s, current_basis, rows = [row]) + assert_eq(tableau.lhs * sol_opt_s, tableau.rhs) + end + stats_time_gmi += @elapsed begin + cuts_s = compute_gmi(data_s, tableau) + assert_does_not_cut_off(cuts_s, sol_opt_s) + end + cuts = backwards(transforms, cuts_s) + assert_does_not_cut_off(cuts, sol_opt) + + if all_cuts === nothing + all_cuts = cuts + else + all_cuts.lhs = [all_cuts.lhs; cuts.lhs] + all_cuts.lb = [all_cuts.lb; cuts.lb] + all_cuts.ub = [all_cuts.ub; cuts.ub] + end + end + + # Strategy 1: Add all cuts during the first call + function cut_callback_1(cb_data) + if all_cuts !== nothing + constrs = build_constraints(model, all_cuts) + @info "Enforcing $(length(constrs)) cuts..." + for c in constrs + MOI.submit(model, MOI.UserCut(cb_data), c) + end + all_cuts = nothing + end + end + + # Strategy 2: Add violated cuts repeatedly until unable to separate + callback_disabled = false + function cut_callback_2(cb_data) + if callback_disabled + return + end + x = all_variables(model) + x_val = callback_value.(cb_data, x) + lhs_val = all_cuts.lhs * x_val + is_violated = lhs_val .> all_cuts.ub + selected_idx = findall(is_violated .== true) + selected_cuts = ConstraintSet( + lhs=all_cuts.lhs[selected_idx, :], + ub=all_cuts.ub[selected_idx], + lb=all_cuts.lb[selected_idx], + ) + constrs = build_constraints(model, selected_cuts) + if length(constrs) > 0 + @info "Enforcing $(length(constrs)) cuts..." + for c in constrs + MOI.submit(model, MOI.UserCut(cb_data), c) + end + else + @info "No violated cuts found. Disabling callback." + callback_disabled = true + end + end + + # Set up cut callback + set_attribute(model, MOI.UserCutCallback(), cut_callback_1) + # set_attribute(model, MOI.UserCutCallback(), cut_callback_2) + + stats = Dict() + stats["ExpertDualGmi: cuts"] = length(all_cuts.lb) + stats["ExpertDualGmi: time convert"] = stats_time_convert + stats["ExpertDualGmi: time tableau"] = stats_time_tableau + stats["ExpertDualGmi: time gmi"] = stats_time_gmi + return stats +end + +function add_constraint_set_dual_v2(model::JuMP.Model, cs::ConstraintSet) + vars = all_variables(model) + nrows, ncols = size(cs.lhs) + + @timeit "Transpose LHS" begin + lhs_t = spzeros(ncols, nrows) + ftranspose!(lhs_t, cs.lhs, x -> x) + lhs_t_rows = rowvals(lhs_t) + lhs_t_vals = nonzeros(lhs_t) + end + + constrs = [] + gmi_exps = [] + for i = 1:nrows + c = nothing + gmi_exp = nothing + gmi_exp2 = nothing + @timeit "Build expr" begin + expr = AffExpr() + for k in nzrange(lhs_t, i) + add_to_expression!(expr, lhs_t_vals[k], vars[lhs_t_rows[k]]) + end + end + @timeit "Add constraints" begin + if isinf(cs.ub[i]) + c = @constraint(model, cs.lb[i] <= expr) + gmi_exp = cs.lb[i] - expr + elseif isinf(cs.lb[i]) + c = @constraint(model, expr <= cs.ub[i]) + gmi_exp = expr - cs.ub[i] + else + c = @constraint(model, cs.lb[i] <= expr <= cs.ub[i]) + gmi_exp = cs.lb[i] - expr + gmi_exp2 = expr - cs.ub[i] + end + end + @timeit "Update structs" begin + push!(constrs, c) + push!(gmi_exps, gmi_exp) + if !isnothing(gmi_exp2) + push!(gmi_exps, gmi_exp2) + end + end + end + return constrs, gmi_exps +end + +function _dualgmi_features(h5_filename, extractor) + h5 = H5File(h5_filename, "r") + try + return extractor.get_instance_features(h5) + finally + h5.close() + end +end + +function _dualgmi_generate(train_h5, model) + @timeit "Read problem data" begin + data = ProblemData(model) + end + @timeit "Convert to standard form" begin + data_s, transforms = convert_to_standard_form(data) + end + + @timeit "Collect cuts from H5 files" begin + vars_to_unique_basis_offset = Dict() + unique_basis_vars = nothing + unique_basis_sizes = nothing + unique_basis_rows = nothing + + for h5_filename in train_h5 + h5 = H5File(h5_filename, "r") + cut_basis_vars = h5.get_array("cuts_basis_vars") + cut_basis_sizes = h5.get_array("cuts_basis_sizes") + cut_rows = h5.get_array("cuts_rows") + ncuts, nvars = size(cut_basis_vars) + if unique_basis_vars === nothing + unique_basis_vars = Matrix{Int}(undef, 0, nvars) + unique_basis_sizes = Matrix{Int}(undef, 0, 4) + unique_basis_rows = Dict{Int,Set{Int}}() + end + for i in 1:ncuts + vars = cut_basis_vars[i, :] + sizes = cut_basis_sizes[i, :] + row = cut_rows[i] + if vars ∉ keys(vars_to_unique_basis_offset) + offset = size(unique_basis_vars)[1] + 1 + vars_to_unique_basis_offset[vars] = offset + unique_basis_vars = [unique_basis_vars; vars'] + unique_basis_sizes = [unique_basis_sizes; sizes'] + unique_basis_rows[offset] = Set() + end + offset = vars_to_unique_basis_offset[vars] + push!(unique_basis_rows[offset], row) + end + h5.close() + end + end + + @timeit "Compute tableaus and cuts" begin + all_cuts = nothing + for (offset, rows) in unique_basis_rows + try + vbb, vnn, cbb, cnn = unique_basis_sizes[offset, :] + current_basis = Basis(; + var_basic = unique_basis_vars[offset, 1:vbb], + var_nonbasic = unique_basis_vars[offset, vbb+1:vbb+vnn], + constr_basic = unique_basis_vars[offset, vbb+vnn+1:vbb+vnn+cbb], + constr_nonbasic = unique_basis_vars[offset, vbb+vnn+cbb+1:vbb+vnn+cbb+cnn], + ) + + tableau = compute_tableau(data_s, current_basis; rows=collect(rows)) + cuts_s = compute_gmi(data_s, tableau) + cuts = backwards(transforms, cuts_s) + if all_cuts === nothing + all_cuts = cuts + else + all_cuts.lhs = [all_cuts.lhs; cuts.lhs] + all_cuts.lb = [all_cuts.lb; cuts.lb] + all_cuts.ub = [all_cuts.ub; cuts.ub] + end + catch e + if isa(e, AssertionError) + @warn "Numerical error detected. Skipping cuts from current tableau." + continue + else + rethrow(e) + end + end + end + end + return all_cuts +end + +function _dualgmi_set_callback(model, all_cuts) + function cut_callback(cb_data) + if all_cuts !== nothing + constrs = build_constraints(model, all_cuts) + @info "Enforcing $(length(constrs)) cuts..." + for c in constrs + MOI.submit(model, MOI.UserCut(cb_data), c) + end + all_cuts = nothing + end + end + set_attribute(model, MOI.UserCutCallback(), cut_callback) +end + +function KnnDualGmiComponent_fit(data::_KnnDualGmiData, train_h5) + x = hcat([_dualgmi_features(filename, data.extractor) for filename in train_h5]...)' + model = pyimport("sklearn.neighbors").NearestNeighbors(n_neighbors = length(train_h5)) + model.fit(x) + data.model = model + data.train_h5 = train_h5 +end + + +function KnnDualGmiComponent_before_mip(data::_KnnDualGmiData, test_h5, model, _) + reset_timer!() + + @timeit "Extract features" begin + x = _dualgmi_features(test_h5, data.extractor) + x = reshape(x, 1, length(x)) + end + + @timeit "Find neighbors" begin + neigh_dist, neigh_ind = data.model.kneighbors(x, return_distance = true) + neigh_ind = neigh_ind .+ 1 + N = length(neigh_dist) + k = min(N, data.k) + + if data.strategy == "near" + selected = collect(1:k) + elseif data.strategy == "far" + selected = collect((N - k + 1) : N) + elseif data.strategy == "rand" + selected = shuffle(collect(1:N))[1:k] + else + error("unknown strategy: $(data.strategy)") + end + + @info "Dual GMI: Selected neighbors ($(data.strategy)):" + neigh_dist = neigh_dist[selected] + neigh_ind = neigh_ind[selected] + for i in 1:k + h5_filename = data.train_h5[neigh_ind[i]] + dist = neigh_dist[i] + @info " $(h5_filename) dist=$(dist)" + end + end + + @info "Dual GMI: Generating cuts..." + @timeit "Generate cuts" begin + time_generate = @elapsed begin + cuts = _dualgmi_generate(data.train_h5[neigh_ind], model) + end + @info "Dual GMI: Generated $(length(cuts.lb)) unique cuts in $(time_generate) seconds" + end + + @timeit "Set callback" begin + _dualgmi_set_callback(model, cuts) + end + + print_timer() + + stats = Dict() + stats["KnnDualGmi: k"] = k + stats["KnnDualGmi: strategy"] = data.strategy + stats["KnnDualGmi: cuts"] = length(cuts.lb) + stats["KnnDualGmi: time generate"] = time_generate + return stats +end + +function __init_gmi_dual__() + @pydef mutable struct Class1 + function fit(_, _) end + function before_mip(self, test_h5, model, stats) + ExpertDualGmiComponent_before_mip(test_h5, model.inner, stats) + end + end + copy!(ExpertDualGmiComponent, Class1) + + @pydef mutable struct Class2 + function __init__(self; extractor, k = 3, strategy = "near") + self.data = _KnnDualGmiData(; extractor, k, strategy) + end + function fit(self, train_h5) + KnnDualGmiComponent_fit(self.data, train_h5) + end + function before_mip(self, test_h5, model, stats) + return @time KnnDualGmiComponent_before_mip(self.data, test_h5, model.inner, stats) + end + end + copy!(KnnDualGmiComponent, Class2) +end + +export collect_gmi_dual, expert_gmi_dual, ExpertDualGmiComponent, KnnDualGmiComponent + diff --git a/src/Cuts/tableau/moi.jl b/src/Cuts/tableau/moi.jl index ec9bdea..7e4b0de 100644 --- a/src/Cuts/tableau/moi.jl +++ b/src/Cuts/tableau/moi.jl @@ -9,6 +9,8 @@ function ProblemData(model::Model)::ProblemData # Objective function obj = objective_function(model) + obj_offset = obj.constant + obj_sense = objective_sense(model) obj = [v ∈ keys(obj.terms) ? obj.terms[v] : 0.0 for v in vars] # Variable types, lower bounds and upper bounds @@ -86,8 +88,9 @@ function ProblemData(model::Model)::ProblemData @assert length(constr_ub) == m return ProblemData( - obj_offset = 0.0; obj, + obj_offset, + obj_sense, constr_lb, constr_ub, constr_lhs, @@ -102,6 +105,7 @@ function to_model(data::ProblemData, tol = 1e-6)::Model model = Model() # Variables + obj_expr = AffExpr(data.obj_offset) nvars = length(data.obj) @variable(model, x[1:nvars]) for i = 1:nvars @@ -117,8 +121,9 @@ function to_model(data::ProblemData, tol = 1e-6)::Model if isfinite(data.var_ub[i]) set_upper_bound(x[i], data.var_ub[i]) end - set_objective_coefficient(model, x[i], data.obj[i]) + add_to_expression!(obj_expr, x[i], data.obj[i]) end + @objective(model, data.obj_sense, obj_expr) # Constraints lhs = data.constr_lhs * x @@ -140,28 +145,44 @@ function to_model(data::ProblemData, tol = 1e-6)::Model end function add_constraint_set(model::JuMP.Model, cs::ConstraintSet) + constrs = build_constraints(model, cs) + for c in constrs + add_constraint(model, c) + end + return constrs +end + +function set_warm_start(model::JuMP.Model, x::Vector{Float64}) + vars = all_variables(model) + for (i, xi) in enumerate(x) + set_start_value(vars[i], xi) + end +end + +function build_constraints(model::JuMP.Model, cs::ConstraintSet) vars = all_variables(model) nrows, _ = size(cs.lhs) constrs = [] for i = 1:nrows + # Build LHS expression + row = cs.lhs[i, :] + lhs_expr = AffExpr() + for (offset, val) in enumerate(row.nzval) + add_to_expression!(lhs_expr, vars[row.nzind[offset]], val) + end + + # Build JuMP constraint c = nothing if isinf(cs.ub[i]) - c = @constraint(model, cs.lb[i] <= dot(cs.lhs[i, :], vars)) + c = @build_constraint(cs.lb[i] <= lhs_expr) elseif isinf(cs.lb[i]) - c = @constraint(model, dot(cs.lhs[i, :], vars) <= cs.ub[i]) + c = @build_constraint(lhs_expr <= cs.ub[i]) else - c = @constraint(model, cs.lb[i] <= dot(cs.lhs[i, :], vars) <= cs.ub[i]) + c = @build_constraint(cs.lb[i] <= lhs_expr <= cs.ub[i]) end push!(constrs, c) end return constrs end -function set_warm_start(model::JuMP.Model, x::Vector{Float64}) - vars = all_variables(model) - for (i, xi) in enumerate(x) - set_start_value(vars[i], xi) - end -end - -export to_model, ProblemData, add_constraint_set, set_warm_start +export to_model, ProblemData, add_constraint_set, set_warm_start, build_constraints diff --git a/src/Cuts/tableau/numerics.jl b/src/Cuts/tableau/numerics.jl new file mode 100644 index 0000000..15c38e4 --- /dev/null +++ b/src/Cuts/tableau/numerics.jl @@ -0,0 +1,51 @@ +@inline frac(x::Float64) = x - floor(x) + +@inline frac2(x::Float64) = ceil(x) - x + +function assert_leq(a, b; atol = 0.01) + if !all(a .<= b .+ atol) + delta = a .- b + for i in eachindex(delta) + if delta[i] > atol + @info "Assertion failed: a[$i] = $(a[i]) <= $(b[i]) = b[$i]" + end + end + error("assert_leq failed") + end +end + +function assert_eq(a, b; atol = 1e-4) + if !all(abs.(a .- b) .<= atol) + delta = abs.(a .- b) + for i in eachindex(delta) + if delta[i] > atol + @info "Assertion failed: a[$i] = $(a[i]) == $(b[i]) = b[$i]" + end + end + error("assert_eq failed") + end +end + +function assert_cuts_off(cuts::ConstraintSet, x::Vector{Float64}, tol = 1e-6) + for i = 1:length(cuts.lb) + val = cuts.lhs[i, :]' * x + if (val <= cuts.ub[i] - tol) && (val >= cuts.lb[i] + tol) + throw(ErrorException("inequality fails to cut off fractional solution")) + end + end +end + +function assert_does_not_cut_off(cuts::ConstraintSet, x::Vector{Float64}; tol = 1e-6) + for i = 1:length(cuts.lb) + val = cuts.lhs[i, :]' * x + ub = cuts.ub[i] + lb = cuts.lb[i] + if (val >= ub) || (val <= lb) + throw( + ErrorException( + "inequality $i cuts off integer solution ($lb <= $val <= $ub)", + ), + ) + end + end +end diff --git a/src/Cuts/tableau/structs.jl b/src/Cuts/tableau/structs.jl index cbdab11..7a2bb07 100644 --- a/src/Cuts/tableau/structs.jl +++ b/src/Cuts/tableau/structs.jl @@ -7,6 +7,7 @@ using SparseArrays Base.@kwdef mutable struct ProblemData obj::Vector{Float64} obj_offset::Float64 + obj_sense::Any constr_lb::Vector{Float64} constr_ub::Vector{Float64} constr_lhs::SparseMatrixCSC diff --git a/src/Cuts/tableau/tableau.jl b/src/Cuts/tableau/tableau.jl index ee389c9..06bf3bd 100644 --- a/src/Cuts/tableau/tableau.jl +++ b/src/Cuts/tableau/tableau.jl @@ -54,8 +54,8 @@ end function compute_tableau( data::ProblemData, - basis::Basis, - x::Vector{Float64}; + basis::Basis; + x::Union{Nothing,Vector{Float64}} = nothing, rows::Union{Vector{Int},Nothing} = nothing, tol = 1e-8, )::Tableau @@ -73,11 +73,12 @@ function compute_tableau( factor = klu(sparse(lhs_b')) end - @timeit "Compute tableau LHS" begin - tableau_lhs_I = Int[] - tableau_lhs_J = Int[] - tableau_lhs_V = Float64[] - for k = 1:length(rows) + @timeit "Compute tableau" begin + @timeit "Initialize" begin + tableau_rhs = zeros(length(rows)) + tableau_lhs = zeros(length(rows), ncols) + end + for k in eachindex(1:length(rows)) @timeit "Prepare inputs" begin i = rows[k] e = zeros(nrows) @@ -87,25 +88,14 @@ function compute_tableau( sol = factor \ e end @timeit "Multiply" begin - row = sol' * data.constr_lhs - end - @timeit "Sparsify & copy" begin - for (j, v) in enumerate(row) - if abs(v) < tol - continue - end - push!(tableau_lhs_I, k) - push!(tableau_lhs_J, j) - push!(tableau_lhs_V, v) - end + tableau_lhs[k, :] = sol' * data.constr_lhs + tableau_rhs[k] = sol' * data.constr_ub end end - tableau_lhs = - sparse(tableau_lhs_I, tableau_lhs_J, tableau_lhs_V, length(rows), ncols) - end - - @timeit "Compute tableau RHS" begin - tableau_rhs = [x[basis.var_basic]; zeros(length(basis.constr_basic))][rows] + @timeit "Sparsify" begin + tableau_lhs[abs.(tableau_lhs) .<= tol] .= 0 + tableau_lhs = sparse(tableau_lhs) + end end @timeit "Compute tableau objective row" begin @@ -114,12 +104,13 @@ function compute_tableau( tableau_obj[abs.(tableau_obj). 1.0001 + push!(violations, sort(clique)) + end + end + return violations + end + + function cuts_enforce(violations) + @info "Adding $(length(violations)) clique cuts..." + for clique in violations + constr = @build_constraint(sum(x[i] for i in clique) <= 1) + submit(model, constr) + end + end + + return JumpModel(model, cuts_separate = cuts_separate, cuts_enforce = cuts_enforce) +end + +export MaxWeightStableSetData, MaxWeightStableSetGenerator, build_stab_model_jump diff --git a/src/problems/tsp.jl b/src/problems/tsp.jl new file mode 100644 index 0000000..1d2dc39 --- /dev/null +++ b/src/problems/tsp.jl @@ -0,0 +1,71 @@ +# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization +# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved. +# Released under the modified BSD license. See COPYING.md for more details. + +using JuMP + +global TravelingSalesmanData = PyNULL() +global TravelingSalesmanGenerator = PyNULL() + +function __init_problems_tsp__() + copy!(TravelingSalesmanData, pyimport("miplearn.problems.tsp").TravelingSalesmanData) + copy!( + TravelingSalesmanGenerator, + pyimport("miplearn.problems.tsp").TravelingSalesmanGenerator, + ) +end + +function build_tsp_model_jump(data::Any; optimizer) + nx = pyimport("networkx") + + if data isa String + data = read_pkl_gz(data) + end + model = Model(optimizer) + edges = [(i, j) for i = 1:data.n_cities for j = (i+1):data.n_cities] + x = @variable(model, x[edges], Bin) + @objective(model, Min, sum(x[(i, j)] * data.distances[i, j] for (i, j) in edges)) + + # Eq: Must choose two edges adjacent to each node + @constraint( + model, + eq_degree[i in 1:data.n_cities], + sum(x[(min(i, j), max(i, j))] for j = 1:data.n_cities if i != j) == 2 + ) + + function lazy_separate(cb_data) + x_val = callback_value.(Ref(cb_data), x) + violations = [] + selected_edges = [e for e in edges if x_val[e] > 0.5] + graph = nx.Graph() + graph.add_edges_from(selected_edges) + for component in nx.connected_components(graph) + if length(component) < data.n_cities + cut_edges = [ + [e[1], e[2]] for + e in edges if (e[1] ∈ component && e[2] ∉ component) || + (e[1] ∉ component && e[2] ∈ component) + ] + push!(violations, cut_edges) + end + end + return violations + end + + function lazy_enforce(violations) + @info "Adding $(length(violations)) subtour elimination eqs..." + for violation in violations + constr = @build_constraint(sum(x[(e[1], e[2])] for e in violation) >= 2) + submit(model, constr) + end + end + + return JumpModel( + model, + lazy_enforce = lazy_enforce, + lazy_separate = lazy_separate, + lp_optimizer = optimizer, + ) +end + +export TravelingSalesmanData, TravelingSalesmanGenerator, build_tsp_model_jump diff --git a/src/solvers/jump.jl b/src/solvers/jump.jl index 56160bc..b20cff0 100644 --- a/src/solvers/jump.jl +++ b/src/solvers/jump.jl @@ -4,9 +4,31 @@ using JuMP using HiGHS +using JSON global JumpModel = PyNULL() +Base.@kwdef mutable struct _JumpModelExtData + aot_cuts = nothing + cb_data = nothing + cuts = [] + lazy = [] + where::Symbol = :WHERE_DEFAULT + cuts_enforce::Union{Function,Nothing} = nothing + cuts_separate::Union{Function,Nothing} = nothing + lazy_enforce::Union{Function,Nothing} = nothing + lazy_separate::Union{Function,Nothing} = nothing + lp_optimizer::Any +end + +function JuMP.copy_extension_data( + old_ext::_JumpModelExtData, + new_model::AbstractModel, + ::AbstractModel, +) + new_model.ext[:miplearn] = _JumpModelExtData(lp_optimizer = old_ext.lp_optimizer) +end + # ----------------------------------------------------------------------------- function _add_constrs( @@ -35,6 +57,17 @@ function _add_constrs( end end +function submit(model::JuMP.Model, constr) + ext = model.ext[:miplearn] + if ext.where == :WHERE_CUTS + MOI.submit(model, MOI.UserCut(ext.cb_data), constr) + elseif ext.where == :WHERE_LAZY + MOI.submit(model, MOI.LazyConstraint(ext.cb_data), constr) + else + add_constraint(model, constr) + end +end + function _extract_after_load(model::JuMP.Model, h5) @info "_extract_after_load" if JuMP.objective_sense(model) == MOI.MIN_SENSE @@ -110,6 +143,9 @@ function _extract_after_load_constrs(model::JuMP.Model, h5) end end end + if isempty(names) + error("no model constraints found; note that MIPLearn ignores unnamed constraints") + end lhs = sparse(lhs_rows, lhs_cols, lhs_values, length(rhs), JuMP.num_variables(model)) h5.put_sparse("static_constr_lhs", lhs) h5.put_array("static_constr_rhs", rhs) @@ -252,6 +288,11 @@ function _extract_after_mip(model::JuMP.Model, h5) rhs = h5.get_array("static_constr_rhs") slacks = abs.(lhs * x - rhs) h5.put_array("mip_constr_slacks", slacks) + + # Cuts and lazy constraints + ext = model.ext[:miplearn] + h5.put_scalar("mip_cuts", JSON.json(ext.cuts)) + h5.put_scalar("mip_lazy", JSON.json(ext.lazy)) end function _fix_variables(model::JuMP.Model, var_names, var_values, stats) @@ -264,8 +305,53 @@ function _fix_variables(model::JuMP.Model, var_names, var_values, stats) end function _optimize(model::JuMP.Model) - @info "_optimize" + # Set up cut callbacks + ext = model.ext[:miplearn] + ext.cuts = [] + function cut_callback(cb_data) + ext.cb_data = cb_data + ext.where = :WHERE_CUTS + if ext.aot_cuts !== nothing + @info "Enforcing $(length(ext.aot_cuts)) cuts ahead-of-time..." + violations = ext.aot_cuts + ext.aot_cuts = nothing + else + violations = ext.cuts_separate(cb_data) + for v in violations + push!(ext.cuts, v) + end + end + if !isempty(violations) + ext.cuts_enforce(violations) + end + end + if ext.cuts_separate !== nothing + set_attribute(model, MOI.UserCutCallback(), cut_callback) + end + + # Set up lazy constraint callbacks + ext.lazy = [] + function lazy_callback(cb_data) + ext.cb_data = cb_data + ext.where = :WHERE_LAZY + violations = ext.lazy_separate(cb_data) + for v in violations + push!(ext.lazy, v) + end + if !isempty(violations) + ext.lazy_enforce(violations) + end + end + if ext.lazy_separate !== nothing + set_attribute(model, MOI.LazyConstraintCallback(), lazy_callback) + end + + # Optimize optimize!(model) + + # Cleanup + ext.where = :WHERE_DEFAULT + ext.cb_data = nothing flush(stdout) Libc.flush_cstdio() end @@ -273,9 +359,8 @@ end function _relax(model::JuMP.Model) relaxed, _ = copy_model(model) relax_integrality(relaxed) - # FIXME: Remove hardcoded optimizer - set_optimizer(relaxed, HiGHS.Optimizer) - # set_silent(relaxed) + set_optimizer(relaxed, model.ext[:miplearn].lp_optimizer) + set_silent(relaxed) return relaxed end @@ -291,16 +376,39 @@ function _set_warm_starts(model::JuMP.Model, var_names, var_values, stats) end function _write(model::JuMP.Model, filename) + ext = model.ext[:miplearn] + if ext.lazy_separate !== nothing + set_attribute(model, MOI.LazyConstraintCallback(), nothing) + end + if ext.cuts_separate !== nothing + set_attribute(model, MOI.UserCutCallback(), nothing) + end write_to_file(model, filename) end # ----------------------------------------------------------------------------- function __init_solvers_jump__() - @pydef mutable struct Class + AbstractModel = pyimport("miplearn.solvers.abstract").AbstractModel + @pydef mutable struct Class <: AbstractModel - function __init__(self, inner) + function __init__( + self, + inner; + cuts_enforce::Union{Function,Nothing} = nothing, + cuts_separate::Union{Function,Nothing} = nothing, + lazy_enforce::Union{Function,Nothing} = nothing, + lazy_separate::Union{Function,Nothing} = nothing, + lp_optimizer = HiGHS.Optimizer, + ) self.inner = inner + self.inner.ext[:miplearn] = _JumpModelExtData( + cuts_enforce = cuts_enforce, + cuts_separate = cuts_separate, + lazy_enforce = lazy_enforce, + lazy_separate = lazy_separate, + lp_optimizer = lp_optimizer, + ) end add_constrs( @@ -336,6 +444,21 @@ function __init_solvers_jump__() _set_warm_starts(self.inner, from_str_array(var_names), var_values, stats) write(self, filename) = _write(self.inner, filename) + + function set_cuts(self, cuts) + self.inner.ext[:miplearn].aot_cuts = cuts + end + + function lazy_enforce(self, violations) + self.inner.ext[:miplearn].lazy_enforce(violations) + end + + function _lazy_enforce_collected(self) + ext = self.inner.ext[:miplearn] + if ext.lazy_enforce !== nothing + ext.lazy_enforce(ext.lazy) + end + end end copy!(JumpModel, Class) end diff --git a/test/Project.toml b/test/Project.toml index 9987a59..a3034bb 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -7,6 +7,7 @@ version = "0.1.0" CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b" Clp = "e2554f3b-3117-50c0-817c-e040a3ddf72d" DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" +GLPK = "60bf3e95-4087-53dc-ae20-288a0d20c6a6" Glob = "c27321d9-0574-5035-807b-f59d2c89b15c" HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f" HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" @@ -17,6 +18,8 @@ Logging = "56ddb016-857b-54e1-b83d-db4d58db5568" MIPLearn = "2b1277c3-b477-4c49-a15e-7ba350325c68" PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" Revise = "295af30f-e4ad-537b-8983-00126c2a3abe" +SCIP = "82193955-e24f-5292-bf16-6f2c5261a85f" +SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [compat] diff --git a/test/fixtures/bell5.h5 b/test/fixtures/bell5.h5 index 73bc778..a52c61d 100644 Binary files a/test/fixtures/bell5.h5 and b/test/fixtures/bell5.h5 differ diff --git a/test/fixtures/stab-n50-00000.h5 b/test/fixtures/stab-n50-00000.h5 new file mode 100644 index 0000000..f8ae28a Binary files /dev/null and b/test/fixtures/stab-n50-00000.h5 differ diff --git a/test/fixtures/stab-n50-00000.pkl.gz b/test/fixtures/stab-n50-00000.pkl.gz new file mode 100644 index 0000000..19bddd8 Binary files /dev/null and b/test/fixtures/stab-n50-00000.pkl.gz differ diff --git a/test/fixtures/tsp-n20-00000.h5 b/test/fixtures/tsp-n20-00000.h5 new file mode 100644 index 0000000..d9f7a74 Binary files /dev/null and b/test/fixtures/tsp-n20-00000.h5 differ diff --git a/test/fixtures/tsp-n20-00000.mps.gz b/test/fixtures/tsp-n20-00000.mps.gz new file mode 100644 index 0000000..cb04ec5 Binary files /dev/null and b/test/fixtures/tsp-n20-00000.mps.gz differ diff --git a/test/fixtures/tsp-n20-00000.pkl.gz b/test/fixtures/tsp-n20-00000.pkl.gz new file mode 100644 index 0000000..98f3797 Binary files /dev/null and b/test/fixtures/tsp-n20-00000.pkl.gz differ diff --git a/test/fixtures/vpm2.h5 b/test/fixtures/vpm2.h5 index 9481679..fc4a159 100644 Binary files a/test/fixtures/vpm2.h5 and b/test/fixtures/vpm2.h5 differ diff --git a/test/src/BB/test_bb.jl b/test/src/BB/test_bb.jl index 7946e24..b1bc503 100644 --- a/test/src/BB/test_bb.jl +++ b/test/src/BB/test_bb.jl @@ -89,11 +89,7 @@ function bb_run(optimizer_name, optimizer; large=true) BB.ReliabilityBranching(aggregation=:min, collect=true), ] h5 = H5File("$FIXTURES/$instance.h5") - mip_lower_bound = h5.get_scalar("mip_lower_bound") - mip_upper_bound = h5.get_scalar("mip_upper_bound") - mip_sense = h5.get_scalar("mip_sense") - mip_primal_bound = - mip_sense == "min" ? mip_upper_bound : mip_lower_bound + mip_obj_bound = h5.get_scalar("mip_obj_bound") h5.file.close() mip = BB.init(optimizer) @@ -101,10 +97,10 @@ function bb_run(optimizer_name, optimizer; large=true) @info optimizer_name, branch_rule, instance @time BB.solve!( mip, - initial_primal_bound=mip_primal_bound, - print_interval=1, - node_limit=25, - branch_rule=branch_rule, + initial_primal_bound = mip_obj_bound, + print_interval = 1, + node_limit = 25, + branch_rule = branch_rule, ) end end diff --git a/test/src/Cuts/tableau/test_gmi.jl b/test/src/Cuts/tableau/test_gmi.jl new file mode 100644 index 0000000..af286c7 --- /dev/null +++ b/test/src/Cuts/tableau/test_gmi.jl @@ -0,0 +1,23 @@ +# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization +# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved. +# Released under the modified BSD license. See COPYING.md for more details. + +using HiGHS + +function test_cuts_tableau_gmi() + mps_filename = "$BASEDIR/../fixtures/bell5.mps.gz" + h5_filename = "$BASEDIR/../fixtures/bell5.h5" + collect_gmi(mps_filename, optimizer = HiGHS.Optimizer) + h5 = H5File(h5_filename, "r") + try + cuts_lb = h5.get_array("cuts_lb") + cuts_ub = h5.get_array("cuts_ub") + cuts_lhs = h5.get_sparse("cuts_lhs") + n_cuts = length(cuts_lb) + @test n_cuts > 0 + @test n_cuts == length(cuts_ub) + @test cuts_lhs.shape[1] == n_cuts + finally + h5.close() + end +end diff --git a/test/src/Cuts/tableau/test_gmi_dual.jl b/test/src/Cuts/tableau/test_gmi_dual.jl new file mode 100644 index 0000000..2b9bcf2 --- /dev/null +++ b/test/src/Cuts/tableau/test_gmi_dual.jl @@ -0,0 +1,70 @@ +# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization +# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved. +# Released under the modified BSD license. See COPYING.md for more details. + +using SCIP +using HiGHS +using MIPLearn.Cuts + +function test_cuts_tableau_gmi_dual_collect() + mps_filename = "$BASEDIR/../fixtures/bell5.mps.gz" + h5_filename = "$BASEDIR/../fixtures/bell5.h5" + stats = collect_gmi_dual(mps_filename, optimizer = HiGHS.Optimizer) + h5 = H5File(h5_filename, "r") + try + cuts_basis_vars = h5.get_array("cuts_basis_vars") + cuts_basis_sizes = h5.get_array("cuts_basis_sizes") + cuts_rows = h5.get_array("cuts_rows") + @test size(cuts_basis_vars) == (15, 402) + @test size(cuts_basis_sizes) == (15, 4) + @test size(cuts_rows) == (15,) + finally + h5.close() + end +end + +function test_cuts_tableau_gmi_dual_usage() + function build_model(mps_filename) + model = read_from_file(mps_filename) + set_optimizer(model, SCIP.Optimizer) + return JumpModel(model) + end + + mps_filename = "$BASEDIR/../fixtures/bell5.mps.gz" + h5_filename = "$BASEDIR/../fixtures/bell5.h5" + rm(h5_filename, force=true) + + # Run basic collector + bc = BasicCollector(write_mps = false, skip_lp = true) + bc.collect([mps_filename], build_model) + + # Run dual GMI collector + @info "Running dual GMI collector..." + collect_gmi_dual(mps_filename, optimizer = HiGHS.Optimizer) + + # # Test expert component + # solver = LearningSolver( + # components = [ + # ExpertPrimalComponent(action = SetWarmStart()), + # ExpertDualGmiComponent(), + # ], + # skip_lp = true, + # ) + # solver.optimize(mps_filename, build_model) + + # Test kNN component + knn = KnnDualGmiComponent( + extractor = H5FieldsExtractor(instance_fields = ["static_var_obj_coeffs"]), + k = 2, + ) + knn.fit([h5_filename, h5_filename]) + solver = LearningSolver( + components = [ + ExpertPrimalComponent(action = SetWarmStart()), + knn, + ], + skip_lp = true, + ) + solver.optimize(mps_filename, build_model) + return +end diff --git a/test/src/MIPLearnT.jl b/test/src/MIPLearnT.jl index b14b883..e52e920 100644 --- a/test/src/MIPLearnT.jl +++ b/test/src/MIPLearnT.jl @@ -16,10 +16,16 @@ FIXTURES = "$BASEDIR/../fixtures" include("fixtures.jl") include("BB/test_bb.jl") +include("components/test_cuts.jl") +include("components/test_lazy.jl") include("Cuts/BlackBox/test_cplex.jl") +include("Cuts/tableau/test_gmi.jl") +include("Cuts/tableau/test_gmi_dual.jl") include("problems/test_setcover.jl") -include("test_io.jl") +include("problems/test_stab.jl") +include("problems/test_tsp.jl") include("solvers/test_jump.jl") +include("test_io.jl") include("test_usage.jl") function runtests() @@ -27,11 +33,14 @@ function runtests() @testset "BB" begin test_bb() end - # test_cuts_blackbox_cplex() test_io() test_problems_setcover() + test_problems_stab() + test_problems_tsp() test_solvers_jump() test_usage() + test_cuts() + test_lazy() end end diff --git a/test/src/components/test_cuts.jl b/test/src/components/test_cuts.jl new file mode 100644 index 0000000..9f88b3e --- /dev/null +++ b/test/src/components/test_cuts.jl @@ -0,0 +1,41 @@ +# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization +# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved. +# Released under the modified BSD license. See COPYING.md for more details. + +using SCIP + +function gen_stab() + np = pyimport("numpy") + uniform = pyimport("scipy.stats").uniform + randint = pyimport("scipy.stats").randint + np.random.seed(42) + gen = MaxWeightStableSetGenerator( + w = uniform(10.0, scale = 1.0), + n = randint(low = 50, high = 51), + p = uniform(loc = 0.5, scale = 0.0), + fix_graph = true, + ) + data = gen.generate(1) + data_filenames = write_pkl_gz(data, "$BASEDIR/../fixtures", prefix = "stab-n50-") + collector = BasicCollector() + collector.collect( + data_filenames, + data -> build_stab_model_jump(data, optimizer = SCIP.Optimizer), + progress = true, + verbose = true, + ) +end + +function test_cuts() + data_filenames = ["$BASEDIR/../fixtures/stab-n50-00000.pkl.gz"] + clf = pyimport("sklearn.dummy").DummyClassifier() + extractor = H5FieldsExtractor(instance_fields = ["static_var_obj_coeffs"]) + comp = MemorizingCutsComponent(clf = clf, extractor = extractor) + solver = LearningSolver(components = [comp]) + solver.fit(data_filenames) + model, stats = solver.optimize( + data_filenames[1], + data -> build_stab_model_jump(data, optimizer = SCIP.Optimizer), + ) + @test stats["Cuts: AOT"] > 0 +end diff --git a/test/src/components/test_lazy.jl b/test/src/components/test_lazy.jl new file mode 100644 index 0000000..0cd0481 --- /dev/null +++ b/test/src/components/test_lazy.jl @@ -0,0 +1,44 @@ +# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization +# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved. +# Released under the modified BSD license. See COPYING.md for more details. + +using GLPK + +function gen_tsp() + np = pyimport("numpy") + uniform = pyimport("scipy.stats").uniform + randint = pyimport("scipy.stats").randint + np.random.seed(42) + + gen = TravelingSalesmanGenerator( + x = uniform(loc = 0.0, scale = 1000.0), + y = uniform(loc = 0.0, scale = 1000.0), + n = randint(low = 20, high = 21), + gamma = uniform(loc = 1.0, scale = 0.25), + fix_cities = true, + round = true, + ) + data = gen.generate(1) + data_filenames = write_pkl_gz(data, "$BASEDIR/../fixtures", prefix = "tsp-n20-") + collector = BasicCollector() + collector.collect( + data_filenames, + data -> build_tsp_model_jump(data, optimizer = GLPK.Optimizer), + progress = true, + verbose = true, + ) +end + +function test_lazy() + data_filenames = ["$BASEDIR/../fixtures/tsp-n20-00000.pkl.gz"] + clf = pyimport("sklearn.dummy").DummyClassifier() + extractor = H5FieldsExtractor(instance_fields = ["static_var_obj_coeffs"]) + comp = MemorizingLazyComponent(clf = clf, extractor = extractor) + solver = LearningSolver(components = [comp]) + solver.fit(data_filenames) + model, stats = solver.optimize( + data_filenames[1], + data -> build_tsp_model_jump(data, optimizer = GLPK.Optimizer), + ) + @test stats["Lazy Constraints: AOT"] > 0 +end diff --git a/test/src/fixtures.jl b/test/src/fixtures.jl index 56f4dec..b7cc084 100644 --- a/test/src/fixtures.jl +++ b/test/src/fixtures.jl @@ -14,5 +14,5 @@ function fixture_setcover_data() end function fixture_setcover_model() - return build_setcover_model(fixture_setcover_data()) + return build_setcover_model_jump(fixture_setcover_data()) end diff --git a/test/src/problems/test_setcover.jl b/test/src/problems/test_setcover.jl index 5f85418..9b92f19 100644 --- a/test/src/problems/test_setcover.jl +++ b/test/src/problems/test_setcover.jl @@ -51,7 +51,7 @@ function test_problems_setcover_model() ) h5 = H5File(tempname(), "w") - model = build_setcover_model(data) + model = build_setcover_model_jump(data) model.extract_after_load(h5) model.optimize() model.extract_after_mip(h5) diff --git a/test/src/problems/test_stab.jl b/test/src/problems/test_stab.jl new file mode 100644 index 0000000..1d5dfb8 --- /dev/null +++ b/test/src/problems/test_stab.jl @@ -0,0 +1,22 @@ +# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization +# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved. +# Released under the modified BSD license. See COPYING.md for more details. + +using PyCall +using SCIP + +function test_problems_stab() + nx = pyimport("networkx") + data = MaxWeightStableSetData( + graph = nx.gnp_random_graph(25, 0.5, seed = 42), + weights = repeat([1.0], 25), + ) + h5 = H5File(tempname(), "w") + model = build_stab_model_jump(data, optimizer = SCIP.Optimizer) + model.extract_after_load(h5) + model.optimize() + model.extract_after_mip(h5) + @test h5.get_scalar("mip_obj_value") == -6 + @test h5.get_scalar("mip_cuts")[1:20] == "[[0,8,11,13],[0,8,13" + h5.close() +end diff --git a/test/src/problems/test_tsp.jl b/test/src/problems/test_tsp.jl new file mode 100644 index 0000000..56bc32f --- /dev/null +++ b/test/src/problems/test_tsp.jl @@ -0,0 +1,22 @@ +# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization +# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved. +# Released under the modified BSD license. See COPYING.md for more details. + +using GLPK +using JuMP + +function test_problems_tsp() + pdist = pyimport("scipy.spatial.distance").pdist + squareform = pyimport("scipy.spatial.distance").squareform + + data = TravelingSalesmanData( + n_cities = 6, + distances = squareform( + pdist([[0.0, 0.0], [1.0, 0.0], [2.0, 0.0], [3.0, 0.0], [0.0, 1.0], [3.0, 1.0]]), + ), + ) + model = build_tsp_model_jump(data, optimizer = GLPK.Optimizer) + model.optimize() + @test objective_value(model.inner) == 8.0 + return +end diff --git a/test/src/test_io.jl b/test/src/test_io.jl index 2dcd46d..88745f9 100644 --- a/test/src/test_io.jl +++ b/test/src/test_io.jl @@ -4,6 +4,7 @@ using MIPLearn using JLD2 +using SparseArrays struct _TestStruct n::Int @@ -35,6 +36,8 @@ function test_h5() _test_roundtrip_array(h5, [1, 2, 3]) _test_roundtrip_array(h5, [1.0, 2.0, 3.0]) _test_roundtrip_str_array(h5, ["A", "BB", "CCC"]) + _test_roundtrip_sparse(h5, sparse([1; 2; 3], [1; 2; 3], [1; 2; 3])) + # _test_roundtrip_sparse(h5, sparse([1; 2; 3], [1; 2; 3], [1; 2; 3], 4, 4)) @test h5.get_array("unknown-key") === nothing h5.close() end @@ -46,7 +49,7 @@ function test_jld2() _TestStruct(2, [1.0, 2.0, 3.0]), _TestStruct(3, [3.0, 3.0, 3.0]), ] - filenames = write_jld2(data, dirname, prefix="obj") + filenames = write_jld2(data, dirname, prefix = "obj") @test all( filenames .== ["$dirname/obj00001.jld2", "$dirname/obj00002.jld2", "$dirname/obj00003.jld2"], @@ -79,3 +82,11 @@ function _test_roundtrip_str_array(h5, original) @test recovered !== nothing @test all(original .== recovered) end + +function _test_roundtrip_sparse(h5, original) + h5.put_sparse("key", original) + recovered = MIPLearn.convert(SparseMatrixCSC, h5.get_sparse("key")) + @test recovered !== nothing + @test size(original) == size(recovered) + @test all(original .== recovered) +end diff --git a/test/src/test_usage.jl b/test/src/test_usage.jl index f02ff65..9a8cb7b 100644 --- a/test/src/test_usage.jl +++ b/test/src/test_usage.jl @@ -29,13 +29,13 @@ function test_usage() @debug "Collecting training data..." bc = BasicCollector() - bc.collect(data_filenames, build_setcover_model) + bc.collect(data_filenames, build_setcover_model_jump) @debug "Training models..." solver.fit(data_filenames) @debug "Solving model..." - solver.optimize(data_filenames[1], build_setcover_model) + solver.optimize(data_filenames[1], build_setcover_model_jump) @debug "Checking solution..." h5 = H5File(h5_filenames[1])