Make lazy constraints compatible with JuMP

feature/replay^2
Alinson S. Xavier 2 years ago
parent 4d5b7e971c
commit 190c288203

@ -13,6 +13,7 @@ include("extractors.jl")
include("io.jl")
include("problems/setcover.jl")
include("problems/stab.jl")
include("problems/tsp.jl")
include("solvers/jump.jl")
include("solvers/learning.jl")
@ -23,6 +24,7 @@ function __init__()
__init_io__()
__init_problems_setcover__()
__init_problems_stab__()
__init_problems_tsp__()
__init_solvers_jump__()
__init_solvers_learning__()
end

@ -0,0 +1,71 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP
global TravelingSalesmanData = PyNULL()
global TravelingSalesmanGenerator = PyNULL()
function __init_problems_tsp__()
copy!(TravelingSalesmanData, pyimport("miplearn.problems.tsp").TravelingSalesmanData)
copy!(TravelingSalesmanGenerator, pyimport("miplearn.problems.tsp").TravelingSalesmanGenerator)
end
function build_tsp_model_jump(data::Any; optimizer=HiGHS.Optimizer)
nx = pyimport("networkx")
if data isa String
data = read_pkl_gz(data)
end
model = Model(optimizer)
edges = [(i, j) for i in 1:data.n_cities for j in (i+1):data.n_cities]
x = @variable(model, x[edges], Bin)
@objective(model, Min, sum(
x[(i, j)] * data.distances[i, j] for (i, j) in edges
))
# Eq: Must choose two edges adjacent to each node
@constraint(
model,
eq_degree[i in 1:data.n_cities],
sum(x[(min(i, j), max(i, j))] for j in 1:data.n_cities if i != j) == 2
)
function lazy_separate(cb_data)
x_val = callback_value.(Ref(cb_data), x)
violations = []
selected_edges = [e for e in edges if x_val[e] > 0.5]
graph = nx.Graph()
graph.add_edges_from(selected_edges)
for component in nx.connected_components(graph)
if length(component) < data.n_cities
cut_edges = [
[e[1], e[2]]
for e in edges
if (e[1] component && e[2] component)
||
(e[1] component && e[2] component)
]
push!(violations, cut_edges)
end
end
return violations
end
function lazy_enforce(violations)
@info "Adding $(length(violations)) subtour elimination eqs..."
for violation in violations
constr = @build_constraint(sum(x[(e[1], e[2])] for e in violation) >= 2)
submit(model, constr)
end
end
return JumpModel(
model,
lazy_enforce=lazy_enforce,
lazy_separate=lazy_separate,
)
end
export TravelingSalesmanData, TravelingSalesmanGenerator, build_tsp_model_jump

@ -12,9 +12,12 @@ Base.@kwdef mutable struct _JumpModelExtData
aot_cuts = nothing
cb_data = nothing
cuts = []
lazy = []
where::Symbol = :WHERE_DEFAULT
cuts_enforce::Union{Function,Nothing} = nothing
cuts_separate::Union{Function,Nothing} = nothing
lazy_enforce::Union{Function,Nothing} = nothing
lazy_separate::Union{Function,Nothing} = nothing
end
function JuMP.copy_extension_data(
@ -58,8 +61,10 @@ function submit(model::JuMP.Model, constr)
ext = model.ext[:miplearn]
if ext.where == :WHERE_CUTS
MOI.submit(model, MOI.UserCut(ext.cb_data), constr)
elseif ext.where == :WHERE_LAZY
MOI.submit(model, MOI.LazyConstraint(ext.cb_data), constr)
else
error("not implemented")
add_constraint(model, constr)
end
end
@ -281,9 +286,10 @@ function _extract_after_mip(model::JuMP.Model, h5)
slacks = abs.(lhs * x - rhs)
h5.put_array("mip_constr_slacks", slacks)
# Cuts
# Cuts and lazy constraints
ext = model.ext[:miplearn]
h5.put_scalar("mip_cuts", JSON.json(ext.cuts))
h5.put_scalar("mip_lazy", JSON.json(ext.lazy))
end
function _fix_variables(model::JuMP.Model, var_names, var_values, stats)
@ -318,6 +324,23 @@ function _optimize(model::JuMP.Model)
set_attribute(model, MOI.UserCutCallback(), cut_callback)
end
# Set up lazy constraint callbacks
ext.lazy = []
function lazy_callback(cb_data)
ext.cb_data = cb_data
ext.where = :WHERE_LAZY
violations = ext.lazy_separate(cb_data)
for v in violations
push!(ext.lazy, v)
end
if !isempty(violations)
ext.lazy_enforce(violations)
end
end
if ext.lazy_separate !== nothing
set_attribute(model, MOI.LazyConstraintCallback(), lazy_callback)
end
# Optimize
ext.where = :WHERE_DEFAULT
optimize!(model)
@ -363,12 +386,15 @@ function __init_solvers_jump__()
inner;
cuts_enforce::Union{Function,Nothing}=nothing,
cuts_separate::Union{Function,Nothing}=nothing,
lazy_enforce::Union{Function,Nothing}=nothing,
lazy_separate::Union{Function,Nothing}=nothing,
)
AbstractModel.__init__(self)
self.inner = inner
self.inner.ext[:miplearn] = _JumpModelExtData(
cuts_enforce=cuts_enforce,
cuts_separate=cuts_separate,
lazy_enforce=lazy_enforce,
lazy_separate=lazy_separate,
)
end
@ -409,6 +435,10 @@ function __init_solvers_jump__()
function set_cuts(self, cuts)
self.inner.ext[:miplearn].aot_cuts = cuts
end
function lazy_enforce(self, model, violations)
self.inner.ext[:miplearn].lazy_enforce(violations)
end
end
copy!(JumpModel, Class)
end

@ -5,6 +5,7 @@ version = "0.1.0"
[deps]
Clp = "e2554f3b-3117-50c0-817c-e040a3ddf72d"
GLPK = "60bf3e95-4087-53dc-ae20-288a0d20c6a6"
Glob = "c27321d9-0574-5035-807b-f59d2c89b15c"
HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f"
HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b"

Binary file not shown.

Binary file not shown.

@ -17,9 +17,11 @@ include("fixtures.jl")
include("BB/test_bb.jl")
include("components/test_cuts.jl")
include("components/test_lazy.jl")
include("Cuts/BlackBox/test_cplex.jl")
include("problems/test_setcover.jl")
include("problems/test_stab.jl")
include("problems/test_tsp.jl")
include("solvers/test_jump.jl")
include("test_io.jl")
include("test_usage.jl")
@ -32,6 +34,7 @@ function runtests()
test_io()
test_problems_setcover()
test_problems_stab()
test_problems_tsp()
test_solvers_jump()
test_usage()
test_cuts()

@ -27,7 +27,7 @@ function gen_stab()
end
function test_cuts()
data_filenames = ["$BASEDIR/../fixtures/stab-n50-0000$i.pkl.gz" for i in 0:0]
data_filenames = ["$BASEDIR/../fixtures/stab-n50-00000.pkl.gz"]
clf = pyimport("sklearn.dummy").DummyClassifier()
extractor = H5FieldsExtractor(
instance_fields=["static_var_obj_coeffs"],

@ -0,0 +1,46 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using GLPK
function gen_tsp()
np = pyimport("numpy")
uniform = pyimport("scipy.stats").uniform
randint = pyimport("scipy.stats").randint
np.random.seed(42)
gen = TravelingSalesmanGenerator(
x=uniform(loc=0.0, scale=1000.0),
y=uniform(loc=0.0, scale=1000.0),
n=randint(low=20, high=21),
gamma=uniform(loc=1.0, scale=0.25),
fix_cities=true,
round=true,
)
data = gen.generate(1)
data_filenames = write_pkl_gz(data, "$BASEDIR/../fixtures", prefix="tsp-n20-")
collector = BasicCollector(write_mps=false)
collector.collect(
data_filenames,
data -> build_tsp_model_jump(data, optimizer=GLPK.Optimizer),
progress=true,
verbose=true,
)
end
function test_lazy()
data_filenames = ["$BASEDIR/../fixtures/tsp-n20-00000.pkl.gz"]
clf = pyimport("sklearn.dummy").DummyClassifier()
extractor = H5FieldsExtractor(
instance_fields=["static_var_obj_coeffs"],
)
comp = MemorizingLazyComponent(clf=clf, extractor=extractor)
solver = LearningSolver(components=[comp])
solver.fit(data_filenames)
stats = solver.optimize(
data_filenames[1],
data -> build_tsp_model_jump(data, optimizer=GLPK.Optimizer),
)
@test stats["Lazy Constraints: AOT"] > 0
end

@ -0,0 +1,27 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using GLPK
using JuMP
function test_problems_tsp()
pdist = pyimport("scipy.spatial.distance").pdist
squareform = pyimport("scipy.spatial.distance").squareform
data = TravelingSalesmanData(
n_cities=6,
distances=squareform(pdist([
[0.0, 0.0],
[1.0, 0.0],
[2.0, 0.0],
[3.0, 0.0],
[0.0, 1.0],
[3.0, 1.0],
])),
)
model = build_tsp_model_jump(data, optimizer=GLPK.Optimizer)
model.optimize()
@test objective_value(model.inner) == 8.0
return
end

@ -13,22 +13,22 @@ function test_usage()
@debug "Setting up LearningSolver..."
solver = LearningSolver(
components = [
components=[
IndependentVarsPrimalComponent(
base_clf = SingleClassFix(
base_clf=SingleClassFix(
MinProbabilityClassifier(
base_clf = LogisticRegression(),
thresholds = [0.95, 0.95],
base_clf=LogisticRegression(),
thresholds=[0.95, 0.95],
),
),
extractor = AlvLouWeh2017Extractor(),
action = SetWarmStart(),
extractor=AlvLouWeh2017Extractor(),
action=SetWarmStart(),
),
],
)
@debug "Collecting training data..."
bc = BasicCollector()
bc = BasicCollector(write_mps=false)
bc.collect(data_filenames, build_setcover_model_jump)
@debug "Training models..."

Loading…
Cancel
Save