Make lazy constraints compatible with JuMP

This commit is contained in:
2024-02-01 16:56:45 -06:00
parent 4d5b7e971c
commit 190c288203
11 changed files with 191 additions and 11 deletions

View File

@@ -5,6 +5,7 @@ version = "0.1.0"
[deps]
Clp = "e2554f3b-3117-50c0-817c-e040a3ddf72d"
GLPK = "60bf3e95-4087-53dc-ae20-288a0d20c6a6"
Glob = "c27321d9-0574-5035-807b-f59d2c89b15c"
HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f"
HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b"

BIN
test/fixtures/tsp-n20-00000.h5 vendored Normal file

Binary file not shown.

BIN
test/fixtures/tsp-n20-00000.pkl.gz vendored Normal file

Binary file not shown.

View File

@@ -17,9 +17,11 @@ include("fixtures.jl")
include("BB/test_bb.jl")
include("components/test_cuts.jl")
include("components/test_lazy.jl")
include("Cuts/BlackBox/test_cplex.jl")
include("problems/test_setcover.jl")
include("problems/test_stab.jl")
include("problems/test_tsp.jl")
include("solvers/test_jump.jl")
include("test_io.jl")
include("test_usage.jl")
@@ -32,6 +34,7 @@ function runtests()
test_io()
test_problems_setcover()
test_problems_stab()
test_problems_tsp()
test_solvers_jump()
test_usage()
test_cuts()

View File

@@ -27,7 +27,7 @@ function gen_stab()
end
function test_cuts()
data_filenames = ["$BASEDIR/../fixtures/stab-n50-0000$i.pkl.gz" for i in 0:0]
data_filenames = ["$BASEDIR/../fixtures/stab-n50-00000.pkl.gz"]
clf = pyimport("sklearn.dummy").DummyClassifier()
extractor = H5FieldsExtractor(
instance_fields=["static_var_obj_coeffs"],

View File

@@ -0,0 +1,46 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using GLPK
function gen_tsp()
np = pyimport("numpy")
uniform = pyimport("scipy.stats").uniform
randint = pyimport("scipy.stats").randint
np.random.seed(42)
gen = TravelingSalesmanGenerator(
x=uniform(loc=0.0, scale=1000.0),
y=uniform(loc=0.0, scale=1000.0),
n=randint(low=20, high=21),
gamma=uniform(loc=1.0, scale=0.25),
fix_cities=true,
round=true,
)
data = gen.generate(1)
data_filenames = write_pkl_gz(data, "$BASEDIR/../fixtures", prefix="tsp-n20-")
collector = BasicCollector(write_mps=false)
collector.collect(
data_filenames,
data -> build_tsp_model_jump(data, optimizer=GLPK.Optimizer),
progress=true,
verbose=true,
)
end
function test_lazy()
data_filenames = ["$BASEDIR/../fixtures/tsp-n20-00000.pkl.gz"]
clf = pyimport("sklearn.dummy").DummyClassifier()
extractor = H5FieldsExtractor(
instance_fields=["static_var_obj_coeffs"],
)
comp = MemorizingLazyComponent(clf=clf, extractor=extractor)
solver = LearningSolver(components=[comp])
solver.fit(data_filenames)
stats = solver.optimize(
data_filenames[1],
data -> build_tsp_model_jump(data, optimizer=GLPK.Optimizer),
)
@test stats["Lazy Constraints: AOT"] > 0
end

View File

@@ -0,0 +1,27 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2024, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using GLPK
using JuMP
function test_problems_tsp()
pdist = pyimport("scipy.spatial.distance").pdist
squareform = pyimport("scipy.spatial.distance").squareform
data = TravelingSalesmanData(
n_cities=6,
distances=squareform(pdist([
[0.0, 0.0],
[1.0, 0.0],
[2.0, 0.0],
[3.0, 0.0],
[0.0, 1.0],
[3.0, 1.0],
])),
)
model = build_tsp_model_jump(data, optimizer=GLPK.Optimizer)
model.optimize()
@test objective_value(model.inner) == 8.0
return
end

View File

@@ -13,22 +13,22 @@ function test_usage()
@debug "Setting up LearningSolver..."
solver = LearningSolver(
components = [
components=[
IndependentVarsPrimalComponent(
base_clf = SingleClassFix(
base_clf=SingleClassFix(
MinProbabilityClassifier(
base_clf = LogisticRegression(),
thresholds = [0.95, 0.95],
base_clf=LogisticRegression(),
thresholds=[0.95, 0.95],
),
),
extractor = AlvLouWeh2017Extractor(),
action = SetWarmStart(),
extractor=AlvLouWeh2017Extractor(),
action=SetWarmStart(),
),
],
)
@debug "Collecting training data..."
bc = BasicCollector()
bc = BasicCollector(write_mps=false)
bc.collect(data_filenames, build_setcover_model_jump)
@debug "Training models..."