mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 01:18:52 -06:00
Implement MemorizingCutsComponent; STAB: switch to edge formulation
This commit is contained in:
0
tests/components/cuts/__init__.py
Normal file
0
tests/components/cuts/__init__.py
Normal file
80
tests/components/cuts/test_mem.py
Normal file
80
tests/components/cuts/test_mem.py
Normal file
@@ -0,0 +1,80 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2023, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
from typing import Any, List, Hashable, Dict
|
||||
from unittest.mock import Mock
|
||||
|
||||
import gurobipy as gp
|
||||
import networkx as nx
|
||||
from gurobipy import GRB, quicksum
|
||||
from sklearn.dummy import DummyClassifier
|
||||
from sklearn.neighbors import KNeighborsClassifier
|
||||
|
||||
from miplearn.components.cuts.mem import MemorizingCutsComponent
|
||||
from miplearn.extractors.abstract import FeaturesExtractor
|
||||
from miplearn.problems.stab import build_stab_model
|
||||
from miplearn.solvers.gurobi import GurobiModel
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
import numpy as np
|
||||
|
||||
|
||||
# def test_usage() -> None:
|
||||
# model = _build_cut_model()
|
||||
# solver = LearningSolver(components=[])
|
||||
# solver.optimize(model)
|
||||
# assert model.cuts_ is not None
|
||||
# assert len(model.cuts_) > 0
|
||||
# assert False
|
||||
|
||||
|
||||
def test_mem_component(
|
||||
stab_h5: List[str],
|
||||
default_extractor: FeaturesExtractor,
|
||||
) -> None:
|
||||
clf = Mock(wraps=DummyClassifier())
|
||||
comp = MemorizingCutsComponent(clf=clf, extractor=default_extractor)
|
||||
comp.fit(stab_h5)
|
||||
|
||||
# Should call fit method with correct arguments
|
||||
clf.fit.assert_called()
|
||||
x, y = clf.fit.call_args.args
|
||||
assert x.shape == (3, 50)
|
||||
assert y.shape == (3, 388)
|
||||
y = y.tolist()
|
||||
assert y[0][:20] == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
|
||||
assert y[1][:20] == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1]
|
||||
assert y[2][:20] == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1]
|
||||
|
||||
# Should store violations
|
||||
assert comp.constrs_ is not None
|
||||
assert comp.n_features_ == 50
|
||||
assert comp.n_targets_ == 388
|
||||
assert len(comp.constrs_) == 388
|
||||
|
||||
# Call before-mip
|
||||
stats: Dict[str, Any] = {}
|
||||
model = Mock()
|
||||
comp.before_mip(stab_h5[0], model, stats)
|
||||
|
||||
# Should call predict with correct args
|
||||
clf.predict.assert_called()
|
||||
(x_test,) = clf.predict.call_args.args
|
||||
assert x_test.shape == (1, 50)
|
||||
|
||||
# Should set cuts_aot_
|
||||
assert model.cuts_aot_ is not None
|
||||
assert len(model.cuts_aot_) == 243
|
||||
|
||||
|
||||
def test_usage_stab(
|
||||
stab_h5: List[str],
|
||||
default_extractor: FeaturesExtractor,
|
||||
) -> None:
|
||||
data_filenames = [f.replace(".h5", ".pkl.gz") for f in stab_h5]
|
||||
clf = KNeighborsClassifier(n_neighbors=1)
|
||||
comp = MemorizingCutsComponent(clf=clf, extractor=default_extractor)
|
||||
solver = LearningSolver(components=[comp])
|
||||
solver.fit(data_filenames)
|
||||
stats = solver.optimize(data_filenames[0], build_stab_model)
|
||||
assert stats["Cuts: AOT"] > 0
|
||||
@@ -8,7 +8,7 @@ from unittest.mock import Mock
|
||||
from sklearn.dummy import DummyClassifier
|
||||
from sklearn.neighbors import KNeighborsClassifier
|
||||
|
||||
from miplearn.components.lazy.mem import MemorizingLazyConstrComponent
|
||||
from miplearn.components.lazy.mem import MemorizingLazyComponent
|
||||
from miplearn.extractors.abstract import FeaturesExtractor
|
||||
from miplearn.problems.tsp import build_tsp_model
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
@@ -19,7 +19,7 @@ def test_mem_component(
|
||||
default_extractor: FeaturesExtractor,
|
||||
) -> None:
|
||||
clf = Mock(wraps=DummyClassifier())
|
||||
comp = MemorizingLazyConstrComponent(clf=clf, extractor=default_extractor)
|
||||
comp = MemorizingLazyComponent(clf=clf, extractor=default_extractor)
|
||||
comp.fit(tsp_h5)
|
||||
|
||||
# Should call fit method with correct arguments
|
||||
@@ -56,7 +56,7 @@ def test_usage_tsp(
|
||||
# Should not crash
|
||||
data_filenames = [f.replace(".h5", ".pkl.gz") for f in tsp_h5]
|
||||
clf = KNeighborsClassifier(n_neighbors=1)
|
||||
comp = MemorizingLazyConstrComponent(clf=clf, extractor=default_extractor)
|
||||
comp = MemorizingLazyComponent(clf=clf, extractor=default_extractor)
|
||||
solver = LearningSolver(components=[comp])
|
||||
solver.fit(data_filenames)
|
||||
solver.optimize(data_filenames[0], build_tsp_model)
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from glob import glob
|
||||
from os.path import dirname
|
||||
from typing import List
|
||||
from os.path import dirname, basename, isfile
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import List, Any
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -12,14 +15,45 @@ from miplearn.extractors.abstract import FeaturesExtractor
|
||||
from miplearn.extractors.fields import H5FieldsExtractor
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def multiknapsack_h5() -> List[str]:
|
||||
return sorted(glob(f"{dirname(__file__)}/fixtures/multiknapsack-n100*.h5"))
|
||||
def _h5_fixture(pattern: str, request: Any) -> List[str]:
|
||||
"""
|
||||
Create a temporary copy of the provided .h5 files, along with the companion
|
||||
.pkl.gz files, and return the path to the copy. Also register a finalizer,
|
||||
so that the temporary folder is removed after the tests.
|
||||
"""
|
||||
filenames = glob(f"{dirname(__file__)}/fixtures/{pattern}")
|
||||
print(filenames)
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
def cleanup() -> None:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
request.addfinalizer(cleanup)
|
||||
|
||||
print(tmpdir)
|
||||
for f in filenames:
|
||||
fbase, _ = os.path.splitext(f)
|
||||
for ext in [".h5", ".pkl.gz"]:
|
||||
dest = os.path.join(tmpdir, f"{basename(fbase)}{ext}")
|
||||
print(dest)
|
||||
shutil.copy(f"{fbase}{ext}", dest)
|
||||
assert isfile(dest)
|
||||
return sorted(glob(f"{tmpdir}/*.h5"))
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def tsp_h5() -> List[str]:
|
||||
return sorted(glob(f"{dirname(__file__)}/fixtures/tsp-n20*.h5"))
|
||||
def multiknapsack_h5(request: Any) -> List[str]:
|
||||
return _h5_fixture("multiknapsack*.h5", request)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def tsp_h5(request: Any) -> List[str]:
|
||||
return _h5_fixture("tsp*.h5", request)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def stab_h5(request: Any) -> List[str]:
|
||||
return _h5_fixture("stab*.h5", request)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
||||
23
tests/fixtures/gen_stab.py
vendored
Normal file
23
tests/fixtures/gen_stab.py
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
from os.path import dirname
|
||||
|
||||
import numpy as np
|
||||
from scipy.stats import uniform, randint
|
||||
|
||||
from miplearn.collectors.basic import BasicCollector
|
||||
from miplearn.io import write_pkl_gz
|
||||
from miplearn.problems.stab import (
|
||||
MaxWeightStableSetGenerator,
|
||||
build_stab_model,
|
||||
)
|
||||
|
||||
np.random.seed(42)
|
||||
gen = MaxWeightStableSetGenerator(
|
||||
w=uniform(10.0, scale=1.0),
|
||||
n=randint(low=50, high=51),
|
||||
p=uniform(loc=0.5, scale=0.0),
|
||||
fix_graph=True,
|
||||
)
|
||||
data = gen.generate(3)
|
||||
data_filenames = write_pkl_gz(data, dirname(__file__), prefix="stab-n50-")
|
||||
collector = BasicCollector()
|
||||
collector.collect(data_filenames, build_stab_model)
|
||||
BIN
tests/fixtures/stab-n50-00000.h5
vendored
Normal file
BIN
tests/fixtures/stab-n50-00000.h5
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-n50-00000.mps.gz
vendored
Normal file
BIN
tests/fixtures/stab-n50-00000.mps.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-n50-00000.pkl.gz
vendored
Normal file
BIN
tests/fixtures/stab-n50-00000.pkl.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-n50-00001.h5
vendored
Normal file
BIN
tests/fixtures/stab-n50-00001.h5
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-n50-00001.mps.gz
vendored
Normal file
BIN
tests/fixtures/stab-n50-00001.mps.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-n50-00001.pkl.gz
vendored
Normal file
BIN
tests/fixtures/stab-n50-00001.pkl.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-n50-00002.h5
vendored
Normal file
BIN
tests/fixtures/stab-n50-00002.h5
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-n50-00002.mps.gz
vendored
Normal file
BIN
tests/fixtures/stab-n50-00002.mps.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-n50-00002.pkl.gz
vendored
Normal file
BIN
tests/fixtures/stab-n50-00002.pkl.gz
vendored
Normal file
Binary file not shown.
@@ -9,8 +9,7 @@ import numpy as np
|
||||
from miplearn.h5 import H5File
|
||||
from miplearn.problems.stab import (
|
||||
MaxWeightStableSetData,
|
||||
build_stab_model_pyomo,
|
||||
build_stab_model_gurobipy,
|
||||
build_stab_model,
|
||||
)
|
||||
from miplearn.solvers.abstract import AbstractModel
|
||||
|
||||
@@ -21,8 +20,7 @@ def test_stab() -> None:
|
||||
weights=np.array([1.0, 1.0, 1.0, 1.0, 1.0]),
|
||||
)
|
||||
for model in [
|
||||
build_stab_model_pyomo(data),
|
||||
build_stab_model_gurobipy(data),
|
||||
build_stab_model(data),
|
||||
]:
|
||||
assert isinstance(model, AbstractModel)
|
||||
with NamedTemporaryFile() as tempfile:
|
||||
|
||||
@@ -39,6 +39,6 @@ def _build_model() -> PyomoModel:
|
||||
def test_pyomo_callback() -> None:
|
||||
model = _build_model()
|
||||
model.optimize()
|
||||
assert model.lazy_constrs_ is not None
|
||||
assert len(model.lazy_constrs_) > 0
|
||||
assert model.lazy_ is not None
|
||||
assert len(model.lazy_) > 0
|
||||
assert model.inner.x.value == 0.0
|
||||
|
||||
Reference in New Issue
Block a user