Move tests to separate folder

This commit is contained in:
2021-01-22 07:42:28 -06:00
parent e2048fc659
commit f90d78f802
25 changed files with 6 additions and 6 deletions

View File

@@ -1,123 +0,0 @@
from unittest.mock import Mock
from miplearn.classifiers import Classifier
from miplearn.components.steps.convert_tight import ConvertTightIneqsIntoEqsStep
from miplearn.components.steps.relax_integrality import RelaxIntegralityStep
from miplearn.instance import Instance
from miplearn.problems.knapsack import GurobiKnapsackInstance
from miplearn.solvers.gurobi import GurobiSolver
from miplearn.solvers.learning import LearningSolver
def test_convert_tight_usage():
instance = GurobiKnapsackInstance(
weights=[3.0, 5.0, 10.0],
prices=[1.0, 1.0, 1.0],
capacity=16.0,
)
solver = LearningSolver(
solver=GurobiSolver,
components=[
RelaxIntegralityStep(),
ConvertTightIneqsIntoEqsStep(),
],
)
# Solve original problem
stats = solver.solve(instance)
original_upper_bound = stats["Upper bound"]
# Should collect training data
assert instance.training_data[0]["slacks"]["eq_capacity"] == 0.0
# Fit and resolve
solver.fit([instance])
stats = solver.solve(instance)
# Objective value should be the same
assert stats["Upper bound"] == original_upper_bound
assert stats["ConvertTight: Inf iterations"] == 0
assert stats["ConvertTight: Subopt iterations"] == 0
class SampleInstance(Instance):
def to_model(self):
import gurobipy as grb
m = grb.Model("model")
x1 = m.addVar(name="x1")
x2 = m.addVar(name="x2")
m.setObjective(x1 + 2 * x2, grb.GRB.MAXIMIZE)
m.addConstr(x1 <= 2, name="c1")
m.addConstr(x2 <= 2, name="c2")
m.addConstr(x1 + x2 <= 3, name="c2")
return m
def test_convert_tight_infeasibility():
comp = ConvertTightIneqsIntoEqsStep()
comp.classifiers = {
"c1": Mock(spec=Classifier),
"c2": Mock(spec=Classifier),
"c3": Mock(spec=Classifier),
}
comp.classifiers["c1"].predict_proba = Mock(return_value=[[0, 1]])
comp.classifiers["c2"].predict_proba = Mock(return_value=[[0, 1]])
comp.classifiers["c3"].predict_proba = Mock(return_value=[[1, 0]])
solver = LearningSolver(
solver=GurobiSolver,
components=[comp],
solve_lp_first=False,
)
instance = SampleInstance()
stats = solver.solve(instance)
assert stats["Upper bound"] == 5.0
assert stats["ConvertTight: Inf iterations"] == 1
assert stats["ConvertTight: Subopt iterations"] == 0
def test_convert_tight_suboptimality():
comp = ConvertTightIneqsIntoEqsStep(check_optimality=True)
comp.classifiers = {
"c1": Mock(spec=Classifier),
"c2": Mock(spec=Classifier),
"c3": Mock(spec=Classifier),
}
comp.classifiers["c1"].predict_proba = Mock(return_value=[[0, 1]])
comp.classifiers["c2"].predict_proba = Mock(return_value=[[1, 0]])
comp.classifiers["c3"].predict_proba = Mock(return_value=[[0, 1]])
solver = LearningSolver(
solver=GurobiSolver,
components=[comp],
solve_lp_first=False,
)
instance = SampleInstance()
stats = solver.solve(instance)
assert stats["Upper bound"] == 5.0
assert stats["ConvertTight: Inf iterations"] == 0
assert stats["ConvertTight: Subopt iterations"] == 1
def test_convert_tight_optimal():
comp = ConvertTightIneqsIntoEqsStep()
comp.classifiers = {
"c1": Mock(spec=Classifier),
"c2": Mock(spec=Classifier),
"c3": Mock(spec=Classifier),
}
comp.classifiers["c1"].predict_proba = Mock(return_value=[[1, 0]])
comp.classifiers["c2"].predict_proba = Mock(return_value=[[0, 1]])
comp.classifiers["c3"].predict_proba = Mock(return_value=[[0, 1]])
solver = LearningSolver(
solver=GurobiSolver,
components=[comp],
solve_lp_first=False,
)
instance = SampleInstance()
stats = solver.solve(instance)
assert stats["Upper bound"] == 5.0
assert stats["ConvertTight: Inf iterations"] == 0
assert stats["ConvertTight: Subopt iterations"] == 0

View File

@@ -1,364 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
from unittest.mock import Mock, call
import numpy as np
from miplearn.classifiers import Classifier
from miplearn.components.relaxation import DropRedundantInequalitiesStep
from miplearn.instance import Instance
from miplearn.solvers.internal import InternalSolver
from miplearn.solvers.learning import LearningSolver
def _setup():
solver = Mock(spec=LearningSolver)
internal = solver.internal_solver = Mock(spec=InternalSolver)
internal.get_constraint_ids = Mock(return_value=["c1", "c2", "c3", "c4"])
internal.get_inequality_slacks = Mock(
side_effect=lambda: {
"c1": 0.5,
"c2": 0.0,
"c3": 0.0,
"c4": 1.4,
}
)
internal.extract_constraint = Mock(side_effect=lambda cid: "<%s>" % cid)
internal.is_constraint_satisfied = Mock(return_value=False)
instance = Mock(spec=Instance)
instance.get_constraint_features = Mock(
side_effect=lambda cid: {
"c2": np.array([1.0, 0.0]),
"c3": np.array([0.5, 0.5]),
"c4": np.array([1.0]),
}[cid]
)
instance.get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid]
)
classifiers = {
"type-a": Mock(spec=Classifier),
"type-b": Mock(spec=Classifier),
}
classifiers["type-a"].predict_proba = Mock(
return_value=np.array(
[
[0.20, 0.80],
[0.05, 0.95],
]
)
)
classifiers["type-b"].predict_proba = Mock(
return_value=np.array(
[
[0.02, 0.98],
]
)
)
return solver, internal, instance, classifiers
def test_drop_redundant():
solver, internal, instance, classifiers = _setup()
component = DropRedundantInequalitiesStep()
component.classifiers = classifiers
# LearningSolver calls before_solve
component.before_solve(solver, instance, None)
# Should query list of constraints
internal.get_constraint_ids.assert_called_once()
# Should query category and features for each constraint in the model
assert instance.get_constraint_category.call_count == 4
instance.get_constraint_category.assert_has_calls(
[
call("c1"),
call("c2"),
call("c3"),
call("c4"),
]
)
# For constraint with non-null categories, should ask for features
assert instance.get_constraint_features.call_count == 3
instance.get_constraint_features.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should ask ML to predict whether constraint should be removed
type_a_actual = component.classifiers["type-a"].predict_proba.call_args[0][0]
type_b_actual = component.classifiers["type-b"].predict_proba.call_args[0][0]
np.testing.assert_array_equal(type_a_actual, np.array([[1.0, 0.0], [0.5, 0.5]]))
np.testing.assert_array_equal(type_b_actual, np.array([[1.0]]))
# Should ask internal solver to remove constraints predicted as redundant
assert internal.extract_constraint.call_count == 2
internal.extract_constraint.assert_has_calls(
[
call("c3"),
call("c4"),
]
)
# LearningSolver calls after_solve
training_data = {}
component.after_solve(solver, instance, None, {}, training_data)
# Should query slack for all inequalities
internal.get_inequality_slacks.assert_called_once()
# Should store constraint slacks in instance object
assert training_data["slacks"] == {
"c1": 0.5,
"c2": 0.0,
"c3": 0.0,
"c4": 1.4,
}
def test_drop_redundant_with_check_feasibility():
solver, internal, instance, classifiers = _setup()
component = DropRedundantInequalitiesStep(
check_feasibility=True,
violation_tolerance=1e-3,
)
component.classifiers = classifiers
# LearningSolver call before_solve
component.before_solve(solver, instance, None)
# Assert constraints are extracted
assert internal.extract_constraint.call_count == 2
internal.extract_constraint.assert_has_calls(
[
call("c3"),
call("c4"),
]
)
# LearningSolver calls iteration_cb (first time)
should_repeat = component.iteration_cb(solver, instance, None)
# Should ask LearningSolver to repeat
assert should_repeat
# Should ask solver if removed constraints are satisfied (mock always returns false)
internal.is_constraint_satisfied.assert_has_calls(
[
call("<c3>", 1e-3),
call("<c4>", 1e-3),
]
)
# Should add constraints back to LP relaxation
internal.add_constraint.assert_has_calls([call("<c3>"), call("<c4>")])
# LearningSolver calls iteration_cb (second time)
should_repeat = component.iteration_cb(solver, instance, None)
assert not should_repeat
def test_x_y_fit_predict_evaluate():
instances = [Mock(spec=Instance), Mock(spec=Instance)]
component = DropRedundantInequalitiesStep(slack_tolerance=0.05, threshold=0.80)
component.classifiers = {
"type-a": Mock(spec=Classifier),
"type-b": Mock(spec=Classifier),
}
component.classifiers["type-a"].predict_proba = Mock(
return_value=[
np.array([0.20, 0.80]),
]
)
component.classifiers["type-b"].predict_proba = Mock(
return_value=np.array(
[
[0.50, 0.50],
[0.05, 0.95],
]
)
)
# First mock instance
instances[0].training_data = [
{
"slacks": {
"c1": 0.00,
"c2": 0.05,
"c3": 0.00,
"c4": 30.0,
}
}
]
instances[0].get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid]
)
instances[0].get_constraint_features = Mock(
side_effect=lambda cid: {
"c2": np.array([1.0, 0.0]),
"c3": np.array([0.5, 0.5]),
"c4": np.array([1.0]),
}[cid]
)
# Second mock instance
instances[1].training_data = [
{
"slacks": {
"c1": 0.00,
"c3": 0.30,
"c4": 0.00,
"c5": 0.00,
}
}
]
instances[1].get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": None,
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid]
)
instances[1].get_constraint_features = Mock(
side_effect=lambda cid: {
"c3": np.array([0.3, 0.4]),
"c4": np.array([0.7]),
"c5": np.array([0.8]),
}[cid]
)
expected_x = {
"type-a": np.array(
[
[1.0, 0.0],
[0.5, 0.5],
[0.3, 0.4],
]
),
"type-b": np.array(
[
[1.0],
[0.7],
[0.8],
]
),
}
expected_y = {
"type-a": np.array([[0], [0], [1]]),
"type-b": np.array([[1], [0], [0]]),
}
# Should build X and Y matrices correctly
actual_x = component.x(instances)
actual_y = component.y(instances)
for category in ["type-a", "type-b"]:
np.testing.assert_array_equal(actual_x[category], expected_x[category])
np.testing.assert_array_equal(actual_y[category], expected_y[category])
# Should pass along X and Y matrices to classifiers
component.fit(instances)
for category in ["type-a", "type-b"]:
actual_x = component.classifiers[category].fit.call_args[0][0]
actual_y = component.classifiers[category].fit.call_args[0][1]
np.testing.assert_array_equal(actual_x, expected_x[category])
np.testing.assert_array_equal(actual_y, expected_y[category])
assert component.predict(expected_x) == {"type-a": [[1]], "type-b": [[0], [1]]}
ev = component.evaluate(instances[1])
assert ev["True positive"] == 1
assert ev["True negative"] == 1
assert ev["False positive"] == 1
assert ev["False negative"] == 0
def test_x_multiple_solves():
instance = Mock(spec=Instance)
instance.training_data = [
{
"slacks": {
"c1": 0.00,
"c2": 0.05,
"c3": 0.00,
"c4": 30.0,
}
},
{
"slacks": {
"c1": 0.00,
"c2": 0.00,
"c3": 1.00,
"c4": 0.0,
}
},
]
instance.get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid]
)
instance.get_constraint_features = Mock(
side_effect=lambda cid: {
"c2": np.array([1.0, 0.0]),
"c3": np.array([0.5, 0.5]),
"c4": np.array([1.0]),
}[cid]
)
expected_x = {
"type-a": np.array(
[
[1.0, 0.0],
[0.5, 0.5],
[1.0, 0.0],
[0.5, 0.5],
]
),
"type-b": np.array(
[
[1.0],
[1.0],
]
),
}
expected_y = {
"type-a": np.array([[1], [0], [0], [1]]),
"type-b": np.array([[1], [0]]),
}
# Should build X and Y matrices correctly
component = DropRedundantInequalitiesStep()
actual_x = component.x([instance])
actual_y = component.y([instance])
print(actual_x)
for category in ["type-a", "type-b"]:
np.testing.assert_array_equal(actual_x[category], expected_x[category])
np.testing.assert_array_equal(actual_y[category], expected_y[category])

View File

@@ -1,3 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.

View File

@@ -1,57 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
from unittest.mock import Mock, call
from miplearn.components.component import Component
from miplearn.components.composite import CompositeComponent
from miplearn.instance import Instance
from miplearn.solvers.learning import LearningSolver
def test_composite():
solver, instance, model = (
Mock(spec=LearningSolver),
Mock(spec=Instance),
Mock(),
)
c1 = Mock(spec=Component)
c2 = Mock(spec=Component)
cc = CompositeComponent([c1, c2])
# Should broadcast before_solve
cc.before_solve(solver, instance, model)
c1.before_solve.assert_has_calls([call(solver, instance, model)])
c2.before_solve.assert_has_calls([call(solver, instance, model)])
# Should broadcast after_solve
cc.after_solve(solver, instance, model, {}, {})
c1.after_solve.assert_has_calls([call(solver, instance, model, {}, {})])
c2.after_solve.assert_has_calls([call(solver, instance, model, {}, {})])
# Should broadcast fit
cc.fit([1, 2, 3])
c1.fit.assert_has_calls([call([1, 2, 3])])
c2.fit.assert_has_calls([call([1, 2, 3])])
# Should broadcast lazy_cb
cc.lazy_cb(solver, instance, model)
c1.lazy_cb.assert_has_calls([call(solver, instance, model)])
c2.lazy_cb.assert_has_calls([call(solver, instance, model)])
# Should broadcast iteration_cb
cc.iteration_cb(solver, instance, model)
c1.iteration_cb.assert_has_calls([call(solver, instance, model)])
c2.iteration_cb.assert_has_calls([call(solver, instance, model)])
# If at least one child component returns true, iteration_cb should return True
c1.iteration_cb = Mock(return_value=True)
c2.iteration_cb = Mock(return_value=False)
assert cc.iteration_cb(solver, instance, model)
# If all children return False, iteration_cb should return False
c1.iteration_cb = Mock(return_value=False)
c2.iteration_cb = Mock(return_value=False)
assert not cc.iteration_cb(solver, instance, model)

View File

@@ -1,143 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
from unittest.mock import Mock
import numpy as np
from numpy.linalg import norm
from miplearn.classifiers import Classifier
from miplearn.components.lazy_dynamic import DynamicLazyConstraintsComponent
from miplearn.solvers.internal import InternalSolver
from miplearn.solvers.learning import LearningSolver
from miplearn.tests import get_test_pyomo_instances
E = 0.1
def test_lazy_fit():
instances, models = get_test_pyomo_instances()
instances[0].found_violated_lazy_constraints = ["a", "b"]
instances[1].found_violated_lazy_constraints = ["b", "c"]
classifier = Mock(spec=Classifier)
component = DynamicLazyConstraintsComponent(classifier=classifier)
component.fit(instances)
# Should create one classifier for each violation
assert "a" in component.classifiers
assert "b" in component.classifiers
assert "c" in component.classifiers
# Should provide correct x_train to each classifier
expected_x_train_a = np.array([[67.0, 21.75, 1287.92], [70.0, 23.75, 1199.83]])
expected_x_train_b = np.array([[67.0, 21.75, 1287.92], [70.0, 23.75, 1199.83]])
expected_x_train_c = np.array([[67.0, 21.75, 1287.92], [70.0, 23.75, 1199.83]])
actual_x_train_a = component.classifiers["a"].fit.call_args[0][0]
actual_x_train_b = component.classifiers["b"].fit.call_args[0][0]
actual_x_train_c = component.classifiers["c"].fit.call_args[0][0]
assert norm(expected_x_train_a - actual_x_train_a) < E
assert norm(expected_x_train_b - actual_x_train_b) < E
assert norm(expected_x_train_c - actual_x_train_c) < E
# Should provide correct y_train to each classifier
expected_y_train_a = np.array([1.0, 0.0])
expected_y_train_b = np.array([1.0, 1.0])
expected_y_train_c = np.array([0.0, 1.0])
actual_y_train_a = component.classifiers["a"].fit.call_args[0][1]
actual_y_train_b = component.classifiers["b"].fit.call_args[0][1]
actual_y_train_c = component.classifiers["c"].fit.call_args[0][1]
assert norm(expected_y_train_a - actual_y_train_a) < E
assert norm(expected_y_train_b - actual_y_train_b) < E
assert norm(expected_y_train_c - actual_y_train_c) < E
def test_lazy_before():
instances, models = get_test_pyomo_instances()
instances[0].build_lazy_constraint = Mock(return_value="c1")
solver = LearningSolver()
solver.internal_solver = Mock(spec=InternalSolver)
component = DynamicLazyConstraintsComponent(threshold=0.10)
component.classifiers = {"a": Mock(spec=Classifier), "b": Mock(spec=Classifier)}
component.classifiers["a"].predict_proba = Mock(return_value=[[0.95, 0.05]])
component.classifiers["b"].predict_proba = Mock(return_value=[[0.02, 0.80]])
component.before_solve(solver, instances[0], models[0])
# Should ask classifier likelihood of each constraint being violated
expected_x_test_a = np.array([[67.0, 21.75, 1287.92]])
expected_x_test_b = np.array([[67.0, 21.75, 1287.92]])
actual_x_test_a = component.classifiers["a"].predict_proba.call_args[0][0]
actual_x_test_b = component.classifiers["b"].predict_proba.call_args[0][0]
assert norm(expected_x_test_a - actual_x_test_a) < E
assert norm(expected_x_test_b - actual_x_test_b) < E
# Should ask instance to generate cut for constraints whose likelihood
# of being violated exceeds the threshold
instances[0].build_lazy_constraint.assert_called_once_with(models[0], "b")
# Should ask internal solver to add generated constraint
solver.internal_solver.add_constraint.assert_called_once_with("c1")
def test_lazy_evaluate():
instances, models = get_test_pyomo_instances()
component = DynamicLazyConstraintsComponent()
component.classifiers = {
"a": Mock(spec=Classifier),
"b": Mock(spec=Classifier),
"c": Mock(spec=Classifier),
}
component.classifiers["a"].predict_proba = Mock(return_value=[[1.0, 0.0]])
component.classifiers["b"].predict_proba = Mock(return_value=[[0.0, 1.0]])
component.classifiers["c"].predict_proba = Mock(return_value=[[0.0, 1.0]])
instances[0].found_violated_lazy_constraints = ["a", "b", "c"]
instances[1].found_violated_lazy_constraints = ["b", "d"]
assert component.evaluate(instances) == {
0: {
"Accuracy": 0.75,
"F1 score": 0.8,
"Precision": 1.0,
"Recall": 2 / 3.0,
"Predicted positive": 2,
"Predicted negative": 2,
"Condition positive": 3,
"Condition negative": 1,
"False negative": 1,
"False positive": 0,
"True negative": 1,
"True positive": 2,
"Predicted positive (%)": 50.0,
"Predicted negative (%)": 50.0,
"Condition positive (%)": 75.0,
"Condition negative (%)": 25.0,
"False negative (%)": 25.0,
"False positive (%)": 0,
"True negative (%)": 25.0,
"True positive (%)": 50.0,
},
1: {
"Accuracy": 0.5,
"F1 score": 0.5,
"Precision": 0.5,
"Recall": 0.5,
"Predicted positive": 2,
"Predicted negative": 2,
"Condition positive": 2,
"Condition negative": 2,
"False negative": 1,
"False positive": 1,
"True negative": 1,
"True positive": 1,
"Predicted positive (%)": 50.0,
"Predicted negative (%)": 50.0,
"Condition positive (%)": 50.0,
"Condition negative (%)": 50.0,
"False negative (%)": 25.0,
"False positive (%)": 25.0,
"True negative (%)": 25.0,
"True positive (%)": 25.0,
},
}

View File

@@ -1,232 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
from unittest.mock import Mock, call
from miplearn.classifiers import Classifier
from miplearn.components.lazy_static import StaticLazyConstraintsComponent
from miplearn.instance import Instance
from miplearn.solvers.internal import InternalSolver
from miplearn.solvers.learning import LearningSolver
def test_usage_with_solver():
solver = Mock(spec=LearningSolver)
solver.use_lazy_cb = False
solver.gap_tolerance = 1e-4
internal = solver.internal_solver = Mock(spec=InternalSolver)
internal.get_constraint_ids = Mock(return_value=["c1", "c2", "c3", "c4"])
internal.extract_constraint = Mock(side_effect=lambda cid: "<%s>" % cid)
internal.is_constraint_satisfied = Mock(return_value=False)
instance = Mock(spec=Instance)
instance.has_static_lazy_constraints = Mock(return_value=True)
instance.is_constraint_lazy = Mock(
side_effect=lambda cid: {
"c1": False,
"c2": True,
"c3": True,
"c4": True,
}[cid]
)
instance.get_constraint_features = Mock(
side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid]
)
instance.get_constraint_category = Mock(
side_effect=lambda cid: {
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid]
)
component = StaticLazyConstraintsComponent(
threshold=0.90,
use_two_phase_gap=False,
violation_tolerance=1.0,
)
component.classifiers = {
"type-a": Mock(spec=Classifier),
"type-b": Mock(spec=Classifier),
}
component.classifiers["type-a"].predict_proba = Mock(
return_value=[
[0.20, 0.80],
[0.05, 0.95],
]
)
component.classifiers["type-b"].predict_proba = Mock(
return_value=[
[0.02, 0.98],
]
)
# LearningSolver calls before_solve
component.before_solve(solver, instance, None)
# Should ask if instance has static lazy constraints
instance.has_static_lazy_constraints.assert_called_once()
# Should ask internal solver for a list of constraints in the model
internal.get_constraint_ids.assert_called_once()
# Should ask if each constraint in the model is lazy
instance.is_constraint_lazy.assert_has_calls(
[
call("c1"),
call("c2"),
call("c3"),
call("c4"),
]
)
# For the lazy ones, should ask for features
instance.get_constraint_features.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should also ask for categories
assert instance.get_constraint_category.call_count == 3
instance.get_constraint_category.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should ask internal solver to remove constraints identified as lazy
assert internal.extract_constraint.call_count == 3
internal.extract_constraint.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should ask ML to predict whether each lazy constraint should be enforced
component.classifiers["type-a"].predict_proba.assert_called_once_with(
[[1.0, 0.0], [0.5, 0.5]]
)
component.classifiers["type-b"].predict_proba.assert_called_once_with([[1.0]])
# For the ones that should be enforced, should ask solver to re-add them
# to the formulation. The remaining ones should remain in the pool.
assert internal.add_constraint.call_count == 2
internal.add_constraint.assert_has_calls(
[
call("<c3>"),
call("<c4>"),
]
)
internal.add_constraint.reset_mock()
# LearningSolver calls after_iteration (first time)
should_repeat = component.iteration_cb(solver, instance, None)
assert should_repeat
# Should ask internal solver to verify if constraints in the pool are
# satisfied and add the ones that are not
internal.is_constraint_satisfied.assert_called_once_with("<c2>", tol=1.0)
internal.is_constraint_satisfied.reset_mock()
internal.add_constraint.assert_called_once_with("<c2>")
internal.add_constraint.reset_mock()
# LearningSolver calls after_iteration (second time)
should_repeat = component.iteration_cb(solver, instance, None)
assert not should_repeat
# The lazy constraint pool should be empty by now, so no calls should be made
internal.is_constraint_satisfied.assert_not_called()
internal.add_constraint.assert_not_called()
# Should update instance object
assert instance.found_violated_lazy_constraints == ["c3", "c4", "c2"]
def test_fit():
instance_1 = Mock(spec=Instance)
instance_1.found_violated_lazy_constraints = ["c1", "c2", "c4", "c5"]
instance_1.get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid]
)
instance_1.get_constraint_features = Mock(
side_effect=lambda cid: {
"c1": [1, 1],
"c2": [1, 2],
"c3": [1, 3],
"c4": [1, 4, 0],
"c5": [1, 5, 0],
}[cid]
)
instance_2 = Mock(spec=Instance)
instance_2.found_violated_lazy_constraints = ["c2", "c3", "c4"]
instance_2.get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid]
)
instance_2.get_constraint_features = Mock(
side_effect=lambda cid: {
"c1": [2, 1],
"c2": [2, 2],
"c3": [2, 3],
"c4": [2, 4, 0],
"c5": [2, 5, 0],
}[cid]
)
instances = [instance_1, instance_2]
component = StaticLazyConstraintsComponent()
component.classifiers = {
"type-a": Mock(spec=Classifier),
"type-b": Mock(spec=Classifier),
}
expected_constraints = {
"type-a": ["c1", "c2", "c3"],
"type-b": ["c4", "c5"],
}
expected_x = {
"type-a": [[1, 1], [1, 2], [1, 3], [2, 1], [2, 2], [2, 3]],
"type-b": [[1, 4, 0], [1, 5, 0], [2, 4, 0], [2, 5, 0]],
}
expected_y = {
"type-a": [[0, 1], [0, 1], [1, 0], [1, 0], [0, 1], [0, 1]],
"type-b": [[0, 1], [0, 1], [0, 1], [1, 0]],
}
assert component._collect_constraints(instances) == expected_constraints
assert component.x(instances) == expected_x
assert component.y(instances) == expected_y
component.fit(instances)
component.classifiers["type-a"].fit.assert_called_once_with(
expected_x["type-a"],
expected_y["type-a"],
)
component.classifiers["type-b"].fit.assert_called_once_with(
expected_x["type-b"],
expected_y["type-b"],
)

View File

@@ -1,50 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
from unittest.mock import Mock
import numpy as np
from miplearn.classifiers import Regressor
from miplearn.components.objective import ObjectiveValueComponent
from miplearn.tests import get_test_pyomo_instances
def test_usage():
instances, models = get_test_pyomo_instances()
comp = ObjectiveValueComponent()
comp.fit(instances)
assert instances[0].training_data[0]["Lower bound"] == 1183.0
assert instances[0].training_data[0]["Upper bound"] == 1183.0
assert np.round(comp.predict(instances), 2).tolist() == [
[1183.0, 1183.0],
[1070.0, 1070.0],
]
def test_obj_evaluate():
instances, models = get_test_pyomo_instances()
reg = Mock(spec=Regressor)
reg.predict = Mock(return_value=np.array([1000.0, 1000.0]))
comp = ObjectiveValueComponent(regressor=reg)
comp.fit(instances)
ev = comp.evaluate(instances)
assert ev == {
"Lower bound": {
"Explained variance": 0.0,
"Max error": 183.0,
"Mean absolute error": 126.5,
"Mean squared error": 19194.5,
"Median absolute error": 126.5,
"R2": -5.012843605607331,
},
"Upper bound": {
"Explained variance": 0.0,
"Max error": 183.0,
"Mean absolute error": 126.5,
"Mean squared error": 19194.5,
"Median absolute error": 126.5,
"R2": -5.012843605607331,
},
}

View File

@@ -1,111 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
from unittest.mock import Mock
import numpy as np
from miplearn.classifiers import Classifier
from miplearn.components.primal import PrimalSolutionComponent
from miplearn.tests import get_test_pyomo_instances
def test_predict():
instances, models = get_test_pyomo_instances()
comp = PrimalSolutionComponent()
comp.fit(instances)
solution = comp.predict(instances[0])
assert "x" in solution
assert 0 in solution["x"]
assert 1 in solution["x"]
assert 2 in solution["x"]
assert 3 in solution["x"]
def test_evaluate():
instances, models = get_test_pyomo_instances()
clf_zero = Mock(spec=Classifier)
clf_zero.predict_proba = Mock(
return_value=np.array(
[
[0.0, 1.0], # x[0]
[0.0, 1.0], # x[1]
[1.0, 0.0], # x[2]
[1.0, 0.0], # x[3]
]
)
)
clf_one = Mock(spec=Classifier)
clf_one.predict_proba = Mock(
return_value=np.array(
[
[1.0, 0.0], # x[0] instances[0]
[1.0, 0.0], # x[1] instances[0]
[0.0, 1.0], # x[2] instances[0]
[1.0, 0.0], # x[3] instances[0]
]
)
)
comp = PrimalSolutionComponent(classifier=[clf_zero, clf_one], threshold=0.50)
comp.fit(instances[:1])
assert comp.predict(instances[0]) == {"x": {0: 0, 1: 0, 2: 1, 3: None}}
assert instances[0].training_data[0]["Solution"] == {"x": {0: 1, 1: 0, 2: 1, 3: 1}}
ev = comp.evaluate(instances[:1])
assert ev == {
"Fix one": {
0: {
"Accuracy": 0.5,
"Condition negative": 1,
"Condition negative (%)": 25.0,
"Condition positive": 3,
"Condition positive (%)": 75.0,
"F1 score": 0.5,
"False negative": 2,
"False negative (%)": 50.0,
"False positive": 0,
"False positive (%)": 0.0,
"Precision": 1.0,
"Predicted negative": 3,
"Predicted negative (%)": 75.0,
"Predicted positive": 1,
"Predicted positive (%)": 25.0,
"Recall": 0.3333333333333333,
"True negative": 1,
"True negative (%)": 25.0,
"True positive": 1,
"True positive (%)": 25.0,
}
},
"Fix zero": {
0: {
"Accuracy": 0.75,
"Condition negative": 3,
"Condition negative (%)": 75.0,
"Condition positive": 1,
"Condition positive (%)": 25.0,
"F1 score": 0.6666666666666666,
"False negative": 0,
"False negative (%)": 0.0,
"False positive": 1,
"False positive (%)": 25.0,
"Precision": 0.5,
"Predicted negative": 2,
"Predicted negative (%)": 50.0,
"Predicted positive": 2,
"Predicted positive (%)": 50.0,
"Recall": 1.0,
"True negative": 2,
"True negative (%)": 50.0,
"True positive": 1,
"True positive (%)": 25.0,
}
},
}
def test_primal_parallel_fit():
instances, models = get_test_pyomo_instances()
comp = PrimalSolutionComponent()
comp.fit(instances, n_jobs=2)
assert len(comp.classifiers) == 2