parent
b555d5739c
commit
86b6ac93c3
@ -0,0 +1,27 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
from miplearn.classifiers import Classifier
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
|
class CountingClassifier(Classifier):
|
||||||
|
"""
|
||||||
|
A classifier that generates constant predictions, based only on the
|
||||||
|
frequency of the training labels. For example, if y_train is [1.0, 0.0, 0.0]
|
||||||
|
this classifier always returns [0.66 0.33] for any x_test. It essentially
|
||||||
|
counts how many times each label appeared, hence the name.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.mean = None
|
||||||
|
|
||||||
|
def fit(self, x_train, y_train):
|
||||||
|
self.mean = np.mean(y_train)
|
||||||
|
|
||||||
|
def predict_proba(self, x_test):
|
||||||
|
return np.array([[1 - self.mean, self.mean]])
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "CountingClassifier(mean=%.3f)" % self.mean
|
@ -0,0 +1,3 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
@ -0,0 +1,17 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
from miplearn.classifiers.counting import CountingClassifier
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from numpy.linalg import norm
|
||||||
|
|
||||||
|
E = 0.1
|
||||||
|
|
||||||
|
|
||||||
|
def test_counting():
|
||||||
|
clf = CountingClassifier()
|
||||||
|
clf.fit(np.zeros((8, 25)), [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0])
|
||||||
|
expected_proba = np.array([[0.375, 0.625]])
|
||||||
|
actual_proba = clf.predict_proba(np.zeros((1, 25)))
|
||||||
|
assert norm(actual_proba - expected_proba) < E
|
@ -0,0 +1,79 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
from unittest.mock import Mock
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from miplearn import LazyConstraintsComponent, LearningSolver, InternalSolver
|
||||||
|
from miplearn.classifiers import Classifier
|
||||||
|
from miplearn.tests import get_training_instances_and_models
|
||||||
|
from numpy.linalg import norm
|
||||||
|
|
||||||
|
E = 0.1
|
||||||
|
|
||||||
|
|
||||||
|
def test_lazy_fit():
|
||||||
|
instances, models = get_training_instances_and_models()
|
||||||
|
instances[0].found_violations = ["a", "b"]
|
||||||
|
instances[1].found_violations = ["b", "c"]
|
||||||
|
classifier = Mock(spec=Classifier)
|
||||||
|
component = LazyConstraintsComponent(classifier=classifier)
|
||||||
|
|
||||||
|
component.fit(instances)
|
||||||
|
|
||||||
|
# Should create one classifier for each violation
|
||||||
|
assert "a" in component.classifiers
|
||||||
|
assert "b" in component.classifiers
|
||||||
|
assert "c" in component.classifiers
|
||||||
|
|
||||||
|
# Should provide correct x_train to each classifier
|
||||||
|
expected_x_train_a = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
|
||||||
|
expected_x_train_b = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
|
||||||
|
expected_x_train_c = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
|
||||||
|
actual_x_train_a = component.classifiers["a"].fit.call_args[0][0]
|
||||||
|
actual_x_train_b = component.classifiers["b"].fit.call_args[0][0]
|
||||||
|
actual_x_train_c = component.classifiers["c"].fit.call_args[0][0]
|
||||||
|
assert norm(expected_x_train_a - actual_x_train_a) < E
|
||||||
|
assert norm(expected_x_train_b - actual_x_train_b) < E
|
||||||
|
assert norm(expected_x_train_c - actual_x_train_c) < E
|
||||||
|
|
||||||
|
# Should provide correct y_train to each classifier
|
||||||
|
expected_y_train_a = np.array([1.0, 0.0])
|
||||||
|
expected_y_train_b = np.array([1.0, 1.0])
|
||||||
|
expected_y_train_c = np.array([0.0, 1.0])
|
||||||
|
actual_y_train_a = component.classifiers["a"].fit.call_args[0][1]
|
||||||
|
actual_y_train_b = component.classifiers["b"].fit.call_args[0][1]
|
||||||
|
actual_y_train_c = component.classifiers["c"].fit.call_args[0][1]
|
||||||
|
assert norm(expected_y_train_a - actual_y_train_a) < E
|
||||||
|
assert norm(expected_y_train_b - actual_y_train_b) < E
|
||||||
|
assert norm(expected_y_train_c - actual_y_train_c) < E
|
||||||
|
|
||||||
|
|
||||||
|
def test_lazy_before():
|
||||||
|
instances, models = get_training_instances_and_models()
|
||||||
|
instances[0].build_lazy_constraint = Mock(return_value="c1")
|
||||||
|
solver = LearningSolver()
|
||||||
|
solver.internal_solver = Mock(spec=InternalSolver)
|
||||||
|
component = LazyConstraintsComponent(threshold=0.10)
|
||||||
|
component.classifiers = {"a": Mock(spec=Classifier),
|
||||||
|
"b": Mock(spec=Classifier)}
|
||||||
|
component.classifiers["a"].predict_proba = Mock(return_value=[[0.95, 0.05]])
|
||||||
|
component.classifiers["b"].predict_proba = Mock(return_value=[[0.02, 0.80]])
|
||||||
|
|
||||||
|
component.before_solve(solver, instances[0], models[0])
|
||||||
|
|
||||||
|
# Should ask classifier likelihood of each constraint being violated
|
||||||
|
expected_x_test_a = np.array([[67., 21.75, 1287.92]])
|
||||||
|
expected_x_test_b = np.array([[67., 21.75, 1287.92]])
|
||||||
|
actual_x_test_a = component.classifiers["a"].predict_proba.call_args[0][0]
|
||||||
|
actual_x_test_b = component.classifiers["b"].predict_proba.call_args[0][0]
|
||||||
|
assert norm(expected_x_test_a - actual_x_test_a) < E
|
||||||
|
assert norm(expected_x_test_b - actual_x_test_b) < E
|
||||||
|
|
||||||
|
# Should ask instance to generate cut for constraints whose likelihood
|
||||||
|
# of being violated exceeds the threshold
|
||||||
|
instances[0].build_lazy_constraint.assert_called_once_with(models[0], "b")
|
||||||
|
|
||||||
|
# Should ask internal solver to add generated constraint
|
||||||
|
solver.internal_solver.add_constraint.assert_called_once_with("c1")
|
@ -1,4 +1,25 @@
|
|||||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
from miplearn import LearningSolver
|
||||||
|
from miplearn.problems.knapsack import KnapsackInstance
|
||||||
|
|
||||||
|
|
||||||
|
def get_training_instances_and_models():
|
||||||
|
instances = [
|
||||||
|
KnapsackInstance(
|
||||||
|
weights=[23., 26., 20., 18.],
|
||||||
|
prices=[505., 352., 458., 220.],
|
||||||
|
capacity=67.,
|
||||||
|
),
|
||||||
|
KnapsackInstance(
|
||||||
|
weights=[25., 30., 22., 18.],
|
||||||
|
prices=[500., 365., 420., 150.],
|
||||||
|
capacity=70.,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
models = [instance.to_model() for instance in instances]
|
||||||
|
solver = LearningSolver()
|
||||||
|
for i in range(len(instances)):
|
||||||
|
solver.solve(instances[i], models[i])
|
||||||
|
return instances, models
|
||||||
|
Loading…
Reference in new issue