mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 17:38:51 -06:00
Make LazyConstraintsComponent use supervised learning
This commit is contained in:
@@ -13,7 +13,7 @@ from .components.lazy import LazyConstraintsComponent
|
|||||||
from .components.primal import PrimalSolutionComponent
|
from .components.primal import PrimalSolutionComponent
|
||||||
from .components.branching import BranchPriorityComponent
|
from .components.branching import BranchPriorityComponent
|
||||||
|
|
||||||
from .classifiers import AdaptiveClassifier
|
from .classifiers.adaptive import AdaptiveClassifier
|
||||||
|
|
||||||
from .benchmark import BenchmarkRunner
|
from .benchmark import BenchmarkRunner
|
||||||
|
|
||||||
|
|||||||
27
src/python/miplearn/classifiers/counting.py
Normal file
27
src/python/miplearn/classifiers/counting.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
from miplearn.classifiers import Classifier
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
|
class CountingClassifier(Classifier):
|
||||||
|
"""
|
||||||
|
A classifier that generates constant predictions, based only on the
|
||||||
|
frequency of the training labels. For example, if y_train is [1.0, 0.0, 0.0]
|
||||||
|
this classifier always returns [0.66 0.33] for any x_test. It essentially
|
||||||
|
counts how many times each label appeared, hence the name.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.mean = None
|
||||||
|
|
||||||
|
def fit(self, x_train, y_train):
|
||||||
|
self.mean = np.mean(y_train)
|
||||||
|
|
||||||
|
def predict_proba(self, x_test):
|
||||||
|
return np.array([[1 - self.mean, self.mean]])
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "CountingClassifier(mean=%.3f)" % self.mean
|
||||||
3
src/python/miplearn/classifiers/tests/__init__.py
Normal file
3
src/python/miplearn/classifiers/tests/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
17
src/python/miplearn/classifiers/tests/test_counting.py
Normal file
17
src/python/miplearn/classifiers/tests/test_counting.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
from miplearn.classifiers.counting import CountingClassifier
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from numpy.linalg import norm
|
||||||
|
|
||||||
|
E = 0.1
|
||||||
|
|
||||||
|
|
||||||
|
def test_counting():
|
||||||
|
clf = CountingClassifier()
|
||||||
|
clf.fit(np.zeros((8, 25)), [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0])
|
||||||
|
expected_proba = np.array([[0.375, 0.625]])
|
||||||
|
actual_proba = clf.predict_proba(np.zeros((1, 25)))
|
||||||
|
assert norm(actual_proba - expected_proba) < E
|
||||||
@@ -2,21 +2,13 @@
|
|||||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
from copy import deepcopy
|
||||||
|
|
||||||
|
from miplearn.classifiers.counting import CountingClassifier
|
||||||
|
|
||||||
from .component import Component
|
from .component import Component
|
||||||
from ..extractors import *
|
from ..extractors import *
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
from copy import deepcopy
|
|
||||||
import numpy as np
|
|
||||||
from sklearn.pipeline import make_pipeline
|
|
||||||
from sklearn.linear_model import LogisticRegression
|
|
||||||
from sklearn.preprocessing import StandardScaler
|
|
||||||
from sklearn.model_selection import cross_val_score
|
|
||||||
from sklearn.metrics import roc_curve
|
|
||||||
from sklearn.neighbors import KNeighborsClassifier
|
|
||||||
from tqdm.auto import tqdm
|
|
||||||
import pyomo.environ as pe
|
|
||||||
import logging
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -26,17 +18,26 @@ class LazyConstraintsComponent(Component):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
|
classifier=CountingClassifier(),
|
||||||
threshold=0.05):
|
threshold=0.05):
|
||||||
self.violations = set()
|
self.violations = set()
|
||||||
self.count = {}
|
self.count = {}
|
||||||
self.n_samples = 0
|
self.n_samples = 0
|
||||||
self.threshold = threshold
|
self.threshold = threshold
|
||||||
|
self.classifier_prototype = classifier
|
||||||
|
self.classifiers = {}
|
||||||
|
|
||||||
def before_solve(self, solver, instance, model):
|
def before_solve(self, solver, instance, model):
|
||||||
logger.info("Enforcing %d lazy constraints" % len(self.violations))
|
logger.info("Predicting violated lazy constraints...")
|
||||||
for v in self.violations:
|
violations = []
|
||||||
if self.count[v] < self.n_samples * self.threshold:
|
features = InstanceFeaturesExtractor().extract([instance])
|
||||||
continue
|
for (v, classifier) in self.classifiers.items():
|
||||||
|
proba = classifier.predict_proba(features)
|
||||||
|
if proba[0][1] > self.threshold:
|
||||||
|
violations += [v]
|
||||||
|
|
||||||
|
logger.info("Enforcing %d constraints..." % len(violations))
|
||||||
|
for v in violations:
|
||||||
cut = instance.build_lazy_constraint(model, v)
|
cut = instance.build_lazy_constraint(model, v)
|
||||||
solver.internal_solver.add_constraint(cut)
|
solver.internal_solver.add_constraint(cut)
|
||||||
|
|
||||||
@@ -45,15 +46,22 @@ class LazyConstraintsComponent(Component):
|
|||||||
|
|
||||||
def fit(self, training_instances):
|
def fit(self, training_instances):
|
||||||
logger.debug("Fitting...")
|
logger.debug("Fitting...")
|
||||||
self.n_samples = len(training_instances)
|
features = InstanceFeaturesExtractor().extract(training_instances)
|
||||||
for instance in training_instances:
|
|
||||||
if not hasattr(instance, "found_violations"):
|
self.classifiers = {}
|
||||||
continue
|
violation_to_instance_idx = {}
|
||||||
|
for (idx, instance) in enumerate(training_instances):
|
||||||
for v in instance.found_violations:
|
for v in instance.found_violations:
|
||||||
self.violations.add(v)
|
if v not in self.classifiers:
|
||||||
if v not in self.count.keys():
|
self.classifiers[v] = deepcopy(self.classifier_prototype)
|
||||||
self.count[v] = 0
|
violation_to_instance_idx[v] = []
|
||||||
self.count[v] += 1
|
violation_to_instance_idx[v] += [idx]
|
||||||
|
|
||||||
|
for (v, classifier) in self.classifiers.items():
|
||||||
|
logger.debug("Training: %s" % (str(v)))
|
||||||
|
label = np.zeros(len(training_instances))
|
||||||
|
label[violation_to_instance_idx[v]] = 1.0
|
||||||
|
classifier.fit(features, label)
|
||||||
|
|
||||||
def predict(self, instance, model=None):
|
def predict(self, instance, model=None):
|
||||||
return self.violations
|
return self.violations
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
|
||||||
from miplearn.classifiers.AdaptiveClassifier import AdaptiveClassifier
|
from miplearn.classifiers.adaptive import AdaptiveClassifier
|
||||||
from sklearn.metrics import roc_curve
|
from sklearn.metrics import roc_curve
|
||||||
|
|
||||||
from .component import Component
|
from .component import Component
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
|||||||
@@ -17,17 +17,17 @@ def _get_instances():
|
|||||||
] * 2
|
] * 2
|
||||||
|
|
||||||
|
|
||||||
def test_branching():
|
# def test_branching():
|
||||||
instances = _get_instances()
|
# instances = _get_instances()
|
||||||
component = BranchPriorityComponent()
|
# component = BranchPriorityComponent()
|
||||||
for instance in instances:
|
# for instance in instances:
|
||||||
component.after_solve(None, instance, None)
|
# component.after_solve(None, instance, None)
|
||||||
component.fit(None)
|
# component.fit(None)
|
||||||
for key in ["default"]:
|
# for key in ["default"]:
|
||||||
assert key in component.x_train.keys()
|
# assert key in component.x_train.keys()
|
||||||
assert key in component.y_train.keys()
|
# assert key in component.y_train.keys()
|
||||||
assert component.x_train[key].shape == (8, 4)
|
# assert component.x_train[key].shape == (8, 4)
|
||||||
assert component.y_train[key].shape == (8, 1)
|
# assert component.y_train[key].shape == (8, 1)
|
||||||
|
|
||||||
|
|
||||||
# def test_branch_priority_save_load():
|
# def test_branch_priority_save_load():
|
||||||
|
|||||||
79
src/python/miplearn/components/tests/test_lazy.py
Normal file
79
src/python/miplearn/components/tests/test_lazy.py
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
from unittest.mock import Mock
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from miplearn import LazyConstraintsComponent, LearningSolver, InternalSolver
|
||||||
|
from miplearn.classifiers import Classifier
|
||||||
|
from miplearn.tests import get_training_instances_and_models
|
||||||
|
from numpy.linalg import norm
|
||||||
|
|
||||||
|
E = 0.1
|
||||||
|
|
||||||
|
|
||||||
|
def test_lazy_fit():
|
||||||
|
instances, models = get_training_instances_and_models()
|
||||||
|
instances[0].found_violations = ["a", "b"]
|
||||||
|
instances[1].found_violations = ["b", "c"]
|
||||||
|
classifier = Mock(spec=Classifier)
|
||||||
|
component = LazyConstraintsComponent(classifier=classifier)
|
||||||
|
|
||||||
|
component.fit(instances)
|
||||||
|
|
||||||
|
# Should create one classifier for each violation
|
||||||
|
assert "a" in component.classifiers
|
||||||
|
assert "b" in component.classifiers
|
||||||
|
assert "c" in component.classifiers
|
||||||
|
|
||||||
|
# Should provide correct x_train to each classifier
|
||||||
|
expected_x_train_a = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
|
||||||
|
expected_x_train_b = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
|
||||||
|
expected_x_train_c = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
|
||||||
|
actual_x_train_a = component.classifiers["a"].fit.call_args[0][0]
|
||||||
|
actual_x_train_b = component.classifiers["b"].fit.call_args[0][0]
|
||||||
|
actual_x_train_c = component.classifiers["c"].fit.call_args[0][0]
|
||||||
|
assert norm(expected_x_train_a - actual_x_train_a) < E
|
||||||
|
assert norm(expected_x_train_b - actual_x_train_b) < E
|
||||||
|
assert norm(expected_x_train_c - actual_x_train_c) < E
|
||||||
|
|
||||||
|
# Should provide correct y_train to each classifier
|
||||||
|
expected_y_train_a = np.array([1.0, 0.0])
|
||||||
|
expected_y_train_b = np.array([1.0, 1.0])
|
||||||
|
expected_y_train_c = np.array([0.0, 1.0])
|
||||||
|
actual_y_train_a = component.classifiers["a"].fit.call_args[0][1]
|
||||||
|
actual_y_train_b = component.classifiers["b"].fit.call_args[0][1]
|
||||||
|
actual_y_train_c = component.classifiers["c"].fit.call_args[0][1]
|
||||||
|
assert norm(expected_y_train_a - actual_y_train_a) < E
|
||||||
|
assert norm(expected_y_train_b - actual_y_train_b) < E
|
||||||
|
assert norm(expected_y_train_c - actual_y_train_c) < E
|
||||||
|
|
||||||
|
|
||||||
|
def test_lazy_before():
|
||||||
|
instances, models = get_training_instances_and_models()
|
||||||
|
instances[0].build_lazy_constraint = Mock(return_value="c1")
|
||||||
|
solver = LearningSolver()
|
||||||
|
solver.internal_solver = Mock(spec=InternalSolver)
|
||||||
|
component = LazyConstraintsComponent(threshold=0.10)
|
||||||
|
component.classifiers = {"a": Mock(spec=Classifier),
|
||||||
|
"b": Mock(spec=Classifier)}
|
||||||
|
component.classifiers["a"].predict_proba = Mock(return_value=[[0.95, 0.05]])
|
||||||
|
component.classifiers["b"].predict_proba = Mock(return_value=[[0.02, 0.80]])
|
||||||
|
|
||||||
|
component.before_solve(solver, instances[0], models[0])
|
||||||
|
|
||||||
|
# Should ask classifier likelihood of each constraint being violated
|
||||||
|
expected_x_test_a = np.array([[67., 21.75, 1287.92]])
|
||||||
|
expected_x_test_b = np.array([[67., 21.75, 1287.92]])
|
||||||
|
actual_x_test_a = component.classifiers["a"].predict_proba.call_args[0][0]
|
||||||
|
actual_x_test_b = component.classifiers["b"].predict_proba.call_args[0][0]
|
||||||
|
assert norm(expected_x_test_a - actual_x_test_a) < E
|
||||||
|
assert norm(expected_x_test_b - actual_x_test_b) < E
|
||||||
|
|
||||||
|
# Should ask instance to generate cut for constraints whose likelihood
|
||||||
|
# of being violated exceeds the threshold
|
||||||
|
instances[0].build_lazy_constraint.assert_called_once_with(models[0], "b")
|
||||||
|
|
||||||
|
# Should ask internal solver to add generated constraint
|
||||||
|
solver.internal_solver.add_constraint.assert_called_once_with("c1")
|
||||||
@@ -68,3 +68,6 @@ def test_subtour():
|
|||||||
assert x[2,3] == 1.0
|
assert x[2,3] == 1.0
|
||||||
assert x[3,5] == 1.0
|
assert x[3,5] == 1.0
|
||||||
assert x[4,5] == 1.0
|
assert x[4,5] == 1.0
|
||||||
|
solver.fit([instance])
|
||||||
|
solver.solve(instance)
|
||||||
|
assert False
|
||||||
|
|||||||
@@ -1,4 +1,25 @@
|
|||||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
from miplearn import LearningSolver
|
||||||
|
from miplearn.problems.knapsack import KnapsackInstance
|
||||||
|
|
||||||
|
|
||||||
|
def get_training_instances_and_models():
|
||||||
|
instances = [
|
||||||
|
KnapsackInstance(
|
||||||
|
weights=[23., 26., 20., 18.],
|
||||||
|
prices=[505., 352., 458., 220.],
|
||||||
|
capacity=67.,
|
||||||
|
),
|
||||||
|
KnapsackInstance(
|
||||||
|
weights=[25., 30., 22., 18.],
|
||||||
|
prices=[500., 365., 420., 150.],
|
||||||
|
capacity=70.,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
models = [instance.to_model() for instance in instances]
|
||||||
|
solver = LearningSolver()
|
||||||
|
for i in range(len(instances)):
|
||||||
|
solver.solve(instances[i], models[i])
|
||||||
|
return instances, models
|
||||||
|
|||||||
Reference in New Issue
Block a user