Reformat source code with Black; add pre-commit hooks and CI checks

This commit is contained in:
2020-12-05 10:59:33 -06:00
parent 3823931382
commit d99600f101
49 changed files with 1291 additions and 972 deletions

View File

@@ -9,15 +9,15 @@ class Component(ABC):
"""
A Component is an object which adds functionality to a LearningSolver.
"""
@abstractmethod
def before_solve(self, solver, instance, model):
pass
@abstractmethod
def after_solve(self, solver, instance, model, results):
pass
@abstractmethod
def fit(self, training_instances):
pass

View File

@@ -18,10 +18,12 @@ class UserCutsComponent(Component):
"""
A component that predicts which user cuts to enforce.
"""
def __init__(self,
classifier=CountingClassifier(),
threshold=0.05):
def __init__(
self,
classifier=CountingClassifier(),
threshold=0.05,
):
self.violations = set()
self.count = {}
self.n_samples = 0
@@ -40,7 +42,7 @@ class UserCutsComponent(Component):
def after_solve(self, solver, instance, model, results):
pass
def fit(self, training_instances):
logger.debug("Fitting...")
features = InstanceFeaturesExtractor().extract(training_instances)
@@ -56,10 +58,11 @@ class UserCutsComponent(Component):
violation_to_instance_idx[v] = []
violation_to_instance_idx[v] += [idx]
for (v, classifier) in tqdm(self.classifiers.items(),
desc="Fit (user cuts)",
disable=not sys.stdout.isatty(),
):
for (v, classifier) in tqdm(
self.classifiers.items(),
desc="Fit (user cuts)",
disable=not sys.stdout.isatty(),
):
logger.debug("Training: %s" % (str(v)))
label = np.zeros(len(training_instances))
label[violation_to_instance_idx[v]] = 1.0
@@ -79,10 +82,11 @@ class UserCutsComponent(Component):
all_violations = set()
for instance in instances:
all_violations |= set(instance.found_violated_user_cuts)
for idx in tqdm(range(len(instances)),
desc="Evaluate (lazy)",
disable=not sys.stdout.isatty(),
):
for idx in tqdm(
range(len(instances)),
desc="Evaluate (lazy)",
disable=not sys.stdout.isatty(),
):
instance = instances[idx]
condition_positive = set(instance.found_violated_user_cuts)
condition_negative = all_violations - condition_positive

View File

@@ -18,10 +18,12 @@ class DynamicLazyConstraintsComponent(Component):
"""
A component that predicts which lazy constraints to enforce.
"""
def __init__(self,
classifier=CountingClassifier(),
threshold=0.05):
def __init__(
self,
classifier=CountingClassifier(),
threshold=0.05,
):
self.violations = set()
self.count = {}
self.n_samples = 0
@@ -52,7 +54,7 @@ class DynamicLazyConstraintsComponent(Component):
def after_solve(self, solver, instance, model, results):
pass
def fit(self, training_instances):
logger.debug("Fitting...")
features = InstanceFeaturesExtractor().extract(training_instances)
@@ -68,10 +70,11 @@ class DynamicLazyConstraintsComponent(Component):
violation_to_instance_idx[v] = []
violation_to_instance_idx[v] += [idx]
for (v, classifier) in tqdm(self.classifiers.items(),
desc="Fit (lazy)",
disable=not sys.stdout.isatty(),
):
for (v, classifier) in tqdm(
self.classifiers.items(),
desc="Fit (lazy)",
disable=not sys.stdout.isatty(),
):
logger.debug("Training: %s" % (str(v)))
label = np.zeros(len(training_instances))
label[violation_to_instance_idx[v]] = 1.0
@@ -91,10 +94,11 @@ class DynamicLazyConstraintsComponent(Component):
all_violations = set()
for instance in instances:
all_violations |= set(instance.found_violated_lazy_constraints)
for idx in tqdm(range(len(instances)),
desc="Evaluate (lazy)",
disable=not sys.stdout.isatty(),
):
for idx in tqdm(
range(len(instances)),
desc="Evaluate (lazy)",
disable=not sys.stdout.isatty(),
):
instance = instances[idx]
condition_positive = set(instance.found_violated_lazy_constraints)
condition_negative = all_violations - condition_positive

View File

@@ -19,13 +19,14 @@ class LazyConstraint:
class StaticLazyConstraintsComponent(Component):
def __init__(self,
classifier=CountingClassifier(),
threshold=0.05,
use_two_phase_gap=True,
large_gap=1e-2,
violation_tolerance=-0.5,
):
def __init__(
self,
classifier=CountingClassifier(),
threshold=0.05,
use_two_phase_gap=True,
large_gap=1e-2,
violation_tolerance=-0.5,
):
self.threshold = threshold
self.classifier_prototype = classifier
self.classifiers = {}
@@ -74,32 +75,38 @@ class StaticLazyConstraintsComponent(Component):
logger.debug("Finding violated lazy constraints...")
constraints_to_add = []
for c in self.pool:
if not solver.internal_solver.is_constraint_satisfied(c.obj,
tol=self.violation_tolerance):
if not solver.internal_solver.is_constraint_satisfied(
c.obj, tol=self.violation_tolerance
):
constraints_to_add.append(c)
for c in constraints_to_add:
self.pool.remove(c)
solver.internal_solver.add_constraint(c.obj)
instance.found_violated_lazy_constraints += [c.cid]
if len(constraints_to_add) > 0:
logger.info("%8d lazy constraints added %8d in the pool" % (len(constraints_to_add), len(self.pool)))
logger.info(
"%8d lazy constraints added %8d in the pool"
% (len(constraints_to_add), len(self.pool))
)
return True
else:
return False
def fit(self, training_instances):
training_instances = [t
for t in training_instances
if hasattr(t, "found_violated_lazy_constraints")]
training_instances = [
t
for t in training_instances
if hasattr(t, "found_violated_lazy_constraints")
]
logger.debug("Extracting x and y...")
x = self.x(training_instances)
y = self.y(training_instances)
logger.debug("Fitting...")
for category in tqdm(x.keys(),
desc="Fit (lazy)",
disable=not sys.stdout.isatty()):
for category in tqdm(
x.keys(), desc="Fit (lazy)", disable=not sys.stdout.isatty()
):
if category not in self.classifiers:
self.classifiers[category] = deepcopy(self.classifier_prototype)
self.classifiers[category].fit(x[category], y[category])
@@ -121,8 +128,10 @@ class StaticLazyConstraintsComponent(Component):
x[category] = []
constraints[category] = []
x[category] += [instance.get_constraint_features(cid)]
c = LazyConstraint(cid=cid,
obj=solver.internal_solver.extract_constraint(cid))
c = LazyConstraint(
cid=cid,
obj=solver.internal_solver.extract_constraint(cid),
)
constraints[category] += [c]
self.pool.append(c)
logger.info("%8d lazy constraints extracted" % len(self.pool))
@@ -141,7 +150,13 @@ class StaticLazyConstraintsComponent(Component):
self.pool.remove(c)
solver.internal_solver.add_constraint(c.obj)
instance.found_violated_lazy_constraints += [c.cid]
logger.info("%8d lazy constraints added %8d in the pool" % (n_added, len(self.pool)))
logger.info(
"%8d lazy constraints added %8d in the pool"
% (
n_added,
len(self.pool),
)
)
def _collect_constraints(self, train_instances):
constraints = {}

View File

@@ -1,13 +1,20 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
from sklearn.metrics import mean_squared_error, explained_variance_score, max_error, mean_absolute_error, r2_score
from sklearn.metrics import (
mean_squared_error,
explained_variance_score,
max_error,
mean_absolute_error,
r2_score,
)
from .. import Component, InstanceFeaturesExtractor, ObjectiveValueExtractor
from sklearn.linear_model import LinearRegression
from copy import deepcopy
import numpy as np
import logging
logger = logging.getLogger(__name__)
@@ -15,12 +22,12 @@ class ObjectiveValueComponent(Component):
"""
A Component which predicts the optimal objective value of the problem.
"""
def __init__(self,
regressor=LinearRegression()):
def __init__(self, regressor=LinearRegression()):
self.ub_regressor = None
self.lb_regressor = None
self.regressor_prototype = regressor
def before_solve(self, solver, instance, model):
if self.ub_regressor is not None:
logger.info("Predicting optimal value...")
@@ -28,7 +35,7 @@ class ObjectiveValueComponent(Component):
instance.predicted_ub = ub
instance.predicted_lb = lb
logger.info("Predicted values: lb=%.2f, ub=%.2f" % (lb, ub))
def after_solve(self, solver, instance, model, results):
if self.ub_regressor is not None:
results["Predicted UB"] = instance.predicted_ub
@@ -36,7 +43,7 @@ class ObjectiveValueComponent(Component):
else:
results["Predicted UB"] = None
results["Predicted LB"] = None
def fit(self, training_instances):
logger.debug("Extracting features...")
features = InstanceFeaturesExtractor().extract(training_instances)
@@ -50,7 +57,7 @@ class ObjectiveValueComponent(Component):
self.ub_regressor.fit(features, ub.ravel())
logger.debug("Fitting ub_regressor...")
self.lb_regressor.fit(features, lb.ravel())
def predict(self, instances):
features = InstanceFeaturesExtractor().extract(instances)
lb = self.lb_regressor.predict(features)

View File

@@ -19,10 +19,12 @@ class PrimalSolutionComponent(Component):
A component that predicts primal solutions.
"""
def __init__(self,
classifier=AdaptiveClassifier(),
mode="exact",
threshold=MinPrecisionThreshold(0.98)):
def __init__(
self,
classifier=AdaptiveClassifier(),
mode="exact",
threshold=MinPrecisionThreshold(0.98),
):
self.mode = mode
self.classifiers = {}
self.thresholds = {}
@@ -51,9 +53,10 @@ class PrimalSolutionComponent(Component):
features = VariableFeaturesExtractor().extract(training_instances)
solutions = SolutionExtractor().extract(training_instances)
for category in tqdm(features.keys(),
desc="Fit (primal)",
):
for category in tqdm(
features.keys(),
desc="Fit (primal)",
):
x_train = features[category]
for label in [0, 1]:
y_train = solutions[category][:, label].astype(int)
@@ -74,9 +77,15 @@ class PrimalSolutionComponent(Component):
# Find threshold (dynamic or static)
if isinstance(self.threshold_prototype, DynamicThreshold):
self.thresholds[category, label] = self.threshold_prototype.find(clf, x_train, y_train)
self.thresholds[category, label] = self.threshold_prototype.find(
clf,
x_train,
y_train,
)
else:
self.thresholds[category, label] = deepcopy(self.threshold_prototype)
self.thresholds[category, label] = deepcopy(
self.threshold_prototype
)
self.classifiers[category, label] = clf
@@ -98,18 +107,21 @@ class PrimalSolutionComponent(Component):
ws = np.array([[1 - clf, clf] for _ in range(n)])
else:
ws = clf.predict_proba(x_test[category])
assert ws.shape == (n, 2), "ws.shape should be (%d, 2) not %s" % (n, ws.shape)
assert ws.shape == (n, 2), "ws.shape should be (%d, 2) not %s" % (
n,
ws.shape,
)
for (i, (var, index)) in enumerate(var_split[category]):
if ws[i, 1] >= self.thresholds[category, label]:
solution[var][index] = label
return solution
def evaluate(self, instances):
ev = {"Fix zero": {},
"Fix one": {}}
for instance_idx in tqdm(range(len(instances)),
desc="Evaluate (primal)",
):
ev = {"Fix zero": {}, "Fix one": {}}
for instance_idx in tqdm(
range(len(instances)),
desc="Evaluate (primal)",
):
instance = instances[instance_idx]
solution_actual = instance.solution
solution_pred = self.predict(instance)
@@ -143,6 +155,10 @@ class PrimalSolutionComponent(Component):
tn_one = len(pred_one_negative & vars_zero)
fn_one = len(pred_one_negative & vars_one)
ev["Fix zero"][instance_idx] = classifier_evaluation_dict(tp_zero, tn_zero, fp_zero, fn_zero)
ev["Fix one"][instance_idx] = classifier_evaluation_dict(tp_one, tn_one, fp_one, fn_one)
ev["Fix zero"][instance_idx] = classifier_evaluation_dict(
tp_zero, tn_zero, fp_zero, fn_zero
)
ev["Fix one"][instance_idx] = classifier_evaluation_dict(
tp_one, tn_one, fp_one, fn_one
)
return ev

View File

@@ -51,14 +51,15 @@ class RelaxationComponent(Component):
If `check_dropped` is true, set the maximum number of iterations in the lazy constraint loop.
"""
def __init__(self,
classifier=CountingClassifier(),
threshold=0.95,
slack_tolerance=1e-5,
check_dropped=False,
violation_tolerance=1e-5,
max_iterations=3,
):
def __init__(
self,
classifier=CountingClassifier(),
threshold=0.95,
slack_tolerance=1e-5,
check_dropped=False,
violation_tolerance=1e-5,
max_iterations=3,
):
self.classifiers = {}
self.classifier_prototype = classifier
self.threshold = threshold
@@ -77,16 +78,20 @@ class RelaxationComponent(Component):
logger.info("Predicting redundant LP constraints...")
cids = solver.internal_solver.get_constraint_ids()
x, constraints = self.x([instance],
constraint_ids=cids,
return_constraints=True)
x, constraints = self.x(
[instance],
constraint_ids=cids,
return_constraints=True,
)
y = self.predict(x)
for category in y.keys():
for i in range(len(y[category])):
if y[category][i][0] == 1:
cid = constraints[category][i]
c = LazyConstraint(cid=cid,
obj=solver.internal_solver.extract_constraint(cid))
c = LazyConstraint(
cid=cid,
obj=solver.internal_solver.extract_constraint(cid),
)
self.pool += [c]
logger.info("Extracted %d predicted constraints" % len(self.pool))
@@ -98,21 +103,19 @@ class RelaxationComponent(Component):
x = self.x(training_instances)
y = self.y(training_instances)
logger.debug("Fitting...")
for category in tqdm(x.keys(),
desc="Fit (relaxation)"):
for category in tqdm(x.keys(), desc="Fit (relaxation)"):
if category not in self.classifiers:
self.classifiers[category] = deepcopy(self.classifier_prototype)
self.classifiers[category].fit(x[category], y[category])
def x(self,
instances,
constraint_ids=None,
return_constraints=False):
def x(self, instances, constraint_ids=None, return_constraints=False):
x = {}
constraints = {}
for instance in tqdm(InstanceIterator(instances),
desc="Extract (relaxation:x)",
disable=len(instances) < 5):
for instance in tqdm(
InstanceIterator(instances),
desc="Extract (relaxation:x)",
disable=len(instances) < 5,
):
if constraint_ids is not None:
cids = constraint_ids
else:
@@ -133,9 +136,11 @@ class RelaxationComponent(Component):
def y(self, instances):
y = {}
for instance in tqdm(InstanceIterator(instances),
desc="Extract (relaxation:y)",
disable=len(instances) < 5):
for instance in tqdm(
InstanceIterator(instances),
desc="Extract (relaxation:y)",
disable=len(instances) < 5,
):
for (cid, slack) in instance.slacks.items():
category = instance.get_constraint_category(cid)
if category is None:
@@ -154,7 +159,7 @@ class RelaxationComponent(Component):
if category not in self.classifiers:
continue
y[category] = []
#x_cat = np.array(x_cat)
# x_cat = np.array(x_cat)
proba = self.classifiers[category].predict_proba(x_cat)
for i in range(len(proba)):
if proba[i][1] >= self.threshold:
@@ -191,13 +196,19 @@ class RelaxationComponent(Component):
logger.debug("Checking that dropped constraints are satisfied...")
constraints_to_add = []
for c in self.pool:
if not solver.internal_solver.is_constraint_satisfied(c.obj, self.violation_tolerance):
if not solver.internal_solver.is_constraint_satisfied(
c.obj,
self.violation_tolerance,
):
constraints_to_add.append(c)
for c in constraints_to_add:
self.pool.remove(c)
solver.internal_solver.add_constraint(c.obj)
if len(constraints_to_add) > 0:
logger.info("%8d constraints %8d in the pool" % (len(constraints_to_add), len(self.pool)))
logger.info(
"%8d constraints %8d in the pool"
% (len(constraints_to_add), len(self.pool))
)
return True
else:
return False

View File

@@ -28,9 +28,9 @@ def test_lazy_fit():
assert "c" in component.classifiers
# Should provide correct x_train to each classifier
expected_x_train_a = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
expected_x_train_b = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
expected_x_train_c = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
expected_x_train_a = np.array([[67.0, 21.75, 1287.92], [70.0, 23.75, 1199.83]])
expected_x_train_b = np.array([[67.0, 21.75, 1287.92], [70.0, 23.75, 1199.83]])
expected_x_train_c = np.array([[67.0, 21.75, 1287.92], [70.0, 23.75, 1199.83]])
actual_x_train_a = component.classifiers["a"].fit.call_args[0][0]
actual_x_train_b = component.classifiers["b"].fit.call_args[0][0]
actual_x_train_c = component.classifiers["c"].fit.call_args[0][0]
@@ -56,16 +56,15 @@ def test_lazy_before():
solver = LearningSolver()
solver.internal_solver = Mock(spec=InternalSolver)
component = DynamicLazyConstraintsComponent(threshold=0.10)
component.classifiers = {"a": Mock(spec=Classifier),
"b": Mock(spec=Classifier)}
component.classifiers = {"a": Mock(spec=Classifier), "b": Mock(spec=Classifier)}
component.classifiers["a"].predict_proba = Mock(return_value=[[0.95, 0.05]])
component.classifiers["b"].predict_proba = Mock(return_value=[[0.02, 0.80]])
component.before_solve(solver, instances[0], models[0])
# Should ask classifier likelihood of each constraint being violated
expected_x_test_a = np.array([[67., 21.75, 1287.92]])
expected_x_test_b = np.array([[67., 21.75, 1287.92]])
expected_x_test_a = np.array([[67.0, 21.75, 1287.92]])
expected_x_test_b = np.array([[67.0, 21.75, 1287.92]])
actual_x_test_a = component.classifiers["a"].predict_proba.call_args[0][0]
actual_x_test_b = component.classifiers["b"].predict_proba.call_args[0][0]
assert norm(expected_x_test_a - actual_x_test_a) < E
@@ -82,13 +81,15 @@ def test_lazy_before():
def test_lazy_evaluate():
instances, models = get_test_pyomo_instances()
component = DynamicLazyConstraintsComponent()
component.classifiers = {"a": Mock(spec=Classifier),
"b": Mock(spec=Classifier),
"c": Mock(spec=Classifier)}
component.classifiers = {
"a": Mock(spec=Classifier),
"b": Mock(spec=Classifier),
"c": Mock(spec=Classifier),
}
component.classifiers["a"].predict_proba = Mock(return_value=[[1.0, 0.0]])
component.classifiers["b"].predict_proba = Mock(return_value=[[0.0, 1.0]])
component.classifiers["c"].predict_proba = Mock(return_value=[[0.0, 1.0]])
instances[0].found_violated_lazy_constraints = ["a", "b", "c"]
instances[1].found_violated_lazy_constraints = ["b", "d"]
assert component.evaluate(instances) == {
@@ -96,7 +97,7 @@ def test_lazy_evaluate():
"Accuracy": 0.75,
"F1 score": 0.8,
"Precision": 1.0,
"Recall": 2/3.,
"Recall": 2 / 3.0,
"Predicted positive": 2,
"Predicted negative": 2,
"Condition positive": 3,
@@ -135,6 +136,5 @@ def test_lazy_evaluate():
"False positive (%)": 25.0,
"True negative (%)": 25.0,
"True positive (%)": 25.0,
}
},
}

View File

@@ -4,10 +4,12 @@
from unittest.mock import Mock, call
from miplearn import (StaticLazyConstraintsComponent,
LearningSolver,
Instance,
InternalSolver)
from miplearn import (
StaticLazyConstraintsComponent,
LearningSolver,
Instance,
InternalSolver,
)
from miplearn.classifiers import Classifier
@@ -23,39 +25,47 @@ def test_usage_with_solver():
instance = Mock(spec=Instance)
instance.has_static_lazy_constraints = Mock(return_value=True)
instance.is_constraint_lazy = Mock(side_effect=lambda cid: {
"c1": False,
"c2": True,
"c3": True,
"c4": True,
}[cid])
instance.get_constraint_features = Mock(side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid])
instance.get_constraint_category = Mock(side_effect=lambda cid: {
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid])
instance.is_constraint_lazy = Mock(
side_effect=lambda cid: {
"c1": False,
"c2": True,
"c3": True,
"c4": True,
}[cid]
)
instance.get_constraint_features = Mock(
side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid]
)
instance.get_constraint_category = Mock(
side_effect=lambda cid: {
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid]
)
component = StaticLazyConstraintsComponent(threshold=0.90,
use_two_phase_gap=False,
violation_tolerance=1.0)
component = StaticLazyConstraintsComponent(
threshold=0.90, use_two_phase_gap=False, violation_tolerance=1.0
)
component.classifiers = {
"type-a": Mock(spec=Classifier),
"type-b": Mock(spec=Classifier),
}
component.classifiers["type-a"].predict_proba = \
Mock(return_value=[
component.classifiers["type-a"].predict_proba = Mock(
return_value=[
[0.20, 0.80],
[0.05, 0.95],
])
component.classifiers["type-b"].predict_proba = \
Mock(return_value=[
]
)
component.classifiers["type-b"].predict_proba = Mock(
return_value=[
[0.02, 0.98],
])
]
)
# LearningSolver calls before_solve
component.before_solve(solver, instance, None)
@@ -67,37 +77,59 @@ def test_usage_with_solver():
internal.get_constraint_ids.assert_called_once()
# Should ask if each constraint in the model is lazy
instance.is_constraint_lazy.assert_has_calls([
call("c1"), call("c2"), call("c3"), call("c4"),
])
instance.is_constraint_lazy.assert_has_calls(
[
call("c1"),
call("c2"),
call("c3"),
call("c4"),
]
)
# For the lazy ones, should ask for features
instance.get_constraint_features.assert_has_calls([
call("c2"), call("c3"), call("c4"),
])
instance.get_constraint_features.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should also ask for categories
assert instance.get_constraint_category.call_count == 3
instance.get_constraint_category.assert_has_calls([
call("c2"), call("c3"), call("c4"),
])
instance.get_constraint_category.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should ask internal solver to remove constraints identified as lazy
assert internal.extract_constraint.call_count == 3
internal.extract_constraint.assert_has_calls([
call("c2"), call("c3"), call("c4"),
])
internal.extract_constraint.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should ask ML to predict whether each lazy constraint should be enforced
component.classifiers["type-a"].predict_proba.assert_called_once_with([[1.0, 0.0], [0.5, 0.5]])
component.classifiers["type-a"].predict_proba.assert_called_once_with(
[[1.0, 0.0], [0.5, 0.5]]
)
component.classifiers["type-b"].predict_proba.assert_called_once_with([[1.0]])
# For the ones that should be enforced, should ask solver to re-add them
# to the formulation. The remaining ones should remain in the pool.
assert internal.add_constraint.call_count == 2
internal.add_constraint.assert_has_calls([
call("<c3>"), call("<c4>"),
])
internal.add_constraint.assert_has_calls(
[
call("<c3>"),
call("<c4>"),
]
)
internal.add_constraint.reset_mock()
# LearningSolver calls after_iteration (first time)
@@ -126,37 +158,45 @@ def test_usage_with_solver():
def test_fit():
instance_1 = Mock(spec=Instance)
instance_1.found_violated_lazy_constraints = ["c1", "c2", "c4", "c5"]
instance_1.get_constraint_category = Mock(side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid])
instance_1.get_constraint_features = Mock(side_effect=lambda cid: {
"c1": [1, 1],
"c2": [1, 2],
"c3": [1, 3],
"c4": [1, 4, 0],
"c5": [1, 5, 0],
}[cid])
instance_1.get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid]
)
instance_1.get_constraint_features = Mock(
side_effect=lambda cid: {
"c1": [1, 1],
"c2": [1, 2],
"c3": [1, 3],
"c4": [1, 4, 0],
"c5": [1, 5, 0],
}[cid]
)
instance_2 = Mock(spec=Instance)
instance_2.found_violated_lazy_constraints = ["c2", "c3", "c4"]
instance_2.get_constraint_category = Mock(side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid])
instance_2.get_constraint_features = Mock(side_effect=lambda cid: {
"c1": [2, 1],
"c2": [2, 2],
"c3": [2, 3],
"c4": [2, 4, 0],
"c5": [2, 5, 0],
}[cid])
instance_2.get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid]
)
instance_2.get_constraint_features = Mock(
side_effect=lambda cid: {
"c1": [2, 1],
"c2": [2, 2],
"c3": [2, 3],
"c4": [2, 4, 0],
"c5": [2, 5, 0],
}[cid]
)
instances = [instance_1, instance_2]
component = StaticLazyConstraintsComponent()
@@ -171,18 +211,22 @@ def test_fit():
}
expected_x = {
"type-a": [[1, 1], [1, 2], [1, 3], [2, 1], [2, 2], [2, 3]],
"type-b": [[1, 4, 0], [1, 5, 0], [2, 4, 0], [2, 5, 0]]
"type-b": [[1, 4, 0], [1, 5, 0], [2, 4, 0], [2, 5, 0]],
}
expected_y = {
"type-a": [[0, 1], [0, 1], [1, 0], [1, 0], [0, 1], [0, 1]],
"type-b": [[0, 1], [0, 1], [0, 1], [1, 0]]
"type-b": [[0, 1], [0, 1], [0, 1], [1, 0]],
}
assert component._collect_constraints(instances) == expected_constraints
assert component.x(instances) == expected_x
assert component.y(instances) == expected_y
component.fit(instances)
component.classifiers["type-a"].fit.assert_called_once_with(expected_x["type-a"],
expected_y["type-a"])
component.classifiers["type-b"].fit.assert_called_once_with(expected_x["type-b"],
expected_y["type-b"])
component.classifiers["type-a"].fit.assert_called_once_with(
expected_x["type-a"],
expected_y["type-a"],
)
component.classifiers["type-b"].fit.assert_called_once_with(
expected_x["type-b"],
expected_y["type-b"],
)

View File

@@ -16,8 +16,10 @@ def test_usage():
comp.fit(instances)
assert instances[0].lower_bound == 1183.0
assert instances[0].upper_bound == 1183.0
assert np.round(comp.predict(instances), 2).tolist() == [[1183.0, 1183.0],
[1070.0, 1070.0]]
assert np.round(comp.predict(instances), 2).tolist() == [
[1183.0, 1183.0],
[1070.0, 1070.0],
]
def test_obj_evaluate():
@@ -28,20 +30,20 @@ def test_obj_evaluate():
comp.fit(instances)
ev = comp.evaluate(instances)
assert ev == {
'Lower bound': {
'Explained variance': 0.0,
'Max error': 183.0,
'Mean absolute error': 126.5,
'Mean squared error': 19194.5,
'Median absolute error': 126.5,
'R2': -5.012843605607331,
"Lower bound": {
"Explained variance": 0.0,
"Max error": 183.0,
"Mean absolute error": 126.5,
"Mean squared error": 19194.5,
"Median absolute error": 126.5,
"R2": -5.012843605607331,
},
"Upper bound": {
"Explained variance": 0.0,
"Max error": 183.0,
"Mean absolute error": 126.5,
"Mean squared error": 19194.5,
"Median absolute error": 126.5,
"R2": -5.012843605607331,
},
'Upper bound': {
'Explained variance': 0.0,
'Max error': 183.0,
'Mean absolute error': 126.5,
'Mean squared error': 19194.5,
'Median absolute error': 126.5,
'R2': -5.012843605607331,
}
}

View File

@@ -25,71 +25,82 @@ def test_predict():
def test_evaluate():
instances, models = get_test_pyomo_instances()
clf_zero = Mock(spec=Classifier)
clf_zero.predict_proba = Mock(return_value=np.array([
[0., 1.], # x[0]
[0., 1.], # x[1]
[1., 0.], # x[2]
[1., 0.], # x[3]
]))
clf_zero.predict_proba = Mock(
return_value=np.array(
[
[0.0, 1.0], # x[0]
[0.0, 1.0], # x[1]
[1.0, 0.0], # x[2]
[1.0, 0.0], # x[3]
]
)
)
clf_one = Mock(spec=Classifier)
clf_one.predict_proba = Mock(return_value=np.array([
[1., 0.], # x[0] instances[0]
[1., 0.], # x[1] instances[0]
[0., 1.], # x[2] instances[0]
[1., 0.], # x[3] instances[0]
]))
comp = PrimalSolutionComponent(classifier=[clf_zero, clf_one],
threshold=0.50)
clf_one.predict_proba = Mock(
return_value=np.array(
[
[1.0, 0.0], # x[0] instances[0]
[1.0, 0.0], # x[1] instances[0]
[0.0, 1.0], # x[2] instances[0]
[1.0, 0.0], # x[3] instances[0]
]
)
)
comp = PrimalSolutionComponent(classifier=[clf_zero, clf_one], threshold=0.50)
comp.fit(instances[:1])
assert comp.predict(instances[0]) == {"x": {0: 0,
1: 0,
2: 1,
3: None}}
assert instances[0].solution == {"x": {0: 1,
1: 0,
2: 1,
3: 1}}
assert comp.predict(instances[0]) == {"x": {0: 0, 1: 0, 2: 1, 3: None}}
assert instances[0].solution == {"x": {0: 1, 1: 0, 2: 1, 3: 1}}
ev = comp.evaluate(instances[:1])
assert ev == {'Fix one': {0: {'Accuracy': 0.5,
'Condition negative': 1,
'Condition negative (%)': 25.0,
'Condition positive': 3,
'Condition positive (%)': 75.0,
'F1 score': 0.5,
'False negative': 2,
'False negative (%)': 50.0,
'False positive': 0,
'False positive (%)': 0.0,
'Precision': 1.0,
'Predicted negative': 3,
'Predicted negative (%)': 75.0,
'Predicted positive': 1,
'Predicted positive (%)': 25.0,
'Recall': 0.3333333333333333,
'True negative': 1,
'True negative (%)': 25.0,
'True positive': 1,
'True positive (%)': 25.0}},
'Fix zero': {0: {'Accuracy': 0.75,
'Condition negative': 3,
'Condition negative (%)': 75.0,
'Condition positive': 1,
'Condition positive (%)': 25.0,
'F1 score': 0.6666666666666666,
'False negative': 0,
'False negative (%)': 0.0,
'False positive': 1,
'False positive (%)': 25.0,
'Precision': 0.5,
'Predicted negative': 2,
'Predicted negative (%)': 50.0,
'Predicted positive': 2,
'Predicted positive (%)': 50.0,
'Recall': 1.0,
'True negative': 2,
'True negative (%)': 50.0,
'True positive': 1,
'True positive (%)': 25.0}}}
assert ev == {
"Fix one": {
0: {
"Accuracy": 0.5,
"Condition negative": 1,
"Condition negative (%)": 25.0,
"Condition positive": 3,
"Condition positive (%)": 75.0,
"F1 score": 0.5,
"False negative": 2,
"False negative (%)": 50.0,
"False positive": 0,
"False positive (%)": 0.0,
"Precision": 1.0,
"Predicted negative": 3,
"Predicted negative (%)": 75.0,
"Predicted positive": 1,
"Predicted positive (%)": 25.0,
"Recall": 0.3333333333333333,
"True negative": 1,
"True negative (%)": 25.0,
"True positive": 1,
"True positive (%)": 25.0,
}
},
"Fix zero": {
0: {
"Accuracy": 0.75,
"Condition negative": 3,
"Condition negative (%)": 75.0,
"Condition positive": 1,
"Condition positive (%)": 25.0,
"F1 score": 0.6666666666666666,
"False negative": 0,
"False negative (%)": 0.0,
"False positive": 1,
"False positive (%)": 25.0,
"Precision": 0.5,
"Predicted negative": 2,
"Predicted negative (%)": 50.0,
"Predicted positive": 2,
"Predicted positive (%)": 50.0,
"Recall": 1.0,
"True negative": 2,
"True negative (%)": 50.0,
"True positive": 1,
"True positive (%)": 25.0,
}
},
}
def test_primal_parallel_fit():

View File

@@ -4,10 +4,7 @@
from unittest.mock import Mock, call
from miplearn import (RelaxationComponent,
LearningSolver,
Instance,
InternalSolver)
from miplearn import RelaxationComponent, LearningSolver, Instance, InternalSolver
from miplearn.classifiers import Classifier
@@ -16,41 +13,49 @@ def _setup():
internal = solver.internal_solver = Mock(spec=InternalSolver)
internal.get_constraint_ids = Mock(return_value=["c1", "c2", "c3", "c4"])
internal.get_constraint_slacks = Mock(side_effect=lambda: {
"c1": 0.5,
"c2": 0.0,
"c3": 0.0,
"c4": 1.4,
})
internal.get_constraint_slacks = Mock(
side_effect=lambda: {
"c1": 0.5,
"c2": 0.0,
"c3": 0.0,
"c4": 1.4,
}
)
internal.extract_constraint = Mock(side_effect=lambda cid: "<%s>" % cid)
internal.is_constraint_satisfied = Mock(return_value=False)
instance = Mock(spec=Instance)
instance.get_constraint_features = Mock(side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid])
instance.get_constraint_category = Mock(side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid])
instance.get_constraint_features = Mock(
side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid]
)
instance.get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid]
)
classifiers = {
"type-a": Mock(spec=Classifier),
"type-b": Mock(spec=Classifier),
}
classifiers["type-a"].predict_proba = \
Mock(return_value=[
classifiers["type-a"].predict_proba = Mock(
return_value=[
[0.20, 0.80],
[0.05, 0.95],
])
classifiers["type-b"].predict_proba = \
Mock(return_value=[
]
)
classifiers["type-b"].predict_proba = Mock(
return_value=[
[0.02, 0.98],
])
]
)
return solver, internal, instance, classifiers
@@ -72,25 +77,39 @@ def test_usage():
# Should query category and features for each constraint in the model
assert instance.get_constraint_category.call_count == 4
instance.get_constraint_category.assert_has_calls([
call("c1"), call("c2"), call("c3"), call("c4"),
])
instance.get_constraint_category.assert_has_calls(
[
call("c1"),
call("c2"),
call("c3"),
call("c4"),
]
)
# For constraint with non-null categories, should ask for features
assert instance.get_constraint_features.call_count == 3
instance.get_constraint_features.assert_has_calls([
call("c2"), call("c3"), call("c4"),
])
instance.get_constraint_features.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should ask ML to predict whether constraint should be removed
component.classifiers["type-a"].predict_proba.assert_called_once_with([[1.0, 0.0], [0.5, 0.5]])
component.classifiers["type-a"].predict_proba.assert_called_once_with(
[[1.0, 0.0], [0.5, 0.5]]
)
component.classifiers["type-b"].predict_proba.assert_called_once_with([[1.0]])
# Should ask internal solver to remove constraints predicted as redundant
assert internal.extract_constraint.call_count == 2
internal.extract_constraint.assert_has_calls([
call("c3"), call("c4"),
])
internal.extract_constraint.assert_has_calls(
[
call("c3"),
call("c4"),
]
)
# LearningSolver calls after_solve
component.after_solve(solver, instance, None, None)
@@ -111,8 +130,7 @@ def test_usage():
def test_usage_with_check_dropped():
solver, internal, instance, classifiers = _setup()
component = RelaxationComponent(check_dropped=True,
violation_tolerance=1e-3)
component = RelaxationComponent(check_dropped=True, violation_tolerance=1e-3)
component.classifiers = classifiers
# LearningSolver call before_solve
@@ -120,9 +138,12 @@ def test_usage_with_check_dropped():
# Assert constraints are extracted
assert internal.extract_constraint.call_count == 2
internal.extract_constraint.assert_has_calls([
call("c3"), call("c4"),
])
internal.extract_constraint.assert_has_calls(
[
call("c3"),
call("c4"),
]
)
# LearningSolver calls iteration_cb (first time)
should_repeat = component.iteration_cb(solver, instance, None)
@@ -131,15 +152,15 @@ def test_usage_with_check_dropped():
assert should_repeat
# Should ask solver if removed constraints are satisfied (mock always returns false)
internal.is_constraint_satisfied.assert_has_calls([
call("<c3>", 1e-3),
call("<c4>", 1e-3),
])
internal.is_constraint_satisfied.assert_has_calls(
[
call("<c3>", 1e-3),
call("<c4>", 1e-3),
]
)
# Should add constraints back to LP relaxation
internal.add_constraint.assert_has_calls([
call("<c3>"), call("<c4>")
])
internal.add_constraint.assert_has_calls([call("<c3>"), call("<c4>")])
# LearningSolver calls iteration_cb (second time)
should_repeat = component.iteration_cb(solver, instance, None)
@@ -148,21 +169,22 @@ def test_usage_with_check_dropped():
def test_x_y_fit_predict_evaluate():
instances = [Mock(spec=Instance), Mock(spec=Instance)]
component = RelaxationComponent(slack_tolerance=0.05,
threshold=0.80)
component = RelaxationComponent(slack_tolerance=0.05, threshold=0.80)
component.classifiers = {
"type-a": Mock(spec=Classifier),
"type-b": Mock(spec=Classifier),
}
component.classifiers["type-a"].predict_proba = \
Mock(return_value=[
component.classifiers["type-a"].predict_proba = Mock(
return_value=[
[0.20, 0.80],
])
component.classifiers["type-b"].predict_proba = \
Mock(return_value=[
]
)
component.classifiers["type-b"].predict_proba = Mock(
return_value=[
[0.50, 0.50],
[0.05, 0.95],
])
]
)
# First mock instance
instances[0].slacks = {
@@ -171,17 +193,21 @@ def test_x_y_fit_predict_evaluate():
"c3": 0.00,
"c4": 30.0,
}
instances[0].get_constraint_category = Mock(side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid])
instances[0].get_constraint_features = Mock(side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid])
instances[0].get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid]
)
instances[0].get_constraint_features = Mock(
side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid]
)
# Second mock instance
instances[1].slacks = {
@@ -190,26 +216,27 @@ def test_x_y_fit_predict_evaluate():
"c4": 0.00,
"c5": 0.00,
}
instances[1].get_constraint_category = Mock(side_effect=lambda cid: {
"c1": None,
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid])
instances[1].get_constraint_features = Mock(side_effect=lambda cid: {
"c3": [0.3, 0.4],
"c4": [0.7],
"c5": [0.8],
}[cid])
instances[1].get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": None,
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid]
)
instances[1].get_constraint_features = Mock(
side_effect=lambda cid: {
"c3": [0.3, 0.4],
"c4": [0.7],
"c5": [0.8],
}[cid]
)
expected_x = {
"type-a": [[1.0, 0.0], [0.5, 0.5], [0.3, 0.4]],
"type-b": [[1.0], [0.7], [0.8]],
}
expected_y = {
"type-a": [[0], [0], [1]],
"type-b": [[1], [0], [0]]
}
expected_y = {"type-a": [[0], [0], [1]], "type-b": [[1], [0], [0]]}
# Should build X and Y matrices correctly
assert component.x(instances) == expected_x
@@ -217,13 +244,16 @@ def test_x_y_fit_predict_evaluate():
# Should pass along X and Y matrices to classifiers
component.fit(instances)
component.classifiers["type-a"].fit.assert_called_with(expected_x["type-a"], expected_y["type-a"])
component.classifiers["type-b"].fit.assert_called_with(expected_x["type-b"], expected_y["type-b"])
component.classifiers["type-a"].fit.assert_called_with(
expected_x["type-a"],
expected_y["type-a"],
)
component.classifiers["type-b"].fit.assert_called_with(
expected_x["type-b"],
expected_y["type-b"],
)
assert component.predict(expected_x) == {
"type-a": [[1]],
"type-b": [[0], [1]]
}
assert component.predict(expected_x) == {"type-a": [[1]], "type-b": [[0], [1]]}
ev = component.evaluate(instances[1])
assert ev["True positive"] == 1