RelaxationComponent: Implement check_dropped

This commit is contained in:
2020-12-04 09:33:46 -06:00
parent 51b5d8e549
commit 54d80bfa85
2 changed files with 105 additions and 23 deletions

View File

@@ -6,36 +6,59 @@ import logging
import sys
from copy import deepcopy
import numpy as np
from miplearn.components import classifier_evaluation_dict
from tqdm import tqdm
from miplearn import Component
from miplearn.classifiers.counting import CountingClassifier
from miplearn.components import classifier_evaluation_dict
from miplearn.components.lazy_static import LazyConstraint
logger = logging.getLogger(__name__)
class RelaxationComponent(Component):
"""
A Component which builds a relaxation of the problem by dropping constraints.
A Component that tries to build a relaxation that is simultaneously strong and easy to solve.
Currently, this component drops all integrality constraints, as well as
all inequality constraints which are not likely binding in the LP relaxation.
In a future version of MIPLearn, this component may decide to keep some
integrality constraints it it determines that they have small impact on
running time, but large impact on dual bound.
Currently, this component performs the following operations:
- Drops all integrality constraints
- Drops all inequality constraints that are not likely to be binding.
In future versions of MIPLearn, this component may keep some integrality constraints and perform other operations.
Parameters
----------
classifier : Classifier, optional
Classifier used to predict whether each constraint is binding or not. One deep copy of this classifier
is made for each constraint category.
threshold : float, optional
If the probability that a constraint is binding exceeds this threshold, the constraint is dropped from the
linear relaxation.
slack_tolerance : float, optional
If a constraint has slack greater than this threshold, then the constraint is considered loose. By default,
this threshold equals a small positive number to compensate for numerical issues.
check_dropped : bool, optional
If `check_dropped` is true, then, after the problem is solved, the component verifies that all dropped
constraints are still satisfied and re-adds the ones that are not.
violation_tolerance : float, optional
If `check_dropped` is true, a constraint is considered satisfied during the check if its violation is smaller
than this tolerance.
"""
def __init__(self,
classifier=CountingClassifier(),
threshold=0.95,
slack_tolerance=1e-5,
check_dropped=False,
violation_tolerance=1e-5,
):
self.classifiers = {}
self.classifier_prototype = classifier
self.threshold = threshold
self.slack_tolerance = slack_tolerance
self.pool = []
self.check_dropped = check_dropped
self.violation_tolerance = violation_tolerance
def before_solve(self, solver, instance, _):
logger.info("Relaxing integrality...")
@@ -47,14 +70,14 @@ class RelaxationComponent(Component):
constraint_ids=cids,
return_constraints=True)
y = self.predict(x)
n_removed = 0
for category in y.keys():
for i in range(len(y[category])):
if y[category][i][0] == 1:
cid = constraints[category][i]
solver.internal_solver.extract_constraint(cid)
n_removed += 1
logger.info("Removed %d predicted redundant LP constraints" % n_removed)
c = LazyConstraint(cid=cid,
obj=solver.internal_solver.extract_constraint(cid))
self.pool += [c]
logger.info("Extracted %d predicted constraints" % len(self.pool))
def after_solve(self, solver, instance, model, results):
instance.slacks = solver.internal_solver.get_constraint_slacks()
@@ -148,4 +171,19 @@ class RelaxationComponent(Component):
tn += 1
return classifier_evaluation_dict(tp, tn, fp, fn)
def iteration_cb(self, solver, instance, model):
if not self.check_dropped:
return False
logger.debug("Checking that dropped constraints are satisfied...")
constraints_to_add = []
for c in self.pool:
if not solver.internal_solver.is_constraint_satisfied(c.obj, self.violation_tolerance):
constraints_to_add.append(c)
for c in constraints_to_add:
self.pool.remove(c)
solver.internal_solver.add_constraint(c.obj)
if len(constraints_to_add) > 0:
logger.info("%8d constraints %8d in the pool" % (len(constraints_to_add), len(self.pool)))
return True
else:
return False

View File

@@ -11,7 +11,7 @@ from miplearn import (RelaxationComponent,
from miplearn.classifiers import Classifier
def test_usage_with_solver():
def _setup():
solver = Mock(spec=LearningSolver)
internal = solver.internal_solver = Mock(spec=InternalSolver)
@@ -22,6 +22,8 @@ def test_usage_with_solver():
"c3": 0.0,
"c4": 1.4,
})
internal.extract_constraint = Mock(side_effect=lambda cid: "<%s>" % cid)
internal.is_constraint_satisfied = Mock(return_value=False)
instance = Mock(spec=Instance)
instance.get_constraint_features = Mock(side_effect=lambda cid: {
@@ -36,21 +38,29 @@ def test_usage_with_solver():
"c4": "type-b",
}[cid])
component = RelaxationComponent()
component.classifiers = {
classifiers = {
"type-a": Mock(spec=Classifier),
"type-b": Mock(spec=Classifier),
}
component.classifiers["type-a"].predict_proba = \
classifiers["type-a"].predict_proba = \
Mock(return_value=[
[0.20, 0.80],
[0.05, 0.95],
])
component.classifiers["type-b"].predict_proba = \
classifiers["type-b"].predict_proba = \
Mock(return_value=[
[0.02, 0.98],
])
return solver, internal, instance, classifiers
def test_usage():
solver, internal, instance, classifiers = _setup()
component = RelaxationComponent()
component.classifiers = classifiers
# LearningSolver calls before_solve
component.before_solve(solver, instance, None)
@@ -98,6 +108,44 @@ def test_usage_with_solver():
}
def test_usage_with_check_dropped():
solver, internal, instance, classifiers = _setup()
component = RelaxationComponent(check_dropped=True,
violation_tolerance=1e-3)
component.classifiers = classifiers
# LearningSolver call before_solve
component.before_solve(solver, instance, None)
# Assert constraints are extracted
assert internal.extract_constraint.call_count == 2
internal.extract_constraint.assert_has_calls([
call("c3"), call("c4"),
])
# LearningSolver calls iteration_cb (first time)
should_repeat = component.iteration_cb(solver, instance, None)
# Should ask LearningSolver to repeat
assert should_repeat
# Should ask solver if removed constraints are satisfied (mock always returns false)
internal.is_constraint_satisfied.assert_has_calls([
call("<c3>", 1e-3),
call("<c4>", 1e-3),
])
# Should add constraints back to LP relaxation
internal.add_constraint.assert_has_calls([
call("<c3>"), call("<c4>")
])
# LearningSolver calls iteration_cb (second time)
should_repeat = component.iteration_cb(solver, instance, None)
assert not should_repeat
def test_x_y_fit_predict_evaluate():
instances = [Mock(spec=Instance), Mock(spec=Instance)]
component = RelaxationComponent(slack_tolerance=0.05,
@@ -182,7 +230,3 @@ def test_x_y_fit_predict_evaluate():
assert ev["True negative"] == 1
assert ev["False positive"] == 1
assert ev["False negative"] == 0