RelaxationComponent: Convert tight inequalities into equalities

master
Alinson S. Xavier 5 years ago
parent 5b5f4b7671
commit 4a26de5ff1

@ -3,9 +3,6 @@
# Released under the modified BSD license. See COPYING.md for more details.
import logging
import sys
import numpy as np
from copy import deepcopy
from tqdm import tqdm
@ -27,19 +24,28 @@ class RelaxationComponent(Component):
Currently, this component performs the following operations:
- Drops all integrality constraints
- Drops all inequality constraints that are not likely to be binding.
- Drops all inequality constraints that are likely redundant, and optionally
double checks that all dropped constraints are actually satisfied.
- Converts inequalities that are likely binding into equalities, and double
checks all resulting equalities have zero marginal costs.
In future versions of MIPLearn, this component may keep some integrality constraints
and perform other operations.
Parameters
----------
classifier : Classifier, optional
Classifier used to predict whether each constraint is binding or not. One deep
redundant_classifier : Classifier, optional
Classifier used to predict if a constraint is likely redundant. One deep
copy of this classifier is made for each constraint category.
threshold : float, optional
If the probability that a constraint is binding exceeds this threshold, the
redundant_threshold : float, optional
If the probability that a constraint is redundant exceeds this threshold, the
constraint is dropped from the linear relaxation.
tight_classifier : Classifier, optional
Classifier used to predict if a constraint is likely to be tight. One deep
copy of this classifier is made for each constraint category.
tight_threshold : float, optional
If the probability that a constraint is tight exceeds this threshold, the
constraint is converted into an equality constraint.
slack_tolerance : float, optional
If a constraint has slack greater than this threshold, then the constraint is
considered loose. By default, this threshold equals a small positive number to
@ -52,30 +58,37 @@ class RelaxationComponent(Component):
violation_tolerance : float, optional
If `check_dropped` is true, a constraint is considered satisfied during the
check if its violation is smaller than this tolerance.
max_iterations : int
max_check_iterations : int
If `check_dropped` is true, set the maximum number of iterations in the lazy
constraint loop.
"""
def __init__(
self,
classifier=CountingClassifier(),
threshold=0.95,
redundant_classifier=CountingClassifier(),
redundant_threshold=0.95,
tight_classifier=CountingClassifier(),
tight_threshold=0.95,
slack_tolerance=1e-5,
check_dropped=False,
violation_tolerance=1e-5,
max_iterations=3,
max_check_iterations=3,
):
self.steps = [
RelaxIntegralityStep(),
DropRedundantInequalitiesStep(
classifier=classifier,
threshold=threshold,
classifier=redundant_classifier,
threshold=redundant_threshold,
slack_tolerance=slack_tolerance,
violation_tolerance=violation_tolerance,
max_iterations=max_iterations,
max_iterations=max_check_iterations,
check_dropped=check_dropped,
),
ConvertTightIneqsIntoEqsStep(
classifier=tight_classifier,
threshold=tight_threshold,
slack_tolerance=slack_tolerance,
),
]
self.composite = CompositeComponent(self.steps)
@ -257,3 +270,126 @@ class DropRedundantInequalitiesStep(Component):
return True
else:
return False
class ConvertTightIneqsIntoEqsStep(Component):
def __init__(
self,
classifier=CountingClassifier(),
threshold=0.95,
slack_tolerance=1e-5,
):
self.classifiers = {}
self.classifier_prototype = classifier
self.threshold = threshold
self.slack_tolerance = slack_tolerance
def before_solve(self, solver, instance, _):
logger.info("Predicting tight LP constraints...")
cids = solver.internal_solver.get_constraint_ids()
x, constraints = self.x(
[instance],
constraint_ids=cids,
return_constraints=True,
)
y = self.predict(x)
n_converted = 0
for category in y.keys():
for i in range(len(y[category])):
if y[category][i][0] == 1:
cid = constraints[category][i]
solver.internal_solver.set_constraint_sense(cid, "=")
n_converted += 1
logger.info(f"Converted {n_converted} inequalities into equalities")
def after_solve(self, solver, instance, model, results):
instance.slacks = solver.internal_solver.get_constraint_slacks()
def fit(self, training_instances):
logger.debug("Extracting x and y...")
x = self.x(training_instances)
y = self.y(training_instances)
logger.debug("Fitting...")
for category in tqdm(x.keys(), desc="Fit (rlx:conv_ineqs)"):
if category not in self.classifiers:
self.classifiers[category] = deepcopy(self.classifier_prototype)
self.classifiers[category].fit(x[category], y[category])
def x(self, instances, constraint_ids=None, return_constraints=False):
x = {}
constraints = {}
for instance in tqdm(
InstanceIterator(instances),
desc="Extract (rlx:conv_ineqs:x)",
disable=len(instances) < 5,
):
if constraint_ids is not None:
cids = constraint_ids
else:
cids = instance.slacks.keys()
for cid in cids:
category = instance.get_constraint_category(cid)
if category is None:
continue
if category not in x:
x[category] = []
constraints[category] = []
x[category] += [instance.get_constraint_features(cid)]
constraints[category] += [cid]
if return_constraints:
return x, constraints
else:
return x
def y(self, instances):
y = {}
for instance in tqdm(
InstanceIterator(instances),
desc="Extract (rlx:conv_ineqs:y)",
disable=len(instances) < 5,
):
for (cid, slack) in instance.slacks.items():
category = instance.get_constraint_category(cid)
if category is None:
continue
if category not in y:
y[category] = []
if slack <= self.slack_tolerance:
y[category] += [[1]]
else:
y[category] += [[0]]
return y
def predict(self, x):
y = {}
for (category, x_cat) in x.items():
if category not in self.classifiers:
continue
y[category] = []
# x_cat = np.array(x_cat)
proba = self.classifiers[category].predict_proba(x_cat)
for i in range(len(proba)):
if proba[i][1] >= self.threshold:
y[category] += [[1]]
else:
y[category] += [[0]]
return y
def evaluate(self, instance):
x = self.x([instance])
y_true = self.y([instance])
y_pred = self.predict(x)
tp, tn, fp, fn = 0, 0, 0, 0
for category in y_true.keys():
for i in range(len(y_true[category])):
if y_pred[category][i][0] == 1:
if y_true[category][i][0] == 1:
tp += 1
else:
fp += 1
else:
if y_true[category][i][0] == 1:
fn += 1
else:
tn += 1
return classifier_evaluation_dict(tp, tn, fp, fn)

Loading…
Cancel
Save