ConvertTight: Always check feasibility

master
Alinson S. Xavier 5 years ago
parent d67af4a26b
commit b01d97cc2b
No known key found for this signature in database
GPG Key ID: A796166E4E218E02

@ -20,8 +20,10 @@ class ConvertTightIneqsIntoEqsStep(Component):
"""
Component that predicts which inequality constraints are likely to be binding in
the LP relaxation of the problem and converts them into equality constraints.
Optionally double checks that the conversion process did not affect feasibility
or optimality of the problem.
This component always makes sure that the conversion process does not affect the
feasibility of the problem. It can also, optionally, make sure that it does not affect
the optimality, but this may be expensive.
This component does not work on MIPs. All integrality constraints must be relaxed
before this component is used.
@ -32,13 +34,13 @@ class ConvertTightIneqsIntoEqsStep(Component):
classifier=CountingClassifier(),
threshold=0.95,
slack_tolerance=0.0,
check_converted=False,
check_optimality=False,
):
self.classifiers = {}
self.classifier_prototype = classifier
self.threshold = threshold
self.slack_tolerance = slack_tolerance
self.check_converted = check_converted
self.check_optimality = check_optimality
self.converted = []
self.original_sense = {}
@ -66,8 +68,10 @@ class ConvertTightIneqsIntoEqsStep(Component):
solver.internal_solver.set_constraint_sense(cid, "=")
self.converted += [cid]
self.n_converted += 1
print(cid)
else:
self.n_kept += 1
logger.info(f"Converted {self.n_converted} inequalities")
def after_solve(self, solver, instance, model, results):
@ -173,10 +177,6 @@ class ConvertTightIneqsIntoEqsStep(Component):
return classifier_evaluation_dict(tp, tn, fp, fn)
def iteration_cb(self, solver, instance, model):
if not self.check_converted:
return False
logger.debug("Checking converted inequalities...")
is_infeasible, is_suboptimal = False, False
restored = []
@ -206,7 +206,7 @@ class ConvertTightIneqsIntoEqsStep(Component):
if abs(pi) > 0:
is_infeasible = True
restore(cid)
else:
elif self.check_optimality:
for cid in self.converted:
pi = solver.internal_solver.get_dual(cid)
csense = self.original_sense[cid]

@ -32,7 +32,7 @@ class DropRedundantInequalitiesStep(Component):
classifier=CountingClassifier(),
threshold=0.95,
slack_tolerance=1e-5,
check_dropped=False,
check_feasibility=False,
violation_tolerance=1e-5,
max_iterations=3,
):
@ -41,7 +41,7 @@ class DropRedundantInequalitiesStep(Component):
self.threshold = threshold
self.slack_tolerance = slack_tolerance
self.pool = []
self.check_dropped = check_dropped
self.check_feasibility = check_feasibility
self.violation_tolerance = violation_tolerance
self.max_iterations = max_iterations
self.current_iteration = 0
@ -175,7 +175,7 @@ class DropRedundantInequalitiesStep(Component):
return classifier_evaluation_dict(tp, tn, fp, fn)
def iteration_cb(self, solver, instance, model):
if not self.check_dropped:
if not self.check_feasibility:
return False
if self.current_iteration >= self.max_iterations:
return False

@ -30,10 +30,12 @@ def test_convert_tight_usage():
# Fit and resolve
solver.fit([instance])
solver.solve(instance)
stats = solver.solve(instance)
# Objective value should be the same
assert instance.upper_bound == original_upper_bound
assert stats["ConvertTight: Inf iterations"] == 0
assert stats["ConvertTight: Subopt iterations"] == 0
class TestInstance(Instance):
@ -51,6 +53,21 @@ class TestInstance(Instance):
return m
class TestInstanceMin(Instance):
def to_model(self):
import gurobipy as grb
from gurobipy import GRB
m = grb.Model("model")
x1 = m.addVar(name="x1")
x2 = m.addVar(name="x2")
m.setObjective(x1 + 2 * x2, grb.GRB.MAXIMIZE)
m.addConstr(x1 <= 2, name="c1")
m.addConstr(x2 <= 2, name="c2")
m.addConstr(x1 + x2 <= 3, name="c2")
return m
def test_convert_tight_infeasibility():
comp = ConvertTightIneqsIntoEqsStep(
check_converted=True,
@ -70,8 +87,10 @@ def test_convert_tight_infeasibility():
solve_lp_first=False,
)
instance = TestInstance()
solver.solve(instance)
stats = solver.solve(instance)
assert instance.lower_bound == 5.0
assert stats["ConvertTight: Inf iterations"] == 1
assert stats["ConvertTight: Subopt iterations"] == 0
def test_convert_tight_suboptimality():
@ -93,5 +112,32 @@ def test_convert_tight_suboptimality():
solve_lp_first=False,
)
instance = TestInstance()
solver.solve(instance)
stats = solver.solve(instance)
assert instance.lower_bound == 5.0
assert stats["ConvertTight: Inf iterations"] == 0
assert stats["ConvertTight: Subopt iterations"] == 1
def test_convert_tight_optimal():
comp = ConvertTightIneqsIntoEqsStep(
check_converted=True,
)
comp.classifiers = {
"c1": Mock(spec=Classifier),
"c2": Mock(spec=Classifier),
"c3": Mock(spec=Classifier),
}
comp.classifiers["c1"].predict_proba = Mock(return_value=[[1, 0]])
comp.classifiers["c2"].predict_proba = Mock(return_value=[[0, 1]])
comp.classifiers["c3"].predict_proba = Mock(return_value=[[0, 1]])
solver = LearningSolver(
solver=GurobiSolver(params={}),
components=[comp],
solve_lp_first=False,
)
instance = TestInstance()
stats = solver.solve(instance)
assert instance.lower_bound == 5.0
assert stats["ConvertTight: Inf iterations"] == 0
assert stats["ConvertTight: Subopt iterations"] == 0

Loading…
Cancel
Save