diff --git a/miplearn/__init__.py b/miplearn/__init__.py index b1b6f7c..54801fe 100644 --- a/miplearn/__init__.py +++ b/miplearn/__init__.py @@ -20,6 +20,7 @@ from .components.steps.convert_tight import ConvertTightIneqsIntoEqsStep from .components.steps.relax_integrality import RelaxIntegralityStep from .components.steps.drop_redundant import DropRedundantInequalitiesStep +from .classifiers import Classifier, Regressor from .classifiers.adaptive import AdaptiveClassifier from .classifiers.threshold import MinPrecisionThreshold diff --git a/miplearn/components/steps/convert_tight.py b/miplearn/components/steps/convert_tight.py index bfd1bf6..b66fffe 100644 --- a/miplearn/components/steps/convert_tight.py +++ b/miplearn/components/steps/convert_tight.py @@ -32,11 +32,15 @@ class ConvertTightIneqsIntoEqsStep(Component): classifier=CountingClassifier(), threshold=0.95, slack_tolerance=0.0, + check_converted=False, ): self.classifiers = {} self.classifier_prototype = classifier self.threshold = threshold self.slack_tolerance = slack_tolerance + self.check_converted = check_converted + self.converted = [] + self.original_sense = {} def before_solve(self, solver, instance, _): logger.info("Predicting tight LP constraints...") @@ -47,14 +51,15 @@ class ConvertTightIneqsIntoEqsStep(Component): return_constraints=True, ) y = self.predict(x) - n_converted = 0 for category in y.keys(): for i in range(len(y[category])): if y[category][i][0] == 1: cid = constraints[category][i] + s = solver.internal_solver.get_constraint_sense(cid) + self.original_sense[cid] = s solver.internal_solver.set_constraint_sense(cid, "=") - n_converted += 1 - logger.info(f"Converted {n_converted} inequalities into equalities") + self.converted += [cid] + logger.info(f"Converted {len(self.converted)} inequalities") def after_solve(self, solver, instance, model, results): instance.slacks = solver.internal_solver.get_inequality_slacks() @@ -152,3 +157,23 @@ class ConvertTightIneqsIntoEqsStep(Component): else: tn += 1 return classifier_evaluation_dict(tp, tn, fp, fn) + + def iteration_cb(self, solver, instance, model): + if not self.check_converted: + return False + logger.debug("Checking converted inequalities...") + restored = [] + if solver.internal_solver.is_infeasible(): + for cid in self.converted: + f = solver.internal_solver.get_farkas_dual(cid) + if abs(f) > 0: + s = self.original_sense[cid] + solver.internal_solver.set_constraint_sense(cid, s) + restored += [cid] + for cid in restored: + self.converted.remove(cid) + if len(restored) > 0: + logger.info(f"Restored {len(restored)} inequalities") + return True + else: + return False diff --git a/miplearn/components/steps/tests/convert_tight_test.py b/miplearn/components/steps/tests/convert_tight_test.py index d64bba0..5e06e82 100644 --- a/miplearn/components/steps/tests/convert_tight_test.py +++ b/miplearn/components/steps/tests/convert_tight_test.py @@ -1,8 +1,10 @@ -from miplearn import LearningSolver, GurobiSolver +from miplearn import LearningSolver, GurobiSolver, Instance, Classifier from miplearn.components.steps.convert_tight import ConvertTightIneqsIntoEqsStep from miplearn.components.steps.relax_integrality import RelaxIntegralityStep from miplearn.problems.knapsack import GurobiKnapsackInstance +from unittest.mock import Mock + def test_convert_tight_usage(): instance = GurobiKnapsackInstance( @@ -32,3 +34,41 @@ def test_convert_tight_usage(): # Objective value should be the same assert instance.upper_bound == original_upper_bound + + +class TestInstance(Instance): + def to_model(self): + import gurobipy as grb + from gurobipy import GRB + + m = grb.Model("model") + x1 = m.addVar(name="x1") + x2 = m.addVar(name="x2") + m.setObjective(x1 + 2 * x2, grb.GRB.MAXIMIZE) + m.addConstr(x1 <= 2, name="c1") + m.addConstr(x2 <= 2, name="c2") + m.addConstr(x1 + x2 <= 3, name="c2") + return m + + +def test_convert_tight_infeasibility(): + comp = ConvertTightIneqsIntoEqsStep( + check_converted=True, + ) + comp.classifiers = { + "c1": Mock(spec=Classifier), + "c2": Mock(spec=Classifier), + "c3": Mock(spec=Classifier), + } + comp.classifiers["c1"].predict_proba = Mock(return_value=[[0, 1]]) + comp.classifiers["c2"].predict_proba = Mock(return_value=[[0, 1]]) + comp.classifiers["c3"].predict_proba = Mock(return_value=[[1, 0]]) + + solver = LearningSolver( + solver=GurobiSolver(params={}), + components=[comp], + solve_lp_first=False, + ) + instance = TestInstance() + solver.solve(instance) + assert instance.lower_bound == 5.0 \ No newline at end of file diff --git a/miplearn/solvers/gurobi.py b/miplearn/solvers/gurobi.py index 4aaebd7..67de9e6 100644 --- a/miplearn/solvers/gurobi.py +++ b/miplearn/solvers/gurobi.py @@ -33,6 +33,7 @@ class GurobiSolver(InternalSolver): """ if params is None: params = {} + params["InfUnbdInfo"] = True from gurobipy import GRB self.GRB = GRB @@ -132,7 +133,6 @@ class GurobiSolver(InternalSolver): if iteration_cb is None: iteration_cb = lambda: False while True: - logger.debug("Solving MIP...") with RedirectOutput(streams): if lazy_cb is None: self.model.optimize() @@ -176,6 +176,13 @@ class GurobiSolver(InternalSolver): var = self._all_vars[var_name][index] return self._get_value(var) + def is_infeasible(self): + return self.model.status in [self.GRB.INFEASIBLE, self.GRB.INF_OR_UNBD] + + def get_farkas_dual(self, cid): + c = self.model.getConstrByName(cid) + return c.farkasDual + def _get_value(self, var): if self.cb_where == self.GRB.Callback.MIPSOL: return self.model.cbGetSolution(var) @@ -280,6 +287,10 @@ class GurobiSolver(InternalSolver): c = self.model.getConstrByName(cid) c.Sense = sense + def get_constraint_sense(self, cid): + c = self.model.getConstrByName(cid) + return c.Sense + def set_constraint_rhs(self, cid, rhs): c = self.model.getConstrByName(cid) c.RHS = rhs diff --git a/miplearn/solvers/internal.py b/miplearn/solvers/internal.py index 334175a..218b969 100644 --- a/miplearn/solvers/internal.py +++ b/miplearn/solvers/internal.py @@ -191,6 +191,23 @@ class InternalSolver(ABC): """ pass + @abstractmethod + def is_infeasible(self): + """ + Returns True if the model has been proved to be infeasible. + Must be called after solve. + """ + pass + + @abstractmethod + def get_farkas_dual(self, cid): + """ + If the model is infeasible, returns a portion of the infeasibility certificate + corresponding to the given constraint. If the model is feasible, calling this + function raises an error. + """ + pass + @abstractmethod def is_constraint_satisfied(self, cobj): pass @@ -199,6 +216,10 @@ class InternalSolver(ABC): def set_constraint_sense(self, cid, sense): pass + @abstractmethod + def get_constraint_sense(self, cid): + pass + @abstractmethod def set_constraint_rhs(self, cid, rhs): pass diff --git a/miplearn/solvers/pyomo/base.py b/miplearn/solvers/pyomo/base.py index eca4903..dda7949 100644 --- a/miplearn/solvers/pyomo/base.py +++ b/miplearn/solvers/pyomo/base.py @@ -258,5 +258,14 @@ class BasePyomoSolver(InternalSolver): def set_constraint_sense(self, cid, sense): raise Exception("Not implemented") + def get_constraint_sense(self, cid): + raise Exception("Not implemented") + def set_constraint_rhs(self, cid, rhs): raise Exception("Not implemented") + + def is_infeasible(self): + raise Exception("Not implemented") + + def get_farkas_dual(self, cid): + raise Exception("Not implemented") \ No newline at end of file