mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 09:28:51 -06:00
Small fixes to lazy constraints
This commit is contained in:
@@ -28,6 +28,7 @@ class StaticLazyConstraintsComponent(Component):
|
|||||||
self.pool = []
|
self.pool = []
|
||||||
|
|
||||||
def before_solve(self, solver, instance, model):
|
def before_solve(self, solver, instance, model):
|
||||||
|
self.pool = []
|
||||||
instance.found_violated_lazy_constraints = []
|
instance.found_violated_lazy_constraints = []
|
||||||
if instance.has_static_lazy_constraints():
|
if instance.has_static_lazy_constraints():
|
||||||
self._extract_and_predict_static(solver, instance)
|
self._extract_and_predict_static(solver, instance)
|
||||||
@@ -36,21 +37,28 @@ class StaticLazyConstraintsComponent(Component):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def after_iteration(self, solver, instance, model):
|
def after_iteration(self, solver, instance, model):
|
||||||
logger.debug("Finding violated (static) lazy constraints...")
|
logger.info("Finding violated lazy constraints...")
|
||||||
n_added = 0
|
constraints_to_add = []
|
||||||
for c in self.pool:
|
for c in self.pool:
|
||||||
if not solver.internal_solver.is_constraint_satisfied(c.obj):
|
if not solver.internal_solver.is_constraint_satisfied(c.obj):
|
||||||
|
constraints_to_add.append(c)
|
||||||
|
for c in constraints_to_add:
|
||||||
self.pool.remove(c)
|
self.pool.remove(c)
|
||||||
solver.internal_solver.add_constraint(c.obj)
|
solver.internal_solver.add_constraint(c.obj)
|
||||||
instance.found_violated_lazy_constraints += [c.cid]
|
instance.found_violated_lazy_constraints += [c.cid]
|
||||||
n_added += 1
|
if len(constraints_to_add) > 0:
|
||||||
if n_added > 0:
|
logger.info("Added %d lazy constraints back into the model" % len(constraints_to_add))
|
||||||
logger.debug(" %d violations found" % n_added)
|
logger.info("Lazy constraint pool has %d constraints" % len(self.pool))
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
logger.info("Found no violated lazy constraints")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def fit(self, training_instances):
|
def fit(self, training_instances):
|
||||||
|
training_instances = [t
|
||||||
|
for t in training_instances
|
||||||
|
if hasattr(t, "found_violated_lazy_constraints")]
|
||||||
|
|
||||||
logger.debug("Extracting x and y...")
|
logger.debug("Extracting x and y...")
|
||||||
x = self.x(training_instances)
|
x = self.x(training_instances)
|
||||||
y = self.y(training_instances)
|
y = self.y(training_instances)
|
||||||
@@ -72,11 +80,10 @@ class StaticLazyConstraintsComponent(Component):
|
|||||||
def _extract_and_predict_static(self, solver, instance):
|
def _extract_and_predict_static(self, solver, instance):
|
||||||
x = {}
|
x = {}
|
||||||
constraints = {}
|
constraints = {}
|
||||||
for cid in solver.internal_solver.get_constraint_names():
|
logger.info("Extracting lazy constraints...")
|
||||||
|
for cid in solver.internal_solver.get_constraint_ids():
|
||||||
if instance.is_constraint_lazy(cid):
|
if instance.is_constraint_lazy(cid):
|
||||||
category = instance.get_lazy_constraint_category(cid)
|
category = instance.get_lazy_constraint_category(cid)
|
||||||
if category not in self.classifiers:
|
|
||||||
continue
|
|
||||||
if category not in x:
|
if category not in x:
|
||||||
x[category] = []
|
x[category] = []
|
||||||
constraints[category] = []
|
constraints[category] = []
|
||||||
@@ -85,16 +92,24 @@ class StaticLazyConstraintsComponent(Component):
|
|||||||
obj=solver.internal_solver.extract_constraint(cid))
|
obj=solver.internal_solver.extract_constraint(cid))
|
||||||
constraints[category] += [c]
|
constraints[category] += [c]
|
||||||
self.pool.append(c)
|
self.pool.append(c)
|
||||||
|
logger.info("Extracted %d lazy constraints" % len(self.pool))
|
||||||
|
logger.info("Predicting required lazy constraints...")
|
||||||
|
n_added = 0
|
||||||
for (category, x_values) in x.items():
|
for (category, x_values) in x.items():
|
||||||
|
if category not in self.classifiers:
|
||||||
|
continue
|
||||||
if isinstance(x_values[0], np.ndarray):
|
if isinstance(x_values[0], np.ndarray):
|
||||||
x[category] = np.array(x_values)
|
x[category] = np.array(x_values)
|
||||||
proba = self.classifiers[category].predict_proba(x[category])
|
proba = self.classifiers[category].predict_proba(x[category])
|
||||||
for i in range(len(proba)):
|
for i in range(len(proba)):
|
||||||
if proba[i][1] > self.threshold:
|
if proba[i][1] > self.threshold:
|
||||||
|
n_added += 1
|
||||||
c = constraints[category][i]
|
c = constraints[category][i]
|
||||||
self.pool.remove(c)
|
self.pool.remove(c)
|
||||||
solver.internal_solver.add_constraint(c.obj)
|
solver.internal_solver.add_constraint(c.obj)
|
||||||
instance.found_violated_lazy_constraints += [c.cid]
|
instance.found_violated_lazy_constraints += [c.cid]
|
||||||
|
logger.info("Added %d lazy constraints back into the model" % n_added)
|
||||||
|
logger.info("Lazy constraint pool has %d constraints" % len(self.pool))
|
||||||
|
|
||||||
def _collect_constraints(self, train_instances):
|
def _collect_constraints(self, train_instances):
|
||||||
constraints = {}
|
constraints = {}
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ from miplearn.classifiers import Classifier
|
|||||||
def test_usage_with_solver():
|
def test_usage_with_solver():
|
||||||
solver = Mock(spec=LearningSolver)
|
solver = Mock(spec=LearningSolver)
|
||||||
internal = solver.internal_solver = Mock(spec=InternalSolver)
|
internal = solver.internal_solver = Mock(spec=InternalSolver)
|
||||||
internal.get_constraint_names = Mock(return_value=["c1", "c2", "c3", "c4"])
|
internal.get_constraint_ids = Mock(return_value=["c1", "c2", "c3", "c4"])
|
||||||
internal.extract_constraint = Mock(side_effect=lambda cid: "<%s>" % cid)
|
internal.extract_constraint = Mock(side_effect=lambda cid: "<%s>" % cid)
|
||||||
internal.is_constraint_satisfied = Mock(return_value=False)
|
internal.is_constraint_satisfied = Mock(return_value=False)
|
||||||
|
|
||||||
@@ -59,7 +59,7 @@ def test_usage_with_solver():
|
|||||||
instance.has_static_lazy_constraints.assert_called_once()
|
instance.has_static_lazy_constraints.assert_called_once()
|
||||||
|
|
||||||
# Should ask internal solver for a list of constraints in the model
|
# Should ask internal solver for a list of constraints in the model
|
||||||
internal.get_constraint_names.assert_called_once()
|
internal.get_constraint_ids.assert_called_once()
|
||||||
|
|
||||||
# Should ask if each constraint in the model is lazy
|
# Should ask if each constraint in the model is lazy
|
||||||
instance.is_constraint_lazy.assert_has_calls([
|
instance.is_constraint_lazy.assert_has_calls([
|
||||||
|
|||||||
@@ -15,10 +15,11 @@ logger = logging.getLogger(__name__)
|
|||||||
class GurobiSolver(InternalSolver):
|
class GurobiSolver(InternalSolver):
|
||||||
def __init__(self, params=None):
|
def __init__(self, params=None):
|
||||||
if params is None:
|
if params is None:
|
||||||
params = {
|
params = {}
|
||||||
"LazyConstraints": 1,
|
# params = {
|
||||||
"PreCrush": 1,
|
# "LazyConstraints": 1,
|
||||||
}
|
# "PreCrush": 1,
|
||||||
|
# }
|
||||||
from gurobipy import GRB
|
from gurobipy import GRB
|
||||||
self.GRB = GRB
|
self.GRB = GRB
|
||||||
self.instance = None
|
self.instance = None
|
||||||
@@ -83,6 +84,7 @@ class GurobiSolver(InternalSolver):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def solve(self, tee=False, iteration_cb=None):
|
def solve(self, tee=False, iteration_cb=None):
|
||||||
|
self._apply_params()
|
||||||
total_wallclock_time = 0
|
total_wallclock_time = 0
|
||||||
total_nodes = 0
|
total_nodes = 0
|
||||||
streams = [StringIO()]
|
streams = [StringIO()]
|
||||||
@@ -122,7 +124,7 @@ class GurobiSolver(InternalSolver):
|
|||||||
def get_variables(self):
|
def get_variables(self):
|
||||||
variables = {}
|
variables = {}
|
||||||
for (varname, vardict) in self._all_vars.items():
|
for (varname, vardict) in self._all_vars.items():
|
||||||
variables[varname] = {}
|
variables[varname] = []
|
||||||
for (idx, var) in vardict.items():
|
for (idx, var) in vardict.items():
|
||||||
variables[varname] += [idx]
|
variables[varname] += [idx]
|
||||||
return variables
|
return variables
|
||||||
@@ -161,7 +163,7 @@ class GurobiSolver(InternalSolver):
|
|||||||
var.lb = value
|
var.lb = value
|
||||||
var.ub = value
|
var.ub = value
|
||||||
|
|
||||||
def get_constraints_ids(self):
|
def get_constraint_ids(self):
|
||||||
self.model.update()
|
self.model.update()
|
||||||
return [c.ConstrName for c in self.model.getConstrs()]
|
return [c.ConstrName for c in self.model.getConstrs()]
|
||||||
|
|
||||||
|
|||||||
@@ -145,7 +145,7 @@ class InternalSolver(ABC):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_constraints_ids(self):
|
def get_constraint_ids(self):
|
||||||
"""
|
"""
|
||||||
Returns a list of ids, which uniquely identify each constraint in the model.
|
Returns a list of ids, which uniquely identify each constraint in the model.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -40,6 +40,14 @@ class LearningSolver:
|
|||||||
Mixed-Integer Linear Programming (MIP) solver that extracts information
|
Mixed-Integer Linear Programming (MIP) solver that extracts information
|
||||||
from previous runs, using Machine Learning methods, to accelerate the
|
from previous runs, using Machine Learning methods, to accelerate the
|
||||||
solution of new (yet unseen) instances.
|
solution of new (yet unseen) instances.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
solve_lp_first: bool
|
||||||
|
If true, solve LP relaxation first, then solve original MILP. This
|
||||||
|
option should be activated if the LP relaxation is not very
|
||||||
|
expensive to solve and if it provides good hints for the integer
|
||||||
|
solution.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
@@ -49,7 +57,8 @@ class LearningSolver:
|
|||||||
solver="gurobi",
|
solver="gurobi",
|
||||||
threads=None,
|
threads=None,
|
||||||
time_limit=None,
|
time_limit=None,
|
||||||
node_limit=None):
|
node_limit=None,
|
||||||
|
solve_lp_first=True):
|
||||||
self.components = {}
|
self.components = {}
|
||||||
self.mode = mode
|
self.mode = mode
|
||||||
self.internal_solver = None
|
self.internal_solver = None
|
||||||
@@ -59,6 +68,7 @@ class LearningSolver:
|
|||||||
self.gap_tolerance = gap_tolerance
|
self.gap_tolerance = gap_tolerance
|
||||||
self.tee = False
|
self.tee = False
|
||||||
self.node_limit = node_limit
|
self.node_limit = node_limit
|
||||||
|
self.solve_lp_first = solve_lp_first
|
||||||
|
|
||||||
if components is not None:
|
if components is not None:
|
||||||
for comp in components:
|
for comp in components:
|
||||||
@@ -84,21 +94,23 @@ class LearningSolver:
|
|||||||
else:
|
else:
|
||||||
solver = self.internal_solver_factory
|
solver = self.internal_solver_factory
|
||||||
if self.threads is not None:
|
if self.threads is not None:
|
||||||
|
logger.info("Setting threads to %d" % self.threads)
|
||||||
solver.set_threads(self.threads)
|
solver.set_threads(self.threads)
|
||||||
if self.time_limit is not None:
|
if self.time_limit is not None:
|
||||||
|
logger.info("Setting time limit to %f" % self.time_limit)
|
||||||
solver.set_time_limit(self.time_limit)
|
solver.set_time_limit(self.time_limit)
|
||||||
if self.gap_tolerance is not None:
|
if self.gap_tolerance is not None:
|
||||||
|
logger.info("Setting gap tolerance to %f" % self.gap_tolerance)
|
||||||
solver.set_gap_tolerance(self.gap_tolerance)
|
solver.set_gap_tolerance(self.gap_tolerance)
|
||||||
if self.node_limit is not None:
|
if self.node_limit is not None:
|
||||||
|
logger.info("Setting node limit to %d" % self.node_limit)
|
||||||
solver.set_node_limit(self.node_limit)
|
solver.set_node_limit(self.node_limit)
|
||||||
return solver
|
return solver
|
||||||
|
|
||||||
def solve(self,
|
def solve(self,
|
||||||
instance,
|
instance,
|
||||||
model=None,
|
model=None,
|
||||||
tee=False,
|
tee=False):
|
||||||
relaxation_only=False,
|
|
||||||
solve_lp_first=True):
|
|
||||||
"""
|
"""
|
||||||
Solves the given instance. If trained machine-learning models are
|
Solves the given instance. If trained machine-learning models are
|
||||||
available, they will be used to accelerate the solution process.
|
available, they will be used to accelerate the solution process.
|
||||||
@@ -127,13 +139,6 @@ class LearningSolver:
|
|||||||
The corresponding Pyomo model. If not provided, it will be created.
|
The corresponding Pyomo model. If not provided, it will be created.
|
||||||
tee: bool
|
tee: bool
|
||||||
If true, prints solver log to screen.
|
If true, prints solver log to screen.
|
||||||
relaxation_only: bool
|
|
||||||
If true, solve only the root LP relaxation.
|
|
||||||
solve_lp_first: bool
|
|
||||||
If true, solve LP relaxation first, then solve original MILP. This
|
|
||||||
option should be activated if the LP relaxation is not very
|
|
||||||
expensive to solve and if it provides good hints for the integer
|
|
||||||
solution.
|
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
@@ -155,7 +160,7 @@ class LearningSolver:
|
|||||||
self.internal_solver = self._create_internal_solver()
|
self.internal_solver = self._create_internal_solver()
|
||||||
self.internal_solver.set_instance(instance, model)
|
self.internal_solver.set_instance(instance, model)
|
||||||
|
|
||||||
if solve_lp_first:
|
if self.solve_lp_first:
|
||||||
logger.info("Solving LP relaxation...")
|
logger.info("Solving LP relaxation...")
|
||||||
results = self.internal_solver.solve_lp(tee=tee)
|
results = self.internal_solver.solve_lp(tee=tee)
|
||||||
instance.lp_solution = self.internal_solver.get_solution()
|
instance.lp_solution = self.internal_solver.get_solution()
|
||||||
@@ -168,9 +173,6 @@ class LearningSolver:
|
|||||||
for component in self.components.values():
|
for component in self.components.values():
|
||||||
component.before_solve(self, instance, model)
|
component.before_solve(self, instance, model)
|
||||||
|
|
||||||
if relaxation_only:
|
|
||||||
return results
|
|
||||||
|
|
||||||
def iteration_cb():
|
def iteration_cb():
|
||||||
should_repeat = False
|
should_repeat = False
|
||||||
for component in self.components.values():
|
for component in self.components.values():
|
||||||
|
|||||||
@@ -203,7 +203,7 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
key = self._get_gap_tolerance_option_name()
|
key = self._get_gap_tolerance_option_name()
|
||||||
self._pyomo_solver.options[key] = gap_tolerance
|
self._pyomo_solver.options[key] = gap_tolerance
|
||||||
|
|
||||||
def get_constraints_ids(self):
|
def get_constraint_ids(self):
|
||||||
return list(self._cname_to_constr.keys())
|
return list(self._cname_to_constr.keys())
|
||||||
|
|
||||||
def extract_constraint(self, cid):
|
def extract_constraint(self, cid):
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ def test_internal_solver():
|
|||||||
|
|
||||||
# New constraint should affect solution and should be listed in
|
# New constraint should affect solution and should be listed in
|
||||||
# constraint ids
|
# constraint ids
|
||||||
assert solver.get_constraints_ids() == ["eq_capacity", "cut"]
|
assert solver.get_constraint_ids() == ["eq_capacity", "cut"]
|
||||||
stats = solver.solve()
|
stats = solver.solve()
|
||||||
assert stats["Lower bound"] == 1030.0
|
assert stats["Lower bound"] == 1030.0
|
||||||
|
|
||||||
@@ -120,7 +120,7 @@ def test_internal_solver():
|
|||||||
|
|
||||||
# New constraint should no longer affect solution and should no longer
|
# New constraint should no longer affect solution and should no longer
|
||||||
# be listed in constraint ids
|
# be listed in constraint ids
|
||||||
assert solver.get_constraints_ids() == ["eq_capacity"]
|
assert solver.get_constraint_ids() == ["eq_capacity"]
|
||||||
stats = solver.solve()
|
stats = solver.solve()
|
||||||
assert stats["Lower bound"] == 1183.0
|
assert stats["Lower bound"] == 1183.0
|
||||||
|
|
||||||
@@ -131,7 +131,7 @@ def test_internal_solver():
|
|||||||
solver.add_constraint(cobj)
|
solver.add_constraint(cobj)
|
||||||
|
|
||||||
# Constraint should affect solution again
|
# Constraint should affect solution again
|
||||||
assert solver.get_constraints_ids() == ["eq_capacity", "cut"]
|
assert solver.get_constraint_ids() == ["eq_capacity", "cut"]
|
||||||
stats = solver.solve()
|
stats = solver.solve()
|
||||||
assert stats["Lower bound"] == 1030.0
|
assert stats["Lower bound"] == 1030.0
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user