Refactor StaticLazy; remove old constraint methods

master
Alinson S. Xavier 4 years ago
parent 53d3e9d98a
commit 91c8db2225
No known key found for this signature in database
GPG Key ID: DCA0DAD4D2F58624

@ -12,7 +12,7 @@ from miplearn.classifiers import Classifier
from miplearn.classifiers.counting import CountingClassifier
from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold
from miplearn.components.component import Component
from miplearn.features import Constraint, Sample
from miplearn.features import Constraint, Sample, ConstraintFeatures
from miplearn.instance.base import Instance
from miplearn.types import LearningSolveStats
@ -45,7 +45,8 @@ class StaticLazyConstraintsComponent(Component):
self.threshold_prototype: Threshold = threshold
self.classifiers: Dict[Hashable, Classifier] = {}
self.thresholds: Dict[Hashable, Threshold] = {}
self.pool: Dict[str, Constraint] = {}
self.pool_old: Dict[str, Constraint] = {}
self.pool: ConstraintFeatures = ConstraintFeatures()
self.violation_tolerance: float = violation_tolerance
self.enforced_cids: Set[Hashable] = set()
self.n_restored: int = 0
@ -78,24 +79,28 @@ class StaticLazyConstraintsComponent(Component):
assert solver.internal_solver is not None
assert sample.after_load is not None
assert sample.after_load.instance is not None
assert sample.after_load.constraints_old is not None
logger.info("Predicting violated (static) lazy constraints...")
if sample.after_load.instance.lazy_constraint_count == 0:
logger.info("Instance does not have static lazy constraints. Skipping.")
self.enforced_cids = set(self.sample_predict(sample))
logger.info("Moving lazy constraints to the pool...")
self.pool = {}
for (cid, cdict) in sample.after_load.constraints_old.items():
if cdict.lazy and cid not in self.enforced_cids:
self.pool[cid] = cdict
solver.internal_solver.remove_constraint(cid)
logger.info(
f"{len(self.enforced_cids)} lazy constraints kept; "
f"{len(self.pool)} moved to the pool"
constraints = sample.after_load.constraints
assert constraints is not None
assert constraints.lazy is not None
assert constraints.names is not None
selected = tuple(
(constraints.lazy[i] and constraints.names[i] not in self.enforced_cids)
for i in range(len(constraints.lazy))
)
stats["LazyStatic: Removed"] = len(self.pool)
stats["LazyStatic: Kept"] = len(self.enforced_cids)
n_removed = sum(selected)
n_kept = sum(constraints.lazy) - n_removed
self.pool = constraints[selected]
assert self.pool.names is not None
solver.internal_solver.remove_constraints(self.pool.names)
logger.info(f"{n_kept} lazy constraints kept; {n_removed} moved to the pool")
stats["LazyStatic: Removed"] = n_removed
stats["LazyStatic: Kept"] = n_kept
stats["LazyStatic: Restored"] = 0
self.n_restored = 0
self.n_iterations = 0
@ -160,25 +165,34 @@ class StaticLazyConstraintsComponent(Component):
def _check_and_add(self, solver: "LearningSolver") -> bool:
assert solver.internal_solver is not None
assert self.pool.names is not None
if len(self.pool.names) == 0:
logger.info("Lazy constraint pool is empty. Skipping violation check.")
return False
self.n_iterations += 1
logger.info("Finding violated lazy constraints...")
enforced: Dict[str, Constraint] = {}
for (cid, c) in self.pool.items():
if not solver.internal_solver.is_constraint_satisfied_old(
c,
is_satisfied = solver.internal_solver.are_constraints_satisfied(
self.pool,
tol=self.violation_tolerance,
):
enforced[cid] = c
logger.info(f"{len(enforced)} violations found")
for (cid, c) in enforced.items():
del self.pool[cid]
solver.internal_solver.add_constraint(c, name=cid)
self.enforced_cids.add(cid)
self.n_restored += 1
)
is_violated = tuple(not i for i in is_satisfied)
violated_constraints = self.pool[is_violated]
satisfied_constraints = self.pool[is_satisfied]
self.pool = satisfied_constraints
assert violated_constraints.names is not None
assert satisfied_constraints.names is not None
n_violated = len(violated_constraints.names)
n_satisfied = len(satisfied_constraints.names)
logger.info(f"Found {n_violated} violated lazy constraints found")
if n_violated > 0:
logger.info(
f"{len(enforced)} constraints restored; {len(self.pool)} in the pool"
"Enforcing {n_violated} lazy constraints; "
f"{n_satisfied} left in the pool..."
)
if len(enforced) > 0:
self.n_iterations += 1
solver.internal_solver.add_constraints(violated_constraints)
for (i, name) in enumerate(violated_constraints.names):
self.enforced_cids.add(name)
self.n_restored += 1
return True
else:
return False
@ -194,12 +208,16 @@ class StaticLazyConstraintsComponent(Component):
y: Dict[Hashable, List[List[float]]] = {}
cids: Dict[Hashable, List[str]] = {}
assert sample.after_load is not None
assert sample.after_load.constraints_old is not None
for (cid, constr) in sample.after_load.constraints_old.items():
constraints = sample.after_load.constraints
assert constraints is not None
assert constraints.names is not None
assert constraints.lazy is not None
assert constraints.categories is not None
for (cidx, cname) in enumerate(constraints.names):
# Initialize categories
if not constr.lazy:
if not constraints.lazy[cidx]:
continue
category = constr.category
category = constraints.categories[cidx]
if category is None:
continue
if category not in x:
@ -212,12 +230,11 @@ class StaticLazyConstraintsComponent(Component):
if sample.after_lp is not None:
sf = sample.after_lp
assert sf.instance is not None
assert sf.constraints is not None
features = list(sf.instance.to_list())
assert sf.constraints_old is not None
assert sf.constraints_old[cid] is not None
features.extend(sf.constraints_old[cid].to_list())
features.extend(sf.constraints.to_list(cidx))
x[category].append(features)
cids[category].append(cid)
cids[category].append(cname)
# Labels
if (
@ -225,7 +242,7 @@ class StaticLazyConstraintsComponent(Component):
and (sample.after_mip.extra is not None)
and ("lazy_enforced" in sample.after_mip.extra)
):
if cid in sample.after_mip.extra["lazy_enforced"]:
if cname in sample.after_mip.extra["lazy_enforced"]:
y[category] += [[False, True]]
else:
y[category] += [[True, False]]

@ -174,7 +174,6 @@ class Features:
instance: Optional[InstanceFeatures] = None
variables: Optional[VariableFeatures] = None
constraints: Optional[ConstraintFeatures] = None
constraints_old: Optional[Dict[str, Constraint]] = None
lp_solve: Optional["LPSolveStats"] = None
mip_solve: Optional["MIPSolveStats"] = None
extra: Optional[Dict] = None
@ -212,9 +211,6 @@ class FeaturesExtractor:
with_sa=self.with_sa,
with_lhs=self.with_lhs,
)
features.constraints_old = solver.get_constraints_old(
with_static=with_static,
)
if with_static:
self._extract_user_features_vars(instance, features)
self._extract_user_features_constrs(instance, features)
@ -266,38 +262,50 @@ class FeaturesExtractor:
instance: "Instance",
features: Features,
) -> None:
assert features.constraints_old is not None
assert features.constraints is not None
assert features.constraints.names is not None
has_static_lazy = instance.has_static_lazy_constraints()
for (cid, constr) in features.constraints_old.items():
user_features = None
category = instance.get_constraint_category(cid)
user_features: List[Optional[Tuple[float, ...]]] = []
categories: List[Optional[Hashable]] = []
lazy: List[bool] = []
for (cidx, cname) in enumerate(features.constraints.names):
cf: Optional[List[float]] = None
category: Optional[Hashable] = instance.get_constraint_category(cname)
if category is not None:
categories.append(category)
assert isinstance(category, collections.Hashable), (
f"Constraint category must be hashable. "
f"Found {type(category).__name__} instead for cid={cid}.",
f"Found {type(category).__name__} instead for cname={cname}.",
)
user_features = instance.get_constraint_features(cid)
if isinstance(user_features, np.ndarray):
user_features = user_features.tolist()
assert isinstance(user_features, list), (
cf = instance.get_constraint_features(cname)
if isinstance(cf, np.ndarray):
cf = tuple(cf.tolist())
assert isinstance(cf, list), (
f"Constraint features must be a list. "
f"Found {type(user_features).__name__} instead for cid={cid}."
f"Found {type(cf).__name__} instead for cname={cname}."
)
assert isinstance(user_features[0], float), (
f"Constraint features must be a list of floats. "
f"Found {type(user_features[0]).__name__} instead for cid={cid}."
for f in cf:
assert isinstance(f, numbers.Real), (
f"Constraint features must be a list of numbers. "
f"Found {type(f).__name__} instead for cname={cname}."
)
user_features.append(tuple(cf))
else:
user_features.append(None)
categories.append(None)
if has_static_lazy:
constr.lazy = instance.is_constraint_lazy(cid)
constr.category = category
constr.user_features = user_features
lazy.append(instance.is_constraint_lazy(cname))
else:
lazy.append(False)
features.constraints.user_features = tuple(user_features)
features.constraints.lazy = tuple(lazy)
features.constraints.categories = tuple(categories)
def _extract_user_features_instance(
self,
instance: "Instance",
features: Features,
) -> None:
assert features.constraints_old is not None
user_features = instance.get_instance_features()
if isinstance(user_features, np.ndarray):
user_features = user_features.tolist()
@ -310,13 +318,11 @@ class FeaturesExtractor:
f"Instance features must be a list of numbers. "
f"Found {type(v).__name__} instead."
)
lazy_count = 0
for (cid, cdict) in features.constraints_old.items():
if cdict.lazy:
lazy_count += 1
assert features.constraints is not None
assert features.constraints.lazy is not None
features.instance = InstanceFeatures(
user_features=user_features,
lazy_constraint_count=lazy_count,
lazy_constraint_count=sum(features.constraints.lazy),
)
def _extract_alvarez_2017(self, features: Features) -> None:

@ -117,7 +117,7 @@ class TravelingSalesmanInstance(Instance):
or (e[0] not in component and e[1] in component)
]
constr = model.eq_subtour.add(expr=sum(model.x[e] for e in cut_edges) >= 2)
solver.add_constraint(constr, name="")
solver.add_constraint(constr)
class TravelingSalesmanGenerator:

@ -91,26 +91,6 @@ class GurobiSolver(InternalSolver):
self.gp.GRB.Callback.MIPNODE,
]
@overrides
def add_constraint(self, constr: Constraint, name: str) -> None:
assert self.model is not None
assert self._varname_to_var is not None
assert constr.lhs is not None
lhs = self.gp.quicksum(
self._varname_to_var[varname] * coeff
for (varname, coeff) in constr.lhs.items()
)
if constr.sense == "=":
self.model.addConstr(lhs == constr.rhs, name=name)
elif constr.sense == "<":
self.model.addConstr(lhs <= constr.rhs, name=name)
else:
self.model.addConstr(lhs >= constr.rhs, name=name)
self._dirty = True
self._has_lp_solution = False
self._has_mip_solution = False
@overrides
def add_constraints(self, cf: ConstraintFeatures) -> None:
assert cf.names is not None
@ -143,7 +123,7 @@ class GurobiSolver(InternalSolver):
self,
cf: ConstraintFeatures,
tol: float = 1e-5,
) -> List[bool]:
) -> Tuple[bool, ...]:
assert cf.names is not None
assert cf.senses is not None
assert cf.lhs is not None
@ -162,7 +142,7 @@ class GurobiSolver(InternalSolver):
result.append(lhs >= cf.rhs[i] - tol)
else:
result.append(abs(cf.rhs[i] - lhs) <= tol)
return result
return tuple(result)
@overrides
def build_test_instance_infeasible(self) -> Instance:
@ -289,76 +269,6 @@ class GurobiSolver(InternalSolver):
slacks=slacks,
)
@overrides
def get_constraints_old(self, with_static: bool = True) -> Dict[str, Constraint]:
model = self.model
assert model is not None
self._raise_if_callback()
if self._dirty:
model.update()
self._dirty = False
gp_constrs = model.getConstrs()
constr_names = model.getAttr("constrName", gp_constrs)
lhs: Optional[List[Dict]] = None
rhs = None
sense = None
dual_value = None
sa_rhs_up = None
sa_rhs_down = None
slack = None
basis_status = None
if with_static:
var_names = model.getAttr("varName", model.getVars())
rhs = model.getAttr("rhs", gp_constrs)
sense = model.getAttr("sense", gp_constrs)
lhs = []
for (i, gp_constr) in enumerate(gp_constrs):
expr = model.getRow(gp_constr)
lhsi = {}
for j in range(expr.size()):
lhsi[var_names[expr.getVar(j).index]] = expr.getCoeff(j)
lhs.append(lhsi)
if self._has_lp_solution:
dual_value = model.getAttr("pi", gp_constrs)
sa_rhs_up = model.getAttr("saRhsUp", gp_constrs)
sa_rhs_down = model.getAttr("saRhsLow", gp_constrs)
basis_status = model.getAttr("cbasis", gp_constrs)
if self._has_lp_solution or self._has_mip_solution:
slack = model.getAttr("slack", gp_constrs)
constraints: Dict[str, Constraint] = {}
for (i, gp_constr) in enumerate(gp_constrs):
assert (
constr_names[i] not in constraints
), f"Duplicated constraint name detected: {constr_names[i]}"
constraint = Constraint()
if with_static:
assert lhs is not None
assert rhs is not None
assert sense is not None
constraint.lhs = lhs[i]
constraint.rhs = rhs[i]
constraint.sense = sense[i]
if dual_value is not None:
assert sa_rhs_up is not None
assert sa_rhs_down is not None
assert basis_status is not None
constraint.dual_value = dual_value[i]
constraint.sa_rhs_up = sa_rhs_up[i]
constraint.sa_rhs_down = sa_rhs_down[i]
if gp_constr.cbasis == 0:
constraint.basis_status = "B"
elif gp_constr.cbasis == -1:
constraint.basis_status = "N"
else:
raise Exception(f"unknown cbasis: {gp_constr.cbasis}")
if slack is not None:
constraint.slack = slack[i]
constraints[constr_names[i]] = constraint
return constraints
@overrides
def get_solution(self) -> Optional[Solution]:
assert self.model is not None
@ -470,42 +380,6 @@ class GurobiSolver(InternalSolver):
values=values,
)
def is_constraint_satisfied(
self,
names: List[str],
tol: float = 1e-6,
) -> List[bool]:
def _check(c: Tuple) -> bool:
lhs, sense, rhs = c
lhs_value = lhs.getValue()
if sense == "=":
return abs(lhs_value - rhs) < tol
elif sense == ">":
return lhs_value > rhs - tol
else:
return lhs_value < rhs - tol
constrs = [self._relaxed_constrs[n] for n in names]
return list(map(_check, constrs))
@overrides
def is_constraint_satisfied_old(
self,
constr: Constraint,
tol: float = 1e-6,
) -> bool:
assert constr.lhs is not None
lhs = 0.0
for (varname, coeff) in constr.lhs.items():
var = self._varname_to_var[varname]
lhs += self._get_value(var) * coeff
if constr.sense == "<":
return lhs <= constr.rhs + tol
elif constr.sense == ">":
return lhs >= constr.rhs - tol
else:
return abs(constr.rhs - lhs) < abs(tol)
@overrides
def is_infeasible(self) -> bool:
assert self.model is not None
@ -521,13 +395,7 @@ class GurobiSolver(InternalSolver):
self.model.update()
@overrides
def remove_constraint(self, name: str) -> None:
assert self.model is not None
constr = self.model.getConstrByName(name)
self.model.remove(constr)
@overrides
def remove_constraints(self, names: List[str]) -> None:
def remove_constraints(self, names: Tuple[str, ...]) -> None:
assert self.model is not None
constrs = [self.model.getConstrByName(n) for n in names]
self.model.remove(constrs)

@ -5,7 +5,7 @@
import logging
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Tuple
from miplearn.features import Constraint, VariableFeatures, ConstraintFeatures
from miplearn.instance.base import Instance
@ -51,98 +51,53 @@ class InternalSolver(ABC):
"""
@abstractmethod
def solve_lp(
self,
tee: bool = False,
) -> LPSolveStats:
"""
Solves the LP relaxation of the currently loaded instance. After this
method finishes, the solution can be retrieved by calling `get_solution`.
This method should not permanently modify the problem. That is, subsequent
calls to `solve` should solve the original MIP, not the LP relaxation.
Parameters
----------
tee
If true, prints the solver log to the screen.
"""
def add_constraints(self, cf: ConstraintFeatures) -> None:
"""Adds the given constraints to the model."""
pass
@abstractmethod
def solve(
def are_constraints_satisfied(
self,
tee: bool = False,
iteration_cb: Optional[IterationCallback] = None,
lazy_cb: Optional[LazyCallback] = None,
user_cut_cb: Optional[UserCutCallback] = None,
) -> MIPSolveStats:
cf: ConstraintFeatures,
tol: float = 1e-5,
) -> Tuple[bool, ...]:
"""
Solves the currently loaded instance. After this method finishes,
the best solution found can be retrieved by calling `get_solution`.
Parameters
----------
iteration_cb: IterationCallback
By default, InternalSolver makes a single call to the native `solve`
method and returns the result. If an iteration callback is provided
instead, InternalSolver enters a loop, where `solve` and `iteration_cb`
are called alternatively. To stop the loop, `iteration_cb` should return
False. Any other result causes the solver to loop again.
lazy_cb: LazyCallback
This function is called whenever the solver finds a new candidate
solution and can be used to add lazy constraints to the model. Only the
following operations within the callback are allowed:
- Querying the value of a variable
- Querying if a constraint is satisfied
- Adding a new constraint to the problem
Additional operations may be allowed by specific subclasses.
user_cut_cb: UserCutCallback
This function is called whenever the solver found a new integer-infeasible
solution and needs to generate cutting planes to cut it off.
tee: bool
If true, prints the solver log to the screen.
Checks whether the current solution satisfies the given constraints.
"""
pass
@abstractmethod
def get_solution(self) -> Optional[Solution]:
def are_callbacks_supported(self) -> bool:
"""
Returns current solution found by the solver.
If called after `solve`, returns the best primal solution found during
the search. If called after `solve_lp`, returns the optimal solution
to the LP relaxation. If no primal solution is available, return None.
Returns True if this solver supports native callbacks, such as lazy constraints
callback or user cuts callback.
"""
return False
@abstractmethod
def build_test_instance_infeasible(self) -> Instance:
pass
@abstractmethod
def set_warm_start(self, solution: Solution) -> None:
def build_test_instance_knapsack(self) -> Instance:
"""
Sets the warm start to be used by the solver.
Returns an instance corresponding to the following MIP, for testing purposes:
Only one warm start is supported. Calling this function when a warm start
already exists will remove the previous warm start.
maximize 505 x0 + 352 x1 + 458 x2 + 220 x3
s.t. eq_capacity: z = 23 x0 + 26 x1 + 20 x2 + 18 x3
x0, x1, x2, x3 binary
0 <= z <= 67 continuous
"""
pass
@abstractmethod
def set_instance(
self,
instance: Instance,
model: Any = None,
) -> None:
"""
Loads the given instance into the solver.
def build_test_instance_redundancy(self) -> Instance:
pass
Parameters
----------
instance: Instance
The instance to be loaded.
model: Any
The concrete optimization model corresponding to this instance
(e.g. JuMP.Model or pyomo.core.ConcreteModel). If not provided,
it will be generated by calling `instance.to_model()`.
@abstractmethod
def clone(self) -> "InternalSolver":
"""
Returns a new copy of this solver with identical parameters, but otherwise
completely unitialized.
"""
pass
@ -154,17 +109,25 @@ class InternalSolver(ABC):
"""
pass
def set_branching_priorities(self, priorities: BranchPriorities) -> None:
@abstractmethod
def get_solution(self) -> Optional[Solution]:
"""
Sets the branching priorities for the given decision variables.
Returns current solution found by the solver.
When the MIP solver needs to decide on which variable to branch, variables
with higher priority are picked first, given that they are fractional.
Ties are solved arbitrarily. By default, all variables have priority zero.
If called after `solve`, returns the best primal solution found during
the search. If called after `solve_lp`, returns the optimal solution
to the LP relaxation. If no primal solution is available, return None.
"""
pass
Missing values indicate variables whose priorities should not be modified.
@abstractmethod
def get_constraint_attrs(self) -> List[str]:
"""
Returns a list of constraint attributes supported by this solver. Used for
testing purposes only.
"""
raise NotImplementedError()
pass
@abstractmethod
def get_constraints(
@ -176,52 +139,48 @@ class InternalSolver(ABC):
pass
@abstractmethod
def get_constraints_old(self, with_static: bool = True) -> Dict[str, Constraint]:
pass
@abstractmethod
def add_constraint(self, constr: Constraint, name: str) -> None:
def get_variable_attrs(self) -> List[str]:
"""
Adds a given constraint to the model.
Returns a list of variable attributes supported by this solver. Used for
testing purposes only.
"""
pass
@abstractmethod
def add_constraints(self, cf: ConstraintFeatures) -> None:
"""Adds the given constraints to the model."""
pass
@abstractmethod
def are_constraints_satisfied(
def get_variables(
self,
cf: ConstraintFeatures,
tol: float = 1e-5,
) -> List[bool]:
"""
Checks whether the current solution satisfies the given constraints.
with_static: bool = True,
with_sa: bool = True,
) -> VariableFeatures:
"""
pass
Returns a description of the decision variables in the problem.
@abstractmethod
def remove_constraint(self, name: str) -> None:
"""
Removes the constraint that has a given name from the model.
Parameters
----------
with_static: bool
If True, include features that do not change during the solution process,
such as variable types and names. This parameter is used to reduce the
amount of duplicated data collected by LearningSolver. Features that do
not change are only collected once.
with_sa: bool
If True, collect sensitivity analysis information. For large models,
collecting this information may be expensive, so this parameter is useful
for reducing running times.
"""
pass
@abstractmethod
def remove_constraints(self, names: List[str]) -> None:
def is_infeasible(self) -> bool:
"""
Removes the given constraints from the model.
Returns True if the model has been proved to be infeasible.
Must be called after solve.
"""
pass
@abstractmethod
def is_constraint_satisfied_old(
self, constr: Constraint, tol: float = 1e-6
) -> bool:
def remove_constraints(self, names: Tuple[str, ...]) -> None:
"""
Returns True if the current solution satisfies the given constraint.
Removes the given constraints from the model.
"""
pass
@ -232,85 +191,99 @@ class InternalSolver(ABC):
"""
pass
@abstractmethod
def is_infeasible(self) -> bool:
"""
Returns True if the model has been proved to be infeasible.
Must be called after solve.
def set_branching_priorities(self, priorities: BranchPriorities) -> None:
"""
pass
Sets the branching priorities for the given decision variables.
@abstractmethod
def clone(self) -> "InternalSolver":
"""
Returns a new copy of this solver with identical parameters, but otherwise
completely unitialized.
When the MIP solver needs to decide on which variable to branch, variables
with higher priority are picked first, given that they are fractional.
Ties are solved arbitrarily. By default, all variables have priority zero.
Missing values indicate variables whose priorities should not be modified.
"""
pass
raise NotImplementedError()
@abstractmethod
def build_test_instance_infeasible(self) -> Instance:
pass
def set_instance(
self,
instance: Instance,
model: Any = None,
) -> None:
"""
Loads the given instance into the solver.
@abstractmethod
def build_test_instance_redundancy(self) -> Instance:
Parameters
----------
instance: Instance
The instance to be loaded.
model: Any
The concrete optimization model corresponding to this instance
(e.g. JuMP.Model or pyomo.core.ConcreteModel). If not provided,
it will be generated by calling `instance.to_model()`.
"""
pass
@abstractmethod
def build_test_instance_knapsack(self) -> Instance:
def set_warm_start(self, solution: Solution) -> None:
"""
Returns an instance corresponding to the following MIP, for testing purposes:
Sets the warm start to be used by the solver.
maximize 505 x0 + 352 x1 + 458 x2 + 220 x3
s.t. eq_capacity: z = 23 x0 + 26 x1 + 20 x2 + 18 x3
x0, x1, x2, x3 binary
0 <= z <= 67 continuous
Only one warm start is supported. Calling this function when a warm start
already exists will remove the previous warm start.
"""
pass
def are_callbacks_supported(self) -> bool:
"""
Returns True if this solver supports native callbacks, such as lazy constraints
callback or user cuts callback.
"""
return False
@abstractmethod
def get_variables(
def solve(
self,
with_static: bool = True,
with_sa: bool = True,
) -> VariableFeatures:
tee: bool = False,
iteration_cb: Optional[IterationCallback] = None,
lazy_cb: Optional[LazyCallback] = None,
user_cut_cb: Optional[UserCutCallback] = None,
) -> MIPSolveStats:
"""
Returns a description of the decision variables in the problem.
Solves the currently loaded instance. After this method finishes,
the best solution found can be retrieved by calling `get_solution`.
Parameters
----------
with_static: bool
If True, include features that do not change during the solution process,
such as variable types and names. This parameter is used to reduce the
amount of duplicated data collected by LearningSolver. Features that do
not change are only collected once.
with_sa: bool
If True, collect sensitivity analysis information. For large models,
collecting this information may be expensive, so this parameter is useful
for reducing running times.
iteration_cb: IterationCallback
By default, InternalSolver makes a single call to the native `solve`
method and returns the result. If an iteration callback is provided
instead, InternalSolver enters a loop, where `solve` and `iteration_cb`
are called alternatively. To stop the loop, `iteration_cb` should return
False. Any other result causes the solver to loop again.
lazy_cb: LazyCallback
This function is called whenever the solver finds a new candidate
solution and can be used to add lazy constraints to the model. Only the
following operations within the callback are allowed:
- Querying the value of a variable
- Querying if a constraint is satisfied
- Adding a new constraint to the problem
Additional operations may be allowed by specific subclasses.
user_cut_cb: UserCutCallback
This function is called whenever the solver found a new integer-infeasible
solution and needs to generate cutting planes to cut it off.
tee: bool
If true, prints the solver log to the screen.
"""
pass
@abstractmethod
def get_constraint_attrs(self) -> List[str]:
"""
Returns a list of constraint attributes supported by this solver. Used for
testing purposes only.
def solve_lp(
self,
tee: bool = False,
) -> LPSolveStats:
"""
Solves the LP relaxation of the currently loaded instance. After this
method finishes, the solution can be retrieved by calling `get_solution`.
pass
This method should not permanently modify the problem. That is, subsequent
calls to `solve` should solve the original MIP, not the LP relaxation.
@abstractmethod
def get_variable_attrs(self) -> List[str]:
"""
Returns a list of variable attributes supported by this solver. Used for
testing purposes only.
Parameters
----------
tee
If true, prints the solver log to the screen.
"""
pass

@ -28,7 +28,6 @@ from miplearn.solvers.internal import (
IterationCallback,
LazyCallback,
MIPSolveStats,
Constraint,
)
from miplearn.types import (
SolverParams,
@ -69,30 +68,11 @@ class BasePyomoSolver(InternalSolver):
for (key, value) in params.items():
self._pyomo_solver.options[key] = value
@overrides
def add_constraint(
self,
constr: Any,
name: str,
) -> None:
assert self.model is not None
if isinstance(constr, Constraint):
assert constr.lhs is not None
lhs = 0.0
for (varname, coeff) in constr.lhs.items():
var = self._varname_to_var[varname]
lhs += var * coeff
if constr.sense == "=":
expr = lhs == constr.rhs
elif constr.sense == "<":
expr = lhs <= constr.rhs
else:
expr = lhs >= constr.rhs
cl = pe.Constraint(expr=expr, name=name)
self.model.add_component(name, cl)
self._pyomo_solver.add_constraint(cl)
self._cname_to_constr[name] = cl
else:
self._pyomo_solver.add_constraint(constr)
self._termination_condition = ""
self._has_lp_solution = False
@ -133,7 +113,7 @@ class BasePyomoSolver(InternalSolver):
self,
cf: ConstraintFeatures,
tol: float = 1e-5,
) -> List[bool]:
) -> Tuple[bool, ...]:
assert cf.names is not None
assert cf.lhs is not None
assert cf.rhs is not None
@ -150,7 +130,7 @@ class BasePyomoSolver(InternalSolver):
result.append(lhs >= cf.rhs[i] - tol)
else:
result.append(abs(cf.rhs[i] - lhs) < tol)
return result
return tuple(result)
@overrides
def build_test_instance_infeasible(self) -> Instance:
@ -277,30 +257,6 @@ class BasePyomoSolver(InternalSolver):
dual_values=dual_values_t,
)
@overrides
def get_constraints_old(self, with_static: bool = True) -> Dict[str, Constraint]:
assert self.model is not None
constraints = {}
for constr in self.model.component_objects(pyomo.core.Constraint):
if isinstance(constr, pe.ConstraintList):
for idx in constr:
name = f"{constr.name}[{idx}]"
assert name not in constraints
constraints[name] = self._parse_pyomo_constraint(
constr[idx],
with_static=with_static,
)
else:
name = constr.name
assert name not in constraints
constraints[name] = self._parse_pyomo_constraint(
constr,
with_static=with_static,
)
return constraints
@overrides
def get_constraint_attrs(self) -> List[str]:
return [
@ -435,36 +391,12 @@ class BasePyomoSolver(InternalSolver):
"values",
]
@overrides
def is_constraint_satisfied_old(
self, constr: Constraint, tol: float = 1e-6
) -> bool:
lhs = 0.0
assert constr.lhs is not None
for (varname, coeff) in constr.lhs.items():
var = self._varname_to_var[varname]
lhs += var.value * coeff
if constr.sense == "<":
return lhs <= constr.rhs + tol
elif constr.sense == ">":
return lhs >= constr.rhs - tol
else:
return abs(constr.rhs - lhs) < abs(tol)
@overrides
def is_infeasible(self) -> bool:
return self._termination_condition == TerminationCondition.infeasible
@overrides
def remove_constraint(self, name: str) -> None:
assert self.model is not None
constr = self._cname_to_constr[name]
del self._cname_to_constr[name]
self.model.del_component(constr)
self._pyomo_solver.remove_constraint(constr)
@overrides
def remove_constraints(self, names: List[str]) -> None:
def remove_constraints(self, names: Tuple[str, ...]) -> None:
assert self.model is not None
for name in names:
constr = self._cname_to_constr[name]
@ -627,46 +559,6 @@ class BasePyomoSolver(InternalSolver):
def _get_warm_start_regexp(self) -> Optional[str]:
return None
def _parse_pyomo_constraint(
self,
pyomo_constr: pyomo.core.Constraint,
with_static: bool = True,
) -> Constraint:
assert self.model is not None
constr = Constraint()
if with_static:
# Extract RHS and sense
has_ub = pyomo_constr.has_ub()
has_lb = pyomo_constr.has_lb()
assert (
(not has_lb)
or (not has_ub)
or pyomo_constr.upper() == pyomo_constr.lower()
), "range constraints not supported"
if not has_ub:
constr.sense = ">"
constr.rhs = pyomo_constr.lower()
elif not has_lb:
constr.sense = "<"
constr.rhs = pyomo_constr.upper()
else:
constr.sense = "="
constr.rhs = pyomo_constr.upper()
# Extract LHS
constr.lhs = self._parse_pyomo_expr(pyomo_constr.body)
# Extract solution attributes
if self._has_lp_solution:
constr.dual_value = self.model.dual[pyomo_constr]
if self._has_mip_solution or self._has_lp_solution:
constr.slack = self.model.slack[pyomo_constr]
# Build constraint
return constr
def _parse_pyomo_expr(self, expr: Any) -> Dict[str, float]:
lhs = {}
if isinstance(expr, SumExpression):

@ -200,7 +200,7 @@ def run_basic_usage_tests(solver: InternalSolver) -> None:
rhs=(0.0,),
senses=("<",),
)
assert_equals(solver.are_constraints_satisfied(cf), [False])
assert_equals(solver.are_constraints_satisfied(cf), (False,))
# Add constraint and verify it affects solution
solver.add_constraints(cf)
@ -227,10 +227,10 @@ def run_basic_usage_tests(solver: InternalSolver) -> None:
)
stats = solver.solve()
assert_equals(stats.mip_lower_bound, 1030.0)
assert_equals(solver.are_constraints_satisfied(cf), [True])
assert_equals(solver.are_constraints_satisfied(cf), (True,))
# Remove the new constraint
solver.remove_constraints(["cut"])
solver.remove_constraints(("cut",))
# New constraint should no longer affect solution
stats = solver.solve()

@ -16,6 +16,7 @@ from miplearn.features import (
Features,
Constraint,
Sample,
ConstraintFeatures,
)
from miplearn.instance.base import Instance
from miplearn.solvers.internal import InternalSolver
@ -32,23 +33,21 @@ def sample() -> Sample:
instance=InstanceFeatures(
lazy_constraint_count=4,
),
constraints_old={
"c1": Constraint(category="type-a", lazy=True),
"c2": Constraint(category="type-a", lazy=True),
"c3": Constraint(category="type-a", lazy=True),
"c4": Constraint(category="type-b", lazy=True),
"c5": Constraint(category="type-b", lazy=False),
},
constraints=ConstraintFeatures(
names=("c1", "c2", "c3", "c4", "c5"),
categories=(
"type-a",
"type-a",
"type-a",
"type-b",
"type-b",
),
lazy=(True, True, True, True, False),
),
),
after_lp=Features(
instance=InstanceFeatures(),
constraints_old={
"c1": Constraint(),
"c2": Constraint(),
"c3": Constraint(),
"c4": Constraint(),
"c5": Constraint(),
},
constraints=ConstraintFeatures(names=("c1", "c2", "c3", "c4", "c5")),
),
after_mip=Features(
extra={
@ -57,17 +56,14 @@ def sample() -> Sample:
),
)
sample.after_lp.instance.to_list = Mock(return_value=[5.0]) # type: ignore
sample.after_lp.constraints_old["c1"].to_list = Mock( # type: ignore
return_value=[1.0, 1.0]
)
sample.after_lp.constraints_old["c2"].to_list = Mock( # type: ignore
return_value=[1.0, 2.0]
)
sample.after_lp.constraints_old["c3"].to_list = Mock( # type: ignore
return_value=[1.0, 3.0]
)
sample.after_lp.constraints_old["c4"].to_list = Mock( # type: ignore
return_value=[1.0, 4.0, 0.0]
sample.after_lp.constraints.to_list = Mock( # type: ignore
side_effect=lambda idx: {
0: [1.0, 1.0],
1: [1.0, 2.0],
2: [1.0, 3.0],
3: [1.0, 4.0, 0.0],
4: None,
}[idx]
)
return sample
@ -87,6 +83,9 @@ def test_usage_with_solver(instance: Instance) -> None:
internal = solver.internal_solver = Mock(spec=InternalSolver)
internal.is_constraint_satisfied_old = Mock(return_value=False)
internal.are_constraints_satisfied = Mock(
side_effect=lambda cf, tol=1.0: [False for i in range(len(cf.names))]
)
component = StaticLazyConstraintsComponent(violation_tolerance=1.0)
component.thresholds["type-a"] = MinProbabilityThreshold([0.5, 0.5])
@ -115,7 +114,6 @@ def test_usage_with_solver(instance: Instance) -> None:
stats: LearningSolveStats = {}
sample = instance.samples[0]
assert sample.after_load is not None
assert sample.after_load.constraints_old is not None
assert sample.after_mip is not None
assert sample.after_mip.extra is not None
del sample.after_mip.extra["lazy_enforced"]
@ -134,8 +132,8 @@ def test_usage_with_solver(instance: Instance) -> None:
component.classifiers["type-b"].predict_proba.assert_called_once()
# Should ask internal solver to remove some constraints
assert internal.remove_constraint.call_count == 1
internal.remove_constraint.assert_has_calls([call("c3")])
assert internal.remove_constraints.call_count == 1
internal.remove_constraints.assert_has_calls([call(("c3",))])
# LearningSolver calls after_iteration (first time)
should_repeat = component.iteration_cb(solver, instance, None)
@ -143,19 +141,20 @@ def test_usage_with_solver(instance: Instance) -> None:
# Should ask internal solver to verify if constraints in the pool are
# satisfied and add the ones that are not
c3 = sample.after_load.constraints_old["c3"]
internal.is_constraint_satisfied_old.assert_called_once_with(c3, tol=1.0)
internal.is_constraint_satisfied_old.reset_mock()
internal.add_constraint.assert_called_once_with(c3, name="c3")
internal.add_constraint.reset_mock()
assert sample.after_load.constraints is not None
c = sample.after_load.constraints[False, False, True, False, False]
internal.are_constraints_satisfied.assert_called_once_with(c, tol=1.0)
internal.are_constraints_satisfied.reset_mock()
internal.add_constraints.assert_called_once_with(c)
internal.add_constraints.reset_mock()
# LearningSolver calls after_iteration (second time)
should_repeat = component.iteration_cb(solver, instance, None)
assert not should_repeat
# The lazy constraint pool should be empty by now, so no calls should be made
internal.is_constraint_satisfied_old.assert_not_called()
internal.add_constraint.assert_not_called()
internal.are_constraints_satisfied.assert_not_called()
internal.add_constraints.assert_not_called()
# LearningSolver calls after_solve_mip
component.after_solve_mip(

@ -50,7 +50,6 @@ def test_redundancy() -> None:
solver.relax_constraints(["c1"])
stats = solver.solve_lp()
assert stats.lp_value == 2.0
assert solver.is_constraint_satisfied(["c1"]) == [False]
solver.enforce_constraints(["c1"])
stats = solver.solve_lp()

@ -28,7 +28,6 @@ def test_knapsack() -> None:
features = FeaturesExtractor().extract(instance, solver)
assert features.variables is not None
assert features.constraints_old is not None
assert features.instance is not None
assert_equals(
@ -66,22 +65,29 @@ def test_knapsack() -> None:
),
)
assert_equals(
_round_constraints(features.constraints_old),
{
"eq_capacity": Constraint(
basis_status="N",
category="eq_capacity",
dual_value=13.538462,
lazy=False,
lhs={"x[0]": 23.0, "x[1]": 26.0, "x[2]": 20.0, "x[3]": 18.0, "z": -1.0},
rhs=0.0,
sa_rhs_down=-24.0,
sa_rhs_up=1.9999999999999987,
sense="=",
slack=0.0,
user_features=[0.0],
)
},
_round(features.constraints),
ConstraintFeatures(
basis_status=("N",),
categories=("eq_capacity",),
dual_values=(13.538462,),
names=("eq_capacity",),
lazy=(False,),
lhs=(
(
("x[0]", 23.0),
("x[1]", 26.0),
("x[2]", 20.0),
("x[3]", 18.0),
("z", -1.0),
),
),
rhs=(0.0,),
sa_rhs_down=(-24.0,),
sa_rhs_up=(2.0,),
senses=("=",),
slacks=(0.0,),
user_features=((0.0,),),
),
)
assert_equals(
features.instance,

Loading…
Cancel
Save