Remove experimental LP components

This commit is contained in:
2021-04-06 16:36:14 -05:00
parent f90f295620
commit f495297168
16 changed files with 49 additions and 1125 deletions

View File

@@ -85,8 +85,8 @@ class DynamicConstraintsComponent(Component):
self,
instance: "Instance",
sample: TrainingSample,
) -> List[str]:
pred: List[str] = []
) -> List[Hashable]:
pred: List[Hashable] = []
x, _, cids = self.sample_xy_with_cids(instance, sample)
for category in x.keys():
assert category in self.classifiers

View File

@@ -3,21 +3,23 @@
# Released under the modified BSD license. See COPYING.md for more details.
import logging
from typing import Dict, List, TYPE_CHECKING, Hashable, Tuple
from typing import Dict, List, TYPE_CHECKING, Hashable, Tuple, Any
import numpy as np
from miplearn.instance.base import Instance
from miplearn.classifiers import Classifier
from miplearn.classifiers.counting import CountingClassifier
from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold
from miplearn.components.component import Component
from miplearn.components.dynamic_common import DynamicConstraintsComponent
from miplearn.features import TrainingSample
from miplearn.features import TrainingSample, Features
from miplearn.types import LearningSolveStats
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from miplearn.solvers.learning import Instance
from miplearn.solvers.learning import LearningSolver
class DynamicLazyConstraintsComponent(Component):
@@ -40,34 +42,47 @@ class DynamicLazyConstraintsComponent(Component):
self.known_cids = self.dynamic.known_cids
@staticmethod
def enforce(cids, instance, model, solver):
def enforce(
cids: List[Hashable],
instance: Instance,
model: Any,
solver: "LearningSolver",
) -> None:
assert solver.internal_solver is not None
for cid in cids:
cobj = instance.build_lazy_constraint(model, cid)
solver.internal_solver.add_constraint(cobj)
def before_solve_mip(
self,
solver,
instance,
model,
stats,
features,
training_data,
):
solver: "LearningSolver",
instance: Instance,
model: Any,
stats: LearningSolveStats,
features: Features,
training_data: TrainingSample,
) -> None:
training_data.lazy_enforced = set()
logger.info("Predicting violated lazy constraints...")
cids = self.dynamic.sample_predict(instance, training_data)
logger.info("Enforcing %d lazy constraints..." % len(cids))
self.enforce(cids, instance, model, solver)
def iteration_cb(self, solver, instance, model):
def iteration_cb(
self,
solver: "LearningSolver",
instance: Instance,
model: Any,
) -> bool:
logger.debug("Finding violated lazy constraints...")
cids = instance.find_violated_lazy_constraints(model)
if len(cids) == 0:
logger.debug("No violations found")
return False
else:
instance.training_data[-1].lazy_enforced |= set(cids)
sample = instance.training_data[-1]
assert sample.lazy_enforced is not None
sample.lazy_enforced |= set(cids)
logger.debug(" %d violations found" % len(cids))
self.enforce(cids, instance, model, solver)
return True
@@ -85,7 +100,7 @@ class DynamicLazyConstraintsComponent(Component):
self,
instance: "Instance",
sample: TrainingSample,
) -> List[str]:
) -> List[Hashable]:
return self.dynamic.sample_predict(instance, sample)
def fit(self, training_instances: List["Instance"]) -> None:

View File

@@ -104,7 +104,7 @@ class UserCutsComponent(Component):
self,
instance: "Instance",
sample: TrainingSample,
) -> List[str]:
) -> List[Hashable]:
return self.dynamic.sample_predict(instance, sample)
def fit(self, training_instances: List["Instance"]) -> None:

View File

@@ -45,7 +45,7 @@ class StaticLazyConstraintsComponent(Component):
self.thresholds: Dict[Hashable, Threshold] = {}
self.pool: Dict[str, LazyConstraint] = {}
self.violation_tolerance: float = violation_tolerance
self.enforced_cids: Set[str] = set()
self.enforced_cids: Set[Hashable] = set()
self.n_restored: int = 0
self.n_iterations: int = 0
@@ -145,11 +145,11 @@ class StaticLazyConstraintsComponent(Component):
self,
instance: "Instance",
sample: TrainingSample,
) -> List[str]:
) -> List[Hashable]:
assert instance.features.constraints is not None
x, y = self.sample_xy(instance, sample)
category_to_cids: Dict[Hashable, List[str]] = {}
category_to_cids: Dict[Hashable, List[Hashable]] = {}
for (cid, cfeatures) in instance.features.constraints.items():
if cfeatures.category is None:
continue
@@ -157,7 +157,7 @@ class StaticLazyConstraintsComponent(Component):
if category not in category_to_cids:
category_to_cids[category] = []
category_to_cids[category] += [cid]
enforced_cids: List[str] = []
enforced_cids: List[Hashable] = []
for category in x.keys():
if category not in self.classifiers:
continue

View File

@@ -1,3 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.

View File

@@ -1,249 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import logging
import random
from copy import deepcopy
import numpy as np
from tqdm import tqdm
from miplearn.classifiers.counting import CountingClassifier
from miplearn.components import classifier_evaluation_dict
from miplearn.components.component import Component
from miplearn.components.steps.drop_redundant import DropRedundantInequalitiesStep
logger = logging.getLogger(__name__)
class ConvertTightIneqsIntoEqsStep(Component):
"""
Component that predicts which inequality constraints are likely to be binding in
the LP relaxation of the problem and converts them into equality constraints.
This component always makes sure that the conversion process does not affect the
feasibility of the problem. It can also, optionally, make sure that it does not affect
the optimality, but this may be expensive.
This component does not work on MIPs. All integrality constraints must be relaxed
before this component is used.
"""
def __init__(
self,
classifier=CountingClassifier(),
threshold=0.95,
slack_tolerance=0.0,
check_optimality=False,
):
self.classifiers = {}
self.classifier_prototype = classifier
self.threshold = threshold
self.slack_tolerance = slack_tolerance
self.check_optimality = check_optimality
self.converted = []
self.original_sense = {}
self.n_restored = 0
self.n_infeasible_iterations = 0
self.n_suboptimal_iterations = 0
def before_solve_mip(
self,
solver,
instance,
model,
stats,
features,
training_data,
):
self.n_restored = 0
self.n_infeasible_iterations = 0
self.n_suboptimal_iterations = 0
logger.info("Predicting tight LP constraints...")
x, constraints = DropRedundantInequalitiesStep.x(
instance,
constraint_ids=solver.internal_solver.get_constraint_ids(),
)
y = self.predict(x)
n_converted = 0
n_kept = 0
for category in y.keys():
for i in range(len(y[category])):
if y[category][i][0] == 1:
cid = constraints[category][i]
s = solver.internal_solver.get_constraint_sense(cid)
self.original_sense[cid] = s
solver.internal_solver.set_constraint_sense(cid, "=")
self.converted += [cid]
n_converted += 1
else:
n_kept += 1
stats["ConvertTight: Kept"] = n_kept
stats["ConvertTight: Converted"] = n_converted
logger.info(f"Converted {n_converted} inequalities")
def after_solve_mip(
self,
solver,
instance,
model,
stats,
features,
training_data,
):
if training_data.slacks is None:
training_data.slacks = solver.internal_solver.get_inequality_slacks()
stats["ConvertTight: Restored"] = self.n_restored
stats["ConvertTight: Inf iterations"] = self.n_infeasible_iterations
stats["ConvertTight: Subopt iterations"] = self.n_suboptimal_iterations
def fit(self, training_instances):
logger.debug("Extracting x and y...")
x = self.x(training_instances)
y = self.y(training_instances)
logger.debug("Fitting...")
for category in tqdm(x.keys(), desc="Fit (rlx:conv_ineqs)"):
if category not in self.classifiers:
self.classifiers[category] = deepcopy(self.classifier_prototype)
self.classifiers[category].fit(x[category], y[category])
@staticmethod
def _x_train(instances):
x = {}
for instance in tqdm(
instances,
desc="Extract (drop:x)",
disable=len(instances) < 5,
):
for training_data in instance.training_data:
cids = training_data.slacks.keys()
for cid in cids:
category = instance.get_constraint_category(cid)
if category is None:
continue
if category not in x:
x[category] = []
x[category] += [instance.get_constraint_features(cid)]
for category in x.keys():
x[category] = np.array(x[category])
return x
def x(self, instances):
return self._x_train(instances)
def y(self, instances):
y = {}
for instance in tqdm(
instances,
desc="Extract (rlx:conv_ineqs:y)",
disable=len(instances) < 5,
):
for (cid, slack) in instance.training_data[0].slacks.items():
category = instance.get_constraint_category(cid)
if category is None:
continue
if category not in y:
y[category] = []
if 0 <= slack <= self.slack_tolerance:
y[category] += [[False, True]]
else:
y[category] += [[True, False]]
for category in y.keys():
y[category] = np.array(y[category], dtype=np.bool8)
return y
def predict(self, x):
y = {}
for (category, x_cat) in x.items():
if category not in self.classifiers:
continue
y[category] = []
x_cat = np.array(x_cat)
proba = self.classifiers[category].predict_proba(x_cat)
for i in range(len(proba)):
if proba[i][1] >= self.threshold:
y[category] += [[1]]
else:
y[category] += [[0]]
return y
def evaluate(self, instance):
x = self.x([instance])
y_true = self.y([instance])
y_pred = self.predict(x)
tp, tn, fp, fn = 0, 0, 0, 0
for category in y_true.keys():
for i in range(len(y_true[category])):
if y_pred[category][i][0] == 1:
if y_true[category][i][0] == 1:
tp += 1
else:
fp += 1
else:
if y_true[category][i][0] == 1:
fn += 1
else:
tn += 1
return classifier_evaluation_dict(tp, tn, fp, fn)
def iteration_cb(self, solver, instance, model):
is_infeasible, is_suboptimal = False, False
restored = []
def check_pi(msense, csense, pi):
if csense == "=":
return True
if msense == "max":
if csense == "<":
return pi >= 0
else:
return pi <= 0
else:
if csense == ">":
return pi >= 0
else:
return pi <= 0
def restore(cid):
nonlocal restored
csense = self.original_sense[cid]
solver.internal_solver.set_constraint_sense(cid, csense)
restored += [cid]
if solver.internal_solver.is_infeasible():
for cid in self.converted:
pi = solver.internal_solver.get_dual(cid)
if abs(pi) > 0:
is_infeasible = True
restore(cid)
elif self.check_optimality:
random.shuffle(self.converted)
n_restored = 0
for cid in self.converted:
if n_restored >= 100:
break
pi = solver.internal_solver.get_dual(cid)
csense = self.original_sense[cid]
msense = solver.internal_solver.get_sense()
if not check_pi(msense, csense, pi):
is_suboptimal = True
restore(cid)
n_restored += 1
for cid in restored:
self.converted.remove(cid)
if len(restored) > 0:
self.n_restored += len(restored)
if is_infeasible:
self.n_infeasible_iterations += 1
if is_suboptimal:
self.n_suboptimal_iterations += 1
logger.info(f"Restored {len(restored)} inequalities")
return True
else:
return False

View File

@@ -1,240 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import logging
from copy import deepcopy
import numpy as np
from p_tqdm import p_umap
from tqdm import tqdm
from miplearn.classifiers.counting import CountingClassifier
from miplearn.components import classifier_evaluation_dict
from miplearn.components.component import Component
from miplearn.components.static_lazy import LazyConstraint
logger = logging.getLogger(__name__)
class DropRedundantInequalitiesStep(Component):
"""
Component that predicts which inequalities are likely loose in the LP and removes
them. Optionally, double checks after the problem is solved that all dropped
inequalities were in fact redundant, and, if not, re-adds them to the problem.
This component does not work on MIPs. All integrality constraints must be relaxed
before this component is used.
"""
def __init__(
self,
classifier=CountingClassifier(),
threshold=0.95,
slack_tolerance=1e-5,
check_feasibility=True,
violation_tolerance=1e-5,
max_iterations=3,
):
self.classifiers = {}
self.classifier_prototype = classifier
self.threshold = threshold
self.slack_tolerance = slack_tolerance
self.pool = []
self.check_feasibility = check_feasibility
self.violation_tolerance = violation_tolerance
self.max_iterations = max_iterations
self.current_iteration = 0
self.n_iterations = 0
self.n_restored = 0
def before_solve_mip(
self,
solver,
instance,
model,
stats,
features,
training_data,
):
self.n_iterations = 0
self.n_restored = 0
self.current_iteration = 0
logger.info("Predicting redundant LP constraints...")
x, constraints = self.x(
instance,
constraint_ids=solver.internal_solver.get_constraint_ids(),
)
y = self.predict(x)
self.pool = []
n_dropped = 0
n_kept = 0
for category in y.keys():
for i in range(len(y[category])):
if y[category][i][1] == 1:
cid = constraints[category][i]
c = LazyConstraint(
cid=cid,
obj=solver.internal_solver.extract_constraint(cid),
)
self.pool += [c]
n_dropped += 1
else:
n_kept += 1
stats["DropRedundant: Kept"] = n_kept
stats["DropRedundant: Dropped"] = n_dropped
logger.info(f"Extracted {n_dropped} predicted constraints")
def after_solve_mip(
self,
solver,
instance,
model,
stats,
features,
training_data,
):
if training_data.slacks is None:
training_data.slacks = solver.internal_solver.get_inequality_slacks()
stats["DropRedundant: Iterations"] = self.n_iterations
stats["DropRedundant: Restored"] = self.n_restored
def fit(self, training_instances, n_jobs=1):
x, y = self.x_y(training_instances, n_jobs=n_jobs)
for category in tqdm(x.keys(), desc="Fit (drop)"):
if category not in self.classifiers:
self.classifiers[category] = deepcopy(self.classifier_prototype)
self.classifiers[category].fit(x[category], np.array(y[category]))
@staticmethod
def x(instance, constraint_ids):
x = {}
constraints = {}
cids = constraint_ids
for cid in cids:
category = instance.get_constraint_category(cid)
if category is None:
continue
if category not in x:
x[category] = []
constraints[category] = []
x[category] += [instance.get_constraint_features(cid)]
constraints[category] += [cid]
for category in x.keys():
x[category] = np.array(x[category])
return x, constraints
def x_y(self, instances, n_jobs=1):
def _extract(instance):
x = {}
y = {}
for training_data in instance.training_data:
for (cid, slack) in training_data.slacks.items():
category = instance.get_constraint_category(cid)
if category is None:
continue
if category not in x:
x[category] = []
if category not in y:
y[category] = []
if slack > self.slack_tolerance:
y[category] += [[False, True]]
else:
y[category] += [[True, False]]
x[category] += [instance.get_constraint_features(cid)]
return x, y
if n_jobs == 1:
results = [_extract(i) for i in tqdm(instances, desc="Extract (drop 1/3)")]
else:
results = p_umap(
_extract,
instances,
num_cpus=n_jobs,
desc="Extract (drop 1/3)",
)
x_combined = {}
y_combined = {}
for (x, y) in tqdm(results, desc="Extract (drop 2/3)"):
for category in x.keys():
if category not in x_combined:
x_combined[category] = []
y_combined[category] = []
x_combined[category] += x[category]
y_combined[category] += y[category]
for category in tqdm(x_combined.keys(), desc="Extract (drop 3/3)"):
x_combined[category] = np.array(x_combined[category])
y_combined[category] = np.array(y_combined[category])
return x_combined, y_combined
def predict(self, x):
y = {}
for (category, x_cat) in x.items():
if category not in self.classifiers:
continue
y[category] = []
x_cat = np.array(x_cat)
proba = self.classifiers[category].predict_proba(x_cat)
for i in range(len(proba)):
if proba[i][1] >= self.threshold:
y[category] += [[False, True]]
else:
y[category] += [[True, False]]
return y
def evaluate(self, instance, n_jobs=1):
x, y_true = self.x_y([instance], n_jobs=n_jobs)
y_pred = self.predict(x)
tp, tn, fp, fn = 0, 0, 0, 0
for category in tqdm(
y_true.keys(),
disable=len(y_true) < 100,
desc="Eval (drop)",
):
for i in range(len(y_true[category])):
if (category in y_pred) and (y_pred[category][i][1] == 1):
if y_true[category][i][1] == 1:
tp += 1
else:
fp += 1
else:
if y_true[category][i][1] == 1:
fn += 1
else:
tn += 1
return classifier_evaluation_dict(tp, tn, fp, fn)
def iteration_cb(self, solver, instance, model):
if not self.check_feasibility:
return False
if self.current_iteration >= self.max_iterations:
return False
if solver.internal_solver.is_infeasible():
return False
self.current_iteration += 1
logger.debug("Checking that dropped constraints are satisfied...")
constraints_to_add = []
for c in self.pool:
if not solver.internal_solver.is_constraint_satisfied(
c.obj,
self.violation_tolerance,
):
constraints_to_add.append(c)
for c in constraints_to_add:
self.pool.remove(c)
solver.internal_solver.add_constraint(c.obj)
if len(constraints_to_add) > 0:
self.n_restored += len(constraints_to_add)
logger.info(
"%8d constraints %8d in the pool"
% (len(constraints_to_add), len(self.pool))
)
self.n_iterations += 1
return True
else:
return False

View File

@@ -1,27 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import logging
from miplearn.components.component import Component
logger = logging.getLogger(__name__)
class RelaxIntegralityStep(Component):
"""
Component that relaxes all integrality constraints before the problem is solved.
"""
def before_solve_mip(
self,
solver,
instance,
model,
stats,
features,
training_data,
):
logger.info("Relaxing integrality...")
solver.internal_solver.relax()