Update DynamicLazyConstraintsComponent

This commit is contained in:
2021-04-13 08:42:06 -05:00
parent b5411b8950
commit a4433916e5
8 changed files with 144 additions and 63 deletions

View File

@@ -196,7 +196,7 @@ class Component(EnforceOverrides):
) -> None:
x, y = self.xy_instances(training_instances)
for cat in x.keys():
x[cat] = np.array(x[cat])
x[cat] = np.array(x[cat], dtype=np.float32)
y[cat] = np.array(y[cat])
self.fit_xy(x, y)

View File

@@ -105,7 +105,10 @@ class DynamicConstraintsComponent(Component):
features.extend(sample.after_lp.instance.to_list())
features.extend(instance.get_constraint_features(cid))
for ci in features:
assert isinstance(ci, float)
assert isinstance(ci, float), (
f"Constraint features must be a list of floats. "
f"Found {ci.__class__.__name__} instead."
)
x[category].append(features)
cids[category].append(cid)
@@ -137,7 +140,7 @@ class DynamicConstraintsComponent(Component):
x, y, _ = self.sample_xy_with_cids(instance, sample)
return x, y
def sample_predict(
def sample_predict_old(
self,
instance: Instance,
sample: TrainingSample,
@@ -160,6 +163,29 @@ class DynamicConstraintsComponent(Component):
pred += [cids[category][i]]
return pred
def sample_predict(
self,
instance: Instance,
sample: Sample,
) -> List[Hashable]:
pred: List[Hashable] = []
if len(self.known_cids) == 0:
logger.info("Classifiers not fitted. Skipping.")
return pred
x, _, cids = self.sample_xy_with_cids(instance, sample)
for category in x.keys():
assert category in self.classifiers
assert category in self.thresholds
clf = self.classifiers[category]
thr = self.thresholds[category]
nx = np.array(x[category])
proba = clf.predict_proba(nx)
t = thr.predict(nx)
for i in range(proba.shape[0]):
if proba[i][1] > t[1]:
pred += [cids[category][i]]
return pred
@overrides
def fit_old(self, training_instances: List[Instance]) -> None:
collected_cids = set()
@@ -174,6 +200,24 @@ class DynamicConstraintsComponent(Component):
self.known_cids.extend(sorted(collected_cids))
super().fit_old(training_instances)
@overrides
def fit(self, training_instances: List[Instance]) -> None:
collected_cids = set()
for instance in training_instances:
instance.load()
for sample in instance.samples:
if (
sample.after_mip is None
or sample.after_mip.extra is None
or sample.after_mip.extra[self.attr] is None
):
continue
collected_cids |= sample.after_mip.extra[self.attr]
instance.free()
self.known_cids.clear()
self.known_cids.extend(sorted(collected_cids))
super().fit(training_instances)
@overrides
def fit_xy(
self,
@@ -189,12 +233,15 @@ class DynamicConstraintsComponent(Component):
self.thresholds[category].fit(self.classifiers[category], npx, npy)
@overrides
def sample_evaluate_old(
def sample_evaluate(
self,
instance: Instance,
sample: TrainingSample,
sample: Sample,
) -> Dict[Hashable, Dict[str, float]]:
assert getattr(sample, self.attr) is not None
assert sample.after_mip is not None
assert sample.after_mip.extra is not None
assert self.attr in sample.after_mip.extra
actual = sample.after_mip.extra[self.attr]
pred = set(self.sample_predict(instance, sample))
tp: Dict[Hashable, int] = {}
tn: Dict[Hashable, int] = {}
@@ -210,12 +257,12 @@ class DynamicConstraintsComponent(Component):
fp[category] = 0
fn[category] = 0
if cid in pred:
if cid in getattr(sample, self.attr):
if cid in actual:
tp[category] += 1
else:
fp[category] += 1
else:
if cid in getattr(sample, self.attr):
if cid in actual:
fn[category] += 1
else:
tn[category] += 1

View File

@@ -3,7 +3,7 @@
# Released under the modified BSD license. See COPYING.md for more details.
import logging
from typing import Dict, List, TYPE_CHECKING, Hashable, Tuple, Any, Optional
from typing import Dict, List, TYPE_CHECKING, Hashable, Tuple, Any, Optional, Set
import numpy as np
from overrides import overrides
@@ -41,6 +41,7 @@ class DynamicLazyConstraintsComponent(Component):
self.classifiers = self.dynamic.classifiers
self.thresholds = self.dynamic.thresholds
self.known_cids = self.dynamic.known_cids
self.lazy_enforced: Set[str] = set()
@staticmethod
def enforce(
@@ -54,21 +55,33 @@ class DynamicLazyConstraintsComponent(Component):
instance.enforce_lazy_constraint(solver.internal_solver, model, cid)
@overrides
def before_solve_mip_old(
def before_solve_mip(
self,
solver: "LearningSolver",
instance: Instance,
model: Any,
stats: LearningSolveStats,
features: Features,
training_data: TrainingSample,
sample: Sample,
) -> None:
training_data.lazy_enforced = set()
self.lazy_enforced.clear()
logger.info("Predicting violated (dynamic) lazy constraints...")
cids = self.dynamic.sample_predict(instance, training_data)
cids = self.dynamic.sample_predict(instance, sample)
logger.info("Enforcing %d lazy constraints..." % len(cids))
self.enforce(cids, instance, model, solver)
@overrides
def after_solve_mip(
self,
solver: "LearningSolver",
instance: Instance,
model: Any,
stats: LearningSolveStats,
sample: Sample,
) -> None:
assert sample.after_mip is not None
assert sample.after_mip.extra is not None
sample.after_mip.extra["lazy_enforced"] = set(self.lazy_enforced)
@overrides
def iteration_cb(
self,
@@ -83,23 +96,13 @@ class DynamicLazyConstraintsComponent(Component):
logger.debug("No violations found")
return False
else:
sample = instance.training_data[-1]
assert sample.lazy_enforced is not None
sample.lazy_enforced |= set(cids)
self.lazy_enforced |= set(cids)
logger.debug(" %d violations found" % len(cids))
self.enforce(cids, instance, model, solver)
return True
# Delegate ML methods to self.dynamic
# -------------------------------------------------------------------
@overrides
def sample_xy_old(
self,
instance: Instance,
sample: TrainingSample,
) -> Tuple[Dict, Dict]:
return self.dynamic.sample_xy_old(instance, sample)
@overrides
def sample_xy(
self,
@@ -111,13 +114,13 @@ class DynamicLazyConstraintsComponent(Component):
def sample_predict(
self,
instance: Instance,
sample: TrainingSample,
sample: Sample,
) -> List[Hashable]:
return self.dynamic.sample_predict(instance, sample)
@overrides
def fit_old(self, training_instances: List[Instance]) -> None:
self.dynamic.fit_old(training_instances)
def fit(self, training_instances: List[Instance]) -> None:
self.dynamic.fit(training_instances)
@overrides
def fit_xy(
@@ -128,9 +131,9 @@ class DynamicLazyConstraintsComponent(Component):
self.dynamic.fit_xy(x, y)
@overrides
def sample_evaluate_old(
def sample_evaluate(
self,
instance: Instance,
sample: TrainingSample,
sample: Sample,
) -> Dict[Hashable, Dict[str, float]]:
return self.dynamic.sample_evaluate_old(instance, sample)
return self.dynamic.sample_evaluate(instance, sample)

View File

@@ -51,7 +51,7 @@ class UserCutsComponent(Component):
self.enforced.clear()
self.n_added_in_callback = 0
logger.info("Predicting violated user cuts...")
cids = self.dynamic.sample_predict(instance, training_data)
cids = self.dynamic.sample_predict_old(instance, training_data)
logger.info("Enforcing %d user cuts ahead-of-time..." % len(cids))
for cid in cids:
instance.enforce_user_cut(solver.internal_solver, model, cid)