From cb62345acf8501fa4a6c01518b0e11ec909f95ab Mon Sep 17 00:00:00 2001 From: "Alinson S. Xavier" Date: Mon, 12 Apr 2021 10:05:17 -0500 Subject: [PATCH] Refactor StaticLazy --- miplearn/components/static_lazy.py | 167 +++++++++++---------------- tests/components/test_static_lazy.py | 93 ++++----------- 2 files changed, 91 insertions(+), 169 deletions(-) diff --git a/miplearn/components/static_lazy.py b/miplearn/components/static_lazy.py index 09f4ca2..bb65763 100644 --- a/miplearn/components/static_lazy.py +++ b/miplearn/components/static_lazy.py @@ -52,26 +52,35 @@ class StaticLazyConstraintsComponent(Component): self.n_iterations: int = 0 @overrides - def before_solve_mip_old( + def after_solve_mip( self, solver: "LearningSolver", instance: "Instance", model: Any, stats: LearningSolveStats, - features: Features, - training_data: TrainingSample, + sample: Sample, ) -> None: - assert solver.internal_solver is not None - assert features.instance is not None - assert features.constraints is not None + sample.after_mip.extra["lazy_enforced"] = self.enforced_cids + stats["LazyStatic: Restored"] = self.n_restored + stats["LazyStatic: Iterations"] = self.n_iterations + @overrides + def before_solve_mip( + self, + solver: "LearningSolver", + instance: "Instance", + model: Any, + stats: LearningSolveStats, + sample: Sample, + ) -> None: + assert solver.internal_solver is not None logger.info("Predicting violated (static) lazy constraints...") - if features.instance.lazy_constraint_count == 0: + if sample.after_load.instance.lazy_constraint_count == 0: logger.info("Instance does not have static lazy constraints. Skipping.") - self.enforced_cids = set(self.sample_predict(instance, training_data)) + self.enforced_cids = set(self.sample_predict(sample)) logger.info("Moving lazy constraints to the pool...") self.pool = {} - for (cid, cdict) in features.constraints.items(): + for (cid, cdict) in sample.after_load.constraints.items(): if cdict.lazy and cid not in self.enforced_cids: self.pool[cid] = cdict solver.internal_solver.remove_constraint(cid) @@ -86,18 +95,17 @@ class StaticLazyConstraintsComponent(Component): self.n_iterations = 0 @overrides - def after_solve_mip_old( + def fit_xy( self, - solver: "LearningSolver", - instance: "Instance", - model: Any, - stats: LearningSolveStats, - features: Features, - training_data: TrainingSample, + x: Dict[Hashable, np.ndarray], + y: Dict[Hashable, np.ndarray], ) -> None: - training_data.lazy_enforced = self.enforced_cids - stats["LazyStatic: Restored"] = self.n_restored - stats["LazyStatic: Iterations"] = self.n_iterations + for c in y.keys(): + assert c in x + self.classifiers[c] = self.classifier_prototype.clone() + self.thresholds[c] = self.threshold_prototype.clone() + self.classifiers[c].fit(x[c], y[c]) + self.thresholds[c].fit(self.classifiers[c], x[c], y[c]) @overrides def iteration_cb( @@ -120,6 +128,30 @@ class StaticLazyConstraintsComponent(Component): ) -> None: self._check_and_add(solver) + def sample_predict(self, sample: Sample) -> List[Hashable]: + x, y, cids = self._sample_xy_with_cids(sample) + enforced_cids: List[Hashable] = [] + for category in x.keys(): + if category not in self.classifiers: + continue + npx = np.array(x[category]) + proba = self.classifiers[category].predict_proba(npx) + thr = self.thresholds[category].predict(npx) + pred = list(proba[:, 1] > thr[1]) + for (i, is_selected) in enumerate(pred): + if is_selected: + enforced_cids += [cids[category][i]] + return enforced_cids + + @overrides + def sample_xy( + self, + _: Optional[Instance], + sample: Sample, + ) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]: + x, y, _ = self._sample_xy_with_cids(sample) + return x, y + def _check_and_add(self, solver: "LearningSolver") -> bool: assert solver.internal_solver is not None logger.info("Finding violated lazy constraints...") @@ -145,69 +177,16 @@ class StaticLazyConstraintsComponent(Component): else: return False - def sample_predict( - self, - instance: "Instance", - sample: TrainingSample, - ) -> List[Hashable]: - assert instance.features.constraints is not None - - x, y = self.sample_xy_old(instance, sample) - category_to_cids: Dict[Hashable, List[Hashable]] = {} - for (cid, cfeatures) in instance.features.constraints.items(): - if cfeatures.category is None: - continue - category = cfeatures.category - if category not in category_to_cids: - category_to_cids[category] = [] - category_to_cids[category] += [cid] - enforced_cids: List[Hashable] = [] - for category in x.keys(): - if category not in self.classifiers: - continue - npx = np.array(x[category]) - proba = self.classifiers[category].predict_proba(npx) - thr = self.thresholds[category].predict(npx) - pred = list(proba[:, 1] > thr[1]) - for (i, is_selected) in enumerate(pred): - if is_selected: - enforced_cids += [category_to_cids[category][i]] - return enforced_cids - - @overrides - def sample_xy_old( - self, - instance: "Instance", - sample: TrainingSample, - ) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]: - assert instance.features.constraints is not None - x: Dict = {} - y: Dict = {} - for (cid, cfeatures) in instance.features.constraints.items(): - if not cfeatures.lazy: - continue - category = cfeatures.category - if category is None: - continue - if category not in x: - x[category] = [] - y[category] = [] - x[category] += [cfeatures.user_features] - if sample.lazy_enforced is not None: - if cid in sample.lazy_enforced: - y[category] += [[False, True]] - else: - y[category] += [[True, False]] - return x, y - - @overrides - def sample_xy( - self, - _: Optional[Instance], - sample: Sample, - ) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]: - x: Dict = {} - y: Dict = {} + def _sample_xy_with_cids( + self, sample: Sample + ) -> Tuple[ + Dict[Hashable, List[List[float]]], + Dict[Hashable, List[List[float]]], + Dict[Hashable, List[str]], + ]: + x: Dict[Hashable, List[List[float]]] = {} + y: Dict[Hashable, List[List[float]]] = {} + cids: Dict[Hashable, List[str]] = {} assert sample.after_load is not None assert sample.after_load.constraints is not None for (cid, constr) in sample.after_load.constraints.items(): @@ -220,6 +199,7 @@ class StaticLazyConstraintsComponent(Component): if category not in x: x[category] = [] y[category] = [] + cids[category] = [] # Features sf = sample.after_load @@ -231,25 +211,16 @@ class StaticLazyConstraintsComponent(Component): assert sf.constraints[cid] is not None features.extend(sf.constraints[cid].to_list()) x[category].append(features) + cids[category].append(cid) # Labels - if sample.after_mip is not None: - assert sample.after_mip.extra is not None + if ( + (sample.after_mip is not None) + and (sample.after_mip.extra is not None) + and ("lazy_enforced" in sample.after_mip.extra) + ): if cid in sample.after_mip.extra["lazy_enforced"]: y[category] += [[False, True]] else: y[category] += [[True, False]] - return x, y - - @overrides - def fit_xy( - self, - x: Dict[Hashable, np.ndarray], - y: Dict[Hashable, np.ndarray], - ) -> None: - for c in y.keys(): - assert c in x - self.classifiers[c] = self.classifier_prototype.clone() - self.thresholds[c] = self.threshold_prototype.clone() - self.classifiers[c].fit(x[c], y[c]) - self.thresholds[c].fit(self.classifiers[c], x[c], y[c]) + return x, y, cids diff --git a/tests/components/test_static_lazy.py b/tests/components/test_static_lazy.py index ac650e5..047b578 100644 --- a/tests/components/test_static_lazy.py +++ b/tests/components/test_static_lazy.py @@ -12,7 +12,6 @@ from miplearn.classifiers import Classifier from miplearn.classifiers.threshold import Threshold, MinProbabilityThreshold from miplearn.components.static_lazy import StaticLazyConstraintsComponent from miplearn.features import ( - TrainingSample, InstanceFeatures, Features, Constraint, @@ -30,13 +29,16 @@ from miplearn.types import ( def sample() -> Sample: sample = Sample( after_load=Features( + instance=InstanceFeatures( + lazy_constraint_count=4, + ), constraints={ "c1": Constraint(category="type-a", lazy=True), "c2": Constraint(category="type-a", lazy=True), "c3": Constraint(category="type-a", lazy=True), "c4": Constraint(category="type-b", lazy=True), "c5": Constraint(category="type-b", lazy=False), - } + }, ), after_lp=Features( instance=InstanceFeatures(), @@ -71,61 +73,14 @@ def sample() -> Sample: @pytest.fixture -def instance_old(features: Features) -> Instance: +def instance(sample: Sample) -> Instance: instance = Mock(spec=Instance) - instance.features = features + instance.samples = [sample] instance.has_static_lazy_constraints = Mock(return_value=True) return instance -@pytest.fixture -def sample_old() -> TrainingSample: - return TrainingSample( - lazy_enforced={"c1", "c2", "c4"}, - ) - - -@pytest.fixture -def features() -> Features: - return Features( - instance=InstanceFeatures( - user_features=[0], - lazy_constraint_count=4, - ), - constraints={ - "c1": Constraint( - category="type-a", - user_features=[1.0, 1.0], - lazy=True, - ), - "c2": Constraint( - category="type-a", - user_features=[1.0, 2.0], - lazy=True, - ), - "c3": Constraint( - category="type-a", - user_features=[1.0, 3.0], - lazy=True, - ), - "c4": Constraint( - category="type-b", - user_features=[1.0, 4.0, 0.0], - lazy=True, - ), - "c5": Constraint( - category="type-b", - user_features=[1.0, 5.0, 0.0], - lazy=False, - ), - }, - ) - - -def test_usage_with_solver(instance_old: Instance) -> None: - assert instance_old.features is not None - assert instance_old.features.constraints is not None - +def test_usage_with_solver(instance: Instance) -> None: solver = Mock(spec=LearningSolver) solver.use_lazy_cb = False solver.gap_tolerance = 1e-4 @@ -157,17 +112,17 @@ def test_usage_with_solver(instance_old: Instance) -> None: ) ) - sample_old: TrainingSample = TrainingSample() stats: LearningSolveStats = {} + sample = instance.samples[0] + del sample.after_mip.extra["lazy_enforced"] # LearningSolver calls before_solve_mip - component.before_solve_mip_old( + component.before_solve_mip( solver=solver, - instance=instance_old, + instance=instance, model=None, stats=stats, - features=instance_old.features, - training_data=sample_old, + sample=sample, ) # Should ask ML to predict whether each lazy constraint should be enforced @@ -179,19 +134,19 @@ def test_usage_with_solver(instance_old: Instance) -> None: internal.remove_constraint.assert_has_calls([call("c3")]) # LearningSolver calls after_iteration (first time) - should_repeat = component.iteration_cb(solver, instance_old, None) + should_repeat = component.iteration_cb(solver, instance, None) assert should_repeat # Should ask internal solver to verify if constraints in the pool are # satisfied and add the ones that are not - c3 = instance_old.features.constraints["c3"] + c3 = sample.after_load.constraints["c3"] internal.is_constraint_satisfied.assert_called_once_with(c3, tol=1.0) internal.is_constraint_satisfied.reset_mock() internal.add_constraint.assert_called_once_with(c3, name="c3") internal.add_constraint.reset_mock() # LearningSolver calls after_iteration (second time) - should_repeat = component.iteration_cb(solver, instance_old, None) + should_repeat = component.iteration_cb(solver, instance, None) assert not should_repeat # The lazy constraint pool should be empty by now, so no calls should be made @@ -199,18 +154,17 @@ def test_usage_with_solver(instance_old: Instance) -> None: internal.add_constraint.assert_not_called() # LearningSolver calls after_solve_mip - component.after_solve_mip_old( + component.after_solve_mip( solver=solver, - instance=instance_old, + instance=instance, model=None, stats=stats, - features=instance_old.features, - training_data=sample_old, + sample=sample, ) # Should update training sample - assert sample_old.lazy_enforced == {"c1", "c2", "c3", "c4"} - + assert sample.after_mip.extra["lazy_enforced"] == {"c1", "c2", "c3", "c4"} + # # Should update stats assert stats["LazyStatic: Removed"] == 1 assert stats["LazyStatic: Kept"] == 3 @@ -218,10 +172,7 @@ def test_usage_with_solver(instance_old: Instance) -> None: assert stats["LazyStatic: Iterations"] == 1 -def test_sample_predict( - instance_old: Instance, - sample_old: TrainingSample, -) -> None: +def test_sample_predict(sample: Sample) -> None: comp = StaticLazyConstraintsComponent() comp.thresholds["type-a"] = MinProbabilityThreshold([0.5, 0.5]) comp.thresholds["type-b"] = MinProbabilityThreshold([0.5, 0.5]) @@ -239,7 +190,7 @@ def test_sample_predict( [0.0, 1.0], # c4 ] ) - pred = comp.sample_predict(instance_old, sample_old) + pred = comp.sample_predict(sample) assert pred == ["c1", "c2", "c4"]