LazyDynamic: Rewrite fit method

master
Alinson S. Xavier 5 years ago
parent 6e326d5d6e
commit bb91c83187
No known key found for this signature in database
GPG Key ID: DCA0DAD4D2F58624

@ -4,7 +4,7 @@
import logging import logging
import sys import sys
from typing import Any, Dict from typing import Any, Dict, List, TYPE_CHECKING, Set, Hashable
import numpy as np import numpy as np
from tqdm.auto import tqdm from tqdm.auto import tqdm
@ -14,9 +14,13 @@ from miplearn.classifiers.counting import CountingClassifier
from miplearn.components import classifier_evaluation_dict from miplearn.components import classifier_evaluation_dict
from miplearn.components.component import Component from miplearn.components.component import Component
from miplearn.extractors import InstanceFeaturesExtractor from miplearn.extractors import InstanceFeaturesExtractor
from miplearn.features import TrainingSample
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from miplearn.solvers.learning import LearningSolver, Instance
class DynamicLazyConstraintsComponent(Component): class DynamicLazyConstraintsComponent(Component):
""" """
@ -32,6 +36,7 @@ class DynamicLazyConstraintsComponent(Component):
self.threshold: float = threshold self.threshold: float = threshold
self.classifier_prototype: Classifier = classifier self.classifier_prototype: Classifier = classifier
self.classifiers: Dict[Any, Classifier] = {} self.classifiers: Dict[Any, Classifier] = {}
self.known_cids: List[str] = []
def before_solve_mip( def before_solve_mip(
self, self,
@ -119,3 +124,50 @@ class DynamicLazyConstraintsComponent(Component):
fn = len(pred_negative & condition_positive) fn = len(pred_negative & condition_positive)
results[idx] = classifier_evaluation_dict(tp, tn, fp, fn) results[idx] = classifier_evaluation_dict(tp, tn, fp, fn)
return results return results
def fit_new(self, training_instances: List["Instance"]) -> None:
# Update known_cids
self.known_cids.clear()
for instance in training_instances:
for sample in instance.training_data:
if sample.lazy_enforced is None:
continue
self.known_cids += list(sample.lazy_enforced)
self.known_cids = sorted(set(self.known_cids))
# Build x and y matrices
x: Dict[Hashable, List[List[float]]] = {}
y: Dict[Hashable, List[List[bool]]] = {}
for instance in training_instances:
for sample in instance.training_data:
if sample.lazy_enforced is None:
continue
for cid in self.known_cids:
category = instance.get_constraint_category(cid)
if category is None:
continue
if category not in x:
x[category] = []
y[category] = []
assert instance.features.instance is not None
assert instance.features.instance.user_features is not None
cfeatures = instance.get_constraint_features(cid)
assert cfeatures is not None
assert isinstance(cfeatures, list)
for ci in cfeatures:
assert isinstance(ci, float)
f = list(instance.features.instance.user_features)
f += cfeatures
x[category] += [f]
if cid in sample.lazy_enforced:
y[category] += [[False, True]]
else:
y[category] += [[True, False]]
# Train classifiers
for category in x.keys():
self.classifiers[category] = self.classifier_prototype.clone()
self.classifiers[category].fit(
np.array(x[category]),
np.array(y[category]),
)

@ -119,7 +119,7 @@ class Instance(ABC):
def get_constraint_features(self, cid: str) -> Optional[List[float]]: def get_constraint_features(self, cid: str) -> Optional[List[float]]:
return [0.0] return [0.0]
def get_constraint_category(self, cid: str) -> Optional[str]: def get_constraint_category(self, cid: str) -> Optional[Hashable]:
return cid return cid
def has_static_lazy_constraints(self) -> bool: def has_static_lazy_constraints(self) -> bool:
@ -243,7 +243,7 @@ class PickleGzInstance(Instance):
return self.instance.get_constraint_features(cid) return self.instance.get_constraint_features(cid)
@lazy_load @lazy_load
def get_constraint_category(self, cid: str) -> Optional[str]: def get_constraint_category(self, cid: str) -> Optional[Hashable]:
assert self.instance is not None assert self.instance is not None
return self.instance.get_constraint_category(cid) return self.instance.get_constraint_category(cid)

@ -1,15 +1,23 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved. # Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
from typing import List, cast
from unittest.mock import Mock from unittest.mock import Mock
import numpy as np import numpy as np
import pytest
from numpy.linalg import norm from numpy.linalg import norm
from numpy.testing import assert_array_equal from numpy.testing import assert_array_equal
from miplearn import Instance
from miplearn.classifiers import Classifier from miplearn.classifiers import Classifier
from miplearn.components.lazy_dynamic import DynamicLazyConstraintsComponent from miplearn.components.lazy_dynamic import DynamicLazyConstraintsComponent
from miplearn.features import (
TrainingSample,
Features,
ConstraintFeatures,
InstanceFeatures,
)
from miplearn.solvers.internal import InternalSolver from miplearn.solvers.internal import InternalSolver
from miplearn.solvers.learning import LearningSolver from miplearn.solvers.learning import LearningSolver
from tests.fixtures.knapsack import get_test_pyomo_instances from tests.fixtures.knapsack import get_test_pyomo_instances
@ -171,3 +179,123 @@ def test_lazy_evaluate():
"True positive (%)": 25.0, "True positive (%)": 25.0,
}, },
} }
@pytest.fixture
def training_instances() -> List[Instance]:
instances = [cast(Instance, Mock(spec=Instance)) for _ in range(2)]
instances[0].features = Features(
instance=InstanceFeatures(
user_features=[50.0],
),
)
instances[0].training_data = [
TrainingSample(lazy_enforced={"c1", "c2"}),
TrainingSample(lazy_enforced={"c2", "c3"}),
]
instances[0].get_constraint_category = Mock( # type: ignore
side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-b",
"c4": "type-b",
}[cid]
)
instances[0].get_constraint_features = Mock( # type: ignore
side_effect=lambda cid: {
"c1": [1.0, 2.0, 3.0],
"c2": [4.0, 5.0, 6.0],
"c3": [1.0, 2.0],
"c4": [3.0, 4.0],
}[cid]
)
instances[1].features = Features(
instance=InstanceFeatures(
user_features=[80.0],
),
)
instances[1].training_data = [
TrainingSample(lazy_enforced={"c3", "c4"}),
]
instances[1].get_constraint_category = Mock( # type: ignore
side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-b",
"c4": "type-b",
}[cid]
)
instances[1].get_constraint_features = Mock( # type: ignore
side_effect=lambda cid: {
"c2": [7.0, 8.0, 9.0],
"c3": [5.0, 6.0],
"c4": [7.0, 8.0],
}[cid]
)
return instances
def test_fit_new(training_instances: List[Instance]) -> None:
clf = Mock(spec=Classifier)
clf.clone = Mock(side_effect=lambda: Mock(spec=Classifier))
comp = DynamicLazyConstraintsComponent(classifier=clf)
comp.fit_new(training_instances)
assert clf.clone.call_count == 2
assert "type-a" in comp.classifiers
clf_a = comp.classifiers["type-a"]
assert clf_a.fit.call_count == 1 # type: ignore
assert_array_equal(
clf_a.fit.call_args[0][0], # type: ignore
np.array(
[
[50.0, 1.0, 2.0, 3.0],
[50.0, 4.0, 5.0, 6.0],
[50.0, 1.0, 2.0, 3.0],
[50.0, 4.0, 5.0, 6.0],
[80.0, 7.0, 8.0, 9.0],
]
),
)
assert_array_equal(
clf_a.fit.call_args[0][1], # type: ignore
np.array(
[
[False, True],
[False, True],
[True, False],
[False, True],
[True, False],
]
),
)
assert "type-b" in comp.classifiers
clf_b = comp.classifiers["type-b"]
assert clf_b.fit.call_count == 1 # type: ignore
assert_array_equal(
clf_b.fit.call_args[0][0], # type: ignore
np.array(
[
[50.0, 1.0, 2.0],
[50.0, 3.0, 4.0],
[50.0, 1.0, 2.0],
[50.0, 3.0, 4.0],
[80.0, 5.0, 6.0],
[80.0, 7.0, 8.0],
]
),
)
assert_array_equal(
clf_b.fit.call_args[0][1], # type: ignore
np.array(
[
[True, False],
[True, False],
[False, True],
[True, False],
[False, True],
[False, True],
]
),
)

Loading…
Cancel
Save