Rewrite DynamicLazy.sample_xy

master
Alinson S. Xavier 5 years ago
parent bccf0e9860
commit 6f6cd3018b
No known key found for this signature in database
GPG Key ID: DCA0DAD4D2F58624

@ -2,7 +2,7 @@
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved. # Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
from typing import Any, List, TYPE_CHECKING, Tuple, Dict, Hashable from typing import Any, List, TYPE_CHECKING, Tuple, Dict, Hashable, Optional
import numpy as np import numpy as np
from overrides import EnforceOverrides from overrides import EnforceOverrides
@ -119,7 +119,11 @@ class Component:
""" """
pass pass
def sample_xy(self, sample: Sample) -> Tuple[Dict, Dict]: def sample_xy(
self,
instance: Optional[Instance],
sample: Sample,
) -> Tuple[Dict, Dict]:
""" """
Returns a pair of x and y dictionaries containing, respectively, the matrices Returns a pair of x and y dictionaries containing, respectively, the matrices
of ML features and the labels for the sample. If the training sample does not of ML features and the labels for the sample. If the training sample does not

@ -2,7 +2,8 @@
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved. # Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
from typing import Dict, Hashable, List, Tuple, TYPE_CHECKING import logging
from typing import Dict, Hashable, List, Tuple, Optional
import numpy as np import numpy as np
from overrides import overrides from overrides import overrides
@ -11,15 +12,11 @@ from miplearn.classifiers import Classifier
from miplearn.classifiers.threshold import Threshold from miplearn.classifiers.threshold import Threshold
from miplearn.components import classifier_evaluation_dict from miplearn.components import classifier_evaluation_dict
from miplearn.components.component import Component from miplearn.components.component import Component
from miplearn.features import TrainingSample from miplearn.features import TrainingSample, Sample
from miplearn.instance.base import Instance
import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from miplearn.solvers.learning import Instance
class DynamicConstraintsComponent(Component): class DynamicConstraintsComponent(Component):
""" """
@ -40,9 +37,9 @@ class DynamicConstraintsComponent(Component):
self.known_cids: List[str] = [] self.known_cids: List[str] = []
self.attr = attr self.attr = attr
def sample_xy_with_cids( def sample_xy_with_cids_old(
self, self,
instance: "Instance", instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Tuple[ ) -> Tuple[
Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]],
@ -78,25 +75,78 @@ class DynamicConstraintsComponent(Component):
y[category] += [[True, False]] y[category] += [[True, False]]
return x, y, cids return x, y, cids
def sample_xy_with_cids(
self,
instance: Optional[Instance],
sample: Sample,
) -> Tuple[
Dict[Hashable, List[List[float]]],
Dict[Hashable, List[List[bool]]],
Dict[Hashable, List[str]],
]:
assert instance is not None
x: Dict[Hashable, List[List[float]]] = {}
y: Dict[Hashable, List[List[bool]]] = {}
cids: Dict[Hashable, List[str]] = {}
for cid in self.known_cids:
# Initialize categories
category = instance.get_constraint_category(cid)
if category is None:
continue
if category not in x:
x[category] = []
y[category] = []
cids[category] = []
# Features
features = []
assert sample.after_lp is not None
assert sample.after_lp.instance is not None
features.extend(sample.after_lp.instance.to_list())
features.extend(instance.get_constraint_features(cid))
for ci in features:
assert isinstance(ci, float)
x[category].append(features)
cids[category].append(cid)
# Labels
if sample.after_mip is not None:
assert sample.after_mip.extra is not None
if sample.after_mip.extra[self.attr] is not None:
if cid in sample.after_mip.extra[self.attr]:
y[category] += [[False, True]]
else:
y[category] += [[True, False]]
return x, y, cids
@overrides @overrides
def sample_xy_old( def sample_xy_old(
self, self,
instance: "Instance", instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Tuple[Dict, Dict]:
x, y, _ = self.sample_xy_with_cids_old(instance, sample)
return x, y
@overrides
def sample_xy(
self,
instance: Optional[Instance],
sample: Sample,
) -> Tuple[Dict, Dict]: ) -> Tuple[Dict, Dict]:
x, y, _ = self.sample_xy_with_cids(instance, sample) x, y, _ = self.sample_xy_with_cids(instance, sample)
return x, y return x, y
def sample_predict( def sample_predict(
self, self,
instance: "Instance", instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> List[Hashable]: ) -> List[Hashable]:
pred: List[Hashable] = [] pred: List[Hashable] = []
if len(self.known_cids) == 0: if len(self.known_cids) == 0:
logger.info("Classifiers not fitted. Skipping.") logger.info("Classifiers not fitted. Skipping.")
return pred return pred
x, _, cids = self.sample_xy_with_cids(instance, sample) x, _, cids = self.sample_xy_with_cids_old(instance, sample)
for category in x.keys(): for category in x.keys():
assert category in self.classifiers assert category in self.classifiers
assert category in self.thresholds assert category in self.thresholds
@ -111,7 +161,7 @@ class DynamicConstraintsComponent(Component):
return pred return pred
@overrides @overrides
def fit(self, training_instances: List["Instance"]) -> None: def fit(self, training_instances: List[Instance]) -> None:
collected_cids = set() collected_cids = set()
for instance in training_instances: for instance in training_instances:
instance.load() instance.load()
@ -141,7 +191,7 @@ class DynamicConstraintsComponent(Component):
@overrides @overrides
def sample_evaluate_old( def sample_evaluate_old(
self, self,
instance: "Instance", instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Dict[Hashable, Dict[str, float]]: ) -> Dict[Hashable, Dict[str, float]]:
assert getattr(sample, self.attr) is not None assert getattr(sample, self.attr) is not None

@ -3,7 +3,7 @@
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
import logging import logging
from typing import Dict, List, TYPE_CHECKING, Hashable, Tuple, Any from typing import Dict, List, TYPE_CHECKING, Hashable, Tuple, Any, Optional
import numpy as np import numpy as np
from overrides import overrides from overrides import overrides
@ -14,7 +14,7 @@ from miplearn.classifiers.counting import CountingClassifier
from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold
from miplearn.components.component import Component from miplearn.components.component import Component
from miplearn.components.dynamic_common import DynamicConstraintsComponent from miplearn.components.dynamic_common import DynamicConstraintsComponent
from miplearn.features import TrainingSample, Features from miplearn.features import TrainingSample, Features, Sample
from miplearn.types import LearningSolveStats from miplearn.types import LearningSolveStats
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -95,20 +95,28 @@ class DynamicLazyConstraintsComponent(Component):
@overrides @overrides
def sample_xy_old( def sample_xy_old(
self, self,
instance: "Instance", instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Tuple[Dict, Dict]: ) -> Tuple[Dict, Dict]:
return self.dynamic.sample_xy_old(instance, sample) return self.dynamic.sample_xy_old(instance, sample)
@overrides
def sample_xy(
self,
instance: Optional[Instance],
sample: Sample,
) -> Tuple[Dict, Dict]:
return self.dynamic.sample_xy(instance, sample)
def sample_predict( def sample_predict(
self, self,
instance: "Instance", instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> List[Hashable]: ) -> List[Hashable]:
return self.dynamic.sample_predict(instance, sample) return self.dynamic.sample_predict(instance, sample)
@overrides @overrides
def fit(self, training_instances: List["Instance"]) -> None: def fit(self, training_instances: List[Instance]) -> None:
self.dynamic.fit(training_instances) self.dynamic.fit(training_instances)
@overrides @overrides
@ -122,7 +130,7 @@ class DynamicLazyConstraintsComponent(Component):
@overrides @overrides
def sample_evaluate_old( def sample_evaluate_old(
self, self,
instance: "Instance", instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Dict[Hashable, Dict[str, float]]: ) -> Dict[Hashable, Dict[str, float]]:
return self.dynamic.sample_evaluate_old(instance, sample) return self.dynamic.sample_evaluate_old(instance, sample)

@ -3,23 +3,24 @@
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
import logging import logging
from typing import Any, TYPE_CHECKING, Hashable, Set, Tuple, Dict, List from typing import Any, TYPE_CHECKING, Hashable, Set, Tuple, Dict, List, Optional
import numpy as np import numpy as np
from overrides import overrides from overrides import overrides
from miplearn.instance.base import Instance
from miplearn.classifiers import Classifier from miplearn.classifiers import Classifier
from miplearn.classifiers.counting import CountingClassifier from miplearn.classifiers.counting import CountingClassifier
from miplearn.classifiers.threshold import Threshold, MinProbabilityThreshold from miplearn.classifiers.threshold import Threshold, MinProbabilityThreshold
from miplearn.components.component import Component from miplearn.components.component import Component
from miplearn.components.dynamic_common import DynamicConstraintsComponent from miplearn.components.dynamic_common import DynamicConstraintsComponent
from miplearn.features import Features, TrainingSample from miplearn.features import Features, TrainingSample, Sample
from miplearn.types import LearningSolveStats from miplearn.types import LearningSolveStats
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
if TYPE_CHECKING: if TYPE_CHECKING:
from miplearn.solvers.learning import LearningSolver, Instance from miplearn.solvers.learning import LearningSolver
class UserCutsComponent(Component): class UserCutsComponent(Component):
@ -103,6 +104,14 @@ class UserCutsComponent(Component):
) -> Tuple[Dict, Dict]: ) -> Tuple[Dict, Dict]:
return self.dynamic.sample_xy_old(instance, sample) return self.dynamic.sample_xy_old(instance, sample)
@overrides
def sample_xy(
self,
instance: Optional[Instance],
sample: Sample,
) -> Tuple[Dict, Dict]:
return self.dynamic.sample_xy(instance, sample)
def sample_predict( def sample_predict(
self, self,
instance: "Instance", instance: "Instance",

@ -3,7 +3,7 @@
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
import logging import logging
from typing import List, Dict, Any, TYPE_CHECKING, Tuple, Hashable from typing import List, Dict, Any, TYPE_CHECKING, Tuple, Hashable, Optional
import numpy as np import numpy as np
from overrides import overrides from overrides import overrides
@ -101,6 +101,7 @@ class ObjectiveValueComponent(Component):
@overrides @overrides
def sample_xy( def sample_xy(
self, self,
_: Optional[Instance],
sample: Sample, sample: Sample,
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]: ) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
# Instance features # Instance features

@ -10,6 +10,7 @@ from typing import (
Any, Any,
TYPE_CHECKING, TYPE_CHECKING,
Tuple, Tuple,
Optional,
) )
import numpy as np import numpy as np
@ -182,6 +183,7 @@ class PrimalSolutionComponent(Component):
@overrides @overrides
def sample_xy( def sample_xy(
self, self,
_: Optional[Instance],
sample: Sample, sample: Sample,
) -> Tuple[Dict[Category, List[List[float]]], Dict[Category, List[List[float]]]]: ) -> Tuple[Dict[Category, List[List[float]]], Dict[Category, List[List[float]]]]:
x: Dict = {} x: Dict = {}

@ -3,11 +3,12 @@
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
import logging import logging
from typing import Dict, Tuple, List, Hashable, Any, TYPE_CHECKING, Set from typing import Dict, Tuple, List, Hashable, Any, TYPE_CHECKING, Set, Optional
import numpy as np import numpy as np
from overrides import overrides from overrides import overrides
from miplearn.instance.base import Instance
from miplearn.classifiers import Classifier from miplearn.classifiers import Classifier
from miplearn.classifiers.counting import CountingClassifier from miplearn.classifiers.counting import CountingClassifier
from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold
@ -18,7 +19,7 @@ from miplearn.types import LearningSolveStats
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
if TYPE_CHECKING: if TYPE_CHECKING:
from miplearn.solvers.learning import LearningSolver, Instance from miplearn.solvers.learning import LearningSolver
class LazyConstraint: class LazyConstraint:
@ -202,6 +203,7 @@ class StaticLazyConstraintsComponent(Component):
@overrides @overrides
def sample_xy( def sample_xy(
self, self,
_: Optional[Instance],
sample: Sample, sample: Sample,
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]: ) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
x: Dict = {} x: Dict = {}

@ -98,7 +98,7 @@ class Instance(ABC, EnforceOverrides):
""" """
return "default" return "default"
def get_constraint_features(self, cid: str) -> Optional[List[float]]: def get_constraint_features(self, cid: str) -> List[float]:
return [0.0] return [0.0]
def get_constraint_category(self, cid: str) -> Optional[Hashable]: def get_constraint_category(self, cid: str) -> Optional[Hashable]:

@ -16,14 +16,16 @@ from miplearn.features import (
TrainingSample, TrainingSample,
Features, Features,
InstanceFeatures, InstanceFeatures,
Sample,
) )
from miplearn.instance.base import Instance from miplearn.instance.base import Instance
from miplearn.solvers.tests import assert_equals
E = 0.1 E = 0.1
@pytest.fixture @pytest.fixture
def training_instances() -> List[Instance]: def training_instances2() -> List[Instance]:
instances = [cast(Instance, Mock(spec=Instance)) for _ in range(2)] instances = [cast(Instance, Mock(spec=Instance)) for _ in range(2)]
instances[0].features = Features( instances[0].features = Features(
instance=InstanceFeatures( instance=InstanceFeatures(
@ -76,11 +78,64 @@ def training_instances() -> List[Instance]:
return instances return instances
def test_fit(training_instances: List[Instance]) -> None: @pytest.fixture
def training_instances() -> List[Instance]:
instances = [cast(Instance, Mock(spec=Instance)) for _ in range(2)]
instances[0].samples = [
Sample(
after_lp=Features(
instance=InstanceFeatures(),
),
after_mip=Features(extra={"lazy_enforced": {"c1", "c2"}}),
)
]
instances[0].samples[0].after_lp.instance.to_list = Mock( # type: ignore
return_value=[5.0]
)
instances[0].get_constraint_category = Mock( # type: ignore
side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-b",
"c4": "type-b",
}[cid]
)
instances[0].get_constraint_features = Mock( # type: ignore
side_effect=lambda cid: {
"c1": [1.0, 2.0, 3.0],
"c2": [4.0, 5.0, 6.0],
"c3": [1.0, 2.0],
"c4": [3.0, 4.0],
}[cid]
)
return instances
def test_sample_xy(training_instances: List[Instance]) -> None:
comp = DynamicLazyConstraintsComponent()
comp.dynamic.known_cids = ["c1", "c2", "c3", "c4"]
x_expected = {
"type-a": [[5.0, 1.0, 2.0, 3.0], [5.0, 4.0, 5.0, 6.0]],
"type-b": [[5.0, 1.0, 2.0], [5.0, 3.0, 4.0]],
}
y_expected = {
"type-a": [[False, True], [False, True]],
"type-b": [[True, False], [True, False]],
}
x_actual, y_actual = comp.sample_xy(
training_instances[0],
training_instances[0].samples[0],
)
assert_equals(x_actual, x_expected)
assert_equals(y_actual, y_expected)
def test_fit(training_instances2: List[Instance]) -> None:
clf = Mock(spec=Classifier) clf = Mock(spec=Classifier)
clf.clone = Mock(side_effect=lambda: Mock(spec=Classifier)) clf.clone = Mock(side_effect=lambda: Mock(spec=Classifier))
comp = DynamicLazyConstraintsComponent(classifier=clf) comp = DynamicLazyConstraintsComponent(classifier=clf)
comp.fit(training_instances) comp.fit(training_instances2)
assert clf.clone.call_count == 2 assert clf.clone.call_count == 2
assert "type-a" in comp.classifiers assert "type-a" in comp.classifiers
@ -142,7 +197,7 @@ def test_fit(training_instances: List[Instance]) -> None:
) )
def test_sample_predict_evaluate(training_instances: List[Instance]) -> None: def test_sample_predict_evaluate(training_instances2: List[Instance]) -> None:
comp = DynamicLazyConstraintsComponent() comp = DynamicLazyConstraintsComponent()
comp.known_cids.extend(["c1", "c2", "c3", "c4"]) comp.known_cids.extend(["c1", "c2", "c3", "c4"])
comp.thresholds["type-a"] = MinProbabilityThreshold([0.5, 0.5]) comp.thresholds["type-a"] = MinProbabilityThreshold([0.5, 0.5])
@ -156,13 +211,13 @@ def test_sample_predict_evaluate(training_instances: List[Instance]) -> None:
side_effect=lambda _: np.array([[0.9, 0.1], [0.1, 0.9]]) side_effect=lambda _: np.array([[0.9, 0.1], [0.1, 0.9]])
) )
pred = comp.sample_predict( pred = comp.sample_predict(
training_instances[0], training_instances2[0],
training_instances[0].training_data[0], training_instances2[0].training_data[0],
) )
assert pred == ["c1", "c4"] assert pred == ["c1", "c4"]
ev = comp.sample_evaluate_old( ev = comp.sample_evaluate_old(
training_instances[0], training_instances2[0],
training_instances[0].training_data[0], training_instances2[0].training_data[0],
) )
print(ev) print(ev)
assert ev == { assert ev == {

@ -88,7 +88,7 @@ def test_sample_xy(sample: Sample) -> None:
"Lower bound": [[1.0]], "Lower bound": [[1.0]],
"Upper bound": [[2.0]], "Upper bound": [[2.0]],
} }
xy = ObjectiveValueComponent().sample_xy(sample) xy = ObjectiveValueComponent().sample_xy(None, sample)
assert xy is not None assert xy is not None
x_actual, y_actual = xy x_actual, y_actual = xy
assert x_actual == x_expected assert x_actual == x_expected

@ -82,7 +82,7 @@ def test_xy(sample: Sample) -> None:
[True, False], [True, False],
] ]
} }
xy = PrimalSolutionComponent().sample_xy(sample) xy = PrimalSolutionComponent().sample_xy(None, sample)
assert xy is not None assert xy is not None
x_actual, y_actual = xy x_actual, y_actual = xy
assert x_actual == x_expected assert x_actual == x_expected

@ -292,7 +292,7 @@ def test_sample_xy(sample: Sample) -> None:
"type-a": [[False, True], [False, True], [True, False]], "type-a": [[False, True], [False, True], [True, False]],
"type-b": [[False, True]], "type-b": [[False, True]],
} }
xy = StaticLazyConstraintsComponent().sample_xy(sample) xy = StaticLazyConstraintsComponent().sample_xy(None, sample)
assert xy is not None assert xy is not None
x_actual, y_actual = xy x_actual, y_actual = xy
assert x_actual == x_expected assert x_actual == x_expected

Loading…
Cancel
Save