Components: Switch from factory methods to prototype objects

master
Alinson S. Xavier 5 years ago
parent 59c734f2a1
commit bc8fe4dc98

@ -79,7 +79,7 @@ from miplearn import PrimalSolutionComponent, MinPrecisionThreshold
PrimalSolutionComponent( PrimalSolutionComponent(
mode="heuristic", mode="heuristic",
threshold=lambda: MinPrecisionThreshold([0.80, 0.95]), threshold=MinPrecisionThreshold([0.80, 0.95]),
) )
``` ```
@ -159,14 +159,14 @@ dtype: float64
By default, given a training set of instantes, MIPLearn trains a fixed set of ML classifiers and regressors, then selects the best one based on cross-validation performance. Alternatively, the user may specify which ML model a component should use through the `classifier` or `regressor` contructor parameters. Scikit-learn classifiers and regressors are currently supported. A future version of the package will add compatibility with Keras models. By default, given a training set of instantes, MIPLearn trains a fixed set of ML classifiers and regressors, then selects the best one based on cross-validation performance. Alternatively, the user may specify which ML model a component should use through the `classifier` or `regressor` contructor parameters. Scikit-learn classifiers and regressors are currently supported. A future version of the package will add compatibility with Keras models.
The example below shows how to construct a `PrimalSolutionComponent` which internally uses scikit-learn's `KNeighborsClassifiers`. Any other scikit-learn classifier or pipeline can be used. The classifier needs to be provided as a lambda function because the component may need to create multiple copies of it. It needs to be wrapped in `ScikitLearnClassifier` to ensure that all the proper data transformations are applied. The example below shows how to construct a `PrimalSolutionComponent` which internally uses scikit-learn's `KNeighborsClassifiers`. Any other scikit-learn classifier or pipeline can be used. It needs to be wrapped in `ScikitLearnClassifier` to ensure that all the proper data transformations are applied.
```python ```python
from miplearn import PrimalSolutionComponent, ScikitLearnClassifier from miplearn import PrimalSolutionComponent, ScikitLearnClassifier
from sklearn.neighbors import KNeighborsClassifier from sklearn.neighbors import KNeighborsClassifier
comp = PrimalSolutionComponent( comp = PrimalSolutionComponent(
classifier=lambda: ScikitLearnClassifier( classifier=ScikitLearnClassifier(
KNeighborsClassifier(n_neighbors=5), KNeighborsClassifier(n_neighbors=5),
), ),
) )

@ -4,7 +4,6 @@
import logging import logging
import sys import sys
from copy import deepcopy
from typing import Any, Dict from typing import Any, Dict
import numpy as np import numpy as np
@ -29,6 +28,7 @@ class UserCutsComponent(Component):
classifier: Classifier = CountingClassifier(), classifier: Classifier = CountingClassifier(),
threshold: float = 0.05, threshold: float = 0.05,
): ):
assert isinstance(classifier, Classifier)
self.threshold: float = threshold self.threshold: float = threshold
self.classifier_prototype: Classifier = classifier self.classifier_prototype: Classifier = classifier
self.classifiers: Dict[Any, Classifier] = {} self.classifiers: Dict[Any, Classifier] = {}
@ -63,7 +63,7 @@ class UserCutsComponent(Component):
continue continue
for v in instance.found_violated_user_cuts: for v in instance.found_violated_user_cuts:
if v not in self.classifiers: if v not in self.classifiers:
self.classifiers[v] = deepcopy(self.classifier_prototype) self.classifiers[v] = self.classifier_prototype.clone()
violation_to_instance_idx[v] = [] violation_to_instance_idx[v] = []
violation_to_instance_idx[v] += [idx] violation_to_instance_idx[v] += [idx]

@ -4,7 +4,6 @@
import logging import logging
import sys import sys
from copy import deepcopy
from typing import Any, Dict from typing import Any, Dict
import numpy as np import numpy as np
@ -29,6 +28,7 @@ class DynamicLazyConstraintsComponent(Component):
classifier: Classifier = CountingClassifier(), classifier: Classifier = CountingClassifier(),
threshold: float = 0.05, threshold: float = 0.05,
): ):
assert isinstance(classifier, Classifier)
self.threshold: float = threshold self.threshold: float = threshold
self.classifier_prototype: Classifier = classifier self.classifier_prototype: Classifier = classifier
self.classifiers: Dict[Any, Classifier] = {} self.classifiers: Dict[Any, Classifier] = {}
@ -75,7 +75,7 @@ class DynamicLazyConstraintsComponent(Component):
if isinstance(v, list): if isinstance(v, list):
v = tuple(v) v = tuple(v)
if v not in self.classifiers: if v not in self.classifiers:
self.classifiers[v] = deepcopy(self.classifier_prototype) self.classifiers[v] = self.classifier_prototype.clone()
violation_to_instance_idx[v] = [] violation_to_instance_idx[v] = []
violation_to_instance_idx[v] += [idx] violation_to_instance_idx[v] += [idx]

@ -4,12 +4,12 @@
import logging import logging
import sys import sys
from copy import deepcopy from typing import Dict, Tuple, Optional
from typing import Any, Dict, Tuple, Optional
import numpy as np import numpy as np
from tqdm.auto import tqdm from tqdm.auto import tqdm
from miplearn import Classifier
from miplearn.classifiers.counting import CountingClassifier from miplearn.classifiers.counting import CountingClassifier
from miplearn.components.component import Component from miplearn.components.component import Component
from miplearn.types import TrainingSample, Features from miplearn.types import TrainingSample, Features
@ -32,6 +32,7 @@ class StaticLazyConstraintsComponent(Component):
large_gap=1e-2, large_gap=1e-2,
violation_tolerance=-0.5, violation_tolerance=-0.5,
): ):
assert isinstance(classifier, Classifier)
self.threshold = threshold self.threshold = threshold
self.classifier_prototype = classifier self.classifier_prototype = classifier
self.classifiers = {} self.classifiers = {}
@ -120,7 +121,7 @@ class StaticLazyConstraintsComponent(Component):
x.keys(), desc="Fit (lazy)", disable=not sys.stdout.isatty() x.keys(), desc="Fit (lazy)", disable=not sys.stdout.isatty()
): ):
if category not in self.classifiers: if category not in self.classifiers:
self.classifiers[category] = deepcopy(self.classifier_prototype) self.classifiers[category] = self.classifier_prototype.clone()
self.classifiers[category].fit(x[category], y[category]) self.classifiers[category].fit(x[category], y[category])
def predict(self, instance): def predict(self, instance):

@ -3,7 +3,7 @@
# Released under the modified BSD license. See COPYING.md for more details. # Released under the modified BSD license. See COPYING.md for more details.
import logging import logging
from typing import List, Dict, Union, Callable, Optional, Any, TYPE_CHECKING, Tuple from typing import List, Dict, Union, Optional, Any, TYPE_CHECKING, Tuple
import numpy as np import numpy as np
from sklearn.linear_model import LinearRegression from sklearn.linear_model import LinearRegression
@ -16,10 +16,11 @@ from sklearn.metrics import (
) )
from miplearn.classifiers import Regressor from miplearn.classifiers import Regressor
from miplearn.classifiers.sklearn import ScikitLearnRegressor
from miplearn.components.component import Component from miplearn.components.component import Component
from miplearn.extractors import InstanceIterator from miplearn.extractors import InstanceIterator
from miplearn.instance import Instance from miplearn.instance import Instance
from miplearn.types import MIPSolveStats, TrainingSample, LearningSolveStats, Features from miplearn.types import TrainingSample, LearningSolveStats, Features
if TYPE_CHECKING: if TYPE_CHECKING:
from miplearn.solvers.learning import LearningSolver from miplearn.solvers.learning import LearningSolver
@ -34,13 +35,15 @@ class ObjectiveValueComponent(Component):
def __init__( def __init__(
self, self,
lb_regressor: Callable[[], Regressor] = LinearRegression, lb_regressor: Regressor = ScikitLearnRegressor(LinearRegression()),
ub_regressor: Callable[[], Regressor] = LinearRegression, ub_regressor: Regressor = ScikitLearnRegressor(LinearRegression()),
) -> None: ) -> None:
assert isinstance(lb_regressor, Regressor)
assert isinstance(ub_regressor, Regressor)
self.ub_regressor: Optional[Regressor] = None self.ub_regressor: Optional[Regressor] = None
self.lb_regressor: Optional[Regressor] = None self.lb_regressor: Optional[Regressor] = None
self.lb_regressor_factory = lb_regressor self.lb_regressor_prototype = lb_regressor
self.ub_regressor_factory = ub_regressor self.ub_regressor_prototype = ub_regressor
self._predicted_ub: Optional[float] = None self._predicted_ub: Optional[float] = None
self._predicted_lb: Optional[float] = None self._predicted_lb: Optional[float] = None
@ -77,8 +80,8 @@ class ObjectiveValueComponent(Component):
stats["Objective: predicted LB"] = self._predicted_lb stats["Objective: predicted LB"] = self._predicted_lb
def fit(self, training_instances: Union[List[str], List[Instance]]) -> None: def fit(self, training_instances: Union[List[str], List[Instance]]) -> None:
self.lb_regressor = self.lb_regressor_factory() self.lb_regressor = self.lb_regressor_prototype.clone()
self.ub_regressor = self.ub_regressor_factory() self.ub_regressor = self.ub_regressor_prototype.clone()
logger.debug("Extracting features...") logger.debug("Extracting features...")
x_train = self.x(training_instances) x_train = self.x(training_instances)
y_train = self.y(training_instances) y_train = self.y(training_instances)

@ -50,18 +50,18 @@ class PrimalSolutionComponent(Component):
def __init__( def __init__(
self, self,
classifier: Callable[[], Classifier] = lambda: AdaptiveClassifier(), classifier: Classifier = AdaptiveClassifier(),
mode: str = "exact", mode: str = "exact",
threshold: Callable[[], Threshold] = lambda: MinPrecisionThreshold( threshold: Threshold = MinPrecisionThreshold([0.98, 0.98]),
[0.98, 0.98]
),
) -> None: ) -> None:
assert isinstance(classifier, Classifier)
assert isinstance(threshold, Threshold)
assert mode in ["exact", "heuristic"] assert mode in ["exact", "heuristic"]
self.mode = mode self.mode = mode
self.classifiers: Dict[Hashable, Classifier] = {} self.classifiers: Dict[Hashable, Classifier] = {}
self.thresholds: Dict[Hashable, Threshold] = {} self.thresholds: Dict[Hashable, Threshold] = {}
self.threshold_factory = threshold self.threshold_prototype = threshold
self.classifier_factory = classifier self.classifier_prototype = classifier
self.stats: Dict[str, float] = {} self.stats: Dict[str, float] = {}
self._n_free = 0 self._n_free = 0
self._n_zero = 0 self._n_zero = 0
@ -114,8 +114,8 @@ class PrimalSolutionComponent(Component):
y: Dict[str, np.ndarray], y: Dict[str, np.ndarray],
) -> None: ) -> None:
for category in x.keys(): for category in x.keys():
clf = self.classifier_factory() clf = self.classifier_prototype.clone()
thr = self.threshold_factory() thr = self.threshold_prototype.clone()
clf.fit(x[category], y[category]) clf.fit(x[category], y[category])
thr.fit(clf, x[category], y[category]) thr.fit(clf, x[category], y[category])
self.classifiers[category] = clf self.classifiers[category] = clf

@ -22,6 +22,7 @@ def test_lazy_fit():
instances[0].found_violated_lazy_constraints = ["a", "b"] instances[0].found_violated_lazy_constraints = ["a", "b"]
instances[1].found_violated_lazy_constraints = ["b", "c"] instances[1].found_violated_lazy_constraints = ["b", "c"]
classifier = Mock(spec=Classifier) classifier = Mock(spec=Classifier)
classifier.clone = lambda: Mock(spec=Classifier)
component = DynamicLazyConstraintsComponent(classifier=classifier) component = DynamicLazyConstraintsComponent(classifier=classifier)
component.fit(instances) component.fit(instances)

@ -38,11 +38,13 @@ def test_x_y_predict() -> None:
# Construct mock regressors # Construct mock regressors
lb_regressor = Mock(spec=Regressor) lb_regressor = Mock(spec=Regressor)
lb_regressor.predict = Mock(return_value=np.array([[5.0], [6.0]])) lb_regressor.predict = Mock(return_value=np.array([[5.0], [6.0]]))
lb_regressor.clone = lambda: lb_regressor
ub_regressor = Mock(spec=Regressor) ub_regressor = Mock(spec=Regressor)
ub_regressor.predict = Mock(return_value=np.array([[3.0], [3.0]])) ub_regressor.predict = Mock(return_value=np.array([[3.0], [3.0]]))
ub_regressor.clone = lambda: ub_regressor
comp = ObjectiveValueComponent( comp = ObjectiveValueComponent(
lb_regressor=lambda: lb_regressor, lb_regressor=lb_regressor,
ub_regressor=lambda: ub_regressor, ub_regressor=ub_regressor,
) )
# Should build x correctly # Should build x correctly
@ -77,9 +79,10 @@ def test_obj_evaluate():
instances, models = get_test_pyomo_instances() instances, models = get_test_pyomo_instances()
reg = Mock(spec=Regressor) reg = Mock(spec=Regressor)
reg.predict = Mock(return_value=np.array([[1000.0], [1000.0]])) reg.predict = Mock(return_value=np.array([[1000.0], [1000.0]]))
reg.clone = lambda: reg
comp = ObjectiveValueComponent( comp = ObjectiveValueComponent(
lb_regressor=lambda: reg, lb_regressor=reg,
ub_regressor=lambda: reg, ub_regressor=reg,
) )
comp.fit(instances) comp.fit(instances)
ev = comp.evaluate(instances) ev = comp.evaluate(instances)

@ -189,10 +189,11 @@ def test_predict() -> None:
def test_fit_xy(): def test_fit_xy():
comp = PrimalSolutionComponent( clf = Mock(spec=Classifier)
classifier=lambda: Mock(spec=Classifier), clf.clone = lambda: Mock(spec=Classifier)
threshold=lambda: Mock(spec=Threshold), thr = Mock(spec=Threshold)
) thr.clone = lambda: Mock(spec=Threshold)
comp = PrimalSolutionComponent(classifier=clf, threshold=thr)
x = { x = {
"type-a": np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]), "type-a": np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]),
"type-b": np.array([[7.0, 8.0, 9.0]]), "type-b": np.array([[7.0, 8.0, 9.0]]),

Loading…
Cancel
Save