Make sample_ method accept instance

master
Alinson S. Xavier 5 years ago
parent bb91c83187
commit c6aee4f90d
No known key found for this signature in database
GPG Key ID: DCA0DAD4D2F58624

@ -108,14 +108,13 @@ class Component:
@staticmethod @staticmethod
def sample_xy( def sample_xy(
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Tuple[Dict, Dict]: ) -> Tuple[Dict, Dict]:
""" """
Given a set of features and a training sample, returns a pair of x and y Returns a pair of x and y dictionaries containing, respectively, the matrices
dictionaries containing, respectively, the matrices of ML features and the of ML features and the labels for the sample. If the training sample does not
labels for the sample. If the training sample does not include label include label information, returns (x, {}).
information, returns (x, {}).
""" """
pass pass
@ -128,7 +127,7 @@ class Component:
for instance in instances: for instance in instances:
assert isinstance(instance, Instance) assert isinstance(instance, Instance)
for sample in instance.training_data: for sample in instance.training_data:
xy = self.sample_xy(instance.features, sample) xy = self.sample_xy(instance, sample)
if xy is None: if xy is None:
continue continue
x_sample, y_sample = xy x_sample, y_sample = xy
@ -203,12 +202,12 @@ class Component:
ev = [] ev = []
for instance in instances: for instance in instances:
for sample in instance.training_data: for sample in instance.training_data:
ev += [self.sample_evaluate(instance.features, sample)] ev += [self.sample_evaluate(instance, sample)]
return ev return ev
def sample_evaluate( def sample_evaluate(
self, self,
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Dict[Hashable, Dict[str, float]]: ) -> Dict[Hashable, Dict[str, float]]:
return {} return {}

@ -4,7 +4,7 @@
import logging import logging
import sys import sys
from typing import Any, Dict, List, TYPE_CHECKING, Set, Hashable from typing import Any, Dict, List, TYPE_CHECKING, Hashable
import numpy as np import numpy as np
from tqdm.auto import tqdm from tqdm.auto import tqdm
@ -14,12 +14,11 @@ from miplearn.classifiers.counting import CountingClassifier
from miplearn.components import classifier_evaluation_dict from miplearn.components import classifier_evaluation_dict
from miplearn.components.component import Component from miplearn.components.component import Component
from miplearn.extractors import InstanceFeaturesExtractor from miplearn.extractors import InstanceFeaturesExtractor
from miplearn.features import TrainingSample
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
if TYPE_CHECKING: if TYPE_CHECKING:
from miplearn.solvers.learning import LearningSolver, Instance from miplearn.solvers.learning import Instance
class DynamicLazyConstraintsComponent(Component): class DynamicLazyConstraintsComponent(Component):

@ -66,7 +66,7 @@ class StaticLazyConstraintsComponent(Component):
if not features.instance.lazy_constraint_count == 0: if not features.instance.lazy_constraint_count == 0:
logger.info("Instance does not have static lazy constraints. Skipping.") logger.info("Instance does not have static lazy constraints. Skipping.")
logger.info("Predicting required lazy constraints...") logger.info("Predicting required lazy constraints...")
self.enforced_cids = set(self.sample_predict(features, training_data)) self.enforced_cids = set(self.sample_predict(instance, training_data))
logger.info("Moving lazy constraints to the pool...") logger.info("Moving lazy constraints to the pool...")
self.pool = {} self.pool = {}
for (cid, cdict) in features.constraints.items(): for (cid, cdict) in features.constraints.items():
@ -144,14 +144,14 @@ class StaticLazyConstraintsComponent(Component):
def sample_predict( def sample_predict(
self, self,
features: Features, instance: "Instance",
sample: TrainingSample, sample: TrainingSample,
) -> List[str]: ) -> List[str]:
assert features.constraints is not None assert instance.features.constraints is not None
x, y = self.sample_xy(features, sample) x, y = self.sample_xy(instance, sample)
category_to_cids: Dict[Hashable, List[str]] = {} category_to_cids: Dict[Hashable, List[str]] = {}
for (cid, cfeatures) in features.constraints.items(): for (cid, cfeatures) in instance.features.constraints.items():
if cfeatures.category is None: if cfeatures.category is None:
continue continue
category = cfeatures.category category = cfeatures.category
@ -173,13 +173,13 @@ class StaticLazyConstraintsComponent(Component):
@staticmethod @staticmethod
def sample_xy( def sample_xy(
features: Features, instance: "Instance",
sample: TrainingSample, sample: TrainingSample,
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]: ) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
assert features.constraints is not None assert instance.features.constraints is not None
x: Dict = {} x: Dict = {}
y: Dict = {} y: Dict = {}
for (cid, cfeatures) in features.constraints.items(): for (cid, cfeatures) in instance.features.constraints.items():
if not cfeatures.lazy: if not cfeatures.lazy:
continue continue
category = cfeatures.category category = cfeatures.category

@ -44,7 +44,7 @@ class ObjectiveValueComponent(Component):
training_data: TrainingSample, training_data: TrainingSample,
) -> None: ) -> None:
logger.info("Predicting optimal value...") logger.info("Predicting optimal value...")
pred = self.sample_predict(features, training_data) pred = self.sample_predict(instance, training_data)
for (c, v) in pred.items(): for (c, v) in pred.items():
logger.info(f"Predicted {c.lower()}: %.6e" % v) logger.info(f"Predicted {c.lower()}: %.6e" % v)
stats[f"Objective: Predicted {c.lower()}"] = v # type: ignore stats[f"Objective: Predicted {c.lower()}"] = v # type: ignore
@ -61,11 +61,11 @@ class ObjectiveValueComponent(Component):
def sample_predict( def sample_predict(
self, self,
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Dict[str, float]: ) -> Dict[str, float]:
pred: Dict[str, float] = {} pred: Dict[str, float] = {}
x, _ = self.sample_xy(features, sample) x, _ = self.sample_xy(instance, sample)
for c in ["Upper bound", "Lower bound"]: for c in ["Upper bound", "Lower bound"]:
if c in self.regressors is not None: if c in self.regressors is not None:
pred[c] = self.regressors[c].predict(np.array(x[c]))[0, 0] pred[c] = self.regressors[c].predict(np.array(x[c]))[0, 0]
@ -75,14 +75,15 @@ class ObjectiveValueComponent(Component):
@staticmethod @staticmethod
def sample_xy( def sample_xy(
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]: ) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
assert features.instance is not None ifeatures = instance.features.instance
assert features.instance.user_features is not None assert ifeatures is not None
assert ifeatures.user_features is not None
x: Dict[Hashable, List[List[float]]] = {} x: Dict[Hashable, List[List[float]]] = {}
y: Dict[Hashable, List[List[float]]] = {} y: Dict[Hashable, List[List[float]]] = {}
f = list(features.instance.user_features) f = list(ifeatures.user_features)
if sample.lp_value is not None: if sample.lp_value is not None:
f += [sample.lp_value] f += [sample.lp_value]
x["Upper bound"] = [f] x["Upper bound"] = [f]
@ -95,7 +96,7 @@ class ObjectiveValueComponent(Component):
def sample_evaluate( def sample_evaluate(
self, self,
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Dict[Hashable, Dict[str, float]]: ) -> Dict[Hashable, Dict[str, float]]:
def compare(y_pred: float, y_actual: float) -> Dict[str, float]: def compare(y_pred: float, y_actual: float) -> Dict[str, float]:
@ -108,7 +109,7 @@ class ObjectiveValueComponent(Component):
} }
result: Dict[Hashable, Dict[str, float]] = {} result: Dict[Hashable, Dict[str, float]] = {}
pred = self.sample_predict(features, sample) pred = self.sample_predict(instance, sample)
if sample.upper_bound is not None: if sample.upper_bound is not None:
result["Upper bound"] = compare(pred["Upper bound"], sample.upper_bound) result["Upper bound"] = compare(pred["Upper bound"], sample.upper_bound)
if sample.lower_bound is not None: if sample.lower_bound is not None:

@ -73,7 +73,7 @@ class PrimalSolutionComponent(Component):
# Predict solution and provide it to the solver # Predict solution and provide it to the solver
logger.info("Predicting MIP solution...") logger.info("Predicting MIP solution...")
solution = self.sample_predict(features, training_data) solution = self.sample_predict(instance, training_data)
assert solver.internal_solver is not None assert solver.internal_solver is not None
if self.mode == "heuristic": if self.mode == "heuristic":
solver.internal_solver.fix(solution) solver.internal_solver.fix(solution)
@ -101,20 +101,20 @@ class PrimalSolutionComponent(Component):
def sample_predict( def sample_predict(
self, self,
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Solution: ) -> Solution:
assert features.variables is not None assert instance.features.variables is not None
# Initialize empty solution # Initialize empty solution
solution: Solution = {} solution: Solution = {}
for (var_name, var_dict) in features.variables.items(): for (var_name, var_dict) in instance.features.variables.items():
solution[var_name] = {} solution[var_name] = {}
for idx in var_dict.keys(): for idx in var_dict.keys():
solution[var_name][idx] = None solution[var_name][idx] = None
# Compute y_pred # Compute y_pred
x, _ = self.sample_xy(features, sample) x, _ = self.sample_xy(instance, sample)
y_pred = {} y_pred = {}
for category in x.keys(): for category in x.keys():
assert category in self.classifiers, ( assert category in self.classifiers, (
@ -133,7 +133,7 @@ class PrimalSolutionComponent(Component):
# Convert y_pred into solution # Convert y_pred into solution
category_offset: Dict[Hashable, int] = {cat: 0 for cat in x.keys()} category_offset: Dict[Hashable, int] = {cat: 0 for cat in x.keys()}
for (var_name, var_dict) in features.variables.items(): for (var_name, var_dict) in instance.features.variables.items():
for (idx, var_features) in var_dict.items(): for (idx, var_features) in var_dict.items():
category = var_features.category category = var_features.category
offset = category_offset[category] offset = category_offset[category]
@ -147,16 +147,16 @@ class PrimalSolutionComponent(Component):
@staticmethod @staticmethod
def sample_xy( def sample_xy(
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]: ) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
assert features.variables is not None assert instance.features.variables is not None
x: Dict = {} x: Dict = {}
y: Dict = {} y: Dict = {}
solution: Optional[Solution] = None solution: Optional[Solution] = None
if sample.solution is not None: if sample.solution is not None:
solution = sample.solution solution = sample.solution
for (var_name, var_dict) in features.variables.items(): for (var_name, var_dict) in instance.features.variables.items():
for (idx, var_features) in var_dict.items(): for (idx, var_features) in var_dict.items():
category = var_features.category category = var_features.category
if category is None: if category is None:
@ -186,12 +186,12 @@ class PrimalSolutionComponent(Component):
def sample_evaluate( def sample_evaluate(
self, self,
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> Dict[Hashable, Dict[str, float]]: ) -> Dict[Hashable, Dict[str, float]]:
solution_actual = sample.solution solution_actual = sample.solution
assert solution_actual is not None assert solution_actual is not None
solution_pred = self.sample_predict(features, sample) solution_pred = self.sample_predict(instance, sample)
vars_all, vars_one, vars_zero = set(), set(), set() vars_all, vars_one, vars_zero = set(), set(), set()
pred_one_positive, pred_zero_positive = set(), set() pred_one_positive, pred_zero_positive = set(), set()
for (varname, var_dict) in solution_actual.items(): for (varname, var_dict) in solution_actual.items():

@ -23,6 +23,14 @@ from miplearn.features import (
) )
@pytest.fixture
def instance(features: Features) -> Instance:
instance = Mock(spec=Instance)
instance.features = features
instance.has_static_lazy_constraints = Mock(return_value=True)
return instance
@pytest.fixture @pytest.fixture
def sample() -> TrainingSample: def sample() -> TrainingSample:
return TrainingSample( return TrainingSample(
@ -67,7 +75,7 @@ def features() -> Features:
) )
def test_usage_with_solver(features: Features) -> None: def test_usage_with_solver(instance: Instance) -> None:
solver = Mock(spec=LearningSolver) solver = Mock(spec=LearningSolver)
solver.use_lazy_cb = False solver.use_lazy_cb = False
solver.gap_tolerance = 1e-4 solver.gap_tolerance = 1e-4
@ -76,9 +84,6 @@ def test_usage_with_solver(features: Features) -> None:
internal.extract_constraint = Mock(side_effect=lambda cid: "<%s>" % cid) internal.extract_constraint = Mock(side_effect=lambda cid: "<%s>" % cid)
internal.is_constraint_satisfied = Mock(return_value=False) internal.is_constraint_satisfied = Mock(return_value=False)
instance = Mock(spec=Instance)
instance.has_static_lazy_constraints = Mock(return_value=True)
component = StaticLazyConstraintsComponent(violation_tolerance=1.0) component = StaticLazyConstraintsComponent(violation_tolerance=1.0)
component.thresholds["type-a"] = MinProbabilityThreshold([0.5, 0.5]) component.thresholds["type-a"] = MinProbabilityThreshold([0.5, 0.5])
component.thresholds["type-b"] = MinProbabilityThreshold([0.5, 0.5]) component.thresholds["type-b"] = MinProbabilityThreshold([0.5, 0.5])
@ -112,7 +117,7 @@ def test_usage_with_solver(features: Features) -> None:
instance=instance, instance=instance,
model=None, model=None,
stats=stats, stats=stats,
features=features, features=instance.features,
training_data=sample, training_data=sample,
) )
@ -149,7 +154,7 @@ def test_usage_with_solver(features: Features) -> None:
instance=instance, instance=instance,
model=None, model=None,
stats=stats, stats=stats,
features=features, features=instance.features,
training_data=sample, training_data=sample,
) )
@ -164,7 +169,7 @@ def test_usage_with_solver(features: Features) -> None:
def test_sample_predict( def test_sample_predict(
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> None: ) -> None:
comp = StaticLazyConstraintsComponent() comp = StaticLazyConstraintsComponent()
@ -184,7 +189,7 @@ def test_sample_predict(
[0.0, 1.0], # c4 [0.0, 1.0], # c4
] ]
) )
pred = comp.sample_predict(features, sample) pred = comp.sample_predict(instance, sample)
assert pred == ["c1", "c2", "c4"] assert pred == ["c1", "c2", "c4"]
@ -229,7 +234,7 @@ def test_fit_xy() -> None:
def test_sample_xy( def test_sample_xy(
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> None: ) -> None:
x_expected = { x_expected = {
@ -240,7 +245,7 @@ def test_sample_xy(
"type-a": [[False, True], [False, True], [True, False]], "type-a": [[False, True], [False, True], [True, False]],
"type-b": [[False, True]], "type-b": [[False, True]],
} }
xy = StaticLazyConstraintsComponent.sample_xy(features, sample) xy = StaticLazyConstraintsComponent.sample_xy(instance, sample)
assert xy is not None assert xy is not None
x_actual, y_actual = xy x_actual, y_actual = xy
assert x_actual == x_expected assert x_actual == x_expected

@ -7,7 +7,7 @@ from unittest.mock import Mock
import pytest import pytest
from numpy.testing import assert_array_equal from numpy.testing import assert_array_equal
from miplearn import GurobiPyomoSolver, LearningSolver, Regressor from miplearn import GurobiPyomoSolver, LearningSolver, Regressor, Instance
from miplearn.components.objective import ObjectiveValueComponent from miplearn.components.objective import ObjectiveValueComponent
from miplearn.features import TrainingSample, InstanceFeatures, Features from miplearn.features import TrainingSample, InstanceFeatures, Features
from tests.fixtures.knapsack import get_knapsack_instance from tests.fixtures.knapsack import get_knapsack_instance
@ -15,6 +15,13 @@ from tests.fixtures.knapsack import get_knapsack_instance
import numpy as np import numpy as np
@pytest.fixture
def instance(features: Features) -> Instance:
instance = Mock(spec=Instance)
instance.features = features
return instance
@pytest.fixture @pytest.fixture
def features() -> Features: def features() -> Features:
return Features( return Features(
@ -50,7 +57,7 @@ def sample_without_ub() -> TrainingSample:
def test_sample_xy( def test_sample_xy(
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> None: ) -> None:
x_expected = { x_expected = {
@ -61,7 +68,7 @@ def test_sample_xy(
"Lower bound": [[1.0]], "Lower bound": [[1.0]],
"Upper bound": [[2.0]], "Upper bound": [[2.0]],
} }
xy = ObjectiveValueComponent.sample_xy(features, sample) xy = ObjectiveValueComponent.sample_xy(instance, sample)
assert xy is not None assert xy is not None
x_actual, y_actual = xy x_actual, y_actual = xy
assert x_actual == x_expected assert x_actual == x_expected
@ -69,7 +76,7 @@ def test_sample_xy(
def test_sample_xy_without_lp( def test_sample_xy_without_lp(
features: Features, instance: Instance,
sample_without_lp: TrainingSample, sample_without_lp: TrainingSample,
) -> None: ) -> None:
x_expected = { x_expected = {
@ -80,7 +87,7 @@ def test_sample_xy_without_lp(
"Lower bound": [[1.0]], "Lower bound": [[1.0]],
"Upper bound": [[2.0]], "Upper bound": [[2.0]],
} }
xy = ObjectiveValueComponent.sample_xy(features, sample_without_lp) xy = ObjectiveValueComponent.sample_xy(instance, sample_without_lp)
assert xy is not None assert xy is not None
x_actual, y_actual = xy x_actual, y_actual = xy
assert x_actual == x_expected assert x_actual == x_expected
@ -88,7 +95,7 @@ def test_sample_xy_without_lp(
def test_sample_xy_without_ub( def test_sample_xy_without_ub(
features: Features, instance: Instance,
sample_without_ub: TrainingSample, sample_without_ub: TrainingSample,
) -> None: ) -> None:
x_expected = { x_expected = {
@ -96,7 +103,7 @@ def test_sample_xy_without_ub(
"Upper bound": [[1.0, 2.0, 3.0]], "Upper bound": [[1.0, 2.0, 3.0]],
} }
y_expected = {"Lower bound": [[1.0]]} y_expected = {"Lower bound": [[1.0]]}
xy = ObjectiveValueComponent.sample_xy(features, sample_without_ub) xy = ObjectiveValueComponent.sample_xy(instance, sample_without_ub)
assert xy is not None assert xy is not None
x_actual, y_actual = xy x_actual, y_actual = xy
assert x_actual == x_expected assert x_actual == x_expected
@ -170,10 +177,10 @@ def test_fit_xy_without_ub() -> None:
def test_sample_predict( def test_sample_predict(
features: Features, instance: Instance,
sample: TrainingSample, sample: TrainingSample,
) -> None: ) -> None:
x, y = ObjectiveValueComponent.sample_xy(features, sample) x, y = ObjectiveValueComponent.sample_xy(instance, sample)
comp = ObjectiveValueComponent() comp = ObjectiveValueComponent()
comp.regressors["Lower bound"] = Mock(spec=Regressor) comp.regressors["Lower bound"] = Mock(spec=Regressor)
comp.regressors["Upper bound"] = Mock(spec=Regressor) comp.regressors["Upper bound"] = Mock(spec=Regressor)
@ -183,7 +190,7 @@ def test_sample_predict(
comp.regressors["Upper bound"].predict = Mock( # type: ignore comp.regressors["Upper bound"].predict = Mock( # type: ignore
side_effect=lambda _: np.array([[60.0]]) side_effect=lambda _: np.array([[60.0]])
) )
pred = comp.sample_predict(features, sample) pred = comp.sample_predict(instance, sample)
assert pred == { assert pred == {
"Lower bound": 50.0, "Lower bound": 50.0,
"Upper bound": 60.0, "Upper bound": 60.0,
@ -199,16 +206,16 @@ def test_sample_predict(
def test_sample_predict_without_ub( def test_sample_predict_without_ub(
features: Features, instance: Instance,
sample_without_ub: TrainingSample, sample_without_ub: TrainingSample,
) -> None: ) -> None:
x, y = ObjectiveValueComponent.sample_xy(features, sample_without_ub) x, y = ObjectiveValueComponent.sample_xy(instance, sample_without_ub)
comp = ObjectiveValueComponent() comp = ObjectiveValueComponent()
comp.regressors["Lower bound"] = Mock(spec=Regressor) comp.regressors["Lower bound"] = Mock(spec=Regressor)
comp.regressors["Lower bound"].predict = Mock( # type: ignore comp.regressors["Lower bound"].predict = Mock( # type: ignore
side_effect=lambda _: np.array([[50.0]]) side_effect=lambda _: np.array([[50.0]])
) )
pred = comp.sample_predict(features, sample_without_ub) pred = comp.sample_predict(instance, sample_without_ub)
assert pred == { assert pred == {
"Lower bound": 50.0, "Lower bound": 50.0,
} }
@ -218,13 +225,13 @@ def test_sample_predict_without_ub(
) )
def test_sample_evaluate(features: Features, sample: TrainingSample) -> None: def test_sample_evaluate(instance: Instance, sample: TrainingSample) -> None:
comp = ObjectiveValueComponent() comp = ObjectiveValueComponent()
comp.regressors["Lower bound"] = Mock(spec=Regressor) comp.regressors["Lower bound"] = Mock(spec=Regressor)
comp.regressors["Lower bound"].predict = lambda _: np.array([[1.05]]) # type: ignore comp.regressors["Lower bound"].predict = lambda _: np.array([[1.05]]) # type: ignore
comp.regressors["Upper bound"] = Mock(spec=Regressor) comp.regressors["Upper bound"] = Mock(spec=Regressor)
comp.regressors["Upper bound"].predict = lambda _: np.array([[2.50]]) # type: ignore comp.regressors["Upper bound"].predict = lambda _: np.array([[2.50]]) # type: ignore
ev = comp.sample_evaluate(features, sample) ev = comp.sample_evaluate(instance, sample)
assert ev == { assert ev == {
"Lower bound": { "Lower bound": {
"Actual value": 1.0, "Actual value": 1.0,

@ -8,7 +8,7 @@ import numpy as np
from numpy.testing import assert_array_equal from numpy.testing import assert_array_equal
from scipy.stats import randint from scipy.stats import randint
from miplearn import Classifier, LearningSolver from miplearn import Classifier, LearningSolver, Instance
from miplearn.classifiers.threshold import Threshold from miplearn.classifiers.threshold import Threshold
from miplearn.components import classifier_evaluation_dict from miplearn.components import classifier_evaluation_dict
from miplearn.components.primal import PrimalSolutionComponent from miplearn.components.primal import PrimalSolutionComponent
@ -38,6 +38,8 @@ def test_xy() -> None:
} }
} }
) )
instance = Mock(spec=Instance)
instance.features = features
sample = TrainingSample( sample = TrainingSample(
solution={ solution={
"x": { "x": {
@ -70,7 +72,7 @@ def test_xy() -> None:
[True, False], [True, False],
] ]
} }
xy = PrimalSolutionComponent.sample_xy(features, sample) xy = PrimalSolutionComponent.sample_xy(instance, sample)
assert xy is not None assert xy is not None
x_actual, y_actual = xy x_actual, y_actual = xy
assert x_actual == x_expected assert x_actual == x_expected
@ -99,6 +101,8 @@ def test_xy_without_lp_solution() -> None:
} }
} }
) )
instance = Mock(spec=Instance)
instance.features = features
sample = TrainingSample( sample = TrainingSample(
solution={ solution={
"x": { "x": {
@ -123,7 +127,7 @@ def test_xy_without_lp_solution() -> None:
[True, False], [True, False],
] ]
} }
xy = PrimalSolutionComponent.sample_xy(features, sample) xy = PrimalSolutionComponent.sample_xy(instance, sample)
assert xy is not None assert xy is not None
x_actual, y_actual = xy x_actual, y_actual = xy
assert x_actual == x_expected assert x_actual == x_expected
@ -161,6 +165,8 @@ def test_predict() -> None:
} }
} }
) )
instance = Mock(spec=Instance)
instance.features = features
sample = TrainingSample( sample = TrainingSample(
lp_solution={ lp_solution={
"x": { "x": {
@ -170,11 +176,11 @@ def test_predict() -> None:
} }
} }
) )
x, _ = PrimalSolutionComponent.sample_xy(features, sample) x, _ = PrimalSolutionComponent.sample_xy(instance, sample)
comp = PrimalSolutionComponent() comp = PrimalSolutionComponent()
comp.classifiers = {"default": clf} comp.classifiers = {"default": clf}
comp.thresholds = {"default": thr} comp.thresholds = {"default": thr}
solution_actual = comp.sample_predict(features, sample) solution_actual = comp.sample_predict(instance, sample)
clf.predict_proba.assert_called_once() clf.predict_proba.assert_called_once()
assert_array_equal(x["default"], clf.predict_proba.call_args[0][0]) assert_array_equal(x["default"], clf.predict_proba.call_args[0][0])
thr.predict.assert_called_once() thr.predict.assert_called_once()
@ -243,7 +249,7 @@ def test_evaluate() -> None:
4: 1.0, 4: 1.0,
} }
} }
features = Features( features: Features = Features(
variables={ variables={
"x": { "x": {
0: VariableFeatures(), 0: VariableFeatures(),
@ -254,7 +260,9 @@ def test_evaluate() -> None:
} }
} }
) )
sample = TrainingSample( instance = Mock(spec=Instance)
instance.features = features
sample: TrainingSample = TrainingSample(
solution={ solution={
"x": { "x": {
0: 1.0, 0: 1.0,
@ -265,7 +273,7 @@ def test_evaluate() -> None:
} }
} }
) )
ev = comp.sample_evaluate(features, sample) ev = comp.sample_evaluate(instance, sample)
assert ev == { assert ev == {
0: classifier_evaluation_dict(tp=1, fp=1, tn=3, fn=0), 0: classifier_evaluation_dict(tp=1, fp=1, tn=3, fn=0),
1: classifier_evaluation_dict(tp=2, fp=0, tn=1, fn=2), 1: classifier_evaluation_dict(tp=2, fp=0, tn=1, fn=2),

Loading…
Cancel
Save