Reformat source code with Black; add pre-commit hooks and CI checks

This commit is contained in:
2020-12-05 10:59:33 -06:00
parent 3823931382
commit d99600f101
49 changed files with 1291 additions and 972 deletions

View File

@@ -28,9 +28,9 @@ def test_lazy_fit():
assert "c" in component.classifiers
# Should provide correct x_train to each classifier
expected_x_train_a = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
expected_x_train_b = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
expected_x_train_c = np.array([[67., 21.75, 1287.92], [70., 23.75, 1199.83]])
expected_x_train_a = np.array([[67.0, 21.75, 1287.92], [70.0, 23.75, 1199.83]])
expected_x_train_b = np.array([[67.0, 21.75, 1287.92], [70.0, 23.75, 1199.83]])
expected_x_train_c = np.array([[67.0, 21.75, 1287.92], [70.0, 23.75, 1199.83]])
actual_x_train_a = component.classifiers["a"].fit.call_args[0][0]
actual_x_train_b = component.classifiers["b"].fit.call_args[0][0]
actual_x_train_c = component.classifiers["c"].fit.call_args[0][0]
@@ -56,16 +56,15 @@ def test_lazy_before():
solver = LearningSolver()
solver.internal_solver = Mock(spec=InternalSolver)
component = DynamicLazyConstraintsComponent(threshold=0.10)
component.classifiers = {"a": Mock(spec=Classifier),
"b": Mock(spec=Classifier)}
component.classifiers = {"a": Mock(spec=Classifier), "b": Mock(spec=Classifier)}
component.classifiers["a"].predict_proba = Mock(return_value=[[0.95, 0.05]])
component.classifiers["b"].predict_proba = Mock(return_value=[[0.02, 0.80]])
component.before_solve(solver, instances[0], models[0])
# Should ask classifier likelihood of each constraint being violated
expected_x_test_a = np.array([[67., 21.75, 1287.92]])
expected_x_test_b = np.array([[67., 21.75, 1287.92]])
expected_x_test_a = np.array([[67.0, 21.75, 1287.92]])
expected_x_test_b = np.array([[67.0, 21.75, 1287.92]])
actual_x_test_a = component.classifiers["a"].predict_proba.call_args[0][0]
actual_x_test_b = component.classifiers["b"].predict_proba.call_args[0][0]
assert norm(expected_x_test_a - actual_x_test_a) < E
@@ -82,13 +81,15 @@ def test_lazy_before():
def test_lazy_evaluate():
instances, models = get_test_pyomo_instances()
component = DynamicLazyConstraintsComponent()
component.classifiers = {"a": Mock(spec=Classifier),
"b": Mock(spec=Classifier),
"c": Mock(spec=Classifier)}
component.classifiers = {
"a": Mock(spec=Classifier),
"b": Mock(spec=Classifier),
"c": Mock(spec=Classifier),
}
component.classifiers["a"].predict_proba = Mock(return_value=[[1.0, 0.0]])
component.classifiers["b"].predict_proba = Mock(return_value=[[0.0, 1.0]])
component.classifiers["c"].predict_proba = Mock(return_value=[[0.0, 1.0]])
instances[0].found_violated_lazy_constraints = ["a", "b", "c"]
instances[1].found_violated_lazy_constraints = ["b", "d"]
assert component.evaluate(instances) == {
@@ -96,7 +97,7 @@ def test_lazy_evaluate():
"Accuracy": 0.75,
"F1 score": 0.8,
"Precision": 1.0,
"Recall": 2/3.,
"Recall": 2 / 3.0,
"Predicted positive": 2,
"Predicted negative": 2,
"Condition positive": 3,
@@ -135,6 +136,5 @@ def test_lazy_evaluate():
"False positive (%)": 25.0,
"True negative (%)": 25.0,
"True positive (%)": 25.0,
}
},
}

View File

@@ -4,10 +4,12 @@
from unittest.mock import Mock, call
from miplearn import (StaticLazyConstraintsComponent,
LearningSolver,
Instance,
InternalSolver)
from miplearn import (
StaticLazyConstraintsComponent,
LearningSolver,
Instance,
InternalSolver,
)
from miplearn.classifiers import Classifier
@@ -23,39 +25,47 @@ def test_usage_with_solver():
instance = Mock(spec=Instance)
instance.has_static_lazy_constraints = Mock(return_value=True)
instance.is_constraint_lazy = Mock(side_effect=lambda cid: {
"c1": False,
"c2": True,
"c3": True,
"c4": True,
}[cid])
instance.get_constraint_features = Mock(side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid])
instance.get_constraint_category = Mock(side_effect=lambda cid: {
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid])
instance.is_constraint_lazy = Mock(
side_effect=lambda cid: {
"c1": False,
"c2": True,
"c3": True,
"c4": True,
}[cid]
)
instance.get_constraint_features = Mock(
side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid]
)
instance.get_constraint_category = Mock(
side_effect=lambda cid: {
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid]
)
component = StaticLazyConstraintsComponent(threshold=0.90,
use_two_phase_gap=False,
violation_tolerance=1.0)
component = StaticLazyConstraintsComponent(
threshold=0.90, use_two_phase_gap=False, violation_tolerance=1.0
)
component.classifiers = {
"type-a": Mock(spec=Classifier),
"type-b": Mock(spec=Classifier),
}
component.classifiers["type-a"].predict_proba = \
Mock(return_value=[
component.classifiers["type-a"].predict_proba = Mock(
return_value=[
[0.20, 0.80],
[0.05, 0.95],
])
component.classifiers["type-b"].predict_proba = \
Mock(return_value=[
]
)
component.classifiers["type-b"].predict_proba = Mock(
return_value=[
[0.02, 0.98],
])
]
)
# LearningSolver calls before_solve
component.before_solve(solver, instance, None)
@@ -67,37 +77,59 @@ def test_usage_with_solver():
internal.get_constraint_ids.assert_called_once()
# Should ask if each constraint in the model is lazy
instance.is_constraint_lazy.assert_has_calls([
call("c1"), call("c2"), call("c3"), call("c4"),
])
instance.is_constraint_lazy.assert_has_calls(
[
call("c1"),
call("c2"),
call("c3"),
call("c4"),
]
)
# For the lazy ones, should ask for features
instance.get_constraint_features.assert_has_calls([
call("c2"), call("c3"), call("c4"),
])
instance.get_constraint_features.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should also ask for categories
assert instance.get_constraint_category.call_count == 3
instance.get_constraint_category.assert_has_calls([
call("c2"), call("c3"), call("c4"),
])
instance.get_constraint_category.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should ask internal solver to remove constraints identified as lazy
assert internal.extract_constraint.call_count == 3
internal.extract_constraint.assert_has_calls([
call("c2"), call("c3"), call("c4"),
])
internal.extract_constraint.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should ask ML to predict whether each lazy constraint should be enforced
component.classifiers["type-a"].predict_proba.assert_called_once_with([[1.0, 0.0], [0.5, 0.5]])
component.classifiers["type-a"].predict_proba.assert_called_once_with(
[[1.0, 0.0], [0.5, 0.5]]
)
component.classifiers["type-b"].predict_proba.assert_called_once_with([[1.0]])
# For the ones that should be enforced, should ask solver to re-add them
# to the formulation. The remaining ones should remain in the pool.
assert internal.add_constraint.call_count == 2
internal.add_constraint.assert_has_calls([
call("<c3>"), call("<c4>"),
])
internal.add_constraint.assert_has_calls(
[
call("<c3>"),
call("<c4>"),
]
)
internal.add_constraint.reset_mock()
# LearningSolver calls after_iteration (first time)
@@ -126,37 +158,45 @@ def test_usage_with_solver():
def test_fit():
instance_1 = Mock(spec=Instance)
instance_1.found_violated_lazy_constraints = ["c1", "c2", "c4", "c5"]
instance_1.get_constraint_category = Mock(side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid])
instance_1.get_constraint_features = Mock(side_effect=lambda cid: {
"c1": [1, 1],
"c2": [1, 2],
"c3": [1, 3],
"c4": [1, 4, 0],
"c5": [1, 5, 0],
}[cid])
instance_1.get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid]
)
instance_1.get_constraint_features = Mock(
side_effect=lambda cid: {
"c1": [1, 1],
"c2": [1, 2],
"c3": [1, 3],
"c4": [1, 4, 0],
"c5": [1, 5, 0],
}[cid]
)
instance_2 = Mock(spec=Instance)
instance_2.found_violated_lazy_constraints = ["c2", "c3", "c4"]
instance_2.get_constraint_category = Mock(side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid])
instance_2.get_constraint_features = Mock(side_effect=lambda cid: {
"c1": [2, 1],
"c2": [2, 2],
"c3": [2, 3],
"c4": [2, 4, 0],
"c5": [2, 5, 0],
}[cid])
instance_2.get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": "type-a",
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid]
)
instance_2.get_constraint_features = Mock(
side_effect=lambda cid: {
"c1": [2, 1],
"c2": [2, 2],
"c3": [2, 3],
"c4": [2, 4, 0],
"c5": [2, 5, 0],
}[cid]
)
instances = [instance_1, instance_2]
component = StaticLazyConstraintsComponent()
@@ -171,18 +211,22 @@ def test_fit():
}
expected_x = {
"type-a": [[1, 1], [1, 2], [1, 3], [2, 1], [2, 2], [2, 3]],
"type-b": [[1, 4, 0], [1, 5, 0], [2, 4, 0], [2, 5, 0]]
"type-b": [[1, 4, 0], [1, 5, 0], [2, 4, 0], [2, 5, 0]],
}
expected_y = {
"type-a": [[0, 1], [0, 1], [1, 0], [1, 0], [0, 1], [0, 1]],
"type-b": [[0, 1], [0, 1], [0, 1], [1, 0]]
"type-b": [[0, 1], [0, 1], [0, 1], [1, 0]],
}
assert component._collect_constraints(instances) == expected_constraints
assert component.x(instances) == expected_x
assert component.y(instances) == expected_y
component.fit(instances)
component.classifiers["type-a"].fit.assert_called_once_with(expected_x["type-a"],
expected_y["type-a"])
component.classifiers["type-b"].fit.assert_called_once_with(expected_x["type-b"],
expected_y["type-b"])
component.classifiers["type-a"].fit.assert_called_once_with(
expected_x["type-a"],
expected_y["type-a"],
)
component.classifiers["type-b"].fit.assert_called_once_with(
expected_x["type-b"],
expected_y["type-b"],
)

View File

@@ -16,8 +16,10 @@ def test_usage():
comp.fit(instances)
assert instances[0].lower_bound == 1183.0
assert instances[0].upper_bound == 1183.0
assert np.round(comp.predict(instances), 2).tolist() == [[1183.0, 1183.0],
[1070.0, 1070.0]]
assert np.round(comp.predict(instances), 2).tolist() == [
[1183.0, 1183.0],
[1070.0, 1070.0],
]
def test_obj_evaluate():
@@ -28,20 +30,20 @@ def test_obj_evaluate():
comp.fit(instances)
ev = comp.evaluate(instances)
assert ev == {
'Lower bound': {
'Explained variance': 0.0,
'Max error': 183.0,
'Mean absolute error': 126.5,
'Mean squared error': 19194.5,
'Median absolute error': 126.5,
'R2': -5.012843605607331,
"Lower bound": {
"Explained variance": 0.0,
"Max error": 183.0,
"Mean absolute error": 126.5,
"Mean squared error": 19194.5,
"Median absolute error": 126.5,
"R2": -5.012843605607331,
},
"Upper bound": {
"Explained variance": 0.0,
"Max error": 183.0,
"Mean absolute error": 126.5,
"Mean squared error": 19194.5,
"Median absolute error": 126.5,
"R2": -5.012843605607331,
},
'Upper bound': {
'Explained variance': 0.0,
'Max error': 183.0,
'Mean absolute error': 126.5,
'Mean squared error': 19194.5,
'Median absolute error': 126.5,
'R2': -5.012843605607331,
}
}

View File

@@ -25,71 +25,82 @@ def test_predict():
def test_evaluate():
instances, models = get_test_pyomo_instances()
clf_zero = Mock(spec=Classifier)
clf_zero.predict_proba = Mock(return_value=np.array([
[0., 1.], # x[0]
[0., 1.], # x[1]
[1., 0.], # x[2]
[1., 0.], # x[3]
]))
clf_zero.predict_proba = Mock(
return_value=np.array(
[
[0.0, 1.0], # x[0]
[0.0, 1.0], # x[1]
[1.0, 0.0], # x[2]
[1.0, 0.0], # x[3]
]
)
)
clf_one = Mock(spec=Classifier)
clf_one.predict_proba = Mock(return_value=np.array([
[1., 0.], # x[0] instances[0]
[1., 0.], # x[1] instances[0]
[0., 1.], # x[2] instances[0]
[1., 0.], # x[3] instances[0]
]))
comp = PrimalSolutionComponent(classifier=[clf_zero, clf_one],
threshold=0.50)
clf_one.predict_proba = Mock(
return_value=np.array(
[
[1.0, 0.0], # x[0] instances[0]
[1.0, 0.0], # x[1] instances[0]
[0.0, 1.0], # x[2] instances[0]
[1.0, 0.0], # x[3] instances[0]
]
)
)
comp = PrimalSolutionComponent(classifier=[clf_zero, clf_one], threshold=0.50)
comp.fit(instances[:1])
assert comp.predict(instances[0]) == {"x": {0: 0,
1: 0,
2: 1,
3: None}}
assert instances[0].solution == {"x": {0: 1,
1: 0,
2: 1,
3: 1}}
assert comp.predict(instances[0]) == {"x": {0: 0, 1: 0, 2: 1, 3: None}}
assert instances[0].solution == {"x": {0: 1, 1: 0, 2: 1, 3: 1}}
ev = comp.evaluate(instances[:1])
assert ev == {'Fix one': {0: {'Accuracy': 0.5,
'Condition negative': 1,
'Condition negative (%)': 25.0,
'Condition positive': 3,
'Condition positive (%)': 75.0,
'F1 score': 0.5,
'False negative': 2,
'False negative (%)': 50.0,
'False positive': 0,
'False positive (%)': 0.0,
'Precision': 1.0,
'Predicted negative': 3,
'Predicted negative (%)': 75.0,
'Predicted positive': 1,
'Predicted positive (%)': 25.0,
'Recall': 0.3333333333333333,
'True negative': 1,
'True negative (%)': 25.0,
'True positive': 1,
'True positive (%)': 25.0}},
'Fix zero': {0: {'Accuracy': 0.75,
'Condition negative': 3,
'Condition negative (%)': 75.0,
'Condition positive': 1,
'Condition positive (%)': 25.0,
'F1 score': 0.6666666666666666,
'False negative': 0,
'False negative (%)': 0.0,
'False positive': 1,
'False positive (%)': 25.0,
'Precision': 0.5,
'Predicted negative': 2,
'Predicted negative (%)': 50.0,
'Predicted positive': 2,
'Predicted positive (%)': 50.0,
'Recall': 1.0,
'True negative': 2,
'True negative (%)': 50.0,
'True positive': 1,
'True positive (%)': 25.0}}}
assert ev == {
"Fix one": {
0: {
"Accuracy": 0.5,
"Condition negative": 1,
"Condition negative (%)": 25.0,
"Condition positive": 3,
"Condition positive (%)": 75.0,
"F1 score": 0.5,
"False negative": 2,
"False negative (%)": 50.0,
"False positive": 0,
"False positive (%)": 0.0,
"Precision": 1.0,
"Predicted negative": 3,
"Predicted negative (%)": 75.0,
"Predicted positive": 1,
"Predicted positive (%)": 25.0,
"Recall": 0.3333333333333333,
"True negative": 1,
"True negative (%)": 25.0,
"True positive": 1,
"True positive (%)": 25.0,
}
},
"Fix zero": {
0: {
"Accuracy": 0.75,
"Condition negative": 3,
"Condition negative (%)": 75.0,
"Condition positive": 1,
"Condition positive (%)": 25.0,
"F1 score": 0.6666666666666666,
"False negative": 0,
"False negative (%)": 0.0,
"False positive": 1,
"False positive (%)": 25.0,
"Precision": 0.5,
"Predicted negative": 2,
"Predicted negative (%)": 50.0,
"Predicted positive": 2,
"Predicted positive (%)": 50.0,
"Recall": 1.0,
"True negative": 2,
"True negative (%)": 50.0,
"True positive": 1,
"True positive (%)": 25.0,
}
},
}
def test_primal_parallel_fit():

View File

@@ -4,10 +4,7 @@
from unittest.mock import Mock, call
from miplearn import (RelaxationComponent,
LearningSolver,
Instance,
InternalSolver)
from miplearn import RelaxationComponent, LearningSolver, Instance, InternalSolver
from miplearn.classifiers import Classifier
@@ -16,41 +13,49 @@ def _setup():
internal = solver.internal_solver = Mock(spec=InternalSolver)
internal.get_constraint_ids = Mock(return_value=["c1", "c2", "c3", "c4"])
internal.get_constraint_slacks = Mock(side_effect=lambda: {
"c1": 0.5,
"c2": 0.0,
"c3": 0.0,
"c4": 1.4,
})
internal.get_constraint_slacks = Mock(
side_effect=lambda: {
"c1": 0.5,
"c2": 0.0,
"c3": 0.0,
"c4": 1.4,
}
)
internal.extract_constraint = Mock(side_effect=lambda cid: "<%s>" % cid)
internal.is_constraint_satisfied = Mock(return_value=False)
instance = Mock(spec=Instance)
instance.get_constraint_features = Mock(side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid])
instance.get_constraint_category = Mock(side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid])
instance.get_constraint_features = Mock(
side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid]
)
instance.get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid]
)
classifiers = {
"type-a": Mock(spec=Classifier),
"type-b": Mock(spec=Classifier),
}
classifiers["type-a"].predict_proba = \
Mock(return_value=[
classifiers["type-a"].predict_proba = Mock(
return_value=[
[0.20, 0.80],
[0.05, 0.95],
])
classifiers["type-b"].predict_proba = \
Mock(return_value=[
]
)
classifiers["type-b"].predict_proba = Mock(
return_value=[
[0.02, 0.98],
])
]
)
return solver, internal, instance, classifiers
@@ -72,25 +77,39 @@ def test_usage():
# Should query category and features for each constraint in the model
assert instance.get_constraint_category.call_count == 4
instance.get_constraint_category.assert_has_calls([
call("c1"), call("c2"), call("c3"), call("c4"),
])
instance.get_constraint_category.assert_has_calls(
[
call("c1"),
call("c2"),
call("c3"),
call("c4"),
]
)
# For constraint with non-null categories, should ask for features
assert instance.get_constraint_features.call_count == 3
instance.get_constraint_features.assert_has_calls([
call("c2"), call("c3"), call("c4"),
])
instance.get_constraint_features.assert_has_calls(
[
call("c2"),
call("c3"),
call("c4"),
]
)
# Should ask ML to predict whether constraint should be removed
component.classifiers["type-a"].predict_proba.assert_called_once_with([[1.0, 0.0], [0.5, 0.5]])
component.classifiers["type-a"].predict_proba.assert_called_once_with(
[[1.0, 0.0], [0.5, 0.5]]
)
component.classifiers["type-b"].predict_proba.assert_called_once_with([[1.0]])
# Should ask internal solver to remove constraints predicted as redundant
assert internal.extract_constraint.call_count == 2
internal.extract_constraint.assert_has_calls([
call("c3"), call("c4"),
])
internal.extract_constraint.assert_has_calls(
[
call("c3"),
call("c4"),
]
)
# LearningSolver calls after_solve
component.after_solve(solver, instance, None, None)
@@ -111,8 +130,7 @@ def test_usage():
def test_usage_with_check_dropped():
solver, internal, instance, classifiers = _setup()
component = RelaxationComponent(check_dropped=True,
violation_tolerance=1e-3)
component = RelaxationComponent(check_dropped=True, violation_tolerance=1e-3)
component.classifiers = classifiers
# LearningSolver call before_solve
@@ -120,9 +138,12 @@ def test_usage_with_check_dropped():
# Assert constraints are extracted
assert internal.extract_constraint.call_count == 2
internal.extract_constraint.assert_has_calls([
call("c3"), call("c4"),
])
internal.extract_constraint.assert_has_calls(
[
call("c3"),
call("c4"),
]
)
# LearningSolver calls iteration_cb (first time)
should_repeat = component.iteration_cb(solver, instance, None)
@@ -131,15 +152,15 @@ def test_usage_with_check_dropped():
assert should_repeat
# Should ask solver if removed constraints are satisfied (mock always returns false)
internal.is_constraint_satisfied.assert_has_calls([
call("<c3>", 1e-3),
call("<c4>", 1e-3),
])
internal.is_constraint_satisfied.assert_has_calls(
[
call("<c3>", 1e-3),
call("<c4>", 1e-3),
]
)
# Should add constraints back to LP relaxation
internal.add_constraint.assert_has_calls([
call("<c3>"), call("<c4>")
])
internal.add_constraint.assert_has_calls([call("<c3>"), call("<c4>")])
# LearningSolver calls iteration_cb (second time)
should_repeat = component.iteration_cb(solver, instance, None)
@@ -148,21 +169,22 @@ def test_usage_with_check_dropped():
def test_x_y_fit_predict_evaluate():
instances = [Mock(spec=Instance), Mock(spec=Instance)]
component = RelaxationComponent(slack_tolerance=0.05,
threshold=0.80)
component = RelaxationComponent(slack_tolerance=0.05, threshold=0.80)
component.classifiers = {
"type-a": Mock(spec=Classifier),
"type-b": Mock(spec=Classifier),
}
component.classifiers["type-a"].predict_proba = \
Mock(return_value=[
component.classifiers["type-a"].predict_proba = Mock(
return_value=[
[0.20, 0.80],
])
component.classifiers["type-b"].predict_proba = \
Mock(return_value=[
]
)
component.classifiers["type-b"].predict_proba = Mock(
return_value=[
[0.50, 0.50],
[0.05, 0.95],
])
]
)
# First mock instance
instances[0].slacks = {
@@ -171,17 +193,21 @@ def test_x_y_fit_predict_evaluate():
"c3": 0.00,
"c4": 30.0,
}
instances[0].get_constraint_category = Mock(side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid])
instances[0].get_constraint_features = Mock(side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid])
instances[0].get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": None,
"c2": "type-a",
"c3": "type-a",
"c4": "type-b",
}[cid]
)
instances[0].get_constraint_features = Mock(
side_effect=lambda cid: {
"c2": [1.0, 0.0],
"c3": [0.5, 0.5],
"c4": [1.0],
}[cid]
)
# Second mock instance
instances[1].slacks = {
@@ -190,26 +216,27 @@ def test_x_y_fit_predict_evaluate():
"c4": 0.00,
"c5": 0.00,
}
instances[1].get_constraint_category = Mock(side_effect=lambda cid: {
"c1": None,
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid])
instances[1].get_constraint_features = Mock(side_effect=lambda cid: {
"c3": [0.3, 0.4],
"c4": [0.7],
"c5": [0.8],
}[cid])
instances[1].get_constraint_category = Mock(
side_effect=lambda cid: {
"c1": None,
"c3": "type-a",
"c4": "type-b",
"c5": "type-b",
}[cid]
)
instances[1].get_constraint_features = Mock(
side_effect=lambda cid: {
"c3": [0.3, 0.4],
"c4": [0.7],
"c5": [0.8],
}[cid]
)
expected_x = {
"type-a": [[1.0, 0.0], [0.5, 0.5], [0.3, 0.4]],
"type-b": [[1.0], [0.7], [0.8]],
}
expected_y = {
"type-a": [[0], [0], [1]],
"type-b": [[1], [0], [0]]
}
expected_y = {"type-a": [[0], [0], [1]], "type-b": [[1], [0], [0]]}
# Should build X and Y matrices correctly
assert component.x(instances) == expected_x
@@ -217,13 +244,16 @@ def test_x_y_fit_predict_evaluate():
# Should pass along X and Y matrices to classifiers
component.fit(instances)
component.classifiers["type-a"].fit.assert_called_with(expected_x["type-a"], expected_y["type-a"])
component.classifiers["type-b"].fit.assert_called_with(expected_x["type-b"], expected_y["type-b"])
component.classifiers["type-a"].fit.assert_called_with(
expected_x["type-a"],
expected_y["type-a"],
)
component.classifiers["type-b"].fit.assert_called_with(
expected_x["type-b"],
expected_y["type-b"],
)
assert component.predict(expected_x) == {
"type-a": [[1]],
"type-b": [[0], [1]]
}
assert component.predict(expected_x) == {"type-a": [[1]], "type-b": [[0], [1]]}
ev = component.evaluate(instances[1])
assert ev["True positive"] == 1