mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 01:18:52 -06:00
Rename more methods to _old
This commit is contained in:
@@ -5,11 +5,11 @@ from typing import Dict, Tuple
|
||||
from unittest.mock import Mock
|
||||
|
||||
from miplearn.components.component import Component
|
||||
from miplearn.features import Features, TrainingSample
|
||||
from miplearn.features import Features
|
||||
from miplearn.instance.base import Instance
|
||||
|
||||
|
||||
def test_xy_instance() -> None:
|
||||
def test_xy_instance_old() -> None:
|
||||
def _sample_xy_old(features: Features, sample: str) -> Tuple[Dict, Dict]:
|
||||
x = {
|
||||
"s1": {
|
||||
@@ -96,6 +96,6 @@ def test_xy_instance() -> None:
|
||||
[11],
|
||||
],
|
||||
}
|
||||
x_actual, y_actual = comp.xy_instances([instance_1, instance_2])
|
||||
x_actual, y_actual = comp.xy_instances_old([instance_1, instance_2])
|
||||
assert x_actual == x_expected
|
||||
assert y_actual == y_expected
|
||||
|
||||
@@ -25,7 +25,7 @@ E = 0.1
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def training_instances2() -> List[Instance]:
|
||||
def training_instances_old() -> List[Instance]:
|
||||
instances = [cast(Instance, Mock(spec=Instance)) for _ in range(2)]
|
||||
instances[0].features = Features(
|
||||
instance=InstanceFeatures(
|
||||
@@ -131,11 +131,11 @@ def test_sample_xy(training_instances: List[Instance]) -> None:
|
||||
assert_equals(y_actual, y_expected)
|
||||
|
||||
|
||||
def test_fit(training_instances2: List[Instance]) -> None:
|
||||
def test_fit_old(training_instances_old: List[Instance]) -> None:
|
||||
clf = Mock(spec=Classifier)
|
||||
clf.clone = Mock(side_effect=lambda: Mock(spec=Classifier))
|
||||
comp = DynamicLazyConstraintsComponent(classifier=clf)
|
||||
comp.fit(training_instances2)
|
||||
comp.fit_old(training_instances_old)
|
||||
assert clf.clone.call_count == 2
|
||||
|
||||
assert "type-a" in comp.classifiers
|
||||
@@ -197,7 +197,7 @@ def test_fit(training_instances2: List[Instance]) -> None:
|
||||
)
|
||||
|
||||
|
||||
def test_sample_predict_evaluate(training_instances2: List[Instance]) -> None:
|
||||
def test_sample_predict_evaluate_old(training_instances_old: List[Instance]) -> None:
|
||||
comp = DynamicLazyConstraintsComponent()
|
||||
comp.known_cids.extend(["c1", "c2", "c3", "c4"])
|
||||
comp.thresholds["type-a"] = MinProbabilityThreshold([0.5, 0.5])
|
||||
@@ -211,13 +211,13 @@ def test_sample_predict_evaluate(training_instances2: List[Instance]) -> None:
|
||||
side_effect=lambda _: np.array([[0.9, 0.1], [0.1, 0.9]])
|
||||
)
|
||||
pred = comp.sample_predict(
|
||||
training_instances2[0],
|
||||
training_instances2[0].training_data[0],
|
||||
training_instances_old[0],
|
||||
training_instances_old[0].training_data[0],
|
||||
)
|
||||
assert pred == ["c1", "c4"]
|
||||
ev = comp.sample_evaluate_old(
|
||||
training_instances2[0],
|
||||
training_instances2[0].training_data[0],
|
||||
training_instances_old[0],
|
||||
training_instances_old[0].training_data[0],
|
||||
)
|
||||
print(ev)
|
||||
assert ev == {
|
||||
|
||||
@@ -18,14 +18,14 @@ from miplearn.solvers.pyomo.gurobi import GurobiPyomoSolver
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance(features: Features) -> Instance:
|
||||
def instance_old(features_old: Features) -> Instance:
|
||||
instance = Mock(spec=Instance)
|
||||
instance.features = features
|
||||
instance.features = features_old
|
||||
return instance
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def features() -> Features:
|
||||
def features_old() -> Features:
|
||||
return Features(
|
||||
instance=InstanceFeatures(
|
||||
user_features=[1.0, 2.0],
|
||||
@@ -95,8 +95,8 @@ def test_sample_xy(sample: Sample) -> None:
|
||||
assert y_actual == y_expected
|
||||
|
||||
|
||||
def test_sample_xy_without_lp(
|
||||
instance: Instance,
|
||||
def test_sample_xy_without_lp_old(
|
||||
instance_old: Instance,
|
||||
sample_without_lp: TrainingSample,
|
||||
) -> None:
|
||||
x_expected = {
|
||||
@@ -107,15 +107,15 @@ def test_sample_xy_without_lp(
|
||||
"Lower bound": [[1.0]],
|
||||
"Upper bound": [[2.0]],
|
||||
}
|
||||
xy = ObjectiveValueComponent().sample_xy_old(instance, sample_without_lp)
|
||||
xy = ObjectiveValueComponent().sample_xy_old(instance_old, sample_without_lp)
|
||||
assert xy is not None
|
||||
x_actual, y_actual = xy
|
||||
assert x_actual == x_expected
|
||||
assert y_actual == y_expected
|
||||
|
||||
|
||||
def test_sample_xy_without_ub(
|
||||
instance: Instance,
|
||||
def test_sample_xy_without_ub_old(
|
||||
instance_old: Instance,
|
||||
sample_without_ub_old: TrainingSample,
|
||||
) -> None:
|
||||
x_expected = {
|
||||
@@ -123,7 +123,7 @@ def test_sample_xy_without_ub(
|
||||
"Upper bound": [[1.0, 2.0, 3.0]],
|
||||
}
|
||||
y_expected = {"Lower bound": [[1.0]]}
|
||||
xy = ObjectiveValueComponent().sample_xy_old(instance, sample_without_ub_old)
|
||||
xy = ObjectiveValueComponent().sample_xy_old(instance_old, sample_without_ub_old)
|
||||
assert xy is not None
|
||||
x_actual, y_actual = xy
|
||||
assert x_actual == x_expected
|
||||
@@ -197,10 +197,10 @@ def test_fit_xy_without_ub() -> None:
|
||||
|
||||
|
||||
def test_sample_predict(
|
||||
instance: Instance,
|
||||
instance_old: Instance,
|
||||
sample_old: TrainingSample,
|
||||
) -> None:
|
||||
x, y = ObjectiveValueComponent().sample_xy_old(instance, sample_old)
|
||||
x, y = ObjectiveValueComponent().sample_xy_old(instance_old, sample_old)
|
||||
comp = ObjectiveValueComponent()
|
||||
comp.regressors["Lower bound"] = Mock(spec=Regressor)
|
||||
comp.regressors["Upper bound"] = Mock(spec=Regressor)
|
||||
@@ -210,7 +210,7 @@ def test_sample_predict(
|
||||
comp.regressors["Upper bound"].predict = Mock( # type: ignore
|
||||
side_effect=lambda _: np.array([[60.0]])
|
||||
)
|
||||
pred = comp.sample_predict(instance, sample_old)
|
||||
pred = comp.sample_predict_old(instance_old, sample_old)
|
||||
assert pred == {
|
||||
"Lower bound": 50.0,
|
||||
"Upper bound": 60.0,
|
||||
@@ -225,17 +225,17 @@ def test_sample_predict(
|
||||
)
|
||||
|
||||
|
||||
def test_sample_predict_without_ub(
|
||||
instance: Instance,
|
||||
def test_sample_predict_without_ub_old(
|
||||
instance_old: Instance,
|
||||
sample_without_ub_old: TrainingSample,
|
||||
) -> None:
|
||||
x, y = ObjectiveValueComponent().sample_xy_old(instance, sample_without_ub_old)
|
||||
x, y = ObjectiveValueComponent().sample_xy_old(instance_old, sample_without_ub_old)
|
||||
comp = ObjectiveValueComponent()
|
||||
comp.regressors["Lower bound"] = Mock(spec=Regressor)
|
||||
comp.regressors["Lower bound"].predict = Mock( # type: ignore
|
||||
side_effect=lambda _: np.array([[50.0]])
|
||||
)
|
||||
pred = comp.sample_predict(instance, sample_without_ub_old)
|
||||
pred = comp.sample_predict_old(instance_old, sample_without_ub_old)
|
||||
assert pred == {
|
||||
"Lower bound": 50.0,
|
||||
}
|
||||
@@ -245,13 +245,16 @@ def test_sample_predict_without_ub(
|
||||
)
|
||||
|
||||
|
||||
def test_sample_evaluate(instance: Instance, sample_old: TrainingSample) -> None:
|
||||
def test_sample_evaluate_old(
|
||||
instance_old: Instance,
|
||||
sample_old: TrainingSample,
|
||||
) -> None:
|
||||
comp = ObjectiveValueComponent()
|
||||
comp.regressors["Lower bound"] = Mock(spec=Regressor)
|
||||
comp.regressors["Lower bound"].predict = lambda _: np.array([[1.05]]) # type: ignore
|
||||
comp.regressors["Upper bound"] = Mock(spec=Regressor)
|
||||
comp.regressors["Upper bound"].predict = lambda _: np.array([[2.50]]) # type: ignore
|
||||
ev = comp.sample_evaluate_old(instance, sample_old)
|
||||
ev = comp.sample_evaluate_old(instance_old, sample_old)
|
||||
assert ev == {
|
||||
"Lower bound": {
|
||||
"Actual value": 1.0,
|
||||
|
||||
@@ -146,7 +146,7 @@ def test_xy_old() -> None:
|
||||
assert y_actual == y_expected
|
||||
|
||||
|
||||
def test_xy_without_lp_solution() -> None:
|
||||
def test_xy_without_lp_solution_old() -> None:
|
||||
features = Features(
|
||||
variables={
|
||||
"x[0]": Variable(
|
||||
@@ -197,7 +197,7 @@ def test_xy_without_lp_solution() -> None:
|
||||
assert y_actual == y_expected
|
||||
|
||||
|
||||
def test_predict() -> None:
|
||||
def test_predict_old() -> None:
|
||||
clf = Mock(spec=Classifier)
|
||||
clf.predict_proba = Mock(
|
||||
return_value=np.array(
|
||||
@@ -295,7 +295,7 @@ def test_usage() -> None:
|
||||
assert stats["mip_lower_bound"] == stats["mip_warm_start_value"]
|
||||
|
||||
|
||||
def test_evaluate() -> None:
|
||||
def test_evaluate_old() -> None:
|
||||
comp = PrimalSolutionComponent()
|
||||
comp.sample_predict = lambda _, __: { # type: ignore
|
||||
"x[0]": 1.0,
|
||||
|
||||
@@ -71,7 +71,7 @@ def sample() -> Sample:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance(features: Features) -> Instance:
|
||||
def instance_old(features: Features) -> Instance:
|
||||
instance = Mock(spec=Instance)
|
||||
instance.features = features
|
||||
instance.has_static_lazy_constraints = Mock(return_value=True)
|
||||
@@ -79,7 +79,7 @@ def instance(features: Features) -> Instance:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample2() -> TrainingSample:
|
||||
def sample_old() -> TrainingSample:
|
||||
return TrainingSample(
|
||||
lazy_enforced={"c1", "c2", "c4"},
|
||||
)
|
||||
@@ -122,9 +122,9 @@ def features() -> Features:
|
||||
)
|
||||
|
||||
|
||||
def test_usage_with_solver(instance: Instance) -> None:
|
||||
assert instance.features is not None
|
||||
assert instance.features.constraints is not None
|
||||
def test_usage_with_solver(instance_old: Instance) -> None:
|
||||
assert instance_old.features is not None
|
||||
assert instance_old.features.constraints is not None
|
||||
|
||||
solver = Mock(spec=LearningSolver)
|
||||
solver.use_lazy_cb = False
|
||||
@@ -157,17 +157,17 @@ def test_usage_with_solver(instance: Instance) -> None:
|
||||
)
|
||||
)
|
||||
|
||||
sample2: TrainingSample = TrainingSample()
|
||||
sample_old: TrainingSample = TrainingSample()
|
||||
stats: LearningSolveStats = {}
|
||||
|
||||
# LearningSolver calls before_solve_mip
|
||||
component.before_solve_mip_old(
|
||||
solver=solver,
|
||||
instance=instance,
|
||||
instance=instance_old,
|
||||
model=None,
|
||||
stats=stats,
|
||||
features=instance.features,
|
||||
training_data=sample2,
|
||||
features=instance_old.features,
|
||||
training_data=sample_old,
|
||||
)
|
||||
|
||||
# Should ask ML to predict whether each lazy constraint should be enforced
|
||||
@@ -179,19 +179,19 @@ def test_usage_with_solver(instance: Instance) -> None:
|
||||
internal.remove_constraint.assert_has_calls([call("c3")])
|
||||
|
||||
# LearningSolver calls after_iteration (first time)
|
||||
should_repeat = component.iteration_cb(solver, instance, None)
|
||||
should_repeat = component.iteration_cb(solver, instance_old, None)
|
||||
assert should_repeat
|
||||
|
||||
# Should ask internal solver to verify if constraints in the pool are
|
||||
# satisfied and add the ones that are not
|
||||
c3 = instance.features.constraints["c3"]
|
||||
c3 = instance_old.features.constraints["c3"]
|
||||
internal.is_constraint_satisfied.assert_called_once_with(c3, tol=1.0)
|
||||
internal.is_constraint_satisfied.reset_mock()
|
||||
internal.add_constraint.assert_called_once_with(c3, name="c3")
|
||||
internal.add_constraint.reset_mock()
|
||||
|
||||
# LearningSolver calls after_iteration (second time)
|
||||
should_repeat = component.iteration_cb(solver, instance, None)
|
||||
should_repeat = component.iteration_cb(solver, instance_old, None)
|
||||
assert not should_repeat
|
||||
|
||||
# The lazy constraint pool should be empty by now, so no calls should be made
|
||||
@@ -201,15 +201,15 @@ def test_usage_with_solver(instance: Instance) -> None:
|
||||
# LearningSolver calls after_solve_mip
|
||||
component.after_solve_mip_old(
|
||||
solver=solver,
|
||||
instance=instance,
|
||||
instance=instance_old,
|
||||
model=None,
|
||||
stats=stats,
|
||||
features=instance.features,
|
||||
training_data=sample2,
|
||||
features=instance_old.features,
|
||||
training_data=sample_old,
|
||||
)
|
||||
|
||||
# Should update training sample
|
||||
assert sample2.lazy_enforced == {"c1", "c2", "c3", "c4"}
|
||||
assert sample_old.lazy_enforced == {"c1", "c2", "c3", "c4"}
|
||||
|
||||
# Should update stats
|
||||
assert stats["LazyStatic: Removed"] == 1
|
||||
@@ -219,8 +219,8 @@ def test_usage_with_solver(instance: Instance) -> None:
|
||||
|
||||
|
||||
def test_sample_predict(
|
||||
instance: Instance,
|
||||
sample2: TrainingSample,
|
||||
instance_old: Instance,
|
||||
sample_old: TrainingSample,
|
||||
) -> None:
|
||||
comp = StaticLazyConstraintsComponent()
|
||||
comp.thresholds["type-a"] = MinProbabilityThreshold([0.5, 0.5])
|
||||
@@ -239,7 +239,7 @@ def test_sample_predict(
|
||||
[0.0, 1.0], # c4
|
||||
]
|
||||
)
|
||||
pred = comp.sample_predict(instance, sample2)
|
||||
pred = comp.sample_predict(instance_old, sample_old)
|
||||
assert pred == ["c1", "c2", "c4"]
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user