Rewrite StaticLazy.sample_xy

master
Alinson S. Xavier 5 years ago
parent 2979bd157c
commit bccf0e9860
No known key found for this signature in database
GPG Key ID: DCA0DAD4D2F58624

@ -12,7 +12,7 @@ from miplearn.classifiers import Classifier
from miplearn.classifiers.counting import CountingClassifier from miplearn.classifiers.counting import CountingClassifier
from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold
from miplearn.components.component import Component from miplearn.components.component import Component
from miplearn.features import TrainingSample, Features, Constraint from miplearn.features import TrainingSample, Features, Constraint, Sample
from miplearn.types import LearningSolveStats from miplearn.types import LearningSolveStats
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -199,6 +199,46 @@ class StaticLazyConstraintsComponent(Component):
y[category] += [[True, False]] y[category] += [[True, False]]
return x, y return x, y
@overrides
def sample_xy(
self,
sample: Sample,
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
x: Dict = {}
y: Dict = {}
assert sample.after_load is not None
assert sample.after_load.constraints is not None
for (cid, constr) in sample.after_load.constraints.items():
# Initialize categories
if not constr.lazy:
continue
category = constr.category
if category is None:
continue
if category not in x:
x[category] = []
y[category] = []
# Features
sf = sample.after_load
if sample.after_lp is not None:
sf = sample.after_lp
assert sf.instance is not None
features = list(sf.instance.to_list())
assert sf.constraints is not None
assert sf.constraints[cid] is not None
features.extend(sf.constraints[cid].to_list())
x[category].append(features)
# Labels
if sample.after_mip is not None:
assert sample.after_mip.extra is not None
if cid in sample.after_mip.extra["lazy_enforced"]:
y[category] += [[False, True]]
else:
y[category] += [[True, False]]
return x, y
@overrides @overrides
def fit_xy( def fit_xy(
self, self,

@ -130,6 +130,7 @@ class Features:
constraints: Optional[Dict[str, Constraint]] = None constraints: Optional[Dict[str, Constraint]] = None
lp_solve: Optional["LPSolveStats"] = None lp_solve: Optional["LPSolveStats"] = None
mip_solve: Optional["MIPSolveStats"] = None mip_solve: Optional["MIPSolveStats"] = None
extra: Optional[Dict] = None
@dataclass @dataclass

@ -16,6 +16,7 @@ from miplearn.features import (
InstanceFeatures, InstanceFeatures,
Features, Features,
Constraint, Constraint,
Sample,
) )
from miplearn.instance.base import Instance from miplearn.instance.base import Instance
from miplearn.solvers.internal import InternalSolver from miplearn.solvers.internal import InternalSolver
@ -25,6 +26,50 @@ from miplearn.types import (
) )
@pytest.fixture
def sample() -> Sample:
sample = Sample(
after_load=Features(
constraints={
"c1": Constraint(category="type-a", lazy=True),
"c2": Constraint(category="type-a", lazy=True),
"c3": Constraint(category="type-a", lazy=True),
"c4": Constraint(category="type-b", lazy=True),
"c5": Constraint(category="type-b", lazy=False),
}
),
after_lp=Features(
instance=InstanceFeatures(),
constraints={
"c1": Constraint(),
"c2": Constraint(),
"c3": Constraint(),
"c4": Constraint(),
"c5": Constraint(),
},
),
after_mip=Features(
extra={
"lazy_enforced": {"c1", "c2", "c4"},
}
),
)
sample.after_lp.instance.to_list = Mock(return_value=[5.0]) # type: ignore
sample.after_lp.constraints["c1"].to_list = Mock( # type: ignore
return_value=[1.0, 1.0]
)
sample.after_lp.constraints["c2"].to_list = Mock( # type: ignore
return_value=[1.0, 2.0]
)
sample.after_lp.constraints["c3"].to_list = Mock( # type: ignore
return_value=[1.0, 3.0]
)
sample.after_lp.constraints["c4"].to_list = Mock( # type: ignore
return_value=[1.0, 4.0, 0.0]
)
return sample
@pytest.fixture @pytest.fixture
def instance(features: Features) -> Instance: def instance(features: Features) -> Instance:
instance = Mock(spec=Instance) instance = Mock(spec=Instance)
@ -34,7 +79,7 @@ def instance(features: Features) -> Instance:
@pytest.fixture @pytest.fixture
def sample() -> TrainingSample: def sample2() -> TrainingSample:
return TrainingSample( return TrainingSample(
lazy_enforced={"c1", "c2", "c4"}, lazy_enforced={"c1", "c2", "c4"},
) )
@ -112,7 +157,7 @@ def test_usage_with_solver(instance: Instance) -> None:
) )
) )
sample: TrainingSample = TrainingSample() sample2: TrainingSample = TrainingSample()
stats: LearningSolveStats = {} stats: LearningSolveStats = {}
# LearningSolver calls before_solve_mip # LearningSolver calls before_solve_mip
@ -122,7 +167,7 @@ def test_usage_with_solver(instance: Instance) -> None:
model=None, model=None,
stats=stats, stats=stats,
features=instance.features, features=instance.features,
training_data=sample, training_data=sample2,
) )
# Should ask ML to predict whether each lazy constraint should be enforced # Should ask ML to predict whether each lazy constraint should be enforced
@ -160,11 +205,11 @@ def test_usage_with_solver(instance: Instance) -> None:
model=None, model=None,
stats=stats, stats=stats,
features=instance.features, features=instance.features,
training_data=sample, training_data=sample2,
) )
# Should update training sample # Should update training sample
assert sample.lazy_enforced == {"c1", "c2", "c3", "c4"} assert sample2.lazy_enforced == {"c1", "c2", "c3", "c4"}
# Should update stats # Should update stats
assert stats["LazyStatic: Removed"] == 1 assert stats["LazyStatic: Removed"] == 1
@ -175,7 +220,7 @@ def test_usage_with_solver(instance: Instance) -> None:
def test_sample_predict( def test_sample_predict(
instance: Instance, instance: Instance,
sample: TrainingSample, sample2: TrainingSample,
) -> None: ) -> None:
comp = StaticLazyConstraintsComponent() comp = StaticLazyConstraintsComponent()
comp.thresholds["type-a"] = MinProbabilityThreshold([0.5, 0.5]) comp.thresholds["type-a"] = MinProbabilityThreshold([0.5, 0.5])
@ -194,7 +239,7 @@ def test_sample_predict(
[0.0, 1.0], # c4 [0.0, 1.0], # c4
] ]
) )
pred = comp.sample_predict(instance, sample) pred = comp.sample_predict(instance, sample2)
assert pred == ["c1", "c2", "c4"] assert pred == ["c1", "c2", "c4"]
@ -238,19 +283,16 @@ def test_fit_xy() -> None:
assert thr_b.fit.call_args[0][0] == clf_b # type: ignore assert thr_b.fit.call_args[0][0] == clf_b # type: ignore
def test_sample_xy( def test_sample_xy(sample: Sample) -> None:
instance: Instance,
sample: TrainingSample,
) -> None:
x_expected = { x_expected = {
"type-a": [[1.0, 1.0], [1.0, 2.0], [1.0, 3.0]], "type-a": [[5.0, 1.0, 1.0], [5.0, 1.0, 2.0], [5.0, 1.0, 3.0]],
"type-b": [[1.0, 4.0, 0.0]], "type-b": [[5.0, 1.0, 4.0, 0.0]],
} }
y_expected = { y_expected = {
"type-a": [[False, True], [False, True], [True, False]], "type-a": [[False, True], [False, True], [True, False]],
"type-b": [[False, True]], "type-b": [[False, True]],
} }
xy = StaticLazyConstraintsComponent().sample_xy_old(instance, sample) xy = StaticLazyConstraintsComponent().sample_xy(sample)
assert xy is not None assert xy is not None
x_actual, y_actual = xy x_actual, y_actual = xy
assert x_actual == x_expected assert x_actual == x_expected

Loading…
Cancel
Save