Update ObjectiveValueComponent

master
Alinson S. Xavier 5 years ago
parent a9dcdb8e4e
commit b5411b8950
No known key found for this signature in database
GPG Key ID: DCA0DAD4D2F58624

@ -36,17 +36,16 @@ class ObjectiveValueComponent(Component):
self.regressor_prototype = regressor self.regressor_prototype = regressor
@overrides @overrides
def before_solve_mip_old( def before_solve_mip(
self, self,
solver: "LearningSolver", solver: "LearningSolver",
instance: Instance, instance: Instance,
model: Any, model: Any,
stats: LearningSolveStats, stats: LearningSolveStats,
features: Features, sample: Sample,
training_data: TrainingSample,
) -> None: ) -> None:
logger.info("Predicting optimal value...") logger.info("Predicting optimal value...")
pred = self.sample_predict_old(instance, training_data) pred = self.sample_predict(sample)
for (c, v) in pred.items(): for (c, v) in pred.items():
logger.info(f"Predicted {c.lower()}: %.6e" % v) logger.info(f"Predicted {c.lower()}: %.6e" % v)
stats[f"Objective: Predicted {c.lower()}"] = v # type: ignore stats[f"Objective: Predicted {c.lower()}"] = v # type: ignore
@ -62,13 +61,9 @@ class ObjectiveValueComponent(Component):
self.regressors[c] = self.regressor_prototype.clone() self.regressors[c] = self.regressor_prototype.clone()
self.regressors[c].fit(x[c], y[c]) self.regressors[c].fit(x[c], y[c])
def sample_predict_old( def sample_predict(self, sample: Sample) -> Dict[str, float]:
self,
instance: Instance,
sample: TrainingSample,
) -> Dict[str, float]:
pred: Dict[str, float] = {} pred: Dict[str, float] = {}
x, _ = self.sample_xy_old(instance, sample) x, _ = self.sample_xy(None, sample)
for c in ["Upper bound", "Lower bound"]: for c in ["Upper bound", "Lower bound"]:
if c in self.regressors is not None: if c in self.regressors is not None:
pred[c] = self.regressors[c].predict(np.array(x[c]))[0, 0] pred[c] = self.regressors[c].predict(np.array(x[c]))[0, 0]
@ -76,28 +71,6 @@ class ObjectiveValueComponent(Component):
logger.info(f"{c} regressor not fitted. Skipping.") logger.info(f"{c} regressor not fitted. Skipping.")
return pred return pred
@overrides
def sample_xy_old(
self,
instance: Instance,
sample: TrainingSample,
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
ifeatures = instance.features.instance
assert ifeatures is not None
assert ifeatures.user_features is not None
x: Dict[Hashable, List[List[float]]] = {}
y: Dict[Hashable, List[List[float]]] = {}
f = list(ifeatures.user_features)
if sample.lp_value is not None:
f += [sample.lp_value]
x["Upper bound"] = [f]
x["Lower bound"] = [f]
if sample.lower_bound is not None:
y["Lower bound"] = [[sample.lower_bound]]
if sample.upper_bound is not None:
y["Upper bound"] = [[sample.upper_bound]]
return x, y
@overrides @overrides
def sample_xy( def sample_xy(
self, self,
@ -133,11 +106,14 @@ class ObjectiveValueComponent(Component):
return x, y return x, y
@overrides @overrides
def sample_evaluate_old( def sample_evaluate(
self, self,
instance: Instance, instance: Instance,
sample: TrainingSample, sample: Sample,
) -> Dict[Hashable, Dict[str, float]]: ) -> Dict[Hashable, Dict[str, float]]:
assert sample.after_mip is not None
assert sample.after_mip.mip_solve is not None
def compare(y_pred: float, y_actual: float) -> Dict[str, float]: def compare(y_pred: float, y_actual: float) -> Dict[str, float]:
err = np.round(abs(y_pred - y_actual), 8) err = np.round(abs(y_pred - y_actual), 8)
return { return {
@ -148,16 +124,11 @@ class ObjectiveValueComponent(Component):
} }
result: Dict[Hashable, Dict[str, float]] = {} result: Dict[Hashable, Dict[str, float]] = {}
pred = self.sample_predict_old(instance, sample) pred = self.sample_predict(sample)
if sample.upper_bound is not None: actual_ub = sample.after_mip.mip_solve.mip_upper_bound
result["Upper bound"] = compare(pred["Upper bound"], sample.upper_bound) actual_lb = sample.after_mip.mip_solve.mip_lower_bound
if sample.lower_bound is not None: if actual_ub is not None:
result["Lower bound"] = compare(pred["Lower bound"], sample.lower_bound) result["Upper bound"] = compare(pred["Upper bound"], actual_ub)
if actual_lb is not None:
result["Lower bound"] = compare(pred["Lower bound"], actual_lb)
return result return result
@overrides
def fit(
self,
training_instances: List[Instance],
) -> None:
return

@ -10,38 +10,12 @@ from numpy.testing import assert_array_equal
from miplearn.classifiers import Regressor from miplearn.classifiers import Regressor
from miplearn.components.objective import ObjectiveValueComponent from miplearn.components.objective import ObjectiveValueComponent
from miplearn.features import TrainingSample, InstanceFeatures, Features, Sample from miplearn.features import InstanceFeatures, Features, Sample
from miplearn.instance.base import Instance
from miplearn.solvers.internal import MIPSolveStats, LPSolveStats from miplearn.solvers.internal import MIPSolveStats, LPSolveStats
from miplearn.solvers.learning import LearningSolver from miplearn.solvers.learning import LearningSolver
from miplearn.solvers.pyomo.gurobi import GurobiPyomoSolver from miplearn.solvers.pyomo.gurobi import GurobiPyomoSolver
@pytest.fixture
def instance_old(features_old: Features) -> Instance:
instance = Mock(spec=Instance)
instance.features = features_old
return instance
@pytest.fixture
def features_old() -> Features:
return Features(
instance=InstanceFeatures(
user_features=[1.0, 2.0],
)
)
@pytest.fixture
def sample_old() -> TrainingSample:
return TrainingSample(
lower_bound=1.0,
upper_bound=2.0,
lp_value=3.0,
)
@pytest.fixture @pytest.fixture
def sample() -> Sample: def sample() -> Sample:
sample = Sample( sample = Sample(
@ -63,22 +37,6 @@ def sample() -> Sample:
return sample return sample
@pytest.fixture
def sample_without_lp() -> TrainingSample:
return TrainingSample(
lower_bound=1.0,
upper_bound=2.0,
)
@pytest.fixture
def sample_without_ub_old() -> TrainingSample:
return TrainingSample(
lower_bound=1.0,
lp_value=3.0,
)
def test_sample_xy(sample: Sample) -> None: def test_sample_xy(sample: Sample) -> None:
x_expected = { x_expected = {
"Lower bound": [[1.0, 2.0, 3.0]], "Lower bound": [[1.0, 2.0, 3.0]],
@ -95,41 +53,6 @@ def test_sample_xy(sample: Sample) -> None:
assert y_actual == y_expected assert y_actual == y_expected
def test_sample_xy_without_lp_old(
instance_old: Instance,
sample_without_lp: TrainingSample,
) -> None:
x_expected = {
"Lower bound": [[1.0, 2.0]],
"Upper bound": [[1.0, 2.0]],
}
y_expected = {
"Lower bound": [[1.0]],
"Upper bound": [[2.0]],
}
xy = ObjectiveValueComponent().sample_xy_old(instance_old, sample_without_lp)
assert xy is not None
x_actual, y_actual = xy
assert x_actual == x_expected
assert y_actual == y_expected
def test_sample_xy_without_ub_old(
instance_old: Instance,
sample_without_ub_old: TrainingSample,
) -> None:
x_expected = {
"Lower bound": [[1.0, 2.0, 3.0]],
"Upper bound": [[1.0, 2.0, 3.0]],
}
y_expected = {"Lower bound": [[1.0]]}
xy = ObjectiveValueComponent().sample_xy_old(instance_old, sample_without_ub_old)
assert xy is not None
x_actual, y_actual = xy
assert x_actual == x_expected
assert y_actual == y_expected
def test_fit_xy() -> None: def test_fit_xy() -> None:
x: Dict[Hashable, np.ndarray] = { x: Dict[Hashable, np.ndarray] = {
"Lower bound": np.array([[0.0, 0.0], [1.0, 2.0]]), "Lower bound": np.array([[0.0, 0.0], [1.0, 2.0]]),
@ -168,39 +91,8 @@ def test_fit_xy() -> None:
) )
def test_fit_xy_without_ub() -> None: def test_sample_predict(sample: Sample) -> None:
x: Dict[Hashable, np.ndarray] = { x, y = ObjectiveValueComponent().sample_xy(None, sample)
"Lower bound": np.array([[0.0, 0.0], [1.0, 2.0]]),
"Upper bound": np.array([[0.0, 0.0], [1.0, 2.0]]),
}
y: Dict[Hashable, np.ndarray] = {
"Lower bound": np.array([[100.0]]),
}
reg = Mock(spec=Regressor)
reg.clone = Mock(side_effect=lambda: Mock(spec=Regressor))
comp = ObjectiveValueComponent(regressor=reg)
assert "Upper bound" not in comp.regressors
assert "Lower bound" not in comp.regressors
comp.fit_xy(x, y)
assert reg.clone.call_count == 1
assert "Upper bound" not in comp.regressors
assert "Lower bound" in comp.regressors
assert comp.regressors["Lower bound"].fit.call_count == 1 # type: ignore
assert_array_equal(
comp.regressors["Lower bound"].fit.call_args[0][0], # type: ignore
x["Lower bound"],
)
assert_array_equal(
comp.regressors["Lower bound"].fit.call_args[0][1], # type: ignore
y["Lower bound"],
)
def test_sample_predict(
instance_old: Instance,
sample_old: TrainingSample,
) -> None:
x, y = ObjectiveValueComponent().sample_xy_old(instance_old, sample_old)
comp = ObjectiveValueComponent() comp = ObjectiveValueComponent()
comp.regressors["Lower bound"] = Mock(spec=Regressor) comp.regressors["Lower bound"] = Mock(spec=Regressor)
comp.regressors["Upper bound"] = Mock(spec=Regressor) comp.regressors["Upper bound"] = Mock(spec=Regressor)
@ -210,7 +102,7 @@ def test_sample_predict(
comp.regressors["Upper bound"].predict = Mock( # type: ignore comp.regressors["Upper bound"].predict = Mock( # type: ignore
side_effect=lambda _: np.array([[60.0]]) side_effect=lambda _: np.array([[60.0]])
) )
pred = comp.sample_predict_old(instance_old, sample_old) pred = comp.sample_predict(sample)
assert pred == { assert pred == {
"Lower bound": 50.0, "Lower bound": 50.0,
"Upper bound": 60.0, "Upper bound": 60.0,
@ -225,36 +117,13 @@ def test_sample_predict(
) )
def test_sample_predict_without_ub_old( def test_sample_evaluate(sample: Sample) -> None:
instance_old: Instance,
sample_without_ub_old: TrainingSample,
) -> None:
x, y = ObjectiveValueComponent().sample_xy_old(instance_old, sample_without_ub_old)
comp = ObjectiveValueComponent()
comp.regressors["Lower bound"] = Mock(spec=Regressor)
comp.regressors["Lower bound"].predict = Mock( # type: ignore
side_effect=lambda _: np.array([[50.0]])
)
pred = comp.sample_predict_old(instance_old, sample_without_ub_old)
assert pred == {
"Lower bound": 50.0,
}
assert_array_equal(
comp.regressors["Lower bound"].predict.call_args[0][0], # type: ignore
x["Lower bound"],
)
def test_sample_evaluate_old(
instance_old: Instance,
sample_old: TrainingSample,
) -> None:
comp = ObjectiveValueComponent() comp = ObjectiveValueComponent()
comp.regressors["Lower bound"] = Mock(spec=Regressor) comp.regressors["Lower bound"] = Mock(spec=Regressor)
comp.regressors["Lower bound"].predict = lambda _: np.array([[1.05]]) # type: ignore comp.regressors["Lower bound"].predict = lambda _: np.array([[1.05]]) # type: ignore
comp.regressors["Upper bound"] = Mock(spec=Regressor) comp.regressors["Upper bound"] = Mock(spec=Regressor)
comp.regressors["Upper bound"].predict = lambda _: np.array([[2.50]]) # type: ignore comp.regressors["Upper bound"].predict = lambda _: np.array([[2.50]]) # type: ignore
ev = comp.sample_evaluate_old(instance_old, sample_old) ev = comp.sample_evaluate(None, sample)
assert ev == { assert ev == {
"Lower bound": { "Lower bound": {
"Actual value": 1.0, "Actual value": 1.0,

Loading…
Cancel
Save