mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 09:28:51 -06:00
Objective: Refactoring
This commit is contained in:
@@ -171,11 +171,11 @@ def charts(args):
|
||||
if (sense == "min").any():
|
||||
primal_column = "Relative upper bound"
|
||||
obj_column = "Upper bound"
|
||||
predicted_obj_column = "Objective: Predicted UB"
|
||||
predicted_obj_column = "Objective: Predicted upper bound"
|
||||
else:
|
||||
primal_column = "Relative lower bound"
|
||||
obj_column = "Lower bound"
|
||||
predicted_obj_column = "Objective: Predicted LB"
|
||||
predicted_obj_column = "Objective: Predicted lower bound"
|
||||
|
||||
palette = {"baseline": "#9b59b6", "ml-exact": "#3498db", "ml-heuristic": "#95a5a6"}
|
||||
fig, (ax1, ax2, ax3, ax4) = plt.subplots(
|
||||
|
||||
@@ -38,11 +38,8 @@ class ObjectiveValueComponent(Component):
|
||||
regressor: Regressor = ScikitLearnRegressor(LinearRegression()),
|
||||
) -> None:
|
||||
assert isinstance(regressor, Regressor)
|
||||
self.ub_regressor: Optional[Regressor] = None
|
||||
self.lb_regressor: Optional[Regressor] = None
|
||||
self.regressors: Dict[str, Regressor] = {}
|
||||
self.regressor_prototype = regressor
|
||||
self._predicted_ub: Optional[float] = None
|
||||
self._predicted_lb: Optional[float] = None
|
||||
|
||||
def before_solve_mip(
|
||||
self,
|
||||
@@ -55,63 +52,19 @@ class ObjectiveValueComponent(Component):
|
||||
) -> None:
|
||||
logger.info("Predicting optimal value...")
|
||||
pred = self.sample_predict(features, training_data)
|
||||
if "Upper bound" in pred:
|
||||
ub = pred["Upper bound"]
|
||||
logger.info("Predicted upper bound: %.6e" % ub)
|
||||
stats["Objective: Predicted UB"] = ub
|
||||
if "Lower bound" in pred:
|
||||
lb = pred["Lower bound"]
|
||||
logger.info("Predicted lower bound: %.6e" % lb)
|
||||
stats["Objective: Predicted LB"] = lb
|
||||
for (c, v) in pred.items():
|
||||
logger.info(f"Predicted {c.lower()}: %.6e" % v)
|
||||
stats[f"Objective: Predicted {c.lower()}"] = v # type: ignore
|
||||
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[str, np.ndarray],
|
||||
y: Dict[str, np.ndarray],
|
||||
) -> None:
|
||||
if "Lower bound" in y:
|
||||
self.lb_regressor = self.regressor_prototype.clone()
|
||||
self.lb_regressor.fit(x["Lower bound"], y["Lower bound"])
|
||||
if "Upper bound" in y:
|
||||
self.ub_regressor = self.regressor_prototype.clone()
|
||||
self.ub_regressor.fit(x["Upper bound"], y["Upper bound"])
|
||||
|
||||
# def evaluate(
|
||||
# self,
|
||||
# instances: Union[List[str], List[Instance]],
|
||||
# ) -> Dict[str, Dict[str, float]]:
|
||||
# y_pred = self.predict(instances)
|
||||
# y_true = np.array(
|
||||
# [
|
||||
# [
|
||||
# inst.training_data[0]["Lower bound"],
|
||||
# inst.training_data[0]["Upper bound"],
|
||||
# ]
|
||||
# for inst in InstanceIterator(instances)
|
||||
# ]
|
||||
# )
|
||||
# y_pred_lb = y_pred["Lower bound"]
|
||||
# y_pred_ub = y_pred["Upper bound"]
|
||||
# y_true_lb, y_true_ub = y_true[:, 1], y_true[:, 1]
|
||||
# ev = {
|
||||
# "Lower bound": {
|
||||
# "Mean squared error": mean_squared_error(y_true_lb, y_pred_lb),
|
||||
# "Explained variance": explained_variance_score(y_true_lb, y_pred_lb),
|
||||
# "Max error": max_error(y_true_lb, y_pred_lb),
|
||||
# "Mean absolute error": mean_absolute_error(y_true_lb, y_pred_lb),
|
||||
# "R2": r2_score(y_true_lb, y_pred_lb),
|
||||
# "Median absolute error": mean_absolute_error(y_true_lb, y_pred_lb),
|
||||
# },
|
||||
# "Upper bound": {
|
||||
# "Mean squared error": mean_squared_error(y_true_ub, y_pred_ub),
|
||||
# "Explained variance": explained_variance_score(y_true_ub, y_pred_ub),
|
||||
# "Max error": max_error(y_true_ub, y_pred_ub),
|
||||
# "Mean absolute error": mean_absolute_error(y_true_ub, y_pred_ub),
|
||||
# "R2": r2_score(y_true_ub, y_pred_ub),
|
||||
# "Median absolute error": mean_absolute_error(y_true_ub, y_pred_ub),
|
||||
# },
|
||||
# }
|
||||
# return ev
|
||||
for c in ["Upper bound", "Lower bound"]:
|
||||
if c in y:
|
||||
self.regressors[c] = self.regressor_prototype.clone()
|
||||
self.regressors[c].fit(x[c], y[c])
|
||||
|
||||
def sample_predict(
|
||||
self,
|
||||
@@ -120,16 +73,11 @@ class ObjectiveValueComponent(Component):
|
||||
) -> Dict[str, float]:
|
||||
pred: Dict[str, float] = {}
|
||||
x, _ = self.sample_xy(features, sample)
|
||||
if self.lb_regressor is not None:
|
||||
lb_pred = self.lb_regressor.predict(np.array(x["Lower bound"]))
|
||||
pred["Lower bound"] = lb_pred[0, 0]
|
||||
else:
|
||||
logger.info("Lower bound regressor not fitted. Skipping.")
|
||||
if self.ub_regressor is not None:
|
||||
ub_pred = self.ub_regressor.predict(np.array(x["Upper bound"]))
|
||||
pred["Upper bound"] = ub_pred[0, 0]
|
||||
else:
|
||||
logger.info("Upper bound regressor not fitted. Skipping.")
|
||||
for c in ["Upper bound", "Lower bound"]:
|
||||
if c in self.regressors is not None:
|
||||
pred[c] = self.regressors[c].predict(np.array(x[c]))[0, 0]
|
||||
else:
|
||||
logger.info(f"{c} regressor not fitted. Skipping.")
|
||||
return pred
|
||||
|
||||
@staticmethod
|
||||
@@ -142,12 +90,10 @@ class ObjectiveValueComponent(Component):
|
||||
f = list(features["Instance"]["User features"])
|
||||
if "LP value" in sample and sample["LP value"] is not None:
|
||||
f += [sample["LP value"]]
|
||||
x["Lower bound"] = [f]
|
||||
x["Upper bound"] = [f]
|
||||
if "Lower bound" in sample and sample["Lower bound"] is not None:
|
||||
y["Lower bound"] = [[sample["Lower bound"]]]
|
||||
if "Upper bound" in sample and sample["Upper bound"] is not None:
|
||||
y["Upper bound"] = [[sample["Upper bound"]]]
|
||||
for c in ["Upper bound", "Lower bound"]:
|
||||
x[c] = [f]
|
||||
if c in sample and sample[c] is not None: # type: ignore
|
||||
y[c] = [[sample[c]]] # type: ignore
|
||||
return x, y
|
||||
|
||||
def sample_evaluate(
|
||||
@@ -166,8 +112,7 @@ class ObjectiveValueComponent(Component):
|
||||
|
||||
result: Dict[Hashable, Dict[str, float]] = {}
|
||||
pred = self.sample_predict(features, sample)
|
||||
if "Upper bound" in sample and sample["Upper bound"] is not None:
|
||||
result["Upper bound"] = compare(pred["Upper bound"], sample["Upper bound"])
|
||||
if "Lower bound" in sample and sample["Lower bound"] is not None:
|
||||
result["Lower bound"] = compare(pred["Lower bound"], sample["Lower bound"])
|
||||
for c in ["Upper bound", "Lower bound"]:
|
||||
if c in sample and sample[c] is not None: # type: ignore
|
||||
result[c] = compare(pred[c], sample[c]) # type: ignore
|
||||
return result
|
||||
|
||||
@@ -59,8 +59,8 @@ LearningSolveStats = TypedDict(
|
||||
"MIP log": str,
|
||||
"Mode": str,
|
||||
"Nodes": Optional[int],
|
||||
"Objective: Predicted LB": float,
|
||||
"Objective: Predicted UB": float,
|
||||
"Objective: Predicted lower bound": float,
|
||||
"Objective: Predicted upper bound": float,
|
||||
"Primal: Free": int,
|
||||
"Primal: One": int,
|
||||
"Primal: Zero": int,
|
||||
|
||||
@@ -114,18 +114,30 @@ def test_fit_xy() -> None:
|
||||
reg = Mock(spec=Regressor)
|
||||
reg.clone = Mock(side_effect=lambda: Mock(spec=Regressor))
|
||||
comp = ObjectiveValueComponent(regressor=reg)
|
||||
assert comp.ub_regressor is None
|
||||
assert comp.lb_regressor is None
|
||||
assert "Upper bound" not in comp.regressors
|
||||
assert "Lower bound" not in comp.regressors
|
||||
comp.fit_xy(x, y)
|
||||
assert reg.clone.call_count == 2
|
||||
assert comp.ub_regressor is not None
|
||||
assert comp.lb_regressor is not None
|
||||
assert comp.ub_regressor.fit.call_count == 1
|
||||
assert comp.lb_regressor.fit.call_count == 1
|
||||
assert_array_equal(comp.ub_regressor.fit.call_args[0][0], x["Upper bound"])
|
||||
assert_array_equal(comp.lb_regressor.fit.call_args[0][0], x["Lower bound"])
|
||||
assert_array_equal(comp.ub_regressor.fit.call_args[0][1], y["Upper bound"])
|
||||
assert_array_equal(comp.lb_regressor.fit.call_args[0][1], y["Lower bound"])
|
||||
assert "Upper bound" in comp.regressors
|
||||
assert "Lower bound" in comp.regressors
|
||||
assert comp.regressors["Upper bound"].fit.call_count == 1 # type: ignore
|
||||
assert comp.regressors["Lower bound"].fit.call_count == 1 # type: ignore
|
||||
assert_array_equal(
|
||||
comp.regressors["Upper bound"].fit.call_args[0][0], # type: ignore
|
||||
x["Upper bound"],
|
||||
)
|
||||
assert_array_equal(
|
||||
comp.regressors["Lower bound"].fit.call_args[0][0], # type: ignore
|
||||
x["Lower bound"],
|
||||
)
|
||||
assert_array_equal(
|
||||
comp.regressors["Upper bound"].fit.call_args[0][1], # type: ignore
|
||||
y["Upper bound"],
|
||||
)
|
||||
assert_array_equal(
|
||||
comp.regressors["Lower bound"].fit.call_args[0][1], # type: ignore
|
||||
y["Lower bound"],
|
||||
)
|
||||
|
||||
|
||||
def test_fit_xy_without_ub() -> None:
|
||||
@@ -139,15 +151,21 @@ def test_fit_xy_without_ub() -> None:
|
||||
reg = Mock(spec=Regressor)
|
||||
reg.clone = Mock(side_effect=lambda: Mock(spec=Regressor))
|
||||
comp = ObjectiveValueComponent(regressor=reg)
|
||||
assert comp.ub_regressor is None
|
||||
assert comp.lb_regressor is None
|
||||
assert "Upper bound" not in comp.regressors
|
||||
assert "Lower bound" not in comp.regressors
|
||||
comp.fit_xy(x, y)
|
||||
assert reg.clone.call_count == 1
|
||||
assert comp.ub_regressor is None
|
||||
assert comp.lb_regressor is not None
|
||||
assert comp.lb_regressor.fit.call_count == 1
|
||||
assert_array_equal(comp.lb_regressor.fit.call_args[0][0], x["Lower bound"])
|
||||
assert_array_equal(comp.lb_regressor.fit.call_args[0][1], y["Lower bound"])
|
||||
assert "Upper bound" not in comp.regressors
|
||||
assert "Lower bound" in comp.regressors
|
||||
assert comp.regressors["Lower bound"].fit.call_count == 1 # type: ignore
|
||||
assert_array_equal(
|
||||
comp.regressors["Lower bound"].fit.call_args[0][0], # type: ignore
|
||||
x["Lower bound"],
|
||||
)
|
||||
assert_array_equal(
|
||||
comp.regressors["Lower bound"].fit.call_args[0][1], # type: ignore
|
||||
y["Lower bound"],
|
||||
)
|
||||
|
||||
|
||||
def test_sample_predict(
|
||||
@@ -156,17 +174,27 @@ def test_sample_predict(
|
||||
) -> None:
|
||||
x, y = ObjectiveValueComponent.sample_xy(features, sample)
|
||||
comp = ObjectiveValueComponent()
|
||||
comp.lb_regressor = Mock(spec=Regressor)
|
||||
comp.ub_regressor = Mock(spec=Regressor)
|
||||
comp.lb_regressor.predict = Mock(side_effect=lambda _: np.array([[50.0]]))
|
||||
comp.ub_regressor.predict = Mock(side_effect=lambda _: np.array([[60.0]]))
|
||||
comp.regressors["Lower bound"] = Mock(spec=Regressor)
|
||||
comp.regressors["Upper bound"] = Mock(spec=Regressor)
|
||||
comp.regressors["Lower bound"].predict = Mock( # type: ignore
|
||||
side_effect=lambda _: np.array([[50.0]])
|
||||
)
|
||||
comp.regressors["Upper bound"].predict = Mock( # type: ignore
|
||||
side_effect=lambda _: np.array([[60.0]])
|
||||
)
|
||||
pred = comp.sample_predict(features, sample)
|
||||
assert pred == {
|
||||
"Lower bound": 50.0,
|
||||
"Upper bound": 60.0,
|
||||
}
|
||||
assert_array_equal(comp.ub_regressor.predict.call_args[0][0], x["Upper bound"])
|
||||
assert_array_equal(comp.lb_regressor.predict.call_args[0][0], x["Lower bound"])
|
||||
assert_array_equal(
|
||||
comp.regressors["Upper bound"].predict.call_args[0][0], # type: ignore
|
||||
x["Upper bound"],
|
||||
)
|
||||
assert_array_equal(
|
||||
comp.regressors["Lower bound"].predict.call_args[0][0], # type: ignore
|
||||
x["Lower bound"],
|
||||
)
|
||||
|
||||
|
||||
def test_sample_predict_without_ub(
|
||||
@@ -175,21 +203,26 @@ def test_sample_predict_without_ub(
|
||||
) -> None:
|
||||
x, y = ObjectiveValueComponent.sample_xy(features, sample_without_ub)
|
||||
comp = ObjectiveValueComponent()
|
||||
comp.lb_regressor = Mock(spec=Regressor)
|
||||
comp.lb_regressor.predict = Mock(side_effect=lambda _: np.array([[50.0]]))
|
||||
comp.regressors["Lower bound"] = Mock(spec=Regressor)
|
||||
comp.regressors["Lower bound"].predict = Mock( # type: ignore
|
||||
side_effect=lambda _: np.array([[50.0]])
|
||||
)
|
||||
pred = comp.sample_predict(features, sample_without_ub)
|
||||
assert pred == {
|
||||
"Lower bound": 50.0,
|
||||
}
|
||||
assert_array_equal(comp.lb_regressor.predict.call_args[0][0], x["Lower bound"])
|
||||
assert_array_equal(
|
||||
comp.regressors["Lower bound"].predict.call_args[0][0], # type: ignore
|
||||
x["Lower bound"],
|
||||
)
|
||||
|
||||
|
||||
def test_sample_evaluate(features: Features, sample: TrainingSample) -> None:
|
||||
comp = ObjectiveValueComponent()
|
||||
comp.lb_regressor = Mock(spec=Regressor)
|
||||
comp.lb_regressor.predict = lambda _: np.array([[1.05]])
|
||||
comp.ub_regressor = Mock(spec=Regressor)
|
||||
comp.ub_regressor.predict = lambda _: np.array([[2.50]])
|
||||
comp.regressors["Lower bound"] = Mock(spec=Regressor)
|
||||
comp.regressors["Lower bound"].predict = lambda _: np.array([[1.05]]) # type: ignore
|
||||
comp.regressors["Upper bound"] = Mock(spec=Regressor)
|
||||
comp.regressors["Upper bound"].predict = lambda _: np.array([[2.50]]) # type: ignore
|
||||
ev = comp.sample_evaluate(features, sample)
|
||||
assert ev == {
|
||||
"Lower bound": {
|
||||
@@ -213,5 +246,5 @@ def test_usage() -> None:
|
||||
solver.solve(instance)
|
||||
solver.fit([instance])
|
||||
stats = solver.solve(instance)
|
||||
assert stats["Lower bound"] == stats["Objective: Predicted LB"]
|
||||
assert stats["Upper bound"] == stats["Objective: Predicted UB"]
|
||||
assert stats["Lower bound"] == stats["Objective: Predicted lower bound"]
|
||||
assert stats["Upper bound"] == stats["Objective: Predicted upper bound"]
|
||||
|
||||
@@ -133,7 +133,7 @@ def test_simulate_perfect():
|
||||
simulate_perfect=True,
|
||||
)
|
||||
stats = solver.solve(tmp.name)
|
||||
assert stats["Lower bound"] == stats["Objective: Predicted LB"]
|
||||
assert stats["Lower bound"] == stats["Objective: Predicted lower bound"]
|
||||
|
||||
|
||||
def test_gap():
|
||||
|
||||
Reference in New Issue
Block a user