mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 01:18:52 -06:00
Objective: Refactoring
This commit is contained in:
@@ -38,11 +38,8 @@ class ObjectiveValueComponent(Component):
|
||||
regressor: Regressor = ScikitLearnRegressor(LinearRegression()),
|
||||
) -> None:
|
||||
assert isinstance(regressor, Regressor)
|
||||
self.ub_regressor: Optional[Regressor] = None
|
||||
self.lb_regressor: Optional[Regressor] = None
|
||||
self.regressors: Dict[str, Regressor] = {}
|
||||
self.regressor_prototype = regressor
|
||||
self._predicted_ub: Optional[float] = None
|
||||
self._predicted_lb: Optional[float] = None
|
||||
|
||||
def before_solve_mip(
|
||||
self,
|
||||
@@ -55,63 +52,19 @@ class ObjectiveValueComponent(Component):
|
||||
) -> None:
|
||||
logger.info("Predicting optimal value...")
|
||||
pred = self.sample_predict(features, training_data)
|
||||
if "Upper bound" in pred:
|
||||
ub = pred["Upper bound"]
|
||||
logger.info("Predicted upper bound: %.6e" % ub)
|
||||
stats["Objective: Predicted UB"] = ub
|
||||
if "Lower bound" in pred:
|
||||
lb = pred["Lower bound"]
|
||||
logger.info("Predicted lower bound: %.6e" % lb)
|
||||
stats["Objective: Predicted LB"] = lb
|
||||
for (c, v) in pred.items():
|
||||
logger.info(f"Predicted {c.lower()}: %.6e" % v)
|
||||
stats[f"Objective: Predicted {c.lower()}"] = v # type: ignore
|
||||
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[str, np.ndarray],
|
||||
y: Dict[str, np.ndarray],
|
||||
) -> None:
|
||||
if "Lower bound" in y:
|
||||
self.lb_regressor = self.regressor_prototype.clone()
|
||||
self.lb_regressor.fit(x["Lower bound"], y["Lower bound"])
|
||||
if "Upper bound" in y:
|
||||
self.ub_regressor = self.regressor_prototype.clone()
|
||||
self.ub_regressor.fit(x["Upper bound"], y["Upper bound"])
|
||||
|
||||
# def evaluate(
|
||||
# self,
|
||||
# instances: Union[List[str], List[Instance]],
|
||||
# ) -> Dict[str, Dict[str, float]]:
|
||||
# y_pred = self.predict(instances)
|
||||
# y_true = np.array(
|
||||
# [
|
||||
# [
|
||||
# inst.training_data[0]["Lower bound"],
|
||||
# inst.training_data[0]["Upper bound"],
|
||||
# ]
|
||||
# for inst in InstanceIterator(instances)
|
||||
# ]
|
||||
# )
|
||||
# y_pred_lb = y_pred["Lower bound"]
|
||||
# y_pred_ub = y_pred["Upper bound"]
|
||||
# y_true_lb, y_true_ub = y_true[:, 1], y_true[:, 1]
|
||||
# ev = {
|
||||
# "Lower bound": {
|
||||
# "Mean squared error": mean_squared_error(y_true_lb, y_pred_lb),
|
||||
# "Explained variance": explained_variance_score(y_true_lb, y_pred_lb),
|
||||
# "Max error": max_error(y_true_lb, y_pred_lb),
|
||||
# "Mean absolute error": mean_absolute_error(y_true_lb, y_pred_lb),
|
||||
# "R2": r2_score(y_true_lb, y_pred_lb),
|
||||
# "Median absolute error": mean_absolute_error(y_true_lb, y_pred_lb),
|
||||
# },
|
||||
# "Upper bound": {
|
||||
# "Mean squared error": mean_squared_error(y_true_ub, y_pred_ub),
|
||||
# "Explained variance": explained_variance_score(y_true_ub, y_pred_ub),
|
||||
# "Max error": max_error(y_true_ub, y_pred_ub),
|
||||
# "Mean absolute error": mean_absolute_error(y_true_ub, y_pred_ub),
|
||||
# "R2": r2_score(y_true_ub, y_pred_ub),
|
||||
# "Median absolute error": mean_absolute_error(y_true_ub, y_pred_ub),
|
||||
# },
|
||||
# }
|
||||
# return ev
|
||||
for c in ["Upper bound", "Lower bound"]:
|
||||
if c in y:
|
||||
self.regressors[c] = self.regressor_prototype.clone()
|
||||
self.regressors[c].fit(x[c], y[c])
|
||||
|
||||
def sample_predict(
|
||||
self,
|
||||
@@ -120,16 +73,11 @@ class ObjectiveValueComponent(Component):
|
||||
) -> Dict[str, float]:
|
||||
pred: Dict[str, float] = {}
|
||||
x, _ = self.sample_xy(features, sample)
|
||||
if self.lb_regressor is not None:
|
||||
lb_pred = self.lb_regressor.predict(np.array(x["Lower bound"]))
|
||||
pred["Lower bound"] = lb_pred[0, 0]
|
||||
else:
|
||||
logger.info("Lower bound regressor not fitted. Skipping.")
|
||||
if self.ub_regressor is not None:
|
||||
ub_pred = self.ub_regressor.predict(np.array(x["Upper bound"]))
|
||||
pred["Upper bound"] = ub_pred[0, 0]
|
||||
else:
|
||||
logger.info("Upper bound regressor not fitted. Skipping.")
|
||||
for c in ["Upper bound", "Lower bound"]:
|
||||
if c in self.regressors is not None:
|
||||
pred[c] = self.regressors[c].predict(np.array(x[c]))[0, 0]
|
||||
else:
|
||||
logger.info(f"{c} regressor not fitted. Skipping.")
|
||||
return pred
|
||||
|
||||
@staticmethod
|
||||
@@ -142,12 +90,10 @@ class ObjectiveValueComponent(Component):
|
||||
f = list(features["Instance"]["User features"])
|
||||
if "LP value" in sample and sample["LP value"] is not None:
|
||||
f += [sample["LP value"]]
|
||||
x["Lower bound"] = [f]
|
||||
x["Upper bound"] = [f]
|
||||
if "Lower bound" in sample and sample["Lower bound"] is not None:
|
||||
y["Lower bound"] = [[sample["Lower bound"]]]
|
||||
if "Upper bound" in sample and sample["Upper bound"] is not None:
|
||||
y["Upper bound"] = [[sample["Upper bound"]]]
|
||||
for c in ["Upper bound", "Lower bound"]:
|
||||
x[c] = [f]
|
||||
if c in sample and sample[c] is not None: # type: ignore
|
||||
y[c] = [[sample[c]]] # type: ignore
|
||||
return x, y
|
||||
|
||||
def sample_evaluate(
|
||||
@@ -166,8 +112,7 @@ class ObjectiveValueComponent(Component):
|
||||
|
||||
result: Dict[Hashable, Dict[str, float]] = {}
|
||||
pred = self.sample_predict(features, sample)
|
||||
if "Upper bound" in sample and sample["Upper bound"] is not None:
|
||||
result["Upper bound"] = compare(pred["Upper bound"], sample["Upper bound"])
|
||||
if "Lower bound" in sample and sample["Lower bound"] is not None:
|
||||
result["Lower bound"] = compare(pred["Lower bound"], sample["Lower bound"])
|
||||
for c in ["Upper bound", "Lower bound"]:
|
||||
if c in sample and sample[c] is not None: # type: ignore
|
||||
result[c] = compare(pred[c], sample[c]) # type: ignore
|
||||
return result
|
||||
|
||||
@@ -59,8 +59,8 @@ LearningSolveStats = TypedDict(
|
||||
"MIP log": str,
|
||||
"Mode": str,
|
||||
"Nodes": Optional[int],
|
||||
"Objective: Predicted LB": float,
|
||||
"Objective: Predicted UB": float,
|
||||
"Objective: Predicted lower bound": float,
|
||||
"Objective: Predicted upper bound": float,
|
||||
"Primal: Free": int,
|
||||
"Primal: One": int,
|
||||
"Primal: Zero": int,
|
||||
|
||||
Reference in New Issue
Block a user