mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 01:18:52 -06:00
Add training_data argument to after_solve
This commit is contained in:
@@ -3,7 +3,10 @@
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
|
||||
class Component:
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Component(ABC):
|
||||
"""
|
||||
A Component is an object which adds functionality to a LearningSolver.
|
||||
|
||||
@@ -15,8 +18,39 @@ class Component:
|
||||
def before_solve(self, solver, instance, model):
|
||||
return
|
||||
|
||||
def after_solve(self, solver, instance, model, results):
|
||||
return
|
||||
@abstractmethod
|
||||
def after_solve(
|
||||
self,
|
||||
solver,
|
||||
instance,
|
||||
model,
|
||||
stats,
|
||||
training_data,
|
||||
):
|
||||
"""
|
||||
Method called by LearningSolver after the problem is solved to optimality.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
solver: LearningSolver
|
||||
The solver calling this method.
|
||||
instance: Instance
|
||||
The instance being solved.
|
||||
model:
|
||||
The concrete optimization model being solved.
|
||||
stats: dict
|
||||
A dictionary containing statistics about the solution process, such as
|
||||
number of nodes explored and running time. Components are free to add their own
|
||||
statistics here. For example, PrimalSolutionComponent adds statistics regarding
|
||||
the number of predicted variables. All statistics in this dictionary are exported
|
||||
to the benchmark CSV file.
|
||||
training_data: dict
|
||||
A dictionary containing data that may be useful for training machine learning
|
||||
models and accelerating the solution process. Components are free to add their
|
||||
own training data here. For example, PrimalSolutionComponent adds the current
|
||||
primal solution. The data must be pickable.
|
||||
"""
|
||||
pass
|
||||
|
||||
def fit(self, training_instances):
|
||||
return
|
||||
|
||||
@@ -25,9 +25,16 @@ class CompositeComponent(Component):
|
||||
for child in self.children:
|
||||
child.before_solve(solver, instance, model)
|
||||
|
||||
def after_solve(self, solver, instance, model, results):
|
||||
def after_solve(
|
||||
self,
|
||||
solver,
|
||||
instance,
|
||||
model,
|
||||
stats,
|
||||
training_data,
|
||||
):
|
||||
for child in self.children:
|
||||
child.after_solve(solver, instance, model, results)
|
||||
child.after_solve(solver, instance, model, stats, training_data)
|
||||
|
||||
def fit(self, training_instances):
|
||||
for child in self.children:
|
||||
|
||||
@@ -40,7 +40,14 @@ class UserCutsComponent(Component):
|
||||
cut = instance.build_user_cut(model, v)
|
||||
solver.internal_solver.add_constraint(cut)
|
||||
|
||||
def after_solve(self, solver, instance, model, results):
|
||||
def after_solve(
|
||||
self,
|
||||
solver,
|
||||
instance,
|
||||
model,
|
||||
results,
|
||||
training_data,
|
||||
):
|
||||
pass
|
||||
|
||||
def fit(self, training_instances):
|
||||
|
||||
@@ -52,7 +52,14 @@ class DynamicLazyConstraintsComponent(Component):
|
||||
solver.internal_solver.add_constraint(cut)
|
||||
return True
|
||||
|
||||
def after_solve(self, solver, instance, model, results):
|
||||
def after_solve(
|
||||
self,
|
||||
solver,
|
||||
instance,
|
||||
model,
|
||||
stats,
|
||||
training_data,
|
||||
):
|
||||
pass
|
||||
|
||||
def fit(self, training_instances):
|
||||
|
||||
@@ -49,7 +49,14 @@ class StaticLazyConstraintsComponent(Component):
|
||||
if instance.has_static_lazy_constraints():
|
||||
self._extract_and_predict_static(solver, instance)
|
||||
|
||||
def after_solve(self, solver, instance, model, results):
|
||||
def after_solve(
|
||||
self,
|
||||
solver,
|
||||
instance,
|
||||
model,
|
||||
stats,
|
||||
training_data,
|
||||
):
|
||||
pass
|
||||
|
||||
def iteration_cb(self, solver, instance, model):
|
||||
|
||||
@@ -36,13 +36,20 @@ class ObjectiveValueComponent(Component):
|
||||
instance.predicted_lb = lb
|
||||
logger.info("Predicted values: lb=%.2f, ub=%.2f" % (lb, ub))
|
||||
|
||||
def after_solve(self, solver, instance, model, results):
|
||||
def after_solve(
|
||||
self,
|
||||
solver,
|
||||
instance,
|
||||
model,
|
||||
stats,
|
||||
training_data,
|
||||
):
|
||||
if self.ub_regressor is not None:
|
||||
results["Predicted UB"] = instance.predicted_ub
|
||||
results["Predicted LB"] = instance.predicted_lb
|
||||
stats["Predicted UB"] = instance.predicted_ub
|
||||
stats["Predicted LB"] = instance.predicted_lb
|
||||
else:
|
||||
results["Predicted UB"] = None
|
||||
results["Predicted LB"] = None
|
||||
stats["Predicted UB"] = None
|
||||
stats["Predicted LB"] = None
|
||||
|
||||
def fit(self, training_instances):
|
||||
logger.debug("Extracting features...")
|
||||
|
||||
@@ -39,7 +39,14 @@ class PrimalSolutionComponent(Component):
|
||||
else:
|
||||
solver.internal_solver.set_warm_start(solution)
|
||||
|
||||
def after_solve(self, solver, instance, model, results):
|
||||
def after_solve(
|
||||
self,
|
||||
solver,
|
||||
instance,
|
||||
model,
|
||||
stats,
|
||||
training_data,
|
||||
):
|
||||
pass
|
||||
|
||||
def x(self, training_instances):
|
||||
|
||||
@@ -74,13 +74,21 @@ class ConvertTightIneqsIntoEqsStep(Component):
|
||||
|
||||
logger.info(f"Converted {self.n_converted} inequalities")
|
||||
|
||||
def after_solve(self, solver, instance, model, results):
|
||||
instance.slacks = solver.internal_solver.get_inequality_slacks()
|
||||
results["ConvertTight: Kept"] = self.n_kept
|
||||
results["ConvertTight: Converted"] = self.n_converted
|
||||
results["ConvertTight: Restored"] = self.n_restored
|
||||
results["ConvertTight: Inf iterations"] = self.n_infeasible_iterations
|
||||
results["ConvertTight: Subopt iterations"] = self.n_suboptimal_iterations
|
||||
def after_solve(
|
||||
self,
|
||||
solver,
|
||||
instance,
|
||||
model,
|
||||
stats,
|
||||
training_data,
|
||||
):
|
||||
if "slacks" not in training_data.keys():
|
||||
training_data["slacks"] = solver.internal_solver.get_inequality_slacks()
|
||||
stats["ConvertTight: Kept"] = self.n_kept
|
||||
stats["ConvertTight: Converted"] = self.n_converted
|
||||
stats["ConvertTight: Restored"] = self.n_restored
|
||||
stats["ConvertTight: Inf iterations"] = self.n_infeasible_iterations
|
||||
stats["ConvertTight: Subopt iterations"] = self.n_suboptimal_iterations
|
||||
|
||||
def fit(self, training_instances):
|
||||
logger.debug("Extracting x and y...")
|
||||
@@ -108,7 +116,7 @@ class ConvertTightIneqsIntoEqsStep(Component):
|
||||
if constraint_ids is not None:
|
||||
cids = constraint_ids
|
||||
else:
|
||||
cids = instance.slacks.keys()
|
||||
cids = instance.training_data[0]["slacks"].keys()
|
||||
for cid in cids:
|
||||
category = instance.get_constraint_category(cid)
|
||||
if category is None:
|
||||
@@ -130,7 +138,7 @@ class ConvertTightIneqsIntoEqsStep(Component):
|
||||
desc="Extract (rlx:conv_ineqs:y)",
|
||||
disable=len(instances) < 5,
|
||||
):
|
||||
for (cid, slack) in instance.slacks.items():
|
||||
for (cid, slack) in instance.training_data[0]["slacks"].items():
|
||||
category = instance.get_constraint_category(cid)
|
||||
if category is None:
|
||||
continue
|
||||
|
||||
@@ -76,12 +76,19 @@ class DropRedundantInequalitiesStep(Component):
|
||||
self.total_kept += 1
|
||||
logger.info(f"Extracted {self.total_dropped} predicted constraints")
|
||||
|
||||
def after_solve(self, solver, instance, model, results):
|
||||
def after_solve(
|
||||
self,
|
||||
solver,
|
||||
instance,
|
||||
model,
|
||||
stats,
|
||||
training_data,
|
||||
):
|
||||
instance.slacks = solver.internal_solver.get_inequality_slacks()
|
||||
results["DropRedundant: Kept"] = self.total_kept
|
||||
results["DropRedundant: Dropped"] = self.total_dropped
|
||||
results["DropRedundant: Restored"] = self.total_restored
|
||||
results["DropRedundant: Iterations"] = self.total_iterations
|
||||
stats["DropRedundant: Kept"] = self.total_kept
|
||||
stats["DropRedundant: Dropped"] = self.total_dropped
|
||||
stats["DropRedundant: Restored"] = self.total_restored
|
||||
stats["DropRedundant: Iterations"] = self.total_iterations
|
||||
|
||||
def fit(self, training_instances):
|
||||
logger.debug("Extracting x and y...")
|
||||
|
||||
@@ -17,3 +17,13 @@ class RelaxIntegralityStep(Component):
|
||||
def before_solve(self, solver, instance, _):
|
||||
logger.info("Relaxing integrality...")
|
||||
solver.internal_solver.relax()
|
||||
|
||||
def after_solve(
|
||||
self,
|
||||
solver,
|
||||
instance,
|
||||
model,
|
||||
stats,
|
||||
training_data,
|
||||
):
|
||||
return
|
||||
|
||||
@@ -25,8 +25,7 @@ def test_convert_tight_usage():
|
||||
original_upper_bound = instance.upper_bound
|
||||
|
||||
# Should collect training data
|
||||
assert hasattr(instance, "slacks")
|
||||
assert instance.slacks["eq_capacity"] == 0.0
|
||||
assert instance.training_data[0]["slacks"]["eq_capacity"] == 0.0
|
||||
|
||||
# Fit and resolve
|
||||
solver.fit([instance])
|
||||
@@ -53,21 +52,6 @@ class TestInstance(Instance):
|
||||
return m
|
||||
|
||||
|
||||
class TestInstanceMin(Instance):
|
||||
def to_model(self):
|
||||
import gurobipy as grb
|
||||
from gurobipy import GRB
|
||||
|
||||
m = grb.Model("model")
|
||||
x1 = m.addVar(name="x1")
|
||||
x2 = m.addVar(name="x2")
|
||||
m.setObjective(x1 + 2 * x2, grb.GRB.MAXIMIZE)
|
||||
m.addConstr(x1 <= 2, name="c1")
|
||||
m.addConstr(x2 <= 2, name="c2")
|
||||
m.addConstr(x1 + x2 <= 3, name="c2")
|
||||
return m
|
||||
|
||||
|
||||
def test_convert_tight_infeasibility():
|
||||
comp = ConvertTightIneqsIntoEqsStep()
|
||||
comp.classifiers = {
|
||||
|
||||
@@ -27,9 +27,9 @@ def test_composite():
|
||||
c2.before_solve.assert_has_calls([call(solver, instance, model)])
|
||||
|
||||
# Should broadcast after_solve
|
||||
cc.after_solve(solver, instance, model, {})
|
||||
c1.after_solve.assert_has_calls([call(solver, instance, model, {})])
|
||||
c2.after_solve.assert_has_calls([call(solver, instance, model, {})])
|
||||
cc.after_solve(solver, instance, model, {}, {})
|
||||
c1.after_solve.assert_has_calls([call(solver, instance, model, {}, {})])
|
||||
c2.after_solve.assert_has_calls([call(solver, instance, model, {}, {})])
|
||||
|
||||
# Should broadcast fit
|
||||
cc.fit([1, 2, 3])
|
||||
|
||||
@@ -115,7 +115,7 @@ def test_drop_redundant():
|
||||
)
|
||||
|
||||
# LearningSolver calls after_solve
|
||||
component.after_solve(solver, instance, None, {})
|
||||
component.after_solve(solver, instance, None, {}, {})
|
||||
|
||||
# Should query slack for all inequalities
|
||||
internal.get_inequality_slacks.assert_called_once()
|
||||
|
||||
Reference in New Issue
Block a user