mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 01:18:52 -06:00
Add training_data argument to after_solve
This commit is contained in:
@@ -3,7 +3,10 @@
|
|||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
|
||||||
class Component:
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
class Component(ABC):
|
||||||
"""
|
"""
|
||||||
A Component is an object which adds functionality to a LearningSolver.
|
A Component is an object which adds functionality to a LearningSolver.
|
||||||
|
|
||||||
@@ -15,8 +18,39 @@ class Component:
|
|||||||
def before_solve(self, solver, instance, model):
|
def before_solve(self, solver, instance, model):
|
||||||
return
|
return
|
||||||
|
|
||||||
def after_solve(self, solver, instance, model, results):
|
@abstractmethod
|
||||||
return
|
def after_solve(
|
||||||
|
self,
|
||||||
|
solver,
|
||||||
|
instance,
|
||||||
|
model,
|
||||||
|
stats,
|
||||||
|
training_data,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Method called by LearningSolver after the problem is solved to optimality.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
solver: LearningSolver
|
||||||
|
The solver calling this method.
|
||||||
|
instance: Instance
|
||||||
|
The instance being solved.
|
||||||
|
model:
|
||||||
|
The concrete optimization model being solved.
|
||||||
|
stats: dict
|
||||||
|
A dictionary containing statistics about the solution process, such as
|
||||||
|
number of nodes explored and running time. Components are free to add their own
|
||||||
|
statistics here. For example, PrimalSolutionComponent adds statistics regarding
|
||||||
|
the number of predicted variables. All statistics in this dictionary are exported
|
||||||
|
to the benchmark CSV file.
|
||||||
|
training_data: dict
|
||||||
|
A dictionary containing data that may be useful for training machine learning
|
||||||
|
models and accelerating the solution process. Components are free to add their
|
||||||
|
own training data here. For example, PrimalSolutionComponent adds the current
|
||||||
|
primal solution. The data must be pickable.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
def fit(self, training_instances):
|
def fit(self, training_instances):
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -25,9 +25,16 @@ class CompositeComponent(Component):
|
|||||||
for child in self.children:
|
for child in self.children:
|
||||||
child.before_solve(solver, instance, model)
|
child.before_solve(solver, instance, model)
|
||||||
|
|
||||||
def after_solve(self, solver, instance, model, results):
|
def after_solve(
|
||||||
|
self,
|
||||||
|
solver,
|
||||||
|
instance,
|
||||||
|
model,
|
||||||
|
stats,
|
||||||
|
training_data,
|
||||||
|
):
|
||||||
for child in self.children:
|
for child in self.children:
|
||||||
child.after_solve(solver, instance, model, results)
|
child.after_solve(solver, instance, model, stats, training_data)
|
||||||
|
|
||||||
def fit(self, training_instances):
|
def fit(self, training_instances):
|
||||||
for child in self.children:
|
for child in self.children:
|
||||||
|
|||||||
@@ -40,7 +40,14 @@ class UserCutsComponent(Component):
|
|||||||
cut = instance.build_user_cut(model, v)
|
cut = instance.build_user_cut(model, v)
|
||||||
solver.internal_solver.add_constraint(cut)
|
solver.internal_solver.add_constraint(cut)
|
||||||
|
|
||||||
def after_solve(self, solver, instance, model, results):
|
def after_solve(
|
||||||
|
self,
|
||||||
|
solver,
|
||||||
|
instance,
|
||||||
|
model,
|
||||||
|
results,
|
||||||
|
training_data,
|
||||||
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def fit(self, training_instances):
|
def fit(self, training_instances):
|
||||||
|
|||||||
@@ -52,7 +52,14 @@ class DynamicLazyConstraintsComponent(Component):
|
|||||||
solver.internal_solver.add_constraint(cut)
|
solver.internal_solver.add_constraint(cut)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def after_solve(self, solver, instance, model, results):
|
def after_solve(
|
||||||
|
self,
|
||||||
|
solver,
|
||||||
|
instance,
|
||||||
|
model,
|
||||||
|
stats,
|
||||||
|
training_data,
|
||||||
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def fit(self, training_instances):
|
def fit(self, training_instances):
|
||||||
|
|||||||
@@ -49,7 +49,14 @@ class StaticLazyConstraintsComponent(Component):
|
|||||||
if instance.has_static_lazy_constraints():
|
if instance.has_static_lazy_constraints():
|
||||||
self._extract_and_predict_static(solver, instance)
|
self._extract_and_predict_static(solver, instance)
|
||||||
|
|
||||||
def after_solve(self, solver, instance, model, results):
|
def after_solve(
|
||||||
|
self,
|
||||||
|
solver,
|
||||||
|
instance,
|
||||||
|
model,
|
||||||
|
stats,
|
||||||
|
training_data,
|
||||||
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def iteration_cb(self, solver, instance, model):
|
def iteration_cb(self, solver, instance, model):
|
||||||
|
|||||||
@@ -36,13 +36,20 @@ class ObjectiveValueComponent(Component):
|
|||||||
instance.predicted_lb = lb
|
instance.predicted_lb = lb
|
||||||
logger.info("Predicted values: lb=%.2f, ub=%.2f" % (lb, ub))
|
logger.info("Predicted values: lb=%.2f, ub=%.2f" % (lb, ub))
|
||||||
|
|
||||||
def after_solve(self, solver, instance, model, results):
|
def after_solve(
|
||||||
|
self,
|
||||||
|
solver,
|
||||||
|
instance,
|
||||||
|
model,
|
||||||
|
stats,
|
||||||
|
training_data,
|
||||||
|
):
|
||||||
if self.ub_regressor is not None:
|
if self.ub_regressor is not None:
|
||||||
results["Predicted UB"] = instance.predicted_ub
|
stats["Predicted UB"] = instance.predicted_ub
|
||||||
results["Predicted LB"] = instance.predicted_lb
|
stats["Predicted LB"] = instance.predicted_lb
|
||||||
else:
|
else:
|
||||||
results["Predicted UB"] = None
|
stats["Predicted UB"] = None
|
||||||
results["Predicted LB"] = None
|
stats["Predicted LB"] = None
|
||||||
|
|
||||||
def fit(self, training_instances):
|
def fit(self, training_instances):
|
||||||
logger.debug("Extracting features...")
|
logger.debug("Extracting features...")
|
||||||
|
|||||||
@@ -39,7 +39,14 @@ class PrimalSolutionComponent(Component):
|
|||||||
else:
|
else:
|
||||||
solver.internal_solver.set_warm_start(solution)
|
solver.internal_solver.set_warm_start(solution)
|
||||||
|
|
||||||
def after_solve(self, solver, instance, model, results):
|
def after_solve(
|
||||||
|
self,
|
||||||
|
solver,
|
||||||
|
instance,
|
||||||
|
model,
|
||||||
|
stats,
|
||||||
|
training_data,
|
||||||
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def x(self, training_instances):
|
def x(self, training_instances):
|
||||||
|
|||||||
@@ -74,13 +74,21 @@ class ConvertTightIneqsIntoEqsStep(Component):
|
|||||||
|
|
||||||
logger.info(f"Converted {self.n_converted} inequalities")
|
logger.info(f"Converted {self.n_converted} inequalities")
|
||||||
|
|
||||||
def after_solve(self, solver, instance, model, results):
|
def after_solve(
|
||||||
instance.slacks = solver.internal_solver.get_inequality_slacks()
|
self,
|
||||||
results["ConvertTight: Kept"] = self.n_kept
|
solver,
|
||||||
results["ConvertTight: Converted"] = self.n_converted
|
instance,
|
||||||
results["ConvertTight: Restored"] = self.n_restored
|
model,
|
||||||
results["ConvertTight: Inf iterations"] = self.n_infeasible_iterations
|
stats,
|
||||||
results["ConvertTight: Subopt iterations"] = self.n_suboptimal_iterations
|
training_data,
|
||||||
|
):
|
||||||
|
if "slacks" not in training_data.keys():
|
||||||
|
training_data["slacks"] = solver.internal_solver.get_inequality_slacks()
|
||||||
|
stats["ConvertTight: Kept"] = self.n_kept
|
||||||
|
stats["ConvertTight: Converted"] = self.n_converted
|
||||||
|
stats["ConvertTight: Restored"] = self.n_restored
|
||||||
|
stats["ConvertTight: Inf iterations"] = self.n_infeasible_iterations
|
||||||
|
stats["ConvertTight: Subopt iterations"] = self.n_suboptimal_iterations
|
||||||
|
|
||||||
def fit(self, training_instances):
|
def fit(self, training_instances):
|
||||||
logger.debug("Extracting x and y...")
|
logger.debug("Extracting x and y...")
|
||||||
@@ -108,7 +116,7 @@ class ConvertTightIneqsIntoEqsStep(Component):
|
|||||||
if constraint_ids is not None:
|
if constraint_ids is not None:
|
||||||
cids = constraint_ids
|
cids = constraint_ids
|
||||||
else:
|
else:
|
||||||
cids = instance.slacks.keys()
|
cids = instance.training_data[0]["slacks"].keys()
|
||||||
for cid in cids:
|
for cid in cids:
|
||||||
category = instance.get_constraint_category(cid)
|
category = instance.get_constraint_category(cid)
|
||||||
if category is None:
|
if category is None:
|
||||||
@@ -130,7 +138,7 @@ class ConvertTightIneqsIntoEqsStep(Component):
|
|||||||
desc="Extract (rlx:conv_ineqs:y)",
|
desc="Extract (rlx:conv_ineqs:y)",
|
||||||
disable=len(instances) < 5,
|
disable=len(instances) < 5,
|
||||||
):
|
):
|
||||||
for (cid, slack) in instance.slacks.items():
|
for (cid, slack) in instance.training_data[0]["slacks"].items():
|
||||||
category = instance.get_constraint_category(cid)
|
category = instance.get_constraint_category(cid)
|
||||||
if category is None:
|
if category is None:
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -76,12 +76,19 @@ class DropRedundantInequalitiesStep(Component):
|
|||||||
self.total_kept += 1
|
self.total_kept += 1
|
||||||
logger.info(f"Extracted {self.total_dropped} predicted constraints")
|
logger.info(f"Extracted {self.total_dropped} predicted constraints")
|
||||||
|
|
||||||
def after_solve(self, solver, instance, model, results):
|
def after_solve(
|
||||||
|
self,
|
||||||
|
solver,
|
||||||
|
instance,
|
||||||
|
model,
|
||||||
|
stats,
|
||||||
|
training_data,
|
||||||
|
):
|
||||||
instance.slacks = solver.internal_solver.get_inequality_slacks()
|
instance.slacks = solver.internal_solver.get_inequality_slacks()
|
||||||
results["DropRedundant: Kept"] = self.total_kept
|
stats["DropRedundant: Kept"] = self.total_kept
|
||||||
results["DropRedundant: Dropped"] = self.total_dropped
|
stats["DropRedundant: Dropped"] = self.total_dropped
|
||||||
results["DropRedundant: Restored"] = self.total_restored
|
stats["DropRedundant: Restored"] = self.total_restored
|
||||||
results["DropRedundant: Iterations"] = self.total_iterations
|
stats["DropRedundant: Iterations"] = self.total_iterations
|
||||||
|
|
||||||
def fit(self, training_instances):
|
def fit(self, training_instances):
|
||||||
logger.debug("Extracting x and y...")
|
logger.debug("Extracting x and y...")
|
||||||
|
|||||||
@@ -17,3 +17,13 @@ class RelaxIntegralityStep(Component):
|
|||||||
def before_solve(self, solver, instance, _):
|
def before_solve(self, solver, instance, _):
|
||||||
logger.info("Relaxing integrality...")
|
logger.info("Relaxing integrality...")
|
||||||
solver.internal_solver.relax()
|
solver.internal_solver.relax()
|
||||||
|
|
||||||
|
def after_solve(
|
||||||
|
self,
|
||||||
|
solver,
|
||||||
|
instance,
|
||||||
|
model,
|
||||||
|
stats,
|
||||||
|
training_data,
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|||||||
@@ -25,8 +25,7 @@ def test_convert_tight_usage():
|
|||||||
original_upper_bound = instance.upper_bound
|
original_upper_bound = instance.upper_bound
|
||||||
|
|
||||||
# Should collect training data
|
# Should collect training data
|
||||||
assert hasattr(instance, "slacks")
|
assert instance.training_data[0]["slacks"]["eq_capacity"] == 0.0
|
||||||
assert instance.slacks["eq_capacity"] == 0.0
|
|
||||||
|
|
||||||
# Fit and resolve
|
# Fit and resolve
|
||||||
solver.fit([instance])
|
solver.fit([instance])
|
||||||
@@ -53,21 +52,6 @@ class TestInstance(Instance):
|
|||||||
return m
|
return m
|
||||||
|
|
||||||
|
|
||||||
class TestInstanceMin(Instance):
|
|
||||||
def to_model(self):
|
|
||||||
import gurobipy as grb
|
|
||||||
from gurobipy import GRB
|
|
||||||
|
|
||||||
m = grb.Model("model")
|
|
||||||
x1 = m.addVar(name="x1")
|
|
||||||
x2 = m.addVar(name="x2")
|
|
||||||
m.setObjective(x1 + 2 * x2, grb.GRB.MAXIMIZE)
|
|
||||||
m.addConstr(x1 <= 2, name="c1")
|
|
||||||
m.addConstr(x2 <= 2, name="c2")
|
|
||||||
m.addConstr(x1 + x2 <= 3, name="c2")
|
|
||||||
return m
|
|
||||||
|
|
||||||
|
|
||||||
def test_convert_tight_infeasibility():
|
def test_convert_tight_infeasibility():
|
||||||
comp = ConvertTightIneqsIntoEqsStep()
|
comp = ConvertTightIneqsIntoEqsStep()
|
||||||
comp.classifiers = {
|
comp.classifiers = {
|
||||||
|
|||||||
@@ -27,9 +27,9 @@ def test_composite():
|
|||||||
c2.before_solve.assert_has_calls([call(solver, instance, model)])
|
c2.before_solve.assert_has_calls([call(solver, instance, model)])
|
||||||
|
|
||||||
# Should broadcast after_solve
|
# Should broadcast after_solve
|
||||||
cc.after_solve(solver, instance, model, {})
|
cc.after_solve(solver, instance, model, {}, {})
|
||||||
c1.after_solve.assert_has_calls([call(solver, instance, model, {})])
|
c1.after_solve.assert_has_calls([call(solver, instance, model, {}, {})])
|
||||||
c2.after_solve.assert_has_calls([call(solver, instance, model, {})])
|
c2.after_solve.assert_has_calls([call(solver, instance, model, {}, {})])
|
||||||
|
|
||||||
# Should broadcast fit
|
# Should broadcast fit
|
||||||
cc.fit([1, 2, 3])
|
cc.fit([1, 2, 3])
|
||||||
|
|||||||
@@ -115,7 +115,7 @@ def test_drop_redundant():
|
|||||||
)
|
)
|
||||||
|
|
||||||
# LearningSolver calls after_solve
|
# LearningSolver calls after_solve
|
||||||
component.after_solve(solver, instance, None, {})
|
component.after_solve(solver, instance, None, {}, {})
|
||||||
|
|
||||||
# Should query slack for all inequalities
|
# Should query slack for all inequalities
|
||||||
internal.get_inequality_slacks.assert_called_once()
|
internal.get_inequality_slacks.assert_called_once()
|
||||||
|
|||||||
@@ -287,22 +287,27 @@ class LearningSolver:
|
|||||||
lazy_cb = lazy_cb_wrapper
|
lazy_cb = lazy_cb_wrapper
|
||||||
|
|
||||||
logger.info("Solving MILP...")
|
logger.info("Solving MILP...")
|
||||||
results = self.internal_solver.solve(
|
stats = self.internal_solver.solve(
|
||||||
tee=tee,
|
tee=tee,
|
||||||
iteration_cb=iteration_cb,
|
iteration_cb=iteration_cb,
|
||||||
lazy_cb=lazy_cb,
|
lazy_cb=lazy_cb,
|
||||||
)
|
)
|
||||||
results["LP value"] = instance.lp_value
|
stats["LP value"] = instance.lp_value
|
||||||
|
|
||||||
# Read MIP solution and bounds
|
# Read MIP solution and bounds
|
||||||
instance.lower_bound = results["Lower bound"]
|
instance.lower_bound = stats["Lower bound"]
|
||||||
instance.upper_bound = results["Upper bound"]
|
instance.upper_bound = stats["Upper bound"]
|
||||||
instance.solver_log = results["Log"]
|
instance.solver_log = stats["Log"]
|
||||||
instance.solution = self.internal_solver.get_solution()
|
instance.solution = self.internal_solver.get_solution()
|
||||||
|
|
||||||
logger.debug("Calling after_solve callbacks...")
|
logger.debug("Calling after_solve callbacks...")
|
||||||
|
training_data = {}
|
||||||
for component in self.components.values():
|
for component in self.components.values():
|
||||||
component.after_solve(self, instance, model, results)
|
component.after_solve(self, instance, model, stats, training_data)
|
||||||
|
|
||||||
|
if not hasattr(instance, "training_data"):
|
||||||
|
instance.training_data = []
|
||||||
|
instance.training_data += [training_data]
|
||||||
|
|
||||||
if filename is not None and output is not None:
|
if filename is not None and output is not None:
|
||||||
output_filename = output
|
output_filename = output
|
||||||
@@ -316,7 +321,7 @@ class LearningSolver:
|
|||||||
with gzip.GzipFile(output_filename, "wb") as file:
|
with gzip.GzipFile(output_filename, "wb") as file:
|
||||||
pickle.dump(instance, file)
|
pickle.dump(instance, file)
|
||||||
|
|
||||||
return results
|
return stats
|
||||||
|
|
||||||
def parallel_solve(self, instances, n_jobs=4, label="Solve", output=[]):
|
def parallel_solve(self, instances, n_jobs=4, label="Solve", output=[]):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -27,11 +27,11 @@ def test_benchmark():
|
|||||||
benchmark = BenchmarkRunner(test_solvers)
|
benchmark = BenchmarkRunner(test_solvers)
|
||||||
benchmark.fit(train_instances)
|
benchmark.fit(train_instances)
|
||||||
benchmark.parallel_solve(test_instances, n_jobs=2, n_trials=2)
|
benchmark.parallel_solve(test_instances, n_jobs=2, n_trials=2)
|
||||||
assert benchmark.raw_results().values.shape == (12, 18)
|
assert benchmark.raw_results().values.shape == (12, 19)
|
||||||
|
|
||||||
benchmark.save_results("/tmp/benchmark.csv")
|
benchmark.save_results("/tmp/benchmark.csv")
|
||||||
assert os.path.isfile("/tmp/benchmark.csv")
|
assert os.path.isfile("/tmp/benchmark.csv")
|
||||||
|
|
||||||
benchmark = BenchmarkRunner(test_solvers)
|
benchmark = BenchmarkRunner(test_solvers)
|
||||||
benchmark.load_results("/tmp/benchmark.csv")
|
benchmark.load_results("/tmp/benchmark.csv")
|
||||||
assert benchmark.raw_results().values.shape == (12, 18)
|
assert benchmark.raw_results().values.shape == (12, 19)
|
||||||
|
|||||||
Reference in New Issue
Block a user