Module miplearn.components.steps.tests.test_convert_tight
Expand source code
from unittest.mock import Mock
from miplearn.classifiers import Classifier
from miplearn.components.steps.convert_tight import ConvertTightIneqsIntoEqsStep
from miplearn.components.steps.relax_integrality import RelaxIntegralityStep
from miplearn.instance import Instance
from miplearn.problems.knapsack import GurobiKnapsackInstance
from miplearn.solvers.gurobi import GurobiSolver
from miplearn.solvers.learning import LearningSolver
def test_convert_tight_usage():
instance = GurobiKnapsackInstance(
weights=[3.0, 5.0, 10.0],
prices=[1.0, 1.0, 1.0],
capacity=16.0,
)
solver = LearningSolver(
solver=GurobiSolver,
components=[
RelaxIntegralityStep(),
ConvertTightIneqsIntoEqsStep(),
],
)
# Solve original problem
stats = solver.solve(instance)
original_upper_bound = stats["Upper bound"]
# Should collect training data
assert instance.training_data[0]["slacks"]["eq_capacity"] == 0.0
# Fit and resolve
solver.fit([instance])
stats = solver.solve(instance)
# Objective value should be the same
assert stats["Upper bound"] == original_upper_bound
assert stats["ConvertTight: Inf iterations"] == 0
assert stats["ConvertTight: Subopt iterations"] == 0
class SampleInstance(Instance):
def to_model(self):
import gurobipy as grb
m = grb.Model("model")
x1 = m.addVar(name="x1")
x2 = m.addVar(name="x2")
m.setObjective(x1 + 2 * x2, grb.GRB.MAXIMIZE)
m.addConstr(x1 <= 2, name="c1")
m.addConstr(x2 <= 2, name="c2")
m.addConstr(x1 + x2 <= 3, name="c2")
return m
def test_convert_tight_infeasibility():
comp = ConvertTightIneqsIntoEqsStep()
comp.classifiers = {
"c1": Mock(spec=Classifier),
"c2": Mock(spec=Classifier),
"c3": Mock(spec=Classifier),
}
comp.classifiers["c1"].predict_proba = Mock(return_value=[[0, 1]])
comp.classifiers["c2"].predict_proba = Mock(return_value=[[0, 1]])
comp.classifiers["c3"].predict_proba = Mock(return_value=[[1, 0]])
solver = LearningSolver(
solver=GurobiSolver,
components=[comp],
solve_lp_first=False,
)
instance = SampleInstance()
stats = solver.solve(instance)
assert stats["Upper bound"] == 5.0
assert stats["ConvertTight: Inf iterations"] == 1
assert stats["ConvertTight: Subopt iterations"] == 0
def test_convert_tight_suboptimality():
comp = ConvertTightIneqsIntoEqsStep(check_optimality=True)
comp.classifiers = {
"c1": Mock(spec=Classifier),
"c2": Mock(spec=Classifier),
"c3": Mock(spec=Classifier),
}
comp.classifiers["c1"].predict_proba = Mock(return_value=[[0, 1]])
comp.classifiers["c2"].predict_proba = Mock(return_value=[[1, 0]])
comp.classifiers["c3"].predict_proba = Mock(return_value=[[0, 1]])
solver = LearningSolver(
solver=GurobiSolver,
components=[comp],
solve_lp_first=False,
)
instance = SampleInstance()
stats = solver.solve(instance)
assert stats["Upper bound"] == 5.0
assert stats["ConvertTight: Inf iterations"] == 0
assert stats["ConvertTight: Subopt iterations"] == 1
def test_convert_tight_optimal():
comp = ConvertTightIneqsIntoEqsStep()
comp.classifiers = {
"c1": Mock(spec=Classifier),
"c2": Mock(spec=Classifier),
"c3": Mock(spec=Classifier),
}
comp.classifiers["c1"].predict_proba = Mock(return_value=[[1, 0]])
comp.classifiers["c2"].predict_proba = Mock(return_value=[[0, 1]])
comp.classifiers["c3"].predict_proba = Mock(return_value=[[0, 1]])
solver = LearningSolver(
solver=GurobiSolver,
components=[comp],
solve_lp_first=False,
)
instance = SampleInstance()
stats = solver.solve(instance)
assert stats["Upper bound"] == 5.0
assert stats["ConvertTight: Inf iterations"] == 0
assert stats["ConvertTight: Subopt iterations"] == 0
Functions
def test_convert_tight_infeasibility()
-
Expand source code
def test_convert_tight_infeasibility(): comp = ConvertTightIneqsIntoEqsStep() comp.classifiers = { "c1": Mock(spec=Classifier), "c2": Mock(spec=Classifier), "c3": Mock(spec=Classifier), } comp.classifiers["c1"].predict_proba = Mock(return_value=[[0, 1]]) comp.classifiers["c2"].predict_proba = Mock(return_value=[[0, 1]]) comp.classifiers["c3"].predict_proba = Mock(return_value=[[1, 0]]) solver = LearningSolver( solver=GurobiSolver, components=[comp], solve_lp_first=False, ) instance = SampleInstance() stats = solver.solve(instance) assert stats["Upper bound"] == 5.0 assert stats["ConvertTight: Inf iterations"] == 1 assert stats["ConvertTight: Subopt iterations"] == 0
def test_convert_tight_optimal()
-
Expand source code
def test_convert_tight_optimal(): comp = ConvertTightIneqsIntoEqsStep() comp.classifiers = { "c1": Mock(spec=Classifier), "c2": Mock(spec=Classifier), "c3": Mock(spec=Classifier), } comp.classifiers["c1"].predict_proba = Mock(return_value=[[1, 0]]) comp.classifiers["c2"].predict_proba = Mock(return_value=[[0, 1]]) comp.classifiers["c3"].predict_proba = Mock(return_value=[[0, 1]]) solver = LearningSolver( solver=GurobiSolver, components=[comp], solve_lp_first=False, ) instance = SampleInstance() stats = solver.solve(instance) assert stats["Upper bound"] == 5.0 assert stats["ConvertTight: Inf iterations"] == 0 assert stats["ConvertTight: Subopt iterations"] == 0
def test_convert_tight_suboptimality()
-
Expand source code
def test_convert_tight_suboptimality(): comp = ConvertTightIneqsIntoEqsStep(check_optimality=True) comp.classifiers = { "c1": Mock(spec=Classifier), "c2": Mock(spec=Classifier), "c3": Mock(spec=Classifier), } comp.classifiers["c1"].predict_proba = Mock(return_value=[[0, 1]]) comp.classifiers["c2"].predict_proba = Mock(return_value=[[1, 0]]) comp.classifiers["c3"].predict_proba = Mock(return_value=[[0, 1]]) solver = LearningSolver( solver=GurobiSolver, components=[comp], solve_lp_first=False, ) instance = SampleInstance() stats = solver.solve(instance) assert stats["Upper bound"] == 5.0 assert stats["ConvertTight: Inf iterations"] == 0 assert stats["ConvertTight: Subopt iterations"] == 1
def test_convert_tight_usage()
-
Expand source code
def test_convert_tight_usage(): instance = GurobiKnapsackInstance( weights=[3.0, 5.0, 10.0], prices=[1.0, 1.0, 1.0], capacity=16.0, ) solver = LearningSolver( solver=GurobiSolver, components=[ RelaxIntegralityStep(), ConvertTightIneqsIntoEqsStep(), ], ) # Solve original problem stats = solver.solve(instance) original_upper_bound = stats["Upper bound"] # Should collect training data assert instance.training_data[0]["slacks"]["eq_capacity"] == 0.0 # Fit and resolve solver.fit([instance]) stats = solver.solve(instance) # Objective value should be the same assert stats["Upper bound"] == original_upper_bound assert stats["ConvertTight: Inf iterations"] == 0 assert stats["ConvertTight: Subopt iterations"] == 0
Classes
class SampleInstance
-
Abstract class holding all the data necessary to generate a concrete model of the problem.
In the knapsack problem, for example, this class could hold the number of items, their weights and costs, as well as the size of the knapsack. Objects implementing this class are able to convert themselves into a concrete optimization model, which can be optimized by a solver, or into arrays of features, which can be provided as inputs to machine learning models.
Expand source code
class SampleInstance(Instance): def to_model(self): import gurobipy as grb m = grb.Model("model") x1 = m.addVar(name="x1") x2 = m.addVar(name="x2") m.setObjective(x1 + 2 * x2, grb.GRB.MAXIMIZE) m.addConstr(x1 <= 2, name="c1") m.addConstr(x2 <= 2, name="c2") m.addConstr(x1 + x2 <= 3, name="c2") return m
Ancestors
- Instance
- abc.ABC
Inherited members