mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 09:28:51 -06:00
Make cuts component compatible with Pyomo+Gurobi
This commit is contained in:
@@ -8,7 +8,7 @@ import sys
|
|||||||
|
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from os.path import exists
|
from os.path import exists
|
||||||
from typing import Callable, List
|
from typing import Callable, List, Any
|
||||||
|
|
||||||
from ..h5 import H5File
|
from ..h5 import H5File
|
||||||
from ..io import _RedirectOutput, gzip, _to_h5_filename
|
from ..io import _RedirectOutput, gzip, _to_h5_filename
|
||||||
@@ -22,6 +22,7 @@ class BasicCollector:
|
|||||||
build_model: Callable,
|
build_model: Callable,
|
||||||
n_jobs: int = 1,
|
n_jobs: int = 1,
|
||||||
progress: bool = False,
|
progress: bool = False,
|
||||||
|
verbose: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
def _collect(data_filename: str) -> None:
|
def _collect(data_filename: str) -> None:
|
||||||
h5_filename = _to_h5_filename(data_filename)
|
h5_filename = _to_h5_filename(data_filename)
|
||||||
@@ -43,7 +44,9 @@ class BasicCollector:
|
|||||||
return
|
return
|
||||||
|
|
||||||
with H5File(h5_filename, "w") as h5:
|
with H5File(h5_filename, "w") as h5:
|
||||||
streams = [StringIO()]
|
streams: List[Any] = [StringIO()]
|
||||||
|
if verbose:
|
||||||
|
streams += [sys.stdout]
|
||||||
with _RedirectOutput(streams):
|
with _RedirectOutput(streams):
|
||||||
# Load and extract static features
|
# Load and extract static features
|
||||||
model = build_model(data_filename)
|
model = build_model(data_filename)
|
||||||
|
|||||||
@@ -1,21 +1,23 @@
|
|||||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import List, Union, Any, Hashable
|
from typing import List, Union, Any, Hashable, Optional
|
||||||
|
|
||||||
import gurobipy as gp
|
import gurobipy as gp
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import pyomo.environ as pe
|
||||||
from gurobipy import GRB, quicksum
|
from gurobipy import GRB, quicksum
|
||||||
|
from miplearn.io import read_pkl_gz
|
||||||
|
from miplearn.solvers.gurobi import GurobiModel
|
||||||
|
from miplearn.solvers.pyomo import PyomoModel
|
||||||
from networkx import Graph
|
from networkx import Graph
|
||||||
from scipy.stats import uniform, randint
|
from scipy.stats import uniform, randint
|
||||||
from scipy.stats.distributions import rv_frozen
|
from scipy.stats.distributions import rv_frozen
|
||||||
|
|
||||||
from miplearn.io import read_pkl_gz
|
|
||||||
from miplearn.solvers.gurobi import GurobiModel
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -82,12 +84,15 @@ class MaxWeightStableSetGenerator:
|
|||||||
return nx.generators.random_graphs.binomial_graph(self.n.rvs(), self.p.rvs())
|
return nx.generators.random_graphs.binomial_graph(self.n.rvs(), self.p.rvs())
|
||||||
|
|
||||||
|
|
||||||
def build_stab_model(data: MaxWeightStableSetData) -> GurobiModel:
|
def build_stab_model_gurobipy(
|
||||||
if isinstance(data, str):
|
data: Union[str, MaxWeightStableSetData],
|
||||||
data = read_pkl_gz(data)
|
params: Optional[dict[str, Any]] = None,
|
||||||
assert isinstance(data, MaxWeightStableSetData)
|
) -> GurobiModel:
|
||||||
|
data = _stab_read(data)
|
||||||
model = gp.Model()
|
model = gp.Model()
|
||||||
|
if params is not None:
|
||||||
|
for (param_name, param_value) in params.items():
|
||||||
|
setattr(model.params, param_name, param_value)
|
||||||
nodes = list(data.graph.nodes)
|
nodes = list(data.graph.nodes)
|
||||||
|
|
||||||
# Variables and objective function
|
# Variables and objective function
|
||||||
@@ -99,16 +104,8 @@ def build_stab_model(data: MaxWeightStableSetData) -> GurobiModel:
|
|||||||
model.addConstr(x[i1] + x[i2] <= 1)
|
model.addConstr(x[i1] + x[i2] <= 1)
|
||||||
|
|
||||||
def cuts_separate(m: GurobiModel) -> List[Hashable]:
|
def cuts_separate(m: GurobiModel) -> List[Hashable]:
|
||||||
# Retrieve optimal fractional solution
|
|
||||||
x_val = m.inner.cbGetNodeRel(x)
|
x_val = m.inner.cbGetNodeRel(x)
|
||||||
|
return _stab_separate(data, x_val)
|
||||||
# Check that we selected at most one vertex for each
|
|
||||||
# clique in the graph (sum <= 1)
|
|
||||||
violations: List[Hashable] = []
|
|
||||||
for clique in nx.find_cliques(data.graph):
|
|
||||||
if sum(x_val[i] for i in clique) > 1.0001:
|
|
||||||
violations.append(tuple(sorted(clique)))
|
|
||||||
return violations
|
|
||||||
|
|
||||||
def cuts_enforce(m: GurobiModel, violations: List[Any]) -> None:
|
def cuts_enforce(m: GurobiModel, violations: List[Any]) -> None:
|
||||||
logger.info(f"Adding {len(violations)} clique cuts...")
|
logger.info(f"Adding {len(violations)} clique cuts...")
|
||||||
@@ -122,3 +119,65 @@ def build_stab_model(data: MaxWeightStableSetData) -> GurobiModel:
|
|||||||
cuts_separate=cuts_separate,
|
cuts_separate=cuts_separate,
|
||||||
cuts_enforce=cuts_enforce,
|
cuts_enforce=cuts_enforce,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def build_stab_model_pyomo(
|
||||||
|
data: MaxWeightStableSetData,
|
||||||
|
solver: str = "gurobi_persistent",
|
||||||
|
params: Optional[dict[str, Any]] = None,
|
||||||
|
) -> PyomoModel:
|
||||||
|
data = _stab_read(data)
|
||||||
|
model = pe.ConcreteModel()
|
||||||
|
nodes = pe.Set(initialize=list(data.graph.nodes))
|
||||||
|
|
||||||
|
# Variables and objective function
|
||||||
|
model.x = pe.Var(nodes, domain=pe.Boolean, name="x")
|
||||||
|
model.obj = pe.Objective(expr=sum([-data.weights[i] * model.x[i] for i in nodes]))
|
||||||
|
|
||||||
|
# Edge inequalities
|
||||||
|
model.edge_eqs = pe.ConstraintList()
|
||||||
|
for (i1, i2) in data.graph.edges:
|
||||||
|
model.edge_eqs.add(model.x[i1] + model.x[i2] <= 1)
|
||||||
|
|
||||||
|
# Clique inequalities
|
||||||
|
model.clique_eqs = pe.ConstraintList()
|
||||||
|
|
||||||
|
def cuts_separate(m: PyomoModel) -> List[Hashable]:
|
||||||
|
m.solver.cbGetNodeRel([model.x[i] for i in nodes])
|
||||||
|
x_val = [model.x[i].value for i in nodes]
|
||||||
|
return _stab_separate(data, x_val)
|
||||||
|
|
||||||
|
def cuts_enforce(m: PyomoModel, violations: List[Any]) -> None:
|
||||||
|
logger.info(f"Adding {len(violations)} clique cuts...")
|
||||||
|
for clique in violations:
|
||||||
|
m.add_constr(model.clique_eqs.add(sum(model.x[i] for i in clique) <= 1))
|
||||||
|
|
||||||
|
m = PyomoModel(
|
||||||
|
model,
|
||||||
|
solver,
|
||||||
|
cuts_separate=cuts_separate,
|
||||||
|
cuts_enforce=cuts_enforce,
|
||||||
|
)
|
||||||
|
|
||||||
|
if solver == "gurobi_persistent" and params is not None:
|
||||||
|
for (param_name, param_value) in params.items():
|
||||||
|
m.solver.set_gurobi_param(param_name, param_value)
|
||||||
|
|
||||||
|
return m
|
||||||
|
|
||||||
|
|
||||||
|
def _stab_read(data: Union[str, MaxWeightStableSetData]) -> MaxWeightStableSetData:
|
||||||
|
if isinstance(data, str):
|
||||||
|
data = read_pkl_gz(data)
|
||||||
|
assert isinstance(data, MaxWeightStableSetData)
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def _stab_separate(data: MaxWeightStableSetData, x_val: List[float]) -> List[Hashable]:
|
||||||
|
# Check that we selected at most one vertex for each
|
||||||
|
# clique in the graph (sum <= 1)
|
||||||
|
violations: List[Hashable] = []
|
||||||
|
for clique in nx.find_cliques(data.graph):
|
||||||
|
if sum(x_val[i] for i in clique) > 1.0001:
|
||||||
|
violations.append(tuple(sorted(clique)))
|
||||||
|
return violations
|
||||||
|
|||||||
@@ -16,6 +16,8 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
def _gurobi_callback(model: AbstractModel, gp_model: gp.Model, where: int) -> None:
|
def _gurobi_callback(model: AbstractModel, gp_model: gp.Model, where: int) -> None:
|
||||||
|
assert isinstance(gp_model, gp.Model)
|
||||||
|
|
||||||
# Lazy constraints
|
# Lazy constraints
|
||||||
if model.lazy_separate is not None:
|
if model.lazy_separate is not None:
|
||||||
assert model.lazy_enforce is not None
|
assert model.lazy_enforce is not None
|
||||||
@@ -58,6 +60,16 @@ def _gurobi_add_constr(gp_model: gp.Model, where: str, constr: Any) -> None:
|
|||||||
gp_model.addConstr(constr)
|
gp_model.addConstr(constr)
|
||||||
|
|
||||||
|
|
||||||
|
def _gurobi_set_required_params(model: AbstractModel, gp_model: gp.Model) -> None:
|
||||||
|
# Required parameters for lazy constraints
|
||||||
|
if model.lazy_enforce is not None:
|
||||||
|
gp_model.setParam("PreCrush", 1)
|
||||||
|
gp_model.setParam("LazyConstraints", 1)
|
||||||
|
# Required parameters for user cuts
|
||||||
|
if model.cuts_enforce is not None:
|
||||||
|
gp_model.setParam("PreCrush", 1)
|
||||||
|
|
||||||
|
|
||||||
class GurobiModel(AbstractModel):
|
class GurobiModel(AbstractModel):
|
||||||
_supports_basis_status = True
|
_supports_basis_status = True
|
||||||
_supports_sensitivity_analysis = True
|
_supports_sensitivity_analysis = True
|
||||||
@@ -188,14 +200,7 @@ class GurobiModel(AbstractModel):
|
|||||||
def callback(_: gp.Model, where: int) -> None:
|
def callback(_: gp.Model, where: int) -> None:
|
||||||
_gurobi_callback(self, self.inner, where)
|
_gurobi_callback(self, self.inner, where)
|
||||||
|
|
||||||
# Required parameters for lazy constraints
|
_gurobi_set_required_params(self, self.inner)
|
||||||
if self.lazy_enforce is not None:
|
|
||||||
self.inner.setParam("PreCrush", 1)
|
|
||||||
self.inner.setParam("LazyConstraints", 1)
|
|
||||||
|
|
||||||
# Required parameters for user cuts
|
|
||||||
if self.cuts_enforce is not None:
|
|
||||||
self.inner.setParam("PreCrush", 1)
|
|
||||||
|
|
||||||
if self.lazy_enforce is not None or self.cuts_enforce is not None:
|
if self.lazy_enforce is not None or self.cuts_enforce is not None:
|
||||||
self.inner.optimize(callback)
|
self.inner.optimize(callback)
|
||||||
|
|||||||
@@ -14,7 +14,11 @@ from scipy.sparse import coo_matrix
|
|||||||
|
|
||||||
from miplearn.h5 import H5File
|
from miplearn.h5 import H5File
|
||||||
from miplearn.solvers.abstract import AbstractModel
|
from miplearn.solvers.abstract import AbstractModel
|
||||||
from miplearn.solvers.gurobi import _gurobi_callback, _gurobi_add_constr
|
from miplearn.solvers.gurobi import (
|
||||||
|
_gurobi_callback,
|
||||||
|
_gurobi_add_constr,
|
||||||
|
_gurobi_set_required_params,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PyomoModel(AbstractModel):
|
class PyomoModel(AbstractModel):
|
||||||
@@ -24,18 +28,22 @@ class PyomoModel(AbstractModel):
|
|||||||
solver_name: str = "gurobi_persistent",
|
solver_name: str = "gurobi_persistent",
|
||||||
lazy_separate: Optional[Callable] = None,
|
lazy_separate: Optional[Callable] = None,
|
||||||
lazy_enforce: Optional[Callable] = None,
|
lazy_enforce: Optional[Callable] = None,
|
||||||
|
cuts_separate: Optional[Callable] = None,
|
||||||
|
cuts_enforce: Optional[Callable] = None,
|
||||||
):
|
):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.inner = model
|
self.inner = model
|
||||||
self.solver_name = solver_name
|
self.solver_name = solver_name
|
||||||
|
self.lazy_separate = lazy_separate
|
||||||
|
self.lazy_enforce = lazy_enforce
|
||||||
|
self.cuts_separate = cuts_separate
|
||||||
|
self.cuts_enforce = cuts_enforce
|
||||||
self.solver = pe.SolverFactory(solver_name)
|
self.solver = pe.SolverFactory(solver_name)
|
||||||
self.is_persistent = hasattr(self.solver, "set_instance")
|
self.is_persistent = hasattr(self.solver, "set_instance")
|
||||||
if self.is_persistent:
|
if self.is_persistent:
|
||||||
self.solver.set_instance(model)
|
self.solver.set_instance(model)
|
||||||
self.results: Optional[Dict] = None
|
self.results: Optional[Dict] = None
|
||||||
self._is_warm_start_available = False
|
self._is_warm_start_available = False
|
||||||
self.lazy_separate = lazy_separate
|
|
||||||
self.lazy_enforce = lazy_enforce
|
|
||||||
if not hasattr(self.inner, "dual"):
|
if not hasattr(self.inner, "dual"):
|
||||||
self.inner.dual = Suffix(direction=Suffix.IMPORT)
|
self.inner.dual = Suffix(direction=Suffix.IMPORT)
|
||||||
self.inner.rc = Suffix(direction=Suffix.IMPORT)
|
self.inner.rc = Suffix(direction=Suffix.IMPORT)
|
||||||
@@ -116,6 +124,10 @@ class PyomoModel(AbstractModel):
|
|||||||
h5.put_scalar("mip_obj_value", obj_value)
|
h5.put_scalar("mip_obj_value", obj_value)
|
||||||
h5.put_scalar("mip_obj_bound", obj_bound)
|
h5.put_scalar("mip_obj_bound", obj_bound)
|
||||||
h5.put_scalar("mip_gap", self._gap(obj_value, obj_bound))
|
h5.put_scalar("mip_gap", self._gap(obj_value, obj_bound))
|
||||||
|
if self.lazy_ is not None:
|
||||||
|
h5.put_scalar("mip_lazy", repr(self.lazy_))
|
||||||
|
if self.cuts_ is not None:
|
||||||
|
h5.put_scalar("mip_cuts", repr(self.cuts_))
|
||||||
|
|
||||||
def fix_variables(
|
def fix_variables(
|
||||||
self,
|
self,
|
||||||
@@ -131,16 +143,17 @@ class PyomoModel(AbstractModel):
|
|||||||
|
|
||||||
def optimize(self) -> None:
|
def optimize(self) -> None:
|
||||||
self.lazy_ = []
|
self.lazy_ = []
|
||||||
if self.lazy_separate is not None:
|
self.cuts_ = []
|
||||||
|
|
||||||
|
if self.lazy_enforce is not None or self.cuts_enforce is not None:
|
||||||
assert (
|
assert (
|
||||||
self.solver_name == "gurobi_persistent"
|
self.solver_name == "gurobi_persistent"
|
||||||
), "Callbacks are currently only supported on gurobi_persistent"
|
), "Callbacks are currently only supported on gurobi_persistent"
|
||||||
|
_gurobi_set_required_params(self, self.solver._solver_model)
|
||||||
|
|
||||||
def callback(_: Any, __: Any, where: int) -> None:
|
def callback(_: Any, __: Any, where: int) -> None:
|
||||||
_gurobi_callback(self, self.solver, where)
|
_gurobi_callback(self, self.solver._solver_model, where)
|
||||||
|
|
||||||
self.solver.set_gurobi_param("PreCrush", 1)
|
|
||||||
self.solver.set_gurobi_param("LazyConstraints", 1)
|
|
||||||
self.solver.set_callback(callback)
|
self.solver.set_callback(callback)
|
||||||
|
|
||||||
if self.is_persistent:
|
if self.is_persistent:
|
||||||
@@ -301,12 +314,12 @@ class PyomoModel(AbstractModel):
|
|||||||
for (i, constr) in enumerate(
|
for (i, constr) in enumerate(
|
||||||
self.inner.component_objects(pyomo.core.Constraint)
|
self.inner.component_objects(pyomo.core.Constraint)
|
||||||
):
|
):
|
||||||
if len(constr) > 0:
|
if len(constr) > 1:
|
||||||
for idx in constr:
|
for idx in constr:
|
||||||
names.append(constr[idx].name)
|
names.append(constr[idx].name)
|
||||||
_parse_constraint(constr[idx], curr_row)
|
_parse_constraint(constr[idx], curr_row)
|
||||||
curr_row += 1
|
curr_row += 1
|
||||||
else:
|
elif len(constr) == 1:
|
||||||
names.append(constr.name)
|
names.append(constr.name)
|
||||||
_parse_constraint(constr, curr_row)
|
_parse_constraint(constr, curr_row)
|
||||||
curr_row += 1
|
curr_row += 1
|
||||||
@@ -352,7 +365,8 @@ class PyomoModel(AbstractModel):
|
|||||||
for constr in self.inner.component_objects(pyomo.core.Constraint):
|
for constr in self.inner.component_objects(pyomo.core.Constraint):
|
||||||
for idx in constr:
|
for idx in constr:
|
||||||
c = constr[idx]
|
c = constr[idx]
|
||||||
slacks.append(abs(self.inner.slack[c]))
|
if c in self.inner.slack:
|
||||||
|
slacks.append(abs(self.inner.slack[c]))
|
||||||
h5.put_array("mip_constr_slacks", np.array(slacks))
|
h5.put_array("mip_constr_slacks", np.array(slacks))
|
||||||
|
|
||||||
def _parse_pyomo_expr(self, expr: Any) -> Tuple[Dict[str, float], float]:
|
def _parse_pyomo_expr(self, expr: Any) -> Tuple[Dict[str, float], float]:
|
||||||
|
|||||||
@@ -5,62 +5,69 @@
|
|||||||
from typing import Any, List, Dict
|
from typing import Any, List, Dict
|
||||||
from unittest.mock import Mock
|
from unittest.mock import Mock
|
||||||
|
|
||||||
from sklearn.dummy import DummyClassifier
|
|
||||||
from sklearn.neighbors import KNeighborsClassifier
|
|
||||||
|
|
||||||
from miplearn.components.cuts.mem import MemorizingCutsComponent
|
from miplearn.components.cuts.mem import MemorizingCutsComponent
|
||||||
from miplearn.extractors.abstract import FeaturesExtractor
|
from miplearn.extractors.abstract import FeaturesExtractor
|
||||||
from miplearn.problems.stab import build_stab_model
|
from miplearn.problems.stab import build_stab_model_gurobipy, build_stab_model_pyomo
|
||||||
from miplearn.solvers.learning import LearningSolver
|
from miplearn.solvers.learning import LearningSolver
|
||||||
|
from sklearn.dummy import DummyClassifier
|
||||||
|
from sklearn.neighbors import KNeighborsClassifier
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
|
||||||
def test_mem_component(
|
def test_mem_component_gp(
|
||||||
stab_h5: List[str],
|
stab_gp_h5: List[str],
|
||||||
|
stab_pyo_h5: List[str],
|
||||||
default_extractor: FeaturesExtractor,
|
default_extractor: FeaturesExtractor,
|
||||||
) -> None:
|
) -> None:
|
||||||
clf = Mock(wraps=DummyClassifier())
|
for h5 in [stab_pyo_h5, stab_gp_h5]:
|
||||||
comp = MemorizingCutsComponent(clf=clf, extractor=default_extractor)
|
clf = Mock(wraps=DummyClassifier())
|
||||||
comp.fit(stab_h5)
|
comp = MemorizingCutsComponent(clf=clf, extractor=default_extractor)
|
||||||
|
comp.fit(h5)
|
||||||
|
|
||||||
# Should call fit method with correct arguments
|
# Should call fit method with correct arguments
|
||||||
clf.fit.assert_called()
|
clf.fit.assert_called()
|
||||||
x, y = clf.fit.call_args.args
|
x, y = clf.fit.call_args.args
|
||||||
assert x.shape == (3, 50)
|
assert x.shape == (3, 50)
|
||||||
assert y.shape == (3, 388)
|
assert y.shape == (3, 415)
|
||||||
y = y.tolist()
|
y = y.tolist()
|
||||||
assert y[0][:20] == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
|
assert y[0][:20] == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
|
||||||
assert y[1][:20] == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1]
|
assert y[1][:20] == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
|
||||||
assert y[2][:20] == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1]
|
assert y[2][:20] == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1]
|
||||||
|
|
||||||
# Should store violations
|
# Should store violations
|
||||||
assert comp.constrs_ is not None
|
assert comp.constrs_ is not None
|
||||||
assert comp.n_features_ == 50
|
assert comp.n_features_ == 50
|
||||||
assert comp.n_targets_ == 388
|
assert comp.n_targets_ == 415
|
||||||
assert len(comp.constrs_) == 388
|
assert len(comp.constrs_) == 415
|
||||||
|
|
||||||
# Call before-mip
|
# Call before-mip
|
||||||
stats: Dict[str, Any] = {}
|
stats: Dict[str, Any] = {}
|
||||||
model = Mock()
|
model = Mock()
|
||||||
comp.before_mip(stab_h5[0], model, stats)
|
comp.before_mip(h5[0], model, stats)
|
||||||
|
|
||||||
# Should call predict with correct args
|
# Should call predict with correct args
|
||||||
clf.predict.assert_called()
|
clf.predict.assert_called()
|
||||||
(x_test,) = clf.predict.call_args.args
|
(x_test,) = clf.predict.call_args.args
|
||||||
assert x_test.shape == (1, 50)
|
assert x_test.shape == (1, 50)
|
||||||
|
|
||||||
# Should set cuts_aot_
|
# Should set cuts_aot_
|
||||||
assert model.cuts_aot_ is not None
|
assert model.cuts_aot_ is not None
|
||||||
assert len(model.cuts_aot_) == 243
|
assert len(model.cuts_aot_) == 285
|
||||||
|
|
||||||
|
|
||||||
def test_usage_stab(
|
def test_usage_stab(
|
||||||
stab_h5: List[str],
|
stab_gp_h5: List[str],
|
||||||
|
stab_pyo_h5: List[str],
|
||||||
default_extractor: FeaturesExtractor,
|
default_extractor: FeaturesExtractor,
|
||||||
) -> None:
|
) -> None:
|
||||||
data_filenames = [f.replace(".h5", ".pkl.gz") for f in stab_h5]
|
for (h5, build_model) in [
|
||||||
clf = KNeighborsClassifier(n_neighbors=1)
|
(stab_pyo_h5, build_stab_model_pyomo),
|
||||||
comp = MemorizingCutsComponent(clf=clf, extractor=default_extractor)
|
(stab_gp_h5, build_stab_model_gurobipy),
|
||||||
solver = LearningSolver(components=[comp])
|
]:
|
||||||
solver.fit(data_filenames)
|
data_filenames = [f.replace(".h5", ".pkl.gz") for f in h5]
|
||||||
stats = solver.optimize(data_filenames[0], build_stab_model)
|
clf = KNeighborsClassifier(n_neighbors=1)
|
||||||
assert stats["Cuts: AOT"] > 0
|
comp = MemorizingCutsComponent(clf=clf, extractor=default_extractor)
|
||||||
|
solver = LearningSolver(components=[comp])
|
||||||
|
solver.fit(data_filenames)
|
||||||
|
stats = solver.optimize(data_filenames[0], build_model) # type: ignore
|
||||||
|
assert stats["Cuts: AOT"] > 0
|
||||||
|
|||||||
@@ -52,8 +52,13 @@ def tsp_h5(request: Any) -> List[str]:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def stab_h5(request: Any) -> List[str]:
|
def stab_gp_h5(request: Any) -> List[str]:
|
||||||
return _h5_fixture("stab*.h5", request)
|
return _h5_fixture("stab-gp*.h5", request)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def stab_pyo_h5(request: Any) -> List[str]:
|
||||||
|
return _h5_fixture("stab-pyo*.h5", request)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
|
|||||||
27
tests/fixtures/gen_stab.py
vendored
27
tests/fixtures/gen_stab.py
vendored
@@ -7,9 +7,11 @@ from miplearn.collectors.basic import BasicCollector
|
|||||||
from miplearn.io import write_pkl_gz
|
from miplearn.io import write_pkl_gz
|
||||||
from miplearn.problems.stab import (
|
from miplearn.problems.stab import (
|
||||||
MaxWeightStableSetGenerator,
|
MaxWeightStableSetGenerator,
|
||||||
build_stab_model,
|
build_stab_model_gurobipy,
|
||||||
|
build_stab_model_pyomo,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
np.random.seed(42)
|
np.random.seed(42)
|
||||||
gen = MaxWeightStableSetGenerator(
|
gen = MaxWeightStableSetGenerator(
|
||||||
w=uniform(10.0, scale=1.0),
|
w=uniform(10.0, scale=1.0),
|
||||||
@@ -18,6 +20,25 @@ gen = MaxWeightStableSetGenerator(
|
|||||||
fix_graph=True,
|
fix_graph=True,
|
||||||
)
|
)
|
||||||
data = gen.generate(3)
|
data = gen.generate(3)
|
||||||
data_filenames = write_pkl_gz(data, dirname(__file__), prefix="stab-n50-")
|
|
||||||
|
params = {"seed": 42, "threads": 1}
|
||||||
|
|
||||||
|
# Gurobipy
|
||||||
|
data_filenames = write_pkl_gz(data, dirname(__file__), prefix="stab-gp-n50-")
|
||||||
collector = BasicCollector()
|
collector = BasicCollector()
|
||||||
collector.collect(data_filenames, build_stab_model)
|
collector.collect(
|
||||||
|
data_filenames,
|
||||||
|
lambda data: build_stab_model_gurobipy(data, params=params),
|
||||||
|
progress=True,
|
||||||
|
verbose=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Pyomo
|
||||||
|
data_filenames = write_pkl_gz(data, dirname(__file__), prefix="stab-pyo-n50-")
|
||||||
|
collector = BasicCollector()
|
||||||
|
collector.collect(
|
||||||
|
data_filenames,
|
||||||
|
lambda model: build_stab_model_pyomo(model, params=params),
|
||||||
|
progress=True,
|
||||||
|
verbose=True,
|
||||||
|
)
|
||||||
|
|||||||
BIN
tests/fixtures/stab-gp-n50-00000.h5
vendored
Normal file
BIN
tests/fixtures/stab-gp-n50-00000.h5
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-gp-n50-00000.mps.gz
vendored
Normal file
BIN
tests/fixtures/stab-gp-n50-00000.mps.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-gp-n50-00000.pkl.gz
vendored
Normal file
BIN
tests/fixtures/stab-gp-n50-00000.pkl.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-gp-n50-00001.h5
vendored
Normal file
BIN
tests/fixtures/stab-gp-n50-00001.h5
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-gp-n50-00001.mps.gz
vendored
Normal file
BIN
tests/fixtures/stab-gp-n50-00001.mps.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-gp-n50-00001.pkl.gz
vendored
Normal file
BIN
tests/fixtures/stab-gp-n50-00001.pkl.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-gp-n50-00002.h5
vendored
Normal file
BIN
tests/fixtures/stab-gp-n50-00002.h5
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-gp-n50-00002.mps.gz
vendored
Normal file
BIN
tests/fixtures/stab-gp-n50-00002.mps.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-gp-n50-00002.pkl.gz
vendored
Normal file
BIN
tests/fixtures/stab-gp-n50-00002.pkl.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-n50-00000.h5
vendored
BIN
tests/fixtures/stab-n50-00000.h5
vendored
Binary file not shown.
BIN
tests/fixtures/stab-n50-00000.mps.gz
vendored
BIN
tests/fixtures/stab-n50-00000.mps.gz
vendored
Binary file not shown.
BIN
tests/fixtures/stab-n50-00000.pkl.gz
vendored
BIN
tests/fixtures/stab-n50-00000.pkl.gz
vendored
Binary file not shown.
BIN
tests/fixtures/stab-n50-00001.h5
vendored
BIN
tests/fixtures/stab-n50-00001.h5
vendored
Binary file not shown.
BIN
tests/fixtures/stab-n50-00001.mps.gz
vendored
BIN
tests/fixtures/stab-n50-00001.mps.gz
vendored
Binary file not shown.
BIN
tests/fixtures/stab-n50-00001.pkl.gz
vendored
BIN
tests/fixtures/stab-n50-00001.pkl.gz
vendored
Binary file not shown.
BIN
tests/fixtures/stab-n50-00002.h5
vendored
BIN
tests/fixtures/stab-n50-00002.h5
vendored
Binary file not shown.
BIN
tests/fixtures/stab-n50-00002.mps.gz
vendored
BIN
tests/fixtures/stab-n50-00002.mps.gz
vendored
Binary file not shown.
BIN
tests/fixtures/stab-n50-00002.pkl.gz
vendored
BIN
tests/fixtures/stab-n50-00002.pkl.gz
vendored
Binary file not shown.
BIN
tests/fixtures/stab-pyo-n50-00000.h5
vendored
Normal file
BIN
tests/fixtures/stab-pyo-n50-00000.h5
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-pyo-n50-00000.mps.gz
vendored
Normal file
BIN
tests/fixtures/stab-pyo-n50-00000.mps.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-pyo-n50-00000.pkl.gz
vendored
Normal file
BIN
tests/fixtures/stab-pyo-n50-00000.pkl.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-pyo-n50-00001.h5
vendored
Normal file
BIN
tests/fixtures/stab-pyo-n50-00001.h5
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-pyo-n50-00001.mps.gz
vendored
Normal file
BIN
tests/fixtures/stab-pyo-n50-00001.mps.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-pyo-n50-00001.pkl.gz
vendored
Normal file
BIN
tests/fixtures/stab-pyo-n50-00001.pkl.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-pyo-n50-00002.h5
vendored
Normal file
BIN
tests/fixtures/stab-pyo-n50-00002.h5
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-pyo-n50-00002.mps.gz
vendored
Normal file
BIN
tests/fixtures/stab-pyo-n50-00002.mps.gz
vendored
Normal file
Binary file not shown.
BIN
tests/fixtures/stab-pyo-n50-00002.pkl.gz
vendored
Normal file
BIN
tests/fixtures/stab-pyo-n50-00002.pkl.gz
vendored
Normal file
Binary file not shown.
@@ -9,7 +9,8 @@ import numpy as np
|
|||||||
from miplearn.h5 import H5File
|
from miplearn.h5 import H5File
|
||||||
from miplearn.problems.stab import (
|
from miplearn.problems.stab import (
|
||||||
MaxWeightStableSetData,
|
MaxWeightStableSetData,
|
||||||
build_stab_model,
|
build_stab_model_gurobipy,
|
||||||
|
build_stab_model_pyomo,
|
||||||
)
|
)
|
||||||
from miplearn.solvers.abstract import AbstractModel
|
from miplearn.solvers.abstract import AbstractModel
|
||||||
|
|
||||||
@@ -20,7 +21,8 @@ def test_stab() -> None:
|
|||||||
weights=np.array([1.0, 1.0, 1.0, 1.0, 1.0]),
|
weights=np.array([1.0, 1.0, 1.0, 1.0, 1.0]),
|
||||||
)
|
)
|
||||||
for model in [
|
for model in [
|
||||||
build_stab_model(data),
|
build_stab_model_gurobipy(data),
|
||||||
|
build_stab_model_pyomo(data),
|
||||||
]:
|
]:
|
||||||
assert isinstance(model, AbstractModel)
|
assert isinstance(model, AbstractModel)
|
||||||
with NamedTemporaryFile() as tempfile:
|
with NamedTemporaryFile() as tempfile:
|
||||||
|
|||||||
Reference in New Issue
Block a user