From 2a76dd42ecf51d634c7deb05e90a85461cfd55f1 Mon Sep 17 00:00:00 2001 From: "Alinson S. Xavier" Date: Tue, 25 Jan 2022 11:39:03 -0600 Subject: [PATCH] Allow user to attach arbitrary data to violations --- miplearn/components/dynamic_common.py | 47 ++++++++++++------- miplearn/components/dynamic_lazy.py | 41 +++++++++-------- miplearn/components/dynamic_user_cuts.py | 42 ++++++++--------- miplearn/instance/base.py | 52 ++++++++++++---------- miplearn/instance/file.py | 16 +++---- miplearn/instance/picklegz.py | 14 +++--- miplearn/problems/tsp.py | 12 ++--- miplearn/solvers/gurobi.py | 2 +- miplearn/solvers/tests/__init__.py | 2 +- tests/components/test_dynamic_lazy.py | 31 ++++++++++--- tests/components/test_dynamic_user_cuts.py | 25 ++++++----- tests/problems/test_tsp.py | 13 ++++-- 12 files changed, 169 insertions(+), 128 deletions(-) diff --git a/miplearn/components/dynamic_common.py b/miplearn/components/dynamic_common.py index dbfd5e7..11f341a 100644 --- a/miplearn/components/dynamic_common.py +++ b/miplearn/components/dynamic_common.py @@ -1,7 +1,7 @@ # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization # Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved. # Released under the modified BSD license. See COPYING.md for more details. - +import json import logging from typing import Dict, List, Tuple, Optional, Any, Set @@ -36,7 +36,7 @@ class DynamicConstraintsComponent(Component): self.classifier_prototype: Classifier = classifier self.classifiers: Dict[ConstraintCategory, Classifier] = {} self.thresholds: Dict[ConstraintCategory, Threshold] = {} - self.known_cids: List[ConstraintName] = [] + self.known_violations: Dict[ConstraintName, Any] = {} self.attr = attr def sample_xy_with_cids( @@ -48,18 +48,19 @@ class DynamicConstraintsComponent(Component): Dict[ConstraintCategory, List[List[bool]]], Dict[ConstraintCategory, List[ConstraintName]], ]: - if len(self.known_cids) == 0: + if len(self.known_violations) == 0: return {}, {}, {} assert instance is not None x: Dict[ConstraintCategory, List[List[float]]] = {} y: Dict[ConstraintCategory, List[List[bool]]] = {} cids: Dict[ConstraintCategory, List[ConstraintName]] = {} - known_cids = np.array(self.known_cids, dtype="S") + known_cids = np.array(sorted(list(self.known_violations.keys())), dtype="S") enforced_cids = None - enforced_cids_np = sample.get_array(self.attr) - if enforced_cids_np is not None: - enforced_cids = list(enforced_cids_np) + enforced_encoded = sample.get_scalar(self.attr) + if enforced_encoded is not None: + enforced = self.decode(enforced_encoded) + enforced_cids = list(enforced.keys()) # Get user-provided constraint features ( @@ -100,11 +101,10 @@ class DynamicConstraintsComponent(Component): @overrides def pre_fit(self, pre: List[Any]) -> None: assert pre is not None - known_cids: Set = set() - for cids in pre: - known_cids |= set(list(cids)) - self.known_cids.clear() - self.known_cids.extend(sorted(known_cids)) + self.known_violations.clear() + for violations in pre: + for (vname, vdata) in violations.items(): + self.known_violations[vname] = vdata def sample_predict( self, @@ -112,7 +112,7 @@ class DynamicConstraintsComponent(Component): sample: Sample, ) -> List[ConstraintName]: pred: List[ConstraintName] = [] - if len(self.known_cids) == 0: + if len(self.known_violations) == 0: logger.info("Classifiers not fitted. Skipping.") return pred x, _, cids = self.sample_xy_with_cids(instance, sample) @@ -131,7 +131,9 @@ class DynamicConstraintsComponent(Component): @overrides def pre_sample_xy(self, instance: Instance, sample: Sample) -> Any: - return sample.get_array(self.attr) + attr_encoded = sample.get_scalar(self.attr) + assert attr_encoded is not None + return self.decode(attr_encoded) @overrides def fit_xy( @@ -153,11 +155,13 @@ class DynamicConstraintsComponent(Component): instance: Instance, sample: Sample, ) -> Dict[str, float]: - actual = sample.get_array(self.attr) - assert actual is not None + attr_encoded = sample.get_scalar(self.attr) + assert attr_encoded is not None + actual_violations = DynamicConstraintsComponent.decode(attr_encoded) + actual = set(actual_violations.keys()) pred = set(self.sample_predict(instance, sample)) tp, tn, fp, fn = 0, 0, 0, 0 - for cid in self.known_cids: + for cid in self.known_violations.keys(): if cid in pred: if cid in actual: tp += 1 @@ -169,3 +173,12 @@ class DynamicConstraintsComponent(Component): else: tn += 1 return classifier_evaluation_dict(tp=tp, tn=tn, fp=fp, fn=fn) + + @staticmethod + def encode(violations: Dict[ConstraintName, Any]) -> str: + return json.dumps({k.decode(): v for (k, v) in violations.items()}) + + @staticmethod + def decode(violations_encoded: str) -> Dict[ConstraintName, Any]: + violations = json.loads(violations_encoded) + return {k.encode(): v for (k, v) in violations.items()} diff --git a/miplearn/components/dynamic_lazy.py b/miplearn/components/dynamic_lazy.py index 7756e64..e13360c 100644 --- a/miplearn/components/dynamic_lazy.py +++ b/miplearn/components/dynamic_lazy.py @@ -1,10 +1,8 @@ # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization # Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved. # Released under the modified BSD license. See COPYING.md for more details. - import logging -import pdb -from typing import Dict, List, TYPE_CHECKING, Tuple, Any, Optional, Set +from typing import Dict, List, TYPE_CHECKING, Tuple, Any, Optional import numpy as np from overrides import overrides @@ -37,23 +35,23 @@ class DynamicLazyConstraintsComponent(Component): self.dynamic: DynamicConstraintsComponent = DynamicConstraintsComponent( classifier=classifier, threshold=threshold, - attr="mip_constr_lazy_enforced", + attr="mip_constr_lazy", ) self.classifiers = self.dynamic.classifiers self.thresholds = self.dynamic.thresholds - self.known_cids = self.dynamic.known_cids - self.lazy_enforced: Set[ConstraintName] = set() + self.known_violations = self.dynamic.known_violations + self.lazy_enforced: Dict[ConstraintName, Any] = {} @staticmethod def enforce( - cids: List[ConstraintName], + violations: Dict[ConstraintName, Any], instance: Instance, model: Any, solver: "LearningSolver", ) -> None: assert solver.internal_solver is not None - for cid in cids: - instance.enforce_lazy_constraint(solver.internal_solver, model, cid) + for (vname, vdata) in violations.items(): + instance.enforce_lazy_constraint(solver.internal_solver, model, vdata) @overrides def before_solve_mip( @@ -66,9 +64,10 @@ class DynamicLazyConstraintsComponent(Component): ) -> None: self.lazy_enforced.clear() logger.info("Predicting violated (dynamic) lazy constraints...") - cids = self.dynamic.sample_predict(instance, sample) - logger.info("Enforcing %d lazy constraints..." % len(cids)) - self.enforce(cids, instance, model, solver) + vnames = self.dynamic.sample_predict(instance, sample) + violations = {c: self.dynamic.known_violations[c] for c in vnames} + logger.info("Enforcing %d lazy constraints..." % len(vnames)) + self.enforce(violations, instance, model, solver) @overrides def after_solve_mip( @@ -79,10 +78,7 @@ class DynamicLazyConstraintsComponent(Component): stats: LearningSolveStats, sample: Sample, ) -> None: - sample.put_array( - "mip_constr_lazy_enforced", - np.array(list(self.lazy_enforced), dtype="S"), - ) + sample.put_scalar("mip_constr_lazy", self.dynamic.encode(self.lazy_enforced)) @overrides def iteration_cb( @@ -93,14 +89,17 @@ class DynamicLazyConstraintsComponent(Component): ) -> bool: assert solver.internal_solver is not None logger.debug("Finding violated lazy constraints...") - cids = instance.find_violated_lazy_constraints(solver.internal_solver, model) - if len(cids) == 0: + violations = instance.find_violated_lazy_constraints( + solver.internal_solver, model + ) + if len(violations) == 0: logger.debug("No violations found") return False else: - self.lazy_enforced |= set(cids) - logger.debug(" %d violations found" % len(cids)) - self.enforce(cids, instance, model, solver) + for v in violations: + self.lazy_enforced[v] = violations[v] + logger.debug(" %d violations found" % len(violations)) + self.enforce(violations, instance, model, solver) return True # Delegate ML methods to self.dynamic diff --git a/miplearn/components/dynamic_user_cuts.py b/miplearn/components/dynamic_user_cuts.py index b48d7e7..d939ff5 100644 --- a/miplearn/components/dynamic_user_cuts.py +++ b/miplearn/components/dynamic_user_cuts.py @@ -1,9 +1,8 @@ # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization # Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved. # Released under the modified BSD license. See COPYING.md for more details. - import logging -from typing import Any, TYPE_CHECKING, Set, Tuple, Dict, List, Optional +from typing import Any, TYPE_CHECKING, Tuple, Dict, List import numpy as np from overrides import overrides @@ -32,9 +31,9 @@ class UserCutsComponent(Component): self.dynamic = DynamicConstraintsComponent( classifier=classifier, threshold=threshold, - attr="mip_user_cuts_enforced", + attr="mip_user_cuts", ) - self.enforced: Set[ConstraintName] = set() + self.enforced: Dict[ConstraintName, Any] = {} self.n_added_in_callback = 0 @overrides @@ -50,11 +49,12 @@ class UserCutsComponent(Component): self.enforced.clear() self.n_added_in_callback = 0 logger.info("Predicting violated user cuts...") - cids = self.dynamic.sample_predict(instance, sample) - logger.info("Enforcing %d user cuts ahead-of-time..." % len(cids)) - for cid in cids: - instance.enforce_user_cut(solver.internal_solver, model, cid) - stats["UserCuts: Added ahead-of-time"] = len(cids) + vnames = self.dynamic.sample_predict(instance, sample) + logger.info("Enforcing %d user cuts ahead-of-time..." % len(vnames)) + for vname in vnames: + vdata = self.dynamic.known_violations[vname] + instance.enforce_user_cut(solver.internal_solver, model, vdata) + stats["UserCuts: Added ahead-of-time"] = len(vnames) @overrides def user_cut_cb( @@ -65,18 +65,17 @@ class UserCutsComponent(Component): ) -> None: assert solver.internal_solver is not None logger.debug("Finding violated user cuts...") - cids = instance.find_violated_user_cuts(model) - logger.debug(f"Found {len(cids)} violated user cuts") + violations = instance.find_violated_user_cuts(model) + logger.debug(f"Found {len(violations)} violated user cuts") logger.debug("Building violated user cuts...") - for cid in cids: - if cid in self.enforced: + for (vname, vdata) in violations.items(): + if vname in self.enforced: continue - assert isinstance(cid, ConstraintName) - instance.enforce_user_cut(solver.internal_solver, model, cid) - self.enforced.add(cid) + instance.enforce_user_cut(solver.internal_solver, model, vdata) + self.enforced[vname] = vdata self.n_added_in_callback += 1 - if len(cids) > 0: - logger.debug(f"Added {len(cids)} violated user cuts") + if len(violations) > 0: + logger.debug(f"Added {len(violations)} violated user cuts") @overrides def after_solve_mip( @@ -87,10 +86,7 @@ class UserCutsComponent(Component): stats: LearningSolveStats, sample: Sample, ) -> None: - sample.put_array( - "mip_user_cuts_enforced", - np.array(list(self.enforced), dtype="S"), - ) + sample.put_scalar("mip_user_cuts", self.dynamic.encode(self.enforced)) stats["UserCuts: Added in callback"] = self.n_added_in_callback if self.n_added_in_callback > 0: logger.info(f"{self.n_added_in_callback} user cuts added in callback") @@ -133,5 +129,5 @@ class UserCutsComponent(Component): self, instance: "Instance", sample: Sample, - ) -> Dict[ConstraintCategory, Dict[str, float]]: + ) -> Dict[ConstraintCategory, Dict[ConstraintName, float]]: return self.dynamic.sample_evaluate(instance, sample) diff --git a/miplearn/instance/base.py b/miplearn/instance/base.py index 01f75e4..8ddcba1 100644 --- a/miplearn/instance/base.py +++ b/miplearn/instance/base.py @@ -9,7 +9,7 @@ from typing import Any, List, TYPE_CHECKING, Dict import numpy as np from miplearn.features.sample import Sample, MemorySample -from miplearn.types import ConstraintName, ConstraintCategory +from miplearn.types import ConstraintName logger = logging.getLogger(__name__) @@ -114,7 +114,7 @@ class Instance(ABC): self, solver: "InternalSolver", model: Any, - ) -> List[ConstraintName]: + ) -> Dict[ConstraintName, Any]: """ Returns lazy constraint violations found for the current solution. @@ -124,40 +124,46 @@ class Instance(ABC): resolve the problem. The process repeats until no further lazy constraint violations are found. - Each "violation" is simply a string which allows the instance to identify - unambiguously which lazy constraint should be generated. In the Traveling - Salesman Problem, for example, a subtour violation could be a string - containing the cities in the subtour. + Violations should be returned in a dictionary mapping the name of the violation + to some user-specified data that allows the instance to unambiguously generate + the lazy constraints at a later time. In the Traveling Salesman Problem, for + example, this function could return a dictionary identifying violated subtour + inequalities. More concretely, it could return: + { + "s1": [1, 2, 3], + "s2": [4, 5, 6, 7], + } + where "s1" and "s2" are the names of the subtours, and [1,2,3] and [4,5,6,7] + are the cities in each subtour. The names of the violations should be kept + stable across instances. In our example, "s1" should always correspond to + [1,2,3] across all instances. The user-provided data should be picklable. The current solution can be queried with `solver.get_solution()`. If the solver is configured to use lazy callbacks, this solution may be non-integer. For a concrete example, see TravelingSalesmanInstance. """ - return [] + return {} def enforce_lazy_constraint( self, solver: "InternalSolver", model: Any, - violation: ConstraintName, + violation_data: Any, ) -> None: """ Adds constraints to the model to ensure that the given violation is fixed. This method is typically called immediately after - find_violated_lazy_constraints. The violation object provided to this method - is exactly the same object returned earlier by - find_violated_lazy_constraints. After some training, LearningSolver may - decide to proactively build some lazy constraints at the beginning of the - optimization process, before a solution is even available. In this case, - enforce_lazy_constraints will be called without a corresponding call to - find_violated_lazy_constraints. - - Note that this method can be called either before the optimization starts or - from within a callback. To ensure that constraints are added correctly in - either case, it is recommended to use `solver.add_constraint`, instead of - modifying the `model` object directly. + `find_violated_lazy_constraints`. The argument `violation_data` is the + user-provided data, previously returned by `find_violated_lazy_constraints`. + In the Traveling Salesman Problem, for example, it could be a list of cities + in the subtour. + + After some training, LearningSolver may decide to proactively build some lazy + constraints at the beginning of the optimization process, before a solution + is even available. In this case, `enforce_lazy_constraints` will be called + without a corresponding call to `find_violated_lazy_constraints`. For a concrete example, see TravelingSalesmanInstance. """ @@ -166,14 +172,14 @@ class Instance(ABC): def has_user_cuts(self) -> bool: return False - def find_violated_user_cuts(self, model: Any) -> List[ConstraintName]: - return [] + def find_violated_user_cuts(self, model: Any) -> Dict[ConstraintName, Any]: + return {} def enforce_user_cut( self, solver: "InternalSolver", model: Any, - violation: ConstraintName, + violation_data: Any, ) -> Any: return None diff --git a/miplearn/instance/file.py b/miplearn/instance/file.py index 46e7609..c1d4a78 100644 --- a/miplearn/instance/file.py +++ b/miplearn/instance/file.py @@ -3,15 +3,15 @@ # Released under the modified BSD license. See COPYING.md for more details. import gc import os -from typing import Any, Optional, List, Dict, TYPE_CHECKING import pickle +from typing import Any, Optional, List, Dict, TYPE_CHECKING import numpy as np from overrides import overrides from miplearn.features.sample import Hdf5Sample, Sample from miplearn.instance.base import Instance -from miplearn.types import ConstraintName, ConstraintCategory +from miplearn.types import ConstraintName if TYPE_CHECKING: from miplearn.solvers.learning import InternalSolver @@ -71,7 +71,7 @@ class FileInstance(Instance): self, solver: "InternalSolver", model: Any, - ) -> List[ConstraintName]: + ) -> Dict[ConstraintName, Any]: assert self.instance is not None return self.instance.find_violated_lazy_constraints(solver, model) @@ -80,13 +80,13 @@ class FileInstance(Instance): self, solver: "InternalSolver", model: Any, - violation: ConstraintName, + violation_data: Any, ) -> None: assert self.instance is not None - self.instance.enforce_lazy_constraint(solver, model, violation) + self.instance.enforce_lazy_constraint(solver, model, violation_data) @overrides - def find_violated_user_cuts(self, model: Any) -> List[ConstraintName]: + def find_violated_user_cuts(self, model: Any) -> Dict[ConstraintName, Any]: assert self.instance is not None return self.instance.find_violated_user_cuts(model) @@ -95,10 +95,10 @@ class FileInstance(Instance): self, solver: "InternalSolver", model: Any, - violation: ConstraintName, + violation_data: Any, ) -> None: assert self.instance is not None - self.instance.enforce_user_cut(solver, model, violation) + self.instance.enforce_user_cut(solver, model, violation_data) # Input & Output # ------------------------------------------------------------------------- diff --git a/miplearn/instance/picklegz.py b/miplearn/instance/picklegz.py index 41cf9b2..bdceae7 100644 --- a/miplearn/instance/picklegz.py +++ b/miplearn/instance/picklegz.py @@ -13,7 +13,7 @@ from overrides import overrides from miplearn.features.sample import Sample from miplearn.instance.base import Instance -from miplearn.types import ConstraintName, ConstraintCategory +from miplearn.types import ConstraintName if TYPE_CHECKING: from miplearn.solvers.learning import InternalSolver @@ -83,7 +83,7 @@ class PickleGzInstance(Instance): self, solver: "InternalSolver", model: Any, - ) -> List[ConstraintName]: + ) -> Dict[ConstraintName, Any]: assert self.instance is not None return self.instance.find_violated_lazy_constraints(solver, model) @@ -92,13 +92,13 @@ class PickleGzInstance(Instance): self, solver: "InternalSolver", model: Any, - violation: ConstraintName, + violation_data: Any, ) -> None: assert self.instance is not None - self.instance.enforce_lazy_constraint(solver, model, violation) + self.instance.enforce_lazy_constraint(solver, model, violation_data) @overrides - def find_violated_user_cuts(self, model: Any) -> List[ConstraintName]: + def find_violated_user_cuts(self, model: Any) -> Dict[ConstraintName, Any]: assert self.instance is not None return self.instance.find_violated_user_cuts(model) @@ -107,10 +107,10 @@ class PickleGzInstance(Instance): self, solver: "InternalSolver", model: Any, - violation: ConstraintName, + violation_name: Any, ) -> None: assert self.instance is not None - self.instance.enforce_user_cut(solver, model, violation) + self.instance.enforce_user_cut(solver, model, violation_name) @overrides def load(self) -> None: diff --git a/miplearn/problems/tsp.py b/miplearn/problems/tsp.py index b277e3a..4261fea 100644 --- a/miplearn/problems/tsp.py +++ b/miplearn/problems/tsp.py @@ -1,7 +1,7 @@ # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization # Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved. # Released under the modified BSD license. See COPYING.md for more details. -from typing import List, Tuple, FrozenSet, Any, Optional, Dict +from typing import List, Tuple, Any, Optional, Dict import networkx as nx import numpy as np @@ -86,14 +86,15 @@ class TravelingSalesmanInstance(Instance): self, solver: InternalSolver, model: Any, - ) -> List[ConstraintName]: + ) -> Dict[ConstraintName, List]: selected_edges = [e for e in self.edges if model.x[e].value > 0.5] graph = nx.Graph() graph.add_edges_from(selected_edges) - violations = [] + violations = {} for c in list(nx.connected_components(graph)): if len(c) < self.n_cities: - violations.append(",".join(map(str, c)).encode()) + cname = ("st[" + ",".join(map(str, c)) + "]").encode() + violations[cname] = list(c) return violations @overrides @@ -101,10 +102,9 @@ class TravelingSalesmanInstance(Instance): self, solver: InternalSolver, model: Any, - violation: ConstraintName, + component: List, ) -> None: assert isinstance(solver, BasePyomoSolver) - component = [int(v) for v in violation.decode().split(",")] cut_edges = [ e for e in self.edges diff --git a/miplearn/solvers/gurobi.py b/miplearn/solvers/gurobi.py index 8311961..7e2132f 100644 --- a/miplearn/solvers/gurobi.py +++ b/miplearn/solvers/gurobi.py @@ -710,7 +710,7 @@ class GurobiTestInstanceKnapsack(PyomoTestInstanceKnapsack): self, solver: InternalSolver, model: Any, - violation: str, + violation_data: Any, ) -> None: x0 = model.getVarByName("x[0]") model.cbLazy(x0 <= 0) diff --git a/miplearn/solvers/tests/__init__.py b/miplearn/solvers/tests/__init__.py index 3bc74d3..34a8416 100644 --- a/miplearn/solvers/tests/__init__.py +++ b/miplearn/solvers/tests/__init__.py @@ -247,7 +247,7 @@ def run_lazy_cb_tests(solver: InternalSolver) -> None: assert relsol is not None assert relsol[b"x[0]"] is not None if relsol[b"x[0]"] > 0: - instance.enforce_lazy_constraint(cb_solver, cb_model, b"cut") + instance.enforce_lazy_constraint(cb_solver, cb_model, None) solver.set_instance(instance, model) solver.solve(lazy_cb=lazy_cb) diff --git a/tests/components/test_dynamic_lazy.py b/tests/components/test_dynamic_lazy.py index 4fbdc0b..5d1faa8 100644 --- a/tests/components/test_dynamic_lazy.py +++ b/tests/components/test_dynamic_lazy.py @@ -10,6 +10,7 @@ import pytest from miplearn.classifiers import Classifier from miplearn.classifiers.threshold import MinProbabilityThreshold from miplearn.components import classifier_evaluation_dict +from miplearn.components.dynamic_common import DynamicConstraintsComponent from miplearn.components.dynamic_lazy import DynamicLazyConstraintsComponent from miplearn.features.sample import MemorySample from miplearn.instance.base import Instance @@ -24,13 +25,23 @@ def training_instances() -> List[Instance]: samples_0 = [ MemorySample( { - "mip_constr_lazy_enforced": np.array(["c1", "c2"], dtype="S"), + "mip_constr_lazy": DynamicConstraintsComponent.encode( + { + b"c1": 0, + b"c2": 0, + } + ), "static_instance_features": np.array([5.0]), }, ), MemorySample( { - "mip_constr_lazy_enforced": np.array(["c2", "c3"], dtype="S"), + "mip_constr_lazy": DynamicConstraintsComponent.encode( + { + b"c2": 0, + b"c3": 0, + } + ), "static_instance_features": np.array([5.0]), }, ), @@ -55,7 +66,12 @@ def training_instances() -> List[Instance]: samples_1 = [ MemorySample( { - "mip_constr_lazy_enforced": np.array(["c3", "c4"], dtype="S"), + "mip_constr_lazy": DynamicConstraintsComponent.encode( + { + b"c3": 0, + b"c4": 0, + } + ), "static_instance_features": np.array([8.0]), }, ) @@ -83,8 +99,8 @@ def test_sample_xy(training_instances: List[Instance]) -> None: comp = DynamicLazyConstraintsComponent() comp.pre_fit( [ - np.array(["c1", "c3", "c4"], dtype="S"), - np.array(["c1", "c2", "c4"], dtype="S"), + {b"c1": 0, b"c3": 0, b"c4": 0}, + {b"c1": 0, b"c2": 0, b"c4": 0}, ] ) x_expected = { @@ -105,7 +121,10 @@ def test_sample_xy(training_instances: List[Instance]) -> None: def test_sample_predict_evaluate(training_instances: List[Instance]) -> None: comp = DynamicLazyConstraintsComponent() - comp.known_cids.extend([b"c1", b"c2", b"c3", b"c4"]) + comp.known_violations[b"c1"] = 0 + comp.known_violations[b"c2"] = 0 + comp.known_violations[b"c3"] = 0 + comp.known_violations[b"c4"] = 0 comp.thresholds[b"type-a"] = MinProbabilityThreshold([0.5, 0.5]) comp.thresholds[b"type-b"] = MinProbabilityThreshold([0.5, 0.5]) comp.classifiers[b"type-a"] = Mock(spec=Classifier) diff --git a/tests/components/test_dynamic_user_cuts.py b/tests/components/test_dynamic_user_cuts.py index 57bbbd8..040d48e 100644 --- a/tests/components/test_dynamic_user_cuts.py +++ b/tests/components/test_dynamic_user_cuts.py @@ -1,9 +1,9 @@ # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization # Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved. # Released under the modified BSD license. See COPYING.md for more details. - +import json import logging -from typing import Any, FrozenSet, List +from typing import Any, List, Dict import gurobipy as gp import networkx as nx @@ -12,12 +12,11 @@ from gurobipy import GRB from networkx import Graph from overrides import overrides -from miplearn.solvers.learning import InternalSolver from miplearn.components.dynamic_user_cuts import UserCutsComponent from miplearn.instance.base import Instance from miplearn.solvers.gurobi import GurobiSolver from miplearn.solvers.learning import LearningSolver -from miplearn.types import ConstraintName, ConstraintCategory +from miplearn.types import ConstraintName logger = logging.getLogger(__name__) @@ -41,13 +40,14 @@ class GurobiStableSetProblem(Instance): return True @overrides - def find_violated_user_cuts(self, model: Any) -> List[ConstraintName]: + def find_violated_user_cuts(self, model: Any) -> Dict[ConstraintName, Any]: assert isinstance(model, gp.Model) vals = model.cbGetNodeRel(model.getVars()) - violations = [] + violations = {} for clique in nx.find_cliques(self.graph): if sum(vals[i] for i in clique) > 1: - violations.append(",".join([str(i) for i in clique]).encode()) + vname = (",".join([str(i) for i in clique])).encode() + violations[vname] = list(clique) return violations @overrides @@ -55,9 +55,8 @@ class GurobiStableSetProblem(Instance): self, solver: GurobiSolver, model: Any, - cid: ConstraintName, + clique: List[int], ) -> Any: - clique = [int(i) for i in cid.decode().split(",")] x = model.getVars() constr = gp.quicksum([x[i] for i in clique]) <= 1 if solver.cb_where: @@ -86,9 +85,11 @@ def test_usage( ) -> None: stats_before = solver.solve(stab_instance) sample = stab_instance.get_samples()[0] - user_cuts_enforced = sample.get_array("mip_user_cuts_enforced") - assert user_cuts_enforced is not None - assert len(user_cuts_enforced) > 0 + user_cuts_encoded = sample.get_scalar("mip_user_cuts") + assert user_cuts_encoded is not None + user_cuts = json.loads(user_cuts_encoded) + assert user_cuts is not None + assert len(user_cuts) > 0 assert stats_before["UserCuts: Added ahead-of-time"] == 0 assert stats_before["UserCuts: Added in callback"] > 0 diff --git a/tests/problems/test_tsp.py b/tests/problems/test_tsp.py index 8572635..f0216ee 100644 --- a/tests/problems/test_tsp.py +++ b/tests/problems/test_tsp.py @@ -1,6 +1,7 @@ # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization # Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved. # Released under the modified BSD license. See COPYING.md for more details. +import json import numpy as np from numpy.linalg import norm @@ -66,9 +67,15 @@ def test_subtour() -> None: samples = instance.get_samples() assert len(samples) == 1 sample = samples[0] - lazy_enforced = sample.get_array("mip_constr_lazy_enforced") - assert lazy_enforced is not None - assert len(lazy_enforced) > 0 + + lazy_encoded = sample.get_scalar("mip_constr_lazy") + assert lazy_encoded is not None + lazy = json.loads(lazy_encoded) + assert lazy == { + "st[0,1,4]": [0, 1, 4], + "st[2,3,5]": [2, 3, 5], + } + assert_equals( sample.get_array("mip_var_values"), [