diff --git a/miplearn/__init__.py b/miplearn/__init__.py index 5964a12..632597b 100644 --- a/miplearn/__init__.py +++ b/miplearn/__init__.py @@ -14,9 +14,9 @@ from .classifiers.sklearn import ( from .classifiers.adaptive import AdaptiveClassifier from .classifiers.threshold import MinPrecisionThreshold from .components.component import Component -from .components.cuts import UserCutsComponent -from .components.lazy_dynamic import DynamicLazyConstraintsComponent -from .components.lazy_static import StaticLazyConstraintsComponent +from .components.dynamic_lazy import DynamicLazyConstraintsComponent +from .components.dynamic_user_cuts import UserCutsComponent +from .components.static_lazy import StaticLazyConstraintsComponent from .components.objective import ObjectiveValueComponent from .components.primal import PrimalSolutionComponent from .components.steps.convert_tight import ConvertTightIneqsIntoEqsStep diff --git a/miplearn/components/cuts.py b/miplearn/components/cuts.py deleted file mode 100644 index 5b163c3..0000000 --- a/miplearn/components/cuts.py +++ /dev/null @@ -1,107 +0,0 @@ -# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization -# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved. -# Released under the modified BSD license. See COPYING.md for more details. - -import logging -import sys -from typing import Any, Dict - -import numpy as np -from tqdm.auto import tqdm - -from miplearn.classifiers import Classifier -from miplearn.classifiers.counting import CountingClassifier -from miplearn.components import classifier_evaluation_dict -from miplearn.components.component import Component -from miplearn.extractors import InstanceFeaturesExtractor - -logger = logging.getLogger(__name__) - - -class UserCutsComponent(Component): - """ - A component that predicts which user cuts to enforce. - """ - - def __init__( - self, - classifier: Classifier = CountingClassifier(), - threshold: float = 0.05, - ): - assert isinstance(classifier, Classifier) - self.threshold: float = threshold - self.classifier_prototype: Classifier = classifier - self.classifiers: Dict[Any, Classifier] = {} - - def before_solve_mip( - self, - solver, - instance, - model, - stats, - features, - training_data, - ): - instance.found_violated_user_cuts = [] - logger.info("Predicting violated user cuts...") - violations = self.predict(instance) - logger.info("Enforcing %d user cuts..." % len(violations)) - for v in violations: - cut = instance.build_user_cut(model, v) - solver.internal_solver.add_constraint(cut) - - def fit(self, training_instances): - logger.debug("Fitting...") - features = InstanceFeaturesExtractor().extract(training_instances) - - self.classifiers = {} - violation_to_instance_idx = {} - for (idx, instance) in enumerate(training_instances): - if not hasattr(instance, "found_violated_user_cuts"): - continue - for v in instance.found_violated_user_cuts: - if v not in self.classifiers: - self.classifiers[v] = self.classifier_prototype.clone() - violation_to_instance_idx[v] = [] - violation_to_instance_idx[v] += [idx] - - for (v, classifier) in tqdm( - self.classifiers.items(), - desc="Fit (user cuts)", - disable=not sys.stdout.isatty(), - ): - logger.debug("Training: %s" % (str(v))) - label = np.zeros(len(training_instances)) - label[violation_to_instance_idx[v]] = 1.0 - classifier.fit(features, label) - - def predict(self, instance): - violations = [] - features = InstanceFeaturesExtractor().extract([instance]) - for (v, classifier) in self.classifiers.items(): - proba = classifier.predict_proba(features) - if proba[0][1] > self.threshold: - violations += [v] - return violations - - def evaluate(self, instances): - results = {} - all_violations = set() - for instance in instances: - all_violations |= set(instance.found_violated_user_cuts) - for idx in tqdm( - range(len(instances)), - desc="Evaluate (lazy)", - disable=not sys.stdout.isatty(), - ): - instance = instances[idx] - condition_positive = set(instance.found_violated_user_cuts) - condition_negative = all_violations - condition_positive - pred_positive = set(self.predict(instance)) & all_violations - pred_negative = all_violations - pred_positive - tp = len(pred_positive & condition_positive) - tn = len(pred_negative & condition_negative) - fp = len(pred_positive & condition_negative) - fn = len(pred_negative & condition_positive) - results[idx] = classifier_evaluation_dict(tp, tn, fp, fn) - return results diff --git a/miplearn/components/lazy_dynamic.py b/miplearn/components/dynamic_common.py similarity index 68% rename from miplearn/components/lazy_dynamic.py rename to miplearn/components/dynamic_common.py index 0de3c47..4d5f1e0 100644 --- a/miplearn/components/lazy_dynamic.py +++ b/miplearn/components/dynamic_common.py @@ -1,34 +1,27 @@ -# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization -# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved. -# Released under the modified BSD license. See COPYING.md for more details. - -import logging -from typing import Dict, List, TYPE_CHECKING, Hashable, Tuple +from typing import Dict, Hashable, List, Tuple, TYPE_CHECKING import numpy as np from miplearn.classifiers import Classifier -from miplearn.classifiers.counting import CountingClassifier -from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold +from miplearn.classifiers.threshold import Threshold from miplearn.components import classifier_evaluation_dict from miplearn.components.component import Component from miplearn.features import TrainingSample -logger = logging.getLogger(__name__) - if TYPE_CHECKING: from miplearn.solvers.learning import Instance -class DynamicLazyConstraintsComponent(Component): +class DynamicConstraintsComponent(Component): """ - A component that predicts which lazy constraints to enforce. + Base component used by both DynamicLazyConstraintsComponent and UserCutsComponent. """ def __init__( self, - classifier: Classifier = CountingClassifier(), - threshold: Threshold = MinProbabilityThreshold([0, 0.05]), + attr: str, + classifier: Classifier, + threshold: Threshold, ): assert isinstance(classifier, Classifier) self.threshold_prototype: Threshold = threshold @@ -36,39 +29,7 @@ class DynamicLazyConstraintsComponent(Component): self.classifiers: Dict[Hashable, Classifier] = {} self.thresholds: Dict[Hashable, Threshold] = {} self.known_cids: List[str] = [] - - @staticmethod - def enforce(cids, instance, model, solver): - for cid in cids: - cobj = instance.build_lazy_constraint(model, cid) - solver.internal_solver.add_constraint(cobj) - - def before_solve_mip( - self, - solver, - instance, - model, - stats, - features, - training_data, - ): - training_data.lazy_enforced = set() - logger.info("Predicting violated lazy constraints...") - cids = self.sample_predict(instance, training_data) - logger.info("Enforcing %d lazy constraints..." % len(cids)) - self.enforce(cids, instance, model, solver) - - def iteration_cb(self, solver, instance, model): - logger.debug("Finding violated lazy constraints...") - cids = instance.find_violated_lazy_constraints(model) - if len(cids) == 0: - logger.debug("No violations found") - return False - else: - instance.training_data[-1].lazy_enforced |= set(cids) - logger.debug(" %d violations found" % len(cids)) - self.enforce(cids, instance, model, solver) - return True + self.attr = attr def sample_xy_with_cids( self, @@ -101,8 +62,8 @@ class DynamicLazyConstraintsComponent(Component): f += cfeatures x[category] += [f] cids[category] += [cid] - if sample.lazy_enforced is not None: - if cid in sample.lazy_enforced: + if getattr(sample, self.attr) is not None: + if cid in getattr(sample, self.attr): y[category] += [[False, True]] else: y[category] += [[True, False]] @@ -137,13 +98,14 @@ class DynamicLazyConstraintsComponent(Component): return pred def fit(self, training_instances: List["Instance"]) -> None: - self.known_cids.clear() + collected_cids = set() for instance in training_instances: for sample in instance.training_data: - if sample.lazy_enforced is None: + if getattr(sample, self.attr) is None: continue - self.known_cids += list(sample.lazy_enforced) - self.known_cids = sorted(set(self.known_cids)) + collected_cids |= getattr(sample, self.attr) + self.known_cids.clear() + self.known_cids.extend(sorted(collected_cids)) super().fit(training_instances) def fit_xy( @@ -164,7 +126,7 @@ class DynamicLazyConstraintsComponent(Component): instance: "Instance", sample: TrainingSample, ) -> Dict[Hashable, Dict[str, float]]: - assert sample.lazy_enforced is not None + assert getattr(sample, self.attr) is not None pred = set(self.sample_predict(instance, sample)) tp: Dict[Hashable, int] = {} tn: Dict[Hashable, int] = {} @@ -180,12 +142,12 @@ class DynamicLazyConstraintsComponent(Component): fp[category] = 0 fn[category] = 0 if cid in pred: - if cid in sample.lazy_enforced: + if cid in getattr(sample, self.attr): tp[category] += 1 else: fp[category] += 1 else: - if cid in sample.lazy_enforced: + if cid in getattr(sample, self.attr): fn[category] += 1 else: tn[category] += 1 diff --git a/miplearn/components/dynamic_lazy.py b/miplearn/components/dynamic_lazy.py new file mode 100644 index 0000000..2a5d6f9 --- /dev/null +++ b/miplearn/components/dynamic_lazy.py @@ -0,0 +1,106 @@ +# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization +# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved. +# Released under the modified BSD license. See COPYING.md for more details. + +import logging +from typing import Dict, List, TYPE_CHECKING, Hashable, Tuple + +import numpy as np + +from miplearn.classifiers import Classifier +from miplearn.classifiers.counting import CountingClassifier +from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold +from miplearn.components.component import Component +from miplearn.components.dynamic_common import DynamicConstraintsComponent +from miplearn.features import TrainingSample + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from miplearn.solvers.learning import Instance + + +class DynamicLazyConstraintsComponent(Component): + """ + A component that predicts which lazy constraints to enforce. + """ + + def __init__( + self, + classifier: Classifier = CountingClassifier(), + threshold: Threshold = MinProbabilityThreshold([0, 0.05]), + ): + self.dynamic: DynamicConstraintsComponent = DynamicConstraintsComponent( + classifier=classifier, + threshold=threshold, + attr="lazy_enforced", + ) + self.classifiers = self.dynamic.classifiers + self.thresholds = self.dynamic.thresholds + self.known_cids = self.dynamic.known_cids + + @staticmethod + def enforce(cids, instance, model, solver): + for cid in cids: + cobj = instance.build_lazy_constraint(model, cid) + solver.internal_solver.add_constraint(cobj) + + def before_solve_mip( + self, + solver, + instance, + model, + stats, + features, + training_data, + ): + training_data.lazy_enforced = set() + logger.info("Predicting violated lazy constraints...") + cids = self.dynamic.sample_predict(instance, training_data) + logger.info("Enforcing %d lazy constraints..." % len(cids)) + self.enforce(cids, instance, model, solver) + + def iteration_cb(self, solver, instance, model): + logger.debug("Finding violated lazy constraints...") + cids = instance.find_violated_lazy_constraints(model) + if len(cids) == 0: + logger.debug("No violations found") + return False + else: + instance.training_data[-1].lazy_enforced |= set(cids) + logger.debug(" %d violations found" % len(cids)) + self.enforce(cids, instance, model, solver) + return True + + # Delegate ML methods to self.dynamic + # ------------------------------------------------------------------- + def sample_xy( + self, + instance: "Instance", + sample: TrainingSample, + ) -> Tuple[Dict, Dict]: + return self.dynamic.sample_xy(instance, sample) + + def sample_predict( + self, + instance: "Instance", + sample: TrainingSample, + ) -> List[str]: + return self.dynamic.sample_predict(instance, sample) + + def fit(self, training_instances: List["Instance"]) -> None: + self.dynamic.fit(training_instances) + + def fit_xy( + self, + x: Dict[Hashable, np.ndarray], + y: Dict[Hashable, np.ndarray], + ) -> None: + self.dynamic.fit_xy(x, y) + + def sample_evaluate( + self, + instance: "Instance", + sample: TrainingSample, + ) -> Dict[Hashable, Dict[str, float]]: + return self.dynamic.sample_evaluate(instance, sample) diff --git a/miplearn/components/dynamic_user_cuts.py b/miplearn/components/dynamic_user_cuts.py new file mode 100644 index 0000000..c792a01 --- /dev/null +++ b/miplearn/components/dynamic_user_cuts.py @@ -0,0 +1,125 @@ +# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization +# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved. +# Released under the modified BSD license. See COPYING.md for more details. + +import logging +from typing import Any, TYPE_CHECKING, Hashable, Set, Tuple, Dict, List + +import numpy as np + +from miplearn.classifiers import Classifier +from miplearn.classifiers.counting import CountingClassifier +from miplearn.classifiers.threshold import Threshold, MinProbabilityThreshold +from miplearn.components.component import Component +from miplearn.components.dynamic_common import DynamicConstraintsComponent +from miplearn.features import Features, TrainingSample +from miplearn.types import LearningSolveStats + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from miplearn.solvers.learning import LearningSolver, Instance + + +class UserCutsComponent(Component): + def __init__( + self, + classifier: Classifier = CountingClassifier(), + threshold: Threshold = MinProbabilityThreshold([0.50, 0.50]), + ) -> None: + self.dynamic = DynamicConstraintsComponent( + classifier=classifier, + threshold=threshold, + attr="user_cuts_enforced", + ) + self.enforced: Set[Hashable] = set() + self.n_added_in_callback = 0 + + def before_solve_mip( + self, + solver: "LearningSolver", + instance: "Instance", + model: Any, + stats: LearningSolveStats, + features: Features, + training_data: TrainingSample, + ) -> None: + assert solver.internal_solver is not None + self.enforced.clear() + self.n_added_in_callback = 0 + logger.info("Predicting violated user cuts...") + cids = self.dynamic.sample_predict(instance, training_data) + logger.info("Enforcing %d user cuts ahead-of-time..." % len(cids)) + for cid in cids: + cobj = instance.build_user_cut(model, cid) + solver.internal_solver.add_constraint(cobj) + stats["UserCuts: Added ahead-of-time"] = len(cids) + + def user_cut_cb( + self, + solver: "LearningSolver", + instance: "Instance", + model: Any, + ) -> None: + assert solver.internal_solver is not None + logger.debug("Finding violated user cuts...") + cids = instance.find_violated_user_cuts(model) + logger.debug(f"Found {len(cids)} violated user cuts") + logger.debug("Building violated user cuts...") + for cid in cids: + if cid in self.enforced: + continue + assert isinstance(cid, Hashable) + cobj = instance.build_user_cut(model, cid) + assert cobj is not None + solver.internal_solver.add_cut(cobj) + self.enforced.add(cid) + self.n_added_in_callback += 1 + if len(cids) > 0: + logger.debug(f"Added {len(cids)} violated user cuts") + + def after_solve_mip( + self, + solver: "LearningSolver", + instance: "Instance", + model: Any, + stats: LearningSolveStats, + features: Features, + training_data: TrainingSample, + ) -> None: + training_data.user_cuts_enforced = set(self.enforced) + stats["UserCuts: Added in callback"] = self.n_added_in_callback + logger.info(f"{self.n_added_in_callback} user cuts added in callback") + + # Delegate ML methods to self.dynamic + # ------------------------------------------------------------------- + def sample_xy( + self, + instance: "Instance", + sample: TrainingSample, + ) -> Tuple[Dict, Dict]: + return self.dynamic.sample_xy(instance, sample) + + def sample_predict( + self, + instance: "Instance", + sample: TrainingSample, + ) -> List[str]: + return self.dynamic.sample_predict(instance, sample) + + def fit(self, training_instances: List["Instance"]) -> None: + self.dynamic.fit(training_instances) + + def fit_xy( + self, + x: Dict[Hashable, np.ndarray], + y: Dict[Hashable, np.ndarray], + ) -> None: + self.dynamic.fit_xy(x, y) + + def sample_evaluate( + self, + instance: "Instance", + sample: TrainingSample, + ) -> Dict[Hashable, Dict[str, float]]: + return self.dynamic.sample_evaluate(instance, sample) diff --git a/miplearn/components/lazy_static.py b/miplearn/components/static_lazy.py similarity index 99% rename from miplearn/components/lazy_static.py rename to miplearn/components/static_lazy.py index c0cf04f..3d688a2 100644 --- a/miplearn/components/lazy_static.py +++ b/miplearn/components/static_lazy.py @@ -7,16 +7,15 @@ from typing import Dict, Tuple, List, Hashable, Any, TYPE_CHECKING, Set import numpy as np -from miplearn import Classifier +from miplearn.classifiers import Classifier from miplearn.classifiers.counting import CountingClassifier from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold from miplearn.components.component import Component -from miplearn.types import LearningSolveStats from miplearn.features import TrainingSample, Features +from miplearn.types import LearningSolveStats logger = logging.getLogger(__name__) - if TYPE_CHECKING: from miplearn.solvers.learning import LearningSolver, Instance diff --git a/miplearn/components/steps/drop_redundant.py b/miplearn/components/steps/drop_redundant.py index ab435d7..e67e17c 100644 --- a/miplearn/components/steps/drop_redundant.py +++ b/miplearn/components/steps/drop_redundant.py @@ -12,7 +12,7 @@ from tqdm import tqdm from miplearn.classifiers.counting import CountingClassifier from miplearn.components import classifier_evaluation_dict from miplearn.components.component import Component -from miplearn.components.lazy_static import LazyConstraint +from miplearn.components.static_lazy import LazyConstraint logger = logging.getLogger(__name__) diff --git a/miplearn/components/user_cuts.py b/miplearn/components/user_cuts.py deleted file mode 100644 index 3790e3d..0000000 --- a/miplearn/components/user_cuts.py +++ /dev/null @@ -1,64 +0,0 @@ -# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization -# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved. -# Released under the modified BSD license. See COPYING.md for more details. - -from typing import Any, TYPE_CHECKING, Hashable, Set - -from miplearn import Component, Instance - -import logging - -from miplearn.features import Features, TrainingSample -from miplearn.types import LearningSolveStats - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from miplearn.solvers.learning import LearningSolver - - -class UserCutsComponentNG(Component): - def __init__(self) -> None: - self.enforced: Set[Hashable] = set() - - def before_solve_mip( - self, - solver: "LearningSolver", - instance: Instance, - model: Any, - stats: LearningSolveStats, - features: Features, - training_data: TrainingSample, - ) -> None: - self.enforced.clear() - - def after_solve_mip( - self, - solver: "LearningSolver", - instance: Instance, - model: Any, - stats: LearningSolveStats, - features: Features, - training_data: TrainingSample, - ) -> None: - training_data.user_cuts_enforced = set(self.enforced) - - def user_cut_cb( - self, - solver: "LearningSolver", - instance: Instance, - model: Any, - ) -> None: - assert solver.internal_solver is not None - logger.debug("Finding violated user cuts...") - cids = instance.find_violated_user_cuts(model) - logger.debug(f"Found {len(cids)} violated user cuts") - logger.debug("Building violated user cuts...") - for cid in cids: - assert isinstance(cid, Hashable) - cobj = instance.build_user_cut(model, cid) - assert cobj is not None - solver.internal_solver.add_cut(cobj) - self.enforced.add(cid) - if len(cids) > 0: - logger.info(f"Added {len(cids)} violated user cuts") diff --git a/miplearn/features.py b/miplearn/features.py index f78ddb3..274b47e 100644 --- a/miplearn/features.py +++ b/miplearn/features.py @@ -10,7 +10,8 @@ from typing import TYPE_CHECKING, Dict, Optional, Set, List, Hashable from miplearn.types import VarIndex, Solution if TYPE_CHECKING: - from miplearn import InternalSolver, Instance + from miplearn.solvers.internal import InternalSolver + from miplearn.instance import Instance @dataclass diff --git a/miplearn/solvers/learning.py b/miplearn/solvers/learning.py index a005abc..80b8f7f 100644 --- a/miplearn/solvers/learning.py +++ b/miplearn/solvers/learning.py @@ -9,8 +9,8 @@ from typing import Optional, List, Any, cast, Callable, Dict from p_tqdm import p_map from miplearn.components.component import Component -from miplearn.components.cuts import UserCutsComponent -from miplearn.components.lazy_dynamic import DynamicLazyConstraintsComponent +from miplearn.components.dynamic_lazy import DynamicLazyConstraintsComponent +from miplearn.components.dynamic_user_cuts import UserCutsComponent from miplearn.components.objective import ObjectiveValueComponent from miplearn.components.primal import PrimalSolutionComponent from miplearn.features import FeaturesExtractor, TrainingSample diff --git a/miplearn/types.py b/miplearn/types.py index 688e520..6bbb789 100644 --- a/miplearn/types.py +++ b/miplearn/types.py @@ -59,6 +59,8 @@ LearningSolveStats = TypedDict( "LazyStatic: Kept": int, "LazyStatic: Restored": int, "LazyStatic: Iterations": int, + "UserCuts: Added ahead-of-time": int, + "UserCuts: Added in callback": int, }, total=False, ) diff --git a/tests/classifiers/test_adaptive.py b/tests/classifiers/test_adaptive.py index f362507..1074ba7 100644 --- a/tests/classifiers/test_adaptive.py +++ b/tests/classifiers/test_adaptive.py @@ -5,8 +5,8 @@ from numpy.linalg import norm from sklearn.svm import SVC -from miplearn import AdaptiveClassifier, ScikitLearnClassifier -from miplearn.classifiers.adaptive import CandidateClassifierSpecs +from miplearn.classifiers.adaptive import CandidateClassifierSpecs, AdaptiveClassifier +from miplearn.classifiers.sklearn import ScikitLearnClassifier from tests.classifiers import _build_circle_training_data diff --git a/tests/components/steps/test_drop_redundant.py b/tests/components/steps/test_drop_redundant.py index 2efe548..aaec305 100644 --- a/tests/components/steps/test_drop_redundant.py +++ b/tests/components/steps/test_drop_redundant.py @@ -6,10 +6,11 @@ from unittest.mock import Mock, call import numpy as np -from miplearn import RelaxIntegralityStep, GurobiSolver from miplearn.classifiers import Classifier from miplearn.components.steps.drop_redundant import DropRedundantInequalitiesStep +from miplearn.components.steps.relax_integrality import RelaxIntegralityStep from miplearn.instance import Instance +from miplearn.solvers.gurobi import GurobiSolver from miplearn.solvers.internal import InternalSolver from miplearn.solvers.learning import LearningSolver from miplearn.features import TrainingSample, Features diff --git a/tests/components/test_component.py b/tests/components/test_component.py index 2d6bcf9..01eb931 100644 --- a/tests/components/test_component.py +++ b/tests/components/test_component.py @@ -3,7 +3,8 @@ # Released under the modified BSD license. See COPYING.md for more details. from unittest.mock import Mock -from miplearn import Component, Instance +from miplearn.components.component import Component +from miplearn.instance import Instance def test_xy_instance(): diff --git a/tests/components/test_lazy_dynamic.py b/tests/components/test_dynamic_lazy.py similarity index 97% rename from tests/components/test_lazy_dynamic.py rename to tests/components/test_dynamic_lazy.py index cb2f82c..7144a5f 100644 --- a/tests/components/test_lazy_dynamic.py +++ b/tests/components/test_dynamic_lazy.py @@ -8,16 +8,16 @@ import numpy as np import pytest from numpy.testing import assert_array_equal -from miplearn import Instance from miplearn.classifiers import Classifier from miplearn.classifiers.threshold import MinProbabilityThreshold from miplearn.components import classifier_evaluation_dict -from miplearn.components.lazy_dynamic import DynamicLazyConstraintsComponent +from miplearn.components.dynamic_lazy import DynamicLazyConstraintsComponent from miplearn.features import ( TrainingSample, Features, InstanceFeatures, ) +from miplearn.instance import Instance E = 0.1 @@ -144,7 +144,7 @@ def test_fit(training_instances: List[Instance]) -> None: def test_sample_predict_evaluate(training_instances: List[Instance]) -> None: comp = DynamicLazyConstraintsComponent() - comp.known_cids = ["c1", "c2", "c3", "c4"] + comp.known_cids.extend(["c1", "c2", "c3", "c4"]) comp.thresholds["type-a"] = MinProbabilityThreshold([0.5, 0.5]) comp.thresholds["type-b"] = MinProbabilityThreshold([0.5, 0.5]) comp.classifiers["type-a"] = Mock(spec=Classifier) diff --git a/tests/components/test_user_cuts.py b/tests/components/test_dynamic_user_cuts.py similarity index 66% rename from tests/components/test_user_cuts.py rename to tests/components/test_dynamic_user_cuts.py index cbd6084..0f26cc9 100644 --- a/tests/components/test_user_cuts.py +++ b/tests/components/test_dynamic_user_cuts.py @@ -11,8 +11,10 @@ import pytest from gurobipy import GRB from networkx import Graph -from miplearn import Instance, LearningSolver, GurobiSolver -from miplearn.components.user_cuts import UserCutsComponentNG +from miplearn.components.dynamic_user_cuts import UserCutsComponent +from miplearn.instance import Instance +from miplearn.solvers.gurobi import GurobiSolver +from miplearn.solvers.learning import LearningSolver logger = logging.getLogger(__name__) @@ -20,12 +22,11 @@ logger = logging.getLogger(__name__) class GurobiStableSetProblem(Instance): def __init__(self, graph: Graph) -> None: super().__init__() - self.graph = graph - self.nodes = list(self.graph.nodes) + self.graph: Graph = graph def to_model(self) -> Any: model = gp.Model() - x = [model.addVar(vtype=GRB.BINARY) for _ in range(len(self.nodes))] + x = [model.addVar(vtype=GRB.BINARY) for _ in range(len(self.graph.nodes))] model.setObjective(gp.quicksum(x), GRB.MAXIMIZE) for e in list(self.graph.edges): model.addConstr(x[e[0]] + x[e[1]] <= 1) @@ -39,16 +40,14 @@ class GurobiStableSetProblem(Instance): vals = model.cbGetNodeRel(model.getVars()) violations = [] for clique in nx.find_cliques(self.graph): - lhs = sum(vals[i] for i in clique) - if lhs > 1: + if sum(vals[i] for i in clique) > 1: violations += [frozenset(clique)] return violations - def build_user_cut(self, model: Any, violation: Hashable) -> Any: - assert isinstance(violation, FrozenSet) + def build_user_cut(self, model: Any, cid: Hashable) -> Any: + assert isinstance(cid, FrozenSet) x = model.getVars() - cut = gp.quicksum([x[i] for i in violation]) <= 1 - return cut + return gp.quicksum([x[i] for i in cid]) <= 1 @pytest.fixture @@ -62,7 +61,7 @@ def solver() -> LearningSolver: return LearningSolver( solver=lambda: GurobiSolver(), components=[ - UserCutsComponentNG(), + UserCutsComponent(), ], ) @@ -71,7 +70,17 @@ def test_usage( stab_instance: Instance, solver: LearningSolver, ) -> None: - solver.solve(stab_instance) + stats_before = solver.solve(stab_instance) sample = stab_instance.training_data[0] assert sample.user_cuts_enforced is not None assert len(sample.user_cuts_enforced) > 0 + print(stats_before) + assert stats_before["UserCuts: Added ahead-of-time"] == 0 + assert stats_before["UserCuts: Added in callback"] > 0 + + solver.fit([stab_instance]) + stats_after = solver.solve(stab_instance) + assert ( + stats_after["UserCuts: Added ahead-of-time"] + == stats_before["UserCuts: Added in callback"] + ) diff --git a/tests/components/test_objective.py b/tests/components/test_objective.py index 24ee154..e3dbcd3 100644 --- a/tests/components/test_objective.py +++ b/tests/components/test_objective.py @@ -7,9 +7,12 @@ from unittest.mock import Mock import pytest from numpy.testing import assert_array_equal -from miplearn import GurobiPyomoSolver, LearningSolver, Regressor, Instance +from miplearn.classifiers import Regressor from miplearn.components.objective import ObjectiveValueComponent from miplearn.features import TrainingSample, InstanceFeatures, Features +from miplearn.instance import Instance +from miplearn.solvers.learning import LearningSolver +from miplearn.solvers.pyomo.gurobi import GurobiPyomoSolver from tests.fixtures.knapsack import get_knapsack_instance import numpy as np diff --git a/tests/components/test_primal.py b/tests/components/test_primal.py index 1279135..7ab4766 100644 --- a/tests/components/test_primal.py +++ b/tests/components/test_primal.py @@ -8,12 +8,14 @@ import numpy as np from numpy.testing import assert_array_equal from scipy.stats import randint -from miplearn import Classifier, LearningSolver, Instance +from miplearn.classifiers import Classifier from miplearn.classifiers.threshold import Threshold from miplearn.components import classifier_evaluation_dict from miplearn.components.primal import PrimalSolutionComponent +from miplearn.instance import Instance from miplearn.problems.tsp import TravelingSalesmanGenerator from miplearn.features import TrainingSample, VariableFeatures, Features +from miplearn.solvers.learning import LearningSolver def test_xy() -> None: diff --git a/tests/components/test_lazy_static.py b/tests/components/test_static_lazy.py similarity index 97% rename from tests/components/test_lazy_static.py rename to tests/components/test_static_lazy.py index 4d62b5c..bf1f442 100644 --- a/tests/components/test_lazy_static.py +++ b/tests/components/test_static_lazy.py @@ -8,10 +8,12 @@ import numpy as np import pytest from numpy.testing import assert_array_equal -from miplearn import LearningSolver, InternalSolver, Instance from miplearn.classifiers import Classifier from miplearn.classifiers.threshold import Threshold, MinProbabilityThreshold -from miplearn.components.lazy_static import StaticLazyConstraintsComponent +from miplearn.components.static_lazy import StaticLazyConstraintsComponent +from miplearn.instance import Instance +from miplearn.solvers.internal import InternalSolver +from miplearn.solvers.learning import LearningSolver from miplearn.types import ( LearningSolveStats, ) diff --git a/tests/fixtures/knapsack.py b/tests/fixtures/knapsack.py index 14e25ce..8d649ff 100644 --- a/tests/fixtures/knapsack.py +++ b/tests/fixtures/knapsack.py @@ -1,10 +1,12 @@ # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization # Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved. # Released under the modified BSD license. See COPYING.md for more details. - -from miplearn import BasePyomoSolver, GurobiSolver, InternalSolver, Instance +from miplearn.instance import Instance from miplearn.problems.knapsack import KnapsackInstance, GurobiKnapsackInstance +from miplearn.solvers.gurobi import GurobiSolver +from miplearn.solvers.internal import InternalSolver from miplearn.solvers.learning import LearningSolver +from miplearn.solvers.pyomo.base import BasePyomoSolver from tests.solvers import _is_subclass_or_instance diff --git a/tests/fixtures/redundant.py b/tests/fixtures/redundant.py index a9e9ae6..3dd5107 100644 --- a/tests/fixtures/redundant.py +++ b/tests/fixtures/redundant.py @@ -3,9 +3,11 @@ # Released under the modified BSD license. See COPYING.md for more details. from typing import Any -from miplearn import Instance, BasePyomoSolver, GurobiSolver import pyomo.environ as pe +from miplearn.instance import Instance +from miplearn.solvers.gurobi import GurobiSolver +from miplearn.solvers.pyomo.base import BasePyomoSolver from tests.solvers import _is_subclass_or_instance diff --git a/tests/problems/test_tsp.py b/tests/problems/test_tsp.py index 908f560..88bed41 100644 --- a/tests/problems/test_tsp.py +++ b/tests/problems/test_tsp.py @@ -67,7 +67,6 @@ def test_subtour(): solver = LearningSolver() solver.solve(instance) assert len(instance.training_data[0].lazy_enforced) > 0 - assert hasattr(instance, "found_violated_user_cuts") x = instance.training_data[0].solution["x"] assert x[0, 1] == 1.0 assert x[0, 4] == 1.0 diff --git a/tests/test_benchmark.py b/tests/test_benchmark.py index 0baca38..b33d695 100644 --- a/tests/test_benchmark.py +++ b/tests/test_benchmark.py @@ -30,7 +30,7 @@ def test_benchmark(): benchmark = BenchmarkRunner(test_solvers) benchmark.fit(train_instances) benchmark.parallel_solve(test_instances, n_jobs=n_jobs, n_trials=2) - assert benchmark.results.values.shape == (12, 18) + assert benchmark.results.values.shape == (12, 20) benchmark.write_csv("/tmp/benchmark.csv") assert os.path.isfile("/tmp/benchmark.csv") diff --git a/tests/test_features.py b/tests/test_features.py index b4e964f..43dbae6 100644 --- a/tests/test_features.py +++ b/tests/test_features.py @@ -2,13 +2,13 @@ # Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved. # Released under the modified BSD license. See COPYING.md for more details. -from miplearn import GurobiSolver from miplearn.features import ( FeaturesExtractor, InstanceFeatures, VariableFeatures, ConstraintFeatures, ) +from miplearn.solvers.gurobi import GurobiSolver from tests.fixtures.knapsack import get_knapsack_instance diff --git a/tests/test_instance.py b/tests/test_instance.py index 0ee3b9c..c76b0ad 100644 --- a/tests/test_instance.py +++ b/tests/test_instance.py @@ -3,8 +3,8 @@ # Released under the modified BSD license. See COPYING.md for more details. import tempfile -from miplearn import GurobiSolver from miplearn.instance import write_pickle_gz, PickleGzInstance +from miplearn.solvers.gurobi import GurobiSolver from tests.fixtures.knapsack import get_knapsack_instance