parent
9f2d7439dc
commit
9e7eed1dbd
@ -1,107 +0,0 @@
|
|||||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
|
||||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
from tqdm.auto import tqdm
|
|
||||||
|
|
||||||
from miplearn.classifiers import Classifier
|
|
||||||
from miplearn.classifiers.counting import CountingClassifier
|
|
||||||
from miplearn.components import classifier_evaluation_dict
|
|
||||||
from miplearn.components.component import Component
|
|
||||||
from miplearn.extractors import InstanceFeaturesExtractor
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class UserCutsComponent(Component):
|
|
||||||
"""
|
|
||||||
A component that predicts which user cuts to enforce.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
classifier: Classifier = CountingClassifier(),
|
|
||||||
threshold: float = 0.05,
|
|
||||||
):
|
|
||||||
assert isinstance(classifier, Classifier)
|
|
||||||
self.threshold: float = threshold
|
|
||||||
self.classifier_prototype: Classifier = classifier
|
|
||||||
self.classifiers: Dict[Any, Classifier] = {}
|
|
||||||
|
|
||||||
def before_solve_mip(
|
|
||||||
self,
|
|
||||||
solver,
|
|
||||||
instance,
|
|
||||||
model,
|
|
||||||
stats,
|
|
||||||
features,
|
|
||||||
training_data,
|
|
||||||
):
|
|
||||||
instance.found_violated_user_cuts = []
|
|
||||||
logger.info("Predicting violated user cuts...")
|
|
||||||
violations = self.predict(instance)
|
|
||||||
logger.info("Enforcing %d user cuts..." % len(violations))
|
|
||||||
for v in violations:
|
|
||||||
cut = instance.build_user_cut(model, v)
|
|
||||||
solver.internal_solver.add_constraint(cut)
|
|
||||||
|
|
||||||
def fit(self, training_instances):
|
|
||||||
logger.debug("Fitting...")
|
|
||||||
features = InstanceFeaturesExtractor().extract(training_instances)
|
|
||||||
|
|
||||||
self.classifiers = {}
|
|
||||||
violation_to_instance_idx = {}
|
|
||||||
for (idx, instance) in enumerate(training_instances):
|
|
||||||
if not hasattr(instance, "found_violated_user_cuts"):
|
|
||||||
continue
|
|
||||||
for v in instance.found_violated_user_cuts:
|
|
||||||
if v not in self.classifiers:
|
|
||||||
self.classifiers[v] = self.classifier_prototype.clone()
|
|
||||||
violation_to_instance_idx[v] = []
|
|
||||||
violation_to_instance_idx[v] += [idx]
|
|
||||||
|
|
||||||
for (v, classifier) in tqdm(
|
|
||||||
self.classifiers.items(),
|
|
||||||
desc="Fit (user cuts)",
|
|
||||||
disable=not sys.stdout.isatty(),
|
|
||||||
):
|
|
||||||
logger.debug("Training: %s" % (str(v)))
|
|
||||||
label = np.zeros(len(training_instances))
|
|
||||||
label[violation_to_instance_idx[v]] = 1.0
|
|
||||||
classifier.fit(features, label)
|
|
||||||
|
|
||||||
def predict(self, instance):
|
|
||||||
violations = []
|
|
||||||
features = InstanceFeaturesExtractor().extract([instance])
|
|
||||||
for (v, classifier) in self.classifiers.items():
|
|
||||||
proba = classifier.predict_proba(features)
|
|
||||||
if proba[0][1] > self.threshold:
|
|
||||||
violations += [v]
|
|
||||||
return violations
|
|
||||||
|
|
||||||
def evaluate(self, instances):
|
|
||||||
results = {}
|
|
||||||
all_violations = set()
|
|
||||||
for instance in instances:
|
|
||||||
all_violations |= set(instance.found_violated_user_cuts)
|
|
||||||
for idx in tqdm(
|
|
||||||
range(len(instances)),
|
|
||||||
desc="Evaluate (lazy)",
|
|
||||||
disable=not sys.stdout.isatty(),
|
|
||||||
):
|
|
||||||
instance = instances[idx]
|
|
||||||
condition_positive = set(instance.found_violated_user_cuts)
|
|
||||||
condition_negative = all_violations - condition_positive
|
|
||||||
pred_positive = set(self.predict(instance)) & all_violations
|
|
||||||
pred_negative = all_violations - pred_positive
|
|
||||||
tp = len(pred_positive & condition_positive)
|
|
||||||
tn = len(pred_negative & condition_negative)
|
|
||||||
fp = len(pred_positive & condition_negative)
|
|
||||||
fn = len(pred_negative & condition_positive)
|
|
||||||
results[idx] = classifier_evaluation_dict(tp, tn, fp, fn)
|
|
||||||
return results
|
|
@ -0,0 +1,106 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Dict, List, TYPE_CHECKING, Hashable, Tuple
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from miplearn.classifiers import Classifier
|
||||||
|
from miplearn.classifiers.counting import CountingClassifier
|
||||||
|
from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold
|
||||||
|
from miplearn.components.component import Component
|
||||||
|
from miplearn.components.dynamic_common import DynamicConstraintsComponent
|
||||||
|
from miplearn.features import TrainingSample
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from miplearn.solvers.learning import Instance
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicLazyConstraintsComponent(Component):
|
||||||
|
"""
|
||||||
|
A component that predicts which lazy constraints to enforce.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
classifier: Classifier = CountingClassifier(),
|
||||||
|
threshold: Threshold = MinProbabilityThreshold([0, 0.05]),
|
||||||
|
):
|
||||||
|
self.dynamic: DynamicConstraintsComponent = DynamicConstraintsComponent(
|
||||||
|
classifier=classifier,
|
||||||
|
threshold=threshold,
|
||||||
|
attr="lazy_enforced",
|
||||||
|
)
|
||||||
|
self.classifiers = self.dynamic.classifiers
|
||||||
|
self.thresholds = self.dynamic.thresholds
|
||||||
|
self.known_cids = self.dynamic.known_cids
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def enforce(cids, instance, model, solver):
|
||||||
|
for cid in cids:
|
||||||
|
cobj = instance.build_lazy_constraint(model, cid)
|
||||||
|
solver.internal_solver.add_constraint(cobj)
|
||||||
|
|
||||||
|
def before_solve_mip(
|
||||||
|
self,
|
||||||
|
solver,
|
||||||
|
instance,
|
||||||
|
model,
|
||||||
|
stats,
|
||||||
|
features,
|
||||||
|
training_data,
|
||||||
|
):
|
||||||
|
training_data.lazy_enforced = set()
|
||||||
|
logger.info("Predicting violated lazy constraints...")
|
||||||
|
cids = self.dynamic.sample_predict(instance, training_data)
|
||||||
|
logger.info("Enforcing %d lazy constraints..." % len(cids))
|
||||||
|
self.enforce(cids, instance, model, solver)
|
||||||
|
|
||||||
|
def iteration_cb(self, solver, instance, model):
|
||||||
|
logger.debug("Finding violated lazy constraints...")
|
||||||
|
cids = instance.find_violated_lazy_constraints(model)
|
||||||
|
if len(cids) == 0:
|
||||||
|
logger.debug("No violations found")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
instance.training_data[-1].lazy_enforced |= set(cids)
|
||||||
|
logger.debug(" %d violations found" % len(cids))
|
||||||
|
self.enforce(cids, instance, model, solver)
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Delegate ML methods to self.dynamic
|
||||||
|
# -------------------------------------------------------------------
|
||||||
|
def sample_xy(
|
||||||
|
self,
|
||||||
|
instance: "Instance",
|
||||||
|
sample: TrainingSample,
|
||||||
|
) -> Tuple[Dict, Dict]:
|
||||||
|
return self.dynamic.sample_xy(instance, sample)
|
||||||
|
|
||||||
|
def sample_predict(
|
||||||
|
self,
|
||||||
|
instance: "Instance",
|
||||||
|
sample: TrainingSample,
|
||||||
|
) -> List[str]:
|
||||||
|
return self.dynamic.sample_predict(instance, sample)
|
||||||
|
|
||||||
|
def fit(self, training_instances: List["Instance"]) -> None:
|
||||||
|
self.dynamic.fit(training_instances)
|
||||||
|
|
||||||
|
def fit_xy(
|
||||||
|
self,
|
||||||
|
x: Dict[Hashable, np.ndarray],
|
||||||
|
y: Dict[Hashable, np.ndarray],
|
||||||
|
) -> None:
|
||||||
|
self.dynamic.fit_xy(x, y)
|
||||||
|
|
||||||
|
def sample_evaluate(
|
||||||
|
self,
|
||||||
|
instance: "Instance",
|
||||||
|
sample: TrainingSample,
|
||||||
|
) -> Dict[Hashable, Dict[str, float]]:
|
||||||
|
return self.dynamic.sample_evaluate(instance, sample)
|
@ -0,0 +1,125 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any, TYPE_CHECKING, Hashable, Set, Tuple, Dict, List
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from miplearn.classifiers import Classifier
|
||||||
|
from miplearn.classifiers.counting import CountingClassifier
|
||||||
|
from miplearn.classifiers.threshold import Threshold, MinProbabilityThreshold
|
||||||
|
from miplearn.components.component import Component
|
||||||
|
from miplearn.components.dynamic_common import DynamicConstraintsComponent
|
||||||
|
from miplearn.features import Features, TrainingSample
|
||||||
|
from miplearn.types import LearningSolveStats
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from miplearn.solvers.learning import LearningSolver, Instance
|
||||||
|
|
||||||
|
|
||||||
|
class UserCutsComponent(Component):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
classifier: Classifier = CountingClassifier(),
|
||||||
|
threshold: Threshold = MinProbabilityThreshold([0.50, 0.50]),
|
||||||
|
) -> None:
|
||||||
|
self.dynamic = DynamicConstraintsComponent(
|
||||||
|
classifier=classifier,
|
||||||
|
threshold=threshold,
|
||||||
|
attr="user_cuts_enforced",
|
||||||
|
)
|
||||||
|
self.enforced: Set[Hashable] = set()
|
||||||
|
self.n_added_in_callback = 0
|
||||||
|
|
||||||
|
def before_solve_mip(
|
||||||
|
self,
|
||||||
|
solver: "LearningSolver",
|
||||||
|
instance: "Instance",
|
||||||
|
model: Any,
|
||||||
|
stats: LearningSolveStats,
|
||||||
|
features: Features,
|
||||||
|
training_data: TrainingSample,
|
||||||
|
) -> None:
|
||||||
|
assert solver.internal_solver is not None
|
||||||
|
self.enforced.clear()
|
||||||
|
self.n_added_in_callback = 0
|
||||||
|
logger.info("Predicting violated user cuts...")
|
||||||
|
cids = self.dynamic.sample_predict(instance, training_data)
|
||||||
|
logger.info("Enforcing %d user cuts ahead-of-time..." % len(cids))
|
||||||
|
for cid in cids:
|
||||||
|
cobj = instance.build_user_cut(model, cid)
|
||||||
|
solver.internal_solver.add_constraint(cobj)
|
||||||
|
stats["UserCuts: Added ahead-of-time"] = len(cids)
|
||||||
|
|
||||||
|
def user_cut_cb(
|
||||||
|
self,
|
||||||
|
solver: "LearningSolver",
|
||||||
|
instance: "Instance",
|
||||||
|
model: Any,
|
||||||
|
) -> None:
|
||||||
|
assert solver.internal_solver is not None
|
||||||
|
logger.debug("Finding violated user cuts...")
|
||||||
|
cids = instance.find_violated_user_cuts(model)
|
||||||
|
logger.debug(f"Found {len(cids)} violated user cuts")
|
||||||
|
logger.debug("Building violated user cuts...")
|
||||||
|
for cid in cids:
|
||||||
|
if cid in self.enforced:
|
||||||
|
continue
|
||||||
|
assert isinstance(cid, Hashable)
|
||||||
|
cobj = instance.build_user_cut(model, cid)
|
||||||
|
assert cobj is not None
|
||||||
|
solver.internal_solver.add_cut(cobj)
|
||||||
|
self.enforced.add(cid)
|
||||||
|
self.n_added_in_callback += 1
|
||||||
|
if len(cids) > 0:
|
||||||
|
logger.debug(f"Added {len(cids)} violated user cuts")
|
||||||
|
|
||||||
|
def after_solve_mip(
|
||||||
|
self,
|
||||||
|
solver: "LearningSolver",
|
||||||
|
instance: "Instance",
|
||||||
|
model: Any,
|
||||||
|
stats: LearningSolveStats,
|
||||||
|
features: Features,
|
||||||
|
training_data: TrainingSample,
|
||||||
|
) -> None:
|
||||||
|
training_data.user_cuts_enforced = set(self.enforced)
|
||||||
|
stats["UserCuts: Added in callback"] = self.n_added_in_callback
|
||||||
|
logger.info(f"{self.n_added_in_callback} user cuts added in callback")
|
||||||
|
|
||||||
|
# Delegate ML methods to self.dynamic
|
||||||
|
# -------------------------------------------------------------------
|
||||||
|
def sample_xy(
|
||||||
|
self,
|
||||||
|
instance: "Instance",
|
||||||
|
sample: TrainingSample,
|
||||||
|
) -> Tuple[Dict, Dict]:
|
||||||
|
return self.dynamic.sample_xy(instance, sample)
|
||||||
|
|
||||||
|
def sample_predict(
|
||||||
|
self,
|
||||||
|
instance: "Instance",
|
||||||
|
sample: TrainingSample,
|
||||||
|
) -> List[str]:
|
||||||
|
return self.dynamic.sample_predict(instance, sample)
|
||||||
|
|
||||||
|
def fit(self, training_instances: List["Instance"]) -> None:
|
||||||
|
self.dynamic.fit(training_instances)
|
||||||
|
|
||||||
|
def fit_xy(
|
||||||
|
self,
|
||||||
|
x: Dict[Hashable, np.ndarray],
|
||||||
|
y: Dict[Hashable, np.ndarray],
|
||||||
|
) -> None:
|
||||||
|
self.dynamic.fit_xy(x, y)
|
||||||
|
|
||||||
|
def sample_evaluate(
|
||||||
|
self,
|
||||||
|
instance: "Instance",
|
||||||
|
sample: TrainingSample,
|
||||||
|
) -> Dict[Hashable, Dict[str, float]]:
|
||||||
|
return self.dynamic.sample_evaluate(instance, sample)
|
@ -1,64 +0,0 @@
|
|||||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
|
||||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
|
||||||
|
|
||||||
from typing import Any, TYPE_CHECKING, Hashable, Set
|
|
||||||
|
|
||||||
from miplearn import Component, Instance
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from miplearn.features import Features, TrainingSample
|
|
||||||
from miplearn.types import LearningSolveStats
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from miplearn.solvers.learning import LearningSolver
|
|
||||||
|
|
||||||
|
|
||||||
class UserCutsComponentNG(Component):
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self.enforced: Set[Hashable] = set()
|
|
||||||
|
|
||||||
def before_solve_mip(
|
|
||||||
self,
|
|
||||||
solver: "LearningSolver",
|
|
||||||
instance: Instance,
|
|
||||||
model: Any,
|
|
||||||
stats: LearningSolveStats,
|
|
||||||
features: Features,
|
|
||||||
training_data: TrainingSample,
|
|
||||||
) -> None:
|
|
||||||
self.enforced.clear()
|
|
||||||
|
|
||||||
def after_solve_mip(
|
|
||||||
self,
|
|
||||||
solver: "LearningSolver",
|
|
||||||
instance: Instance,
|
|
||||||
model: Any,
|
|
||||||
stats: LearningSolveStats,
|
|
||||||
features: Features,
|
|
||||||
training_data: TrainingSample,
|
|
||||||
) -> None:
|
|
||||||
training_data.user_cuts_enforced = set(self.enforced)
|
|
||||||
|
|
||||||
def user_cut_cb(
|
|
||||||
self,
|
|
||||||
solver: "LearningSolver",
|
|
||||||
instance: Instance,
|
|
||||||
model: Any,
|
|
||||||
) -> None:
|
|
||||||
assert solver.internal_solver is not None
|
|
||||||
logger.debug("Finding violated user cuts...")
|
|
||||||
cids = instance.find_violated_user_cuts(model)
|
|
||||||
logger.debug(f"Found {len(cids)} violated user cuts")
|
|
||||||
logger.debug("Building violated user cuts...")
|
|
||||||
for cid in cids:
|
|
||||||
assert isinstance(cid, Hashable)
|
|
||||||
cobj = instance.build_user_cut(model, cid)
|
|
||||||
assert cobj is not None
|
|
||||||
solver.internal_solver.add_cut(cobj)
|
|
||||||
self.enforced.add(cid)
|
|
||||||
if len(cids) > 0:
|
|
||||||
logger.info(f"Added {len(cids)} violated user cuts")
|
|
Loading…
Reference in new issue