mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 01:18:52 -06:00
MIPLearn v0.3
This commit is contained in:
@@ -1,47 +1,3 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
from typing import Dict
|
||||
|
||||
|
||||
def classifier_evaluation_dict(
|
||||
tp: int,
|
||||
tn: int,
|
||||
fp: int,
|
||||
fn: int,
|
||||
) -> Dict[str, float]:
|
||||
p = tp + fn
|
||||
n = fp + tn
|
||||
d: Dict = {
|
||||
"Predicted positive": fp + tp,
|
||||
"Predicted negative": fn + tn,
|
||||
"Condition positive": p,
|
||||
"Condition negative": n,
|
||||
"True positive": tp,
|
||||
"True negative": tn,
|
||||
"False positive": fp,
|
||||
"False negative": fn,
|
||||
"Accuracy": (tp + tn) / (p + n),
|
||||
"F1 score": (2 * tp) / (2 * tp + fp + fn),
|
||||
}
|
||||
|
||||
if p > 0:
|
||||
d["Recall"] = tp / p
|
||||
else:
|
||||
d["Recall"] = 1.0
|
||||
|
||||
if tp + fp > 0:
|
||||
d["Precision"] = tp / (tp + fp)
|
||||
else:
|
||||
d["Precision"] = 1.0
|
||||
|
||||
t = (p + n) / 100.0
|
||||
d["Predicted positive (%)"] = d["Predicted positive"] / t
|
||||
d["Predicted negative (%)"] = d["Predicted negative"] / t
|
||||
d["Condition positive (%)"] = d["Condition positive"] / t
|
||||
d["Condition negative (%)"] = d["Condition negative"] / t
|
||||
d["True positive (%)"] = d["True positive"] / t
|
||||
d["True negative (%)"] = d["True negative"] / t
|
||||
d["False positive (%)"] = d["False positive"] / t
|
||||
d["False negative (%)"] = d["False negative"] / t
|
||||
return d
|
||||
|
||||
@@ -1,269 +0,0 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
from typing import Any, List, TYPE_CHECKING, Tuple, Dict, Optional
|
||||
|
||||
import numpy as np
|
||||
from tqdm.auto import tqdm
|
||||
from p_tqdm import p_umap
|
||||
|
||||
from miplearn.features.sample import Sample
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import LearningSolveStats, Category
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
class Component:
|
||||
"""
|
||||
A Component is an object which adds functionality to a LearningSolver.
|
||||
|
||||
For better code maintainability, LearningSolver simply delegates most of its
|
||||
functionality to Components. Each Component is responsible for exactly one ML
|
||||
strategy.
|
||||
"""
|
||||
|
||||
def after_solve_lp(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
"""
|
||||
Method called by LearningSolver after the root LP relaxation is solved.
|
||||
See before_solve_lp for a description of the parameters.
|
||||
"""
|
||||
return
|
||||
|
||||
def after_solve_mip(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
"""
|
||||
Method called by LearningSolver after the MIP is solved.
|
||||
See before_solve_lp for a description of the parameters.
|
||||
"""
|
||||
return
|
||||
|
||||
def before_solve_lp(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
"""
|
||||
Method called by LearningSolver before the root LP relaxation is solved.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
solver: LearningSolver
|
||||
The solver calling this method.
|
||||
instance: Instance
|
||||
The instance being solved.
|
||||
model
|
||||
The concrete optimization model being solved.
|
||||
stats: LearningSolveStats
|
||||
A dictionary containing statistics about the solution process, such as
|
||||
number of nodes explored and running time. Components are free to add
|
||||
their own statistics here. For example, PrimalSolutionComponent adds
|
||||
statistics regarding the number of predicted variables. All statistics in
|
||||
this dictionary are exported to the benchmark CSV file.
|
||||
sample: miplearn.features.Sample
|
||||
An object containing data that may be useful for training machine
|
||||
learning models and accelerating the solution process. Components are
|
||||
free to add their own training data here.
|
||||
"""
|
||||
return
|
||||
|
||||
def before_solve_mip(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
"""
|
||||
Method called by LearningSolver before the MIP is solved.
|
||||
See before_solve_lp for a description of the parameters.
|
||||
"""
|
||||
return
|
||||
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[Category, np.ndarray],
|
||||
y: Dict[Category, np.ndarray],
|
||||
) -> None:
|
||||
"""
|
||||
Given two dictionaries x and y, mapping the name of the category to matrices
|
||||
of features and targets, this function does two things. First, for each
|
||||
category, it creates a clone of the prototype regressor/classifier. Second,
|
||||
it passes (x[category], y[category]) to the clone's fit method.
|
||||
"""
|
||||
return
|
||||
|
||||
def iteration_cb(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
) -> bool:
|
||||
"""
|
||||
Method called by LearningSolver at the end of each iteration.
|
||||
|
||||
After solving the MIP, LearningSolver calls `iteration_cb` of each component,
|
||||
giving them a chance to modify the problem and resolve it before the solution
|
||||
process ends. For example, the lazy constraint component uses `iteration_cb`
|
||||
to check that all lazy constraints are satisfied.
|
||||
|
||||
If `iteration_cb` returns False for all components, the solution process
|
||||
ends. If it retunrs True for any component, the MIP is solved again.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
solver: LearningSolver
|
||||
The solver calling this method.
|
||||
instance: Instance
|
||||
The instance being solved.
|
||||
model: Any
|
||||
The concrete optimization model being solved.
|
||||
"""
|
||||
return False
|
||||
|
||||
def lazy_cb(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
) -> None:
|
||||
return
|
||||
|
||||
def sample_evaluate(
|
||||
self,
|
||||
instance: Optional[Instance],
|
||||
sample: Sample,
|
||||
) -> Dict[str, Dict[str, float]]:
|
||||
return {}
|
||||
|
||||
def sample_xy(
|
||||
self,
|
||||
instance: Optional[Instance],
|
||||
sample: Sample,
|
||||
) -> Tuple[Dict, Dict]:
|
||||
"""
|
||||
Returns a pair of x and y dictionaries containing, respectively, the matrices
|
||||
of ML features and the labels for the sample. If the training sample does not
|
||||
include label information, returns (x, {}).
|
||||
"""
|
||||
pass
|
||||
|
||||
def pre_fit(self, pre: List[Any]) -> None:
|
||||
pass
|
||||
|
||||
def user_cut_cb(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
) -> None:
|
||||
return
|
||||
|
||||
def pre_sample_xy(self, instance: Instance, sample: Sample) -> Any:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def fit_multiple(
|
||||
components: List["Component"],
|
||||
instances: List[Instance],
|
||||
n_jobs: int = 1,
|
||||
progress: bool = False,
|
||||
) -> None:
|
||||
|
||||
# Part I: Pre-fit
|
||||
def _pre_sample_xy(instance: Instance) -> Dict:
|
||||
pre_instance: Dict = {}
|
||||
for (cidx, comp) in enumerate(components):
|
||||
pre_instance[cidx] = []
|
||||
instance.load()
|
||||
for sample in instance.get_samples():
|
||||
for (cidx, comp) in enumerate(components):
|
||||
pre_instance[cidx].append(comp.pre_sample_xy(instance, sample))
|
||||
instance.free()
|
||||
return pre_instance
|
||||
|
||||
if n_jobs == 1:
|
||||
pre = [_pre_sample_xy(instance) for instance in instances]
|
||||
else:
|
||||
pre = p_umap(
|
||||
_pre_sample_xy,
|
||||
instances,
|
||||
num_cpus=n_jobs,
|
||||
desc="pre-sample-xy",
|
||||
disable=not progress,
|
||||
)
|
||||
pre_combined: Dict = {}
|
||||
for (cidx, comp) in enumerate(components):
|
||||
pre_combined[cidx] = []
|
||||
for p in pre:
|
||||
pre_combined[cidx].extend(p[cidx])
|
||||
for (cidx, comp) in enumerate(components):
|
||||
comp.pre_fit(pre_combined[cidx])
|
||||
|
||||
# Part II: Fit
|
||||
def _sample_xy(instance: Instance) -> Tuple[Dict, Dict]:
|
||||
x_instance: Dict = {}
|
||||
y_instance: Dict = {}
|
||||
for (cidx, comp) in enumerate(components):
|
||||
x_instance[cidx] = {}
|
||||
y_instance[cidx] = {}
|
||||
instance.load()
|
||||
for sample in instance.get_samples():
|
||||
for (cidx, comp) in enumerate(components):
|
||||
x = x_instance[cidx]
|
||||
y = y_instance[cidx]
|
||||
x_sample, y_sample = comp.sample_xy(instance, sample)
|
||||
for cat in x_sample.keys():
|
||||
if cat not in x:
|
||||
x[cat] = []
|
||||
y[cat] = []
|
||||
x[cat] += x_sample[cat]
|
||||
y[cat] += y_sample[cat]
|
||||
instance.free()
|
||||
return x_instance, y_instance
|
||||
|
||||
if n_jobs == 1:
|
||||
xy = [_sample_xy(instance) for instance in instances]
|
||||
else:
|
||||
xy = p_umap(_sample_xy, instances, desc="sample-xy", disable=not progress)
|
||||
|
||||
for (cidx, comp) in enumerate(
|
||||
tqdm(
|
||||
components,
|
||||
desc="fit",
|
||||
disable=not progress,
|
||||
)
|
||||
):
|
||||
x_comp: Dict = {}
|
||||
y_comp: Dict = {}
|
||||
for (x, y) in xy:
|
||||
for cat in x[cidx].keys():
|
||||
if cat not in x_comp:
|
||||
x_comp[cat] = []
|
||||
y_comp[cat] = []
|
||||
x_comp[cat].extend(x[cidx][cat])
|
||||
y_comp[cat].extend(y[cidx][cat])
|
||||
for cat in x_comp.keys():
|
||||
x_comp[cat] = np.array(x_comp[cat], dtype=np.float32)
|
||||
y_comp[cat] = np.array(y_comp[cat])
|
||||
comp.fit_xy(x_comp, y_comp)
|
||||
@@ -1,184 +0,0 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, List, Tuple, Optional, Any, Set
|
||||
|
||||
import numpy as np
|
||||
from overrides import overrides
|
||||
|
||||
from miplearn.features.extractor import FeaturesExtractor
|
||||
from miplearn.classifiers import Classifier
|
||||
from miplearn.classifiers.threshold import Threshold
|
||||
from miplearn.components import classifier_evaluation_dict
|
||||
from miplearn.components.component import Component
|
||||
from miplearn.features.sample import Sample
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import ConstraintCategory, ConstraintName
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DynamicConstraintsComponent(Component):
|
||||
"""
|
||||
Base component used by both DynamicLazyConstraintsComponent and UserCutsComponent.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
attr: str,
|
||||
classifier: Classifier,
|
||||
threshold: Threshold,
|
||||
):
|
||||
assert isinstance(classifier, Classifier)
|
||||
self.threshold_prototype: Threshold = threshold
|
||||
self.classifier_prototype: Classifier = classifier
|
||||
self.classifiers: Dict[ConstraintCategory, Classifier] = {}
|
||||
self.thresholds: Dict[ConstraintCategory, Threshold] = {}
|
||||
self.known_violations: Dict[ConstraintName, Any] = {}
|
||||
self.attr = attr
|
||||
|
||||
def sample_xy_with_cids(
|
||||
self,
|
||||
instance: Optional[Instance],
|
||||
sample: Sample,
|
||||
) -> Tuple[
|
||||
Dict[ConstraintCategory, List[List[float]]],
|
||||
Dict[ConstraintCategory, List[List[bool]]],
|
||||
Dict[ConstraintCategory, List[ConstraintName]],
|
||||
]:
|
||||
if len(self.known_violations) == 0:
|
||||
return {}, {}, {}
|
||||
assert instance is not None
|
||||
x: Dict[ConstraintCategory, List[List[float]]] = {}
|
||||
y: Dict[ConstraintCategory, List[List[bool]]] = {}
|
||||
cids: Dict[ConstraintCategory, List[ConstraintName]] = {}
|
||||
known_cids = np.array(sorted(list(self.known_violations.keys())), dtype="S")
|
||||
|
||||
enforced_cids = None
|
||||
enforced_encoded = sample.get_scalar(self.attr)
|
||||
if enforced_encoded is not None:
|
||||
enforced = self.decode(enforced_encoded)
|
||||
enforced_cids = list(enforced.keys())
|
||||
|
||||
# Get user-provided constraint features
|
||||
(
|
||||
constr_features,
|
||||
constr_categories,
|
||||
constr_lazy,
|
||||
) = FeaturesExtractor._extract_user_features_constrs(instance, known_cids)
|
||||
|
||||
# Augment with instance features
|
||||
instance_features = sample.get_array("static_instance_features")
|
||||
assert instance_features is not None
|
||||
constr_features = np.hstack(
|
||||
[
|
||||
instance_features.reshape(1, -1).repeat(len(known_cids), axis=0),
|
||||
constr_features,
|
||||
]
|
||||
)
|
||||
|
||||
categories = np.unique(constr_categories)
|
||||
for c in categories:
|
||||
x[c] = constr_features[constr_categories == c].tolist()
|
||||
cids[c] = known_cids[constr_categories == c].tolist()
|
||||
if enforced_cids is not None:
|
||||
tmp = np.isin(cids[c], enforced_cids).reshape(-1, 1)
|
||||
y[c] = np.hstack([~tmp, tmp]).tolist() # type: ignore
|
||||
|
||||
return x, y, cids
|
||||
|
||||
@overrides
|
||||
def sample_xy(
|
||||
self,
|
||||
instance: Optional[Instance],
|
||||
sample: Sample,
|
||||
) -> Tuple[Dict, Dict]:
|
||||
x, y, _ = self.sample_xy_with_cids(instance, sample)
|
||||
return x, y
|
||||
|
||||
@overrides
|
||||
def pre_fit(self, pre: List[Any]) -> None:
|
||||
assert pre is not None
|
||||
self.known_violations.clear()
|
||||
for violations in pre:
|
||||
for (vname, vdata) in violations.items():
|
||||
self.known_violations[vname] = vdata
|
||||
|
||||
def sample_predict(
|
||||
self,
|
||||
instance: Instance,
|
||||
sample: Sample,
|
||||
) -> List[ConstraintName]:
|
||||
pred: List[ConstraintName] = []
|
||||
if len(self.known_violations) == 0:
|
||||
logger.info("Classifiers not fitted. Skipping.")
|
||||
return pred
|
||||
x, _, cids = self.sample_xy_with_cids(instance, sample)
|
||||
for category in x.keys():
|
||||
assert category in self.classifiers
|
||||
assert category in self.thresholds
|
||||
clf = self.classifiers[category]
|
||||
thr = self.thresholds[category]
|
||||
nx = np.array(x[category])
|
||||
proba = clf.predict_proba(nx)
|
||||
t = thr.predict(nx)
|
||||
for i in range(proba.shape[0]):
|
||||
if proba[i][1] > t[1]:
|
||||
pred += [cids[category][i]]
|
||||
return pred
|
||||
|
||||
@overrides
|
||||
def pre_sample_xy(self, instance: Instance, sample: Sample) -> Any:
|
||||
attr_encoded = sample.get_scalar(self.attr)
|
||||
assert attr_encoded is not None
|
||||
return self.decode(attr_encoded)
|
||||
|
||||
@overrides
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[ConstraintCategory, np.ndarray],
|
||||
y: Dict[ConstraintCategory, np.ndarray],
|
||||
) -> None:
|
||||
for category in x.keys():
|
||||
self.classifiers[category] = self.classifier_prototype.clone()
|
||||
self.thresholds[category] = self.threshold_prototype.clone()
|
||||
npx = np.array(x[category])
|
||||
npy = np.array(y[category])
|
||||
self.classifiers[category].fit(npx, npy)
|
||||
self.thresholds[category].fit(self.classifiers[category], npx, npy)
|
||||
|
||||
@overrides
|
||||
def sample_evaluate(
|
||||
self,
|
||||
instance: Instance,
|
||||
sample: Sample,
|
||||
) -> Dict[str, float]:
|
||||
attr_encoded = sample.get_scalar(self.attr)
|
||||
assert attr_encoded is not None
|
||||
actual_violations = DynamicConstraintsComponent.decode(attr_encoded)
|
||||
actual = set(actual_violations.keys())
|
||||
pred = set(self.sample_predict(instance, sample))
|
||||
tp, tn, fp, fn = 0, 0, 0, 0
|
||||
for cid in self.known_violations.keys():
|
||||
if cid in pred:
|
||||
if cid in actual:
|
||||
tp += 1
|
||||
else:
|
||||
fp += 1
|
||||
else:
|
||||
if cid in actual:
|
||||
fn += 1
|
||||
else:
|
||||
tn += 1
|
||||
return classifier_evaluation_dict(tp=tp, tn=tn, fp=fp, fn=fn)
|
||||
|
||||
@staticmethod
|
||||
def encode(violations: Dict[ConstraintName, Any]) -> str:
|
||||
return json.dumps({k.decode(): v for (k, v) in violations.items()})
|
||||
|
||||
@staticmethod
|
||||
def decode(violations_encoded: str) -> Dict[ConstraintName, Any]:
|
||||
violations = json.loads(violations_encoded)
|
||||
return {k.encode(): v for (k, v) in violations.items()}
|
||||
@@ -1,223 +0,0 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, List, TYPE_CHECKING, Tuple, Any, Optional
|
||||
|
||||
import numpy as np
|
||||
from overrides import overrides
|
||||
from tqdm.auto import tqdm
|
||||
|
||||
from miplearn.classifiers import Classifier
|
||||
from miplearn.classifiers.counting import CountingClassifier
|
||||
from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold
|
||||
from miplearn.components.component import Component
|
||||
from miplearn.components.dynamic_common import DynamicConstraintsComponent
|
||||
from miplearn.features.sample import Sample, Hdf5Sample
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import LearningSolveStats, ConstraintName, ConstraintCategory
|
||||
from p_tqdm import p_map
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
|
||||
|
||||
class DynamicLazyConstraintsComponent(Component):
|
||||
"""
|
||||
A component that predicts which lazy constraints to enforce.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
classifier: Classifier = CountingClassifier(),
|
||||
threshold: Threshold = MinProbabilityThreshold([0, 0.05]),
|
||||
):
|
||||
self.dynamic: DynamicConstraintsComponent = DynamicConstraintsComponent(
|
||||
classifier=classifier,
|
||||
threshold=threshold,
|
||||
attr="mip_constr_lazy",
|
||||
)
|
||||
self.classifiers = self.dynamic.classifiers
|
||||
self.thresholds = self.dynamic.thresholds
|
||||
self.known_violations = self.dynamic.known_violations
|
||||
self.lazy_enforced: Dict[ConstraintName, Any] = {}
|
||||
self.n_iterations: int = 0
|
||||
|
||||
@staticmethod
|
||||
def enforce(
|
||||
violations: Dict[ConstraintName, Any],
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
solver: "LearningSolver",
|
||||
) -> None:
|
||||
assert solver.internal_solver is not None
|
||||
for (vname, vdata) in violations.items():
|
||||
instance.enforce_lazy_constraint(solver.internal_solver, model, vdata)
|
||||
|
||||
@overrides
|
||||
def before_solve_mip(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
self.lazy_enforced.clear()
|
||||
logger.info("Predicting violated (dynamic) lazy constraints...")
|
||||
vnames = self.dynamic.sample_predict(instance, sample)
|
||||
violations = {c: self.dynamic.known_violations[c] for c in vnames}
|
||||
logger.info("Enforcing %d lazy constraints..." % len(vnames))
|
||||
self.enforce(violations, instance, model, solver)
|
||||
self.n_iterations = 0
|
||||
|
||||
@overrides
|
||||
def after_solve_mip(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
sample.put_scalar("mip_constr_lazy", self.dynamic.encode(self.lazy_enforced))
|
||||
stats["LazyDynamic: Added in callback"] = len(self.lazy_enforced)
|
||||
stats["LazyDynamic: Iterations"] = self.n_iterations
|
||||
|
||||
@overrides
|
||||
def iteration_cb(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
) -> bool:
|
||||
assert solver.internal_solver is not None
|
||||
logger.debug("Finding violated lazy constraints...")
|
||||
violations = instance.find_violated_lazy_constraints(
|
||||
solver.internal_solver,
|
||||
model,
|
||||
)
|
||||
if len(violations) == 0:
|
||||
logger.debug("No violations found")
|
||||
return False
|
||||
else:
|
||||
self.n_iterations += 1
|
||||
for v in violations:
|
||||
self.lazy_enforced[v] = violations[v]
|
||||
logger.debug(" %d violations found" % len(violations))
|
||||
self.enforce(violations, instance, model, solver)
|
||||
return True
|
||||
|
||||
# Delegate ML methods to self.dynamic
|
||||
# -------------------------------------------------------------------
|
||||
@overrides
|
||||
def sample_xy(
|
||||
self,
|
||||
instance: Optional[Instance],
|
||||
sample: Sample,
|
||||
) -> Tuple[Dict, Dict]:
|
||||
return self.dynamic.sample_xy(instance, sample)
|
||||
|
||||
@overrides
|
||||
def pre_fit(self, pre: List[Any]) -> None:
|
||||
self.dynamic.pre_fit(pre)
|
||||
|
||||
def sample_predict(
|
||||
self,
|
||||
instance: Instance,
|
||||
sample: Sample,
|
||||
) -> List[ConstraintName]:
|
||||
return self.dynamic.sample_predict(instance, sample)
|
||||
|
||||
@overrides
|
||||
def pre_sample_xy(self, instance: Instance, sample: Sample) -> Any:
|
||||
return self.dynamic.pre_sample_xy(instance, sample)
|
||||
|
||||
@overrides
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[ConstraintCategory, np.ndarray],
|
||||
y: Dict[ConstraintCategory, np.ndarray],
|
||||
) -> None:
|
||||
self.dynamic.fit_xy(x, y)
|
||||
|
||||
@overrides
|
||||
def sample_evaluate(
|
||||
self,
|
||||
instance: Instance,
|
||||
sample: Sample,
|
||||
) -> Dict[ConstraintCategory, Dict[str, float]]:
|
||||
return self.dynamic.sample_evaluate(instance, sample)
|
||||
|
||||
# ------------------------------------------------------------------------------------------------------------------
|
||||
# NEW API
|
||||
# ------------------------------------------------------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def extract(filenames, progress=True, known_cids=None):
|
||||
enforced_cids, features = [], []
|
||||
freeze_known_cids = True
|
||||
if known_cids is None:
|
||||
known_cids = set()
|
||||
freeze_known_cids = False
|
||||
for filename in tqdm(
|
||||
filenames,
|
||||
desc="extract (1/2)",
|
||||
disable=not progress,
|
||||
):
|
||||
with Hdf5Sample(filename, mode="r") as sample:
|
||||
features.append(sample.get_array("lp_var_values"))
|
||||
cids = frozenset(
|
||||
DynamicConstraintsComponent.decode(
|
||||
sample.get_scalar("mip_constr_lazy")
|
||||
).keys()
|
||||
)
|
||||
enforced_cids.append(cids)
|
||||
if not freeze_known_cids:
|
||||
known_cids.update(cids)
|
||||
|
||||
x, y, cat, cdata = [], [], [], {}
|
||||
for (j, cid) in enumerate(known_cids):
|
||||
cdata[cid] = json.loads(cid.decode())
|
||||
for i in range(len(features)):
|
||||
cat.append(cid)
|
||||
x.append(features[i])
|
||||
if cid in enforced_cids[i]:
|
||||
y.append([0, 1])
|
||||
else:
|
||||
y.append([1, 0])
|
||||
x = np.vstack(x)
|
||||
y = np.vstack(y)
|
||||
cat = np.array(cat)
|
||||
x_dict, y_dict = DynamicLazyConstraintsComponent._split(
|
||||
x,
|
||||
y,
|
||||
cat,
|
||||
progress=progress,
|
||||
)
|
||||
return x_dict, y_dict, cdata
|
||||
|
||||
@staticmethod
|
||||
def _split(x, y, cat, progress=False):
|
||||
# Sort data by categories
|
||||
pi = np.argsort(cat, kind="stable")
|
||||
x = x[pi]
|
||||
y = y[pi]
|
||||
cat = cat[pi]
|
||||
|
||||
# Split categories
|
||||
x_dict = {}
|
||||
y_dict = {}
|
||||
start = 0
|
||||
for end in tqdm(
|
||||
range(len(cat) + 1),
|
||||
desc="extract (2/2)",
|
||||
disable=not progress,
|
||||
):
|
||||
if (end >= len(cat)) or (cat[start] != cat[end]):
|
||||
x_dict[cat[start]] = x[start:end, :]
|
||||
y_dict[cat[start]] = y[start:end, :]
|
||||
start = end
|
||||
return x_dict, y_dict
|
||||
@@ -1,133 +0,0 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
import logging
|
||||
from typing import Any, TYPE_CHECKING, Tuple, Dict, List
|
||||
|
||||
import numpy as np
|
||||
from overrides import overrides
|
||||
|
||||
from miplearn.classifiers import Classifier
|
||||
from miplearn.classifiers.counting import CountingClassifier
|
||||
from miplearn.classifiers.threshold import Threshold, MinProbabilityThreshold
|
||||
from miplearn.components.component import Component
|
||||
from miplearn.components.dynamic_common import DynamicConstraintsComponent
|
||||
from miplearn.features.sample import Sample
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import LearningSolveStats, ConstraintName, ConstraintCategory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
|
||||
|
||||
class UserCutsComponent(Component):
|
||||
def __init__(
|
||||
self,
|
||||
classifier: Classifier = CountingClassifier(),
|
||||
threshold: Threshold = MinProbabilityThreshold([0.50, 0.50]),
|
||||
) -> None:
|
||||
self.dynamic = DynamicConstraintsComponent(
|
||||
classifier=classifier,
|
||||
threshold=threshold,
|
||||
attr="mip_user_cuts",
|
||||
)
|
||||
self.enforced: Dict[ConstraintName, Any] = {}
|
||||
self.n_added_in_callback = 0
|
||||
|
||||
@overrides
|
||||
def before_solve_mip(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: "Instance",
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
assert solver.internal_solver is not None
|
||||
self.enforced.clear()
|
||||
self.n_added_in_callback = 0
|
||||
logger.info("Predicting violated user cuts...")
|
||||
vnames = self.dynamic.sample_predict(instance, sample)
|
||||
logger.info("Enforcing %d user cuts ahead-of-time..." % len(vnames))
|
||||
for vname in vnames:
|
||||
vdata = self.dynamic.known_violations[vname]
|
||||
instance.enforce_user_cut(solver.internal_solver, model, vdata)
|
||||
stats["UserCuts: Added ahead-of-time"] = len(vnames)
|
||||
|
||||
@overrides
|
||||
def user_cut_cb(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: "Instance",
|
||||
model: Any,
|
||||
) -> None:
|
||||
assert solver.internal_solver is not None
|
||||
logger.debug("Finding violated user cuts...")
|
||||
violations = instance.find_violated_user_cuts(model)
|
||||
logger.debug(f"Found {len(violations)} violated user cuts")
|
||||
logger.debug("Building violated user cuts...")
|
||||
for (vname, vdata) in violations.items():
|
||||
if vname in self.enforced:
|
||||
continue
|
||||
instance.enforce_user_cut(solver.internal_solver, model, vdata)
|
||||
self.enforced[vname] = vdata
|
||||
self.n_added_in_callback += 1
|
||||
if len(violations) > 0:
|
||||
logger.debug(f"Added {len(violations)} violated user cuts")
|
||||
|
||||
@overrides
|
||||
def after_solve_mip(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: "Instance",
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
sample.put_scalar("mip_user_cuts", self.dynamic.encode(self.enforced))
|
||||
stats["UserCuts: Added in callback"] = self.n_added_in_callback
|
||||
if self.n_added_in_callback > 0:
|
||||
logger.info(f"{self.n_added_in_callback} user cuts added in callback")
|
||||
|
||||
# Delegate ML methods to self.dynamic
|
||||
# -------------------------------------------------------------------
|
||||
@overrides
|
||||
def sample_xy(
|
||||
self,
|
||||
instance: "Instance",
|
||||
sample: Sample,
|
||||
) -> Tuple[Dict, Dict]:
|
||||
return self.dynamic.sample_xy(instance, sample)
|
||||
|
||||
@overrides
|
||||
def pre_fit(self, pre: List[Any]) -> None:
|
||||
self.dynamic.pre_fit(pre)
|
||||
|
||||
def sample_predict(
|
||||
self,
|
||||
instance: "Instance",
|
||||
sample: Sample,
|
||||
) -> List[ConstraintName]:
|
||||
return self.dynamic.sample_predict(instance, sample)
|
||||
|
||||
@overrides
|
||||
def pre_sample_xy(self, instance: Instance, sample: Sample) -> Any:
|
||||
return self.dynamic.pre_sample_xy(instance, sample)
|
||||
|
||||
@overrides
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[ConstraintCategory, np.ndarray],
|
||||
y: Dict[ConstraintCategory, np.ndarray],
|
||||
) -> None:
|
||||
self.dynamic.fit_xy(x, y)
|
||||
|
||||
@overrides
|
||||
def sample_evaluate(
|
||||
self,
|
||||
instance: "Instance",
|
||||
sample: Sample,
|
||||
) -> Dict[ConstraintCategory, Dict[ConstraintName, float]]:
|
||||
return self.dynamic.sample_evaluate(instance, sample)
|
||||
43
miplearn/components/lazy.py
Normal file
43
miplearn/components/lazy.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import json
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import gurobipy as gp
|
||||
|
||||
from ..h5 import H5File
|
||||
|
||||
|
||||
class ExpertLazyComponent:
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def fit(self, train_h5: List[str]) -> None:
|
||||
pass
|
||||
|
||||
def before_mip(self, test_h5: str, model: gp.Model, stats: Dict[str, Any]) -> None:
|
||||
with H5File(test_h5, "r") as h5:
|
||||
constr_names = h5.get_array("static_constr_names")
|
||||
constr_lazy = h5.get_array("mip_constr_lazy")
|
||||
constr_violations = h5.get_scalar("mip_constr_violations")
|
||||
|
||||
assert constr_names is not None
|
||||
assert constr_violations is not None
|
||||
|
||||
# Static lazy constraints
|
||||
n_static_lazy = 0
|
||||
if constr_lazy is not None:
|
||||
for (constr_idx, constr_name) in enumerate(constr_names):
|
||||
if constr_lazy[constr_idx]:
|
||||
constr = model.getConstrByName(constr_name.decode())
|
||||
constr.lazy = 3
|
||||
n_static_lazy += 1
|
||||
stats.update({"Static lazy constraints": n_static_lazy})
|
||||
|
||||
# Dynamic lazy constraints
|
||||
if hasattr(model, "_fix_violations"):
|
||||
violations = json.loads(constr_violations)
|
||||
model._fix_violations(model, violations, "aot")
|
||||
stats.update({"Dynamic lazy constraints": len(violations)})
|
||||
@@ -1,126 +0,0 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import logging
|
||||
from typing import List, Dict, Any, TYPE_CHECKING, Tuple, Optional, cast
|
||||
|
||||
import numpy as np
|
||||
from overrides import overrides
|
||||
from sklearn.linear_model import LinearRegression
|
||||
|
||||
from miplearn.classifiers import Regressor
|
||||
from miplearn.classifiers.sklearn import ScikitLearnRegressor
|
||||
from miplearn.components.component import Component
|
||||
from miplearn.features.sample import Sample
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import LearningSolveStats
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ObjectiveValueComponent(Component):
|
||||
"""
|
||||
A Component which predicts the optimal objective value of the problem.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
regressor: Regressor = ScikitLearnRegressor(LinearRegression()),
|
||||
) -> None:
|
||||
assert isinstance(regressor, Regressor)
|
||||
self.regressors: Dict[str, Regressor] = {}
|
||||
self.regressor_prototype = regressor
|
||||
|
||||
@overrides
|
||||
def before_solve_mip(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
logger.info("Predicting optimal value...")
|
||||
pred = self.sample_predict(sample)
|
||||
for (c, v) in pred.items():
|
||||
logger.info(f"Predicted {c.lower()}: %.6e" % v)
|
||||
stats[f"Objective: Predicted {c.lower()}"] = v # type: ignore
|
||||
|
||||
@overrides
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[str, np.ndarray],
|
||||
y: Dict[str, np.ndarray],
|
||||
) -> None:
|
||||
for c in ["Upper bound", "Lower bound"]:
|
||||
if c in y:
|
||||
self.regressors[c] = self.regressor_prototype.clone()
|
||||
self.regressors[c].fit(x[c], y[c])
|
||||
|
||||
def sample_predict(self, sample: Sample) -> Dict[str, float]:
|
||||
pred: Dict[str, float] = {}
|
||||
x, _ = self.sample_xy(None, sample)
|
||||
for c in ["Upper bound", "Lower bound"]:
|
||||
if c in self.regressors is not None:
|
||||
pred[c] = self.regressors[c].predict(np.array(x[c]))[0, 0]
|
||||
else:
|
||||
logger.info(f"{c} regressor not fitted. Skipping.")
|
||||
return pred
|
||||
|
||||
@overrides
|
||||
def sample_xy(
|
||||
self,
|
||||
_: Optional[Instance],
|
||||
sample: Sample,
|
||||
) -> Tuple[Dict[str, List[List[float]]], Dict[str, List[List[float]]]]:
|
||||
lp_instance_features_np = sample.get_array("lp_instance_features")
|
||||
if lp_instance_features_np is None:
|
||||
lp_instance_features_np = sample.get_array("static_instance_features")
|
||||
assert lp_instance_features_np is not None
|
||||
lp_instance_features = cast(List[float], lp_instance_features_np.tolist())
|
||||
|
||||
# Features
|
||||
x: Dict[str, List[List[float]]] = {
|
||||
"Upper bound": [lp_instance_features],
|
||||
"Lower bound": [lp_instance_features],
|
||||
}
|
||||
|
||||
# Labels
|
||||
y: Dict[str, List[List[float]]] = {}
|
||||
mip_lower_bound = sample.get_scalar("mip_lower_bound")
|
||||
mip_upper_bound = sample.get_scalar("mip_upper_bound")
|
||||
if mip_lower_bound is not None:
|
||||
y["Lower bound"] = [[mip_lower_bound]]
|
||||
if mip_upper_bound is not None:
|
||||
y["Upper bound"] = [[mip_upper_bound]]
|
||||
|
||||
return x, y
|
||||
|
||||
@overrides
|
||||
def sample_evaluate(
|
||||
self,
|
||||
instance: Instance,
|
||||
sample: Sample,
|
||||
) -> Dict[str, Dict[str, float]]:
|
||||
def compare(y_pred: float, y_actual: float) -> Dict[str, float]:
|
||||
err = np.round(abs(y_pred - y_actual), 8)
|
||||
return {
|
||||
"Actual value": y_actual,
|
||||
"Predicted value": y_pred,
|
||||
"Absolute error": err,
|
||||
"Relative error": err / y_actual,
|
||||
}
|
||||
|
||||
result: Dict[str, Dict[str, float]] = {}
|
||||
pred = self.sample_predict(sample)
|
||||
actual_ub = sample.get_scalar("mip_upper_bound")
|
||||
actual_lb = sample.get_scalar("mip_lower_bound")
|
||||
if actual_ub is not None:
|
||||
result["Upper bound"] = compare(pred["Upper bound"], actual_ub)
|
||||
if actual_lb is not None:
|
||||
result["Lower bound"] = compare(pred["Lower bound"], actual_lb)
|
||||
return result
|
||||
@@ -1,341 +0,0 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Any, TYPE_CHECKING, Tuple, Optional
|
||||
|
||||
import numpy as np
|
||||
from overrides import overrides
|
||||
|
||||
from miplearn.classifiers import Classifier
|
||||
from miplearn.classifiers.adaptive import AdaptiveClassifier
|
||||
from miplearn.classifiers.threshold import MinPrecisionThreshold, Threshold
|
||||
from miplearn.components import classifier_evaluation_dict
|
||||
from miplearn.components.component import Component
|
||||
from miplearn.features.sample import Sample
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import (
|
||||
LearningSolveStats,
|
||||
Category,
|
||||
Solution,
|
||||
)
|
||||
from miplearn.features.sample import Hdf5Sample
|
||||
from p_tqdm import p_map
|
||||
from tqdm.auto import tqdm
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
|
||||
|
||||
class PrimalSolutionComponent(Component):
|
||||
"""
|
||||
A component that predicts the optimal primal values for the binary decision
|
||||
variables.
|
||||
|
||||
In exact mode, predicted primal solutions are provided to the solver as MIP
|
||||
starts. In heuristic mode, this component fixes the decision variables to their
|
||||
predicted values.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
classifier: Classifier = AdaptiveClassifier(),
|
||||
mode: str = "exact",
|
||||
threshold: Threshold = MinPrecisionThreshold([0.99, 0.99]),
|
||||
) -> None:
|
||||
assert isinstance(classifier, Classifier)
|
||||
assert isinstance(threshold, Threshold)
|
||||
assert mode in ["exact", "heuristic"]
|
||||
self.mode = mode
|
||||
self.classifiers: Dict[Category, Classifier] = {}
|
||||
self.thresholds: Dict[Category, Threshold] = {}
|
||||
self.threshold_prototype = threshold
|
||||
self.classifier_prototype = classifier
|
||||
|
||||
@overrides
|
||||
def before_solve_mip(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
logger.info("Predicting primal solution...")
|
||||
|
||||
# Do nothing if models are not trained
|
||||
if len(self.classifiers) == 0:
|
||||
logger.info("Classifiers not fitted. Skipping.")
|
||||
return
|
||||
|
||||
# Predict solution and provide it to the solver
|
||||
solution = self.sample_predict(sample)
|
||||
assert solver.internal_solver is not None
|
||||
if self.mode == "heuristic":
|
||||
solver.internal_solver.fix(solution)
|
||||
else:
|
||||
solver.internal_solver.set_warm_start(solution)
|
||||
|
||||
# Update statistics
|
||||
stats["Primal: Free"] = 0
|
||||
stats["Primal: Zero"] = 0
|
||||
stats["Primal: One"] = 0
|
||||
for (var_name, value) in solution.items():
|
||||
if value is None:
|
||||
stats["Primal: Free"] += 1
|
||||
else:
|
||||
if value < 0.5:
|
||||
stats["Primal: Zero"] += 1
|
||||
else:
|
||||
stats["Primal: One"] += 1
|
||||
logger.info(
|
||||
f"Predicted: free: {stats['Primal: Free']}, "
|
||||
f"zero: {stats['Primal: Zero']}, "
|
||||
f"one: {stats['Primal: One']}"
|
||||
)
|
||||
|
||||
def sample_predict(self, sample: Sample) -> Solution:
|
||||
var_names = sample.get_array("static_var_names")
|
||||
var_categories = sample.get_array("static_var_categories")
|
||||
var_types = sample.get_array("static_var_types")
|
||||
assert var_names is not None
|
||||
assert var_categories is not None
|
||||
assert var_types is not None
|
||||
|
||||
# Compute y_pred
|
||||
x, _ = self.sample_xy(None, sample)
|
||||
y_pred = {}
|
||||
for category in x.keys():
|
||||
assert category in self.classifiers, (
|
||||
f"Classifier for category {category} has not been trained. "
|
||||
f"Please call component.fit before component.predict."
|
||||
)
|
||||
xc = np.array(x[category])
|
||||
proba = self.classifiers[category].predict_proba(xc)
|
||||
thr = self.thresholds[category].predict(xc)
|
||||
y_pred[category] = np.vstack(
|
||||
[
|
||||
proba[:, 0] >= thr[0],
|
||||
proba[:, 1] >= thr[1],
|
||||
]
|
||||
).T
|
||||
|
||||
# Convert y_pred into solution
|
||||
solution: Solution = {v: None for v in var_names}
|
||||
category_offset: Dict[Category, int] = {cat: 0 for cat in x.keys()}
|
||||
for (i, var_name) in enumerate(var_names):
|
||||
if var_types[i] != b"B":
|
||||
continue
|
||||
category = var_categories[i]
|
||||
if category not in category_offset:
|
||||
continue
|
||||
offset = category_offset[category]
|
||||
category_offset[category] += 1
|
||||
if y_pred[category][offset, 0]:
|
||||
solution[var_name] = 0.0
|
||||
if y_pred[category][offset, 1]:
|
||||
solution[var_name] = 1.0
|
||||
|
||||
return solution
|
||||
|
||||
@overrides
|
||||
def sample_xy(
|
||||
self,
|
||||
_: Optional[Instance],
|
||||
sample: Sample,
|
||||
) -> Tuple[Dict[Category, List[List[float]]], Dict[Category, List[List[float]]]]:
|
||||
x: Dict = {}
|
||||
y: Dict = {}
|
||||
instance_features = sample.get_array("static_instance_features")
|
||||
mip_var_values = sample.get_array("mip_var_values")
|
||||
lp_var_values = sample.get_array("lp_var_values")
|
||||
var_features = sample.get_array("lp_var_features")
|
||||
var_names = sample.get_array("static_var_names")
|
||||
var_types = sample.get_array("static_var_types")
|
||||
var_categories = sample.get_array("static_var_categories")
|
||||
if var_features is None:
|
||||
var_features = sample.get_array("static_var_features")
|
||||
assert instance_features is not None
|
||||
assert var_features is not None
|
||||
assert var_names is not None
|
||||
assert var_types is not None
|
||||
assert var_categories is not None
|
||||
|
||||
for (i, var_name) in enumerate(var_names):
|
||||
# Skip non-binary variables
|
||||
if var_types[i] != b"B":
|
||||
continue
|
||||
|
||||
# Initialize categories
|
||||
category = var_categories[i]
|
||||
if len(category) == 0:
|
||||
continue
|
||||
if category not in x.keys():
|
||||
x[category] = []
|
||||
y[category] = []
|
||||
|
||||
# Features
|
||||
features = list(instance_features)
|
||||
features.extend(var_features[i])
|
||||
if lp_var_values is not None:
|
||||
features.extend(lp_var_values)
|
||||
x[category].append(features)
|
||||
|
||||
# Labels
|
||||
if mip_var_values is not None:
|
||||
opt_value = mip_var_values[i]
|
||||
assert opt_value is not None
|
||||
y[category].append([opt_value < 0.5, opt_value >= 0.5])
|
||||
return x, y
|
||||
|
||||
@overrides
|
||||
def sample_evaluate(
|
||||
self,
|
||||
_: Optional[Instance],
|
||||
sample: Sample,
|
||||
) -> Dict[str, Dict[str, float]]:
|
||||
mip_var_values = sample.get_array("mip_var_values")
|
||||
var_names = sample.get_array("static_var_names")
|
||||
assert mip_var_values is not None
|
||||
assert var_names is not None
|
||||
|
||||
solution_actual = {
|
||||
var_name: mip_var_values[i] for (i, var_name) in enumerate(var_names)
|
||||
}
|
||||
solution_pred = self.sample_predict(sample)
|
||||
vars_all, vars_one, vars_zero = set(), set(), set()
|
||||
pred_one_positive, pred_zero_positive = set(), set()
|
||||
for (var_name, value_actual) in solution_actual.items():
|
||||
vars_all.add(var_name)
|
||||
if value_actual > 0.5:
|
||||
vars_one.add(var_name)
|
||||
else:
|
||||
vars_zero.add(var_name)
|
||||
value_pred = solution_pred[var_name]
|
||||
if value_pred is not None:
|
||||
if value_pred > 0.5:
|
||||
pred_one_positive.add(var_name)
|
||||
else:
|
||||
pred_zero_positive.add(var_name)
|
||||
pred_one_negative = vars_all - pred_one_positive
|
||||
pred_zero_negative = vars_all - pred_zero_positive
|
||||
return {
|
||||
"0": classifier_evaluation_dict(
|
||||
tp=len(pred_zero_positive & vars_zero),
|
||||
tn=len(pred_zero_negative & vars_one),
|
||||
fp=len(pred_zero_positive & vars_one),
|
||||
fn=len(pred_zero_negative & vars_zero),
|
||||
),
|
||||
"1": classifier_evaluation_dict(
|
||||
tp=len(pred_one_positive & vars_one),
|
||||
tn=len(pred_one_negative & vars_zero),
|
||||
fp=len(pred_one_positive & vars_zero),
|
||||
fn=len(pred_one_negative & vars_one),
|
||||
),
|
||||
}
|
||||
|
||||
@overrides
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[Category, np.ndarray],
|
||||
y: Dict[Category, np.ndarray],
|
||||
progress: bool = False,
|
||||
) -> None:
|
||||
for category in tqdm(x.keys(), desc="fit", disable=not progress):
|
||||
clf = self.classifier_prototype.clone()
|
||||
thr = self.threshold_prototype.clone()
|
||||
clf.fit(x[category], y[category])
|
||||
thr.fit(clf, x[category], y[category])
|
||||
self.classifiers[category] = clf
|
||||
self.thresholds[category] = thr
|
||||
|
||||
# ------------------------------------------------------------------------------------------------------------------
|
||||
# NEW API
|
||||
# ------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
def fit(
|
||||
self,
|
||||
x: Dict[Category, np.ndarray],
|
||||
y: Dict[Category, np.ndarray],
|
||||
progress: bool = False,
|
||||
) -> None:
|
||||
for category in tqdm(x.keys(), desc="fit", disable=not progress):
|
||||
clf = self.classifier_prototype.clone()
|
||||
thr = self.threshold_prototype.clone()
|
||||
clf.fit(x[category], y[category])
|
||||
thr.fit(clf, x[category], y[category])
|
||||
self.classifiers[category] = clf
|
||||
self.thresholds[category] = thr
|
||||
|
||||
def predict(self, x):
|
||||
y_pred = {}
|
||||
for category in x.keys():
|
||||
assert category in self.classifiers, (
|
||||
f"Classifier for category {category} has not been trained. "
|
||||
f"Please call component.fit before component.predict."
|
||||
)
|
||||
xc = np.array(x[category])
|
||||
proba = self.classifiers[category].predict_proba(xc)
|
||||
thr = self.thresholds[category].predict(xc)
|
||||
y_pred[category] = np.vstack(
|
||||
[
|
||||
proba[:, 0] >= thr[0],
|
||||
proba[:, 1] >= thr[1],
|
||||
]
|
||||
).T
|
||||
return y_pred
|
||||
|
||||
@staticmethod
|
||||
def extract(
|
||||
filenames: List[str],
|
||||
progress: bool = False,
|
||||
):
|
||||
x, y, cat = [], [], []
|
||||
|
||||
# Read data
|
||||
for filename in tqdm(
|
||||
filenames,
|
||||
desc="extract (1/2)",
|
||||
disable=not progress,
|
||||
):
|
||||
with Hdf5Sample(filename, mode="r") as sample:
|
||||
mip_var_values = sample.get_array("mip_var_values")
|
||||
var_features = sample.get_array("lp_var_features")
|
||||
var_types = sample.get_array("static_var_types")
|
||||
var_categories = sample.get_array("static_var_categories")
|
||||
assert var_features is not None
|
||||
assert var_types is not None
|
||||
assert var_categories is not None
|
||||
x.append(var_features)
|
||||
y.append([mip_var_values < 0.5, mip_var_values > 0.5])
|
||||
cat.extend(var_categories)
|
||||
|
||||
# Convert to numpy arrays
|
||||
x = np.vstack(x)
|
||||
y = np.hstack(y).T
|
||||
cat = np.array(cat)
|
||||
|
||||
# Sort data by categories
|
||||
pi = np.argsort(cat, kind="stable")
|
||||
x = x[pi]
|
||||
y = y[pi]
|
||||
cat = cat[pi]
|
||||
|
||||
# Split categories
|
||||
x_dict = {}
|
||||
y_dict = {}
|
||||
start = 0
|
||||
for end in tqdm(
|
||||
range(len(cat) + 1),
|
||||
desc="extract (2/2)",
|
||||
disable=not progress,
|
||||
):
|
||||
if (end >= len(cat)) or (cat[start] != cat[end]):
|
||||
x_dict[cat[start]] = x[start:end, :]
|
||||
y_dict[cat[start]] = y[start:end, :]
|
||||
start = end
|
||||
|
||||
return x_dict, y_dict
|
||||
29
miplearn/components/primal/__init__.py
Normal file
29
miplearn/components/primal/__init__.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
from typing import Tuple
|
||||
|
||||
import numpy as np
|
||||
|
||||
from miplearn.h5 import H5File
|
||||
|
||||
|
||||
def _extract_bin_var_names_values(
|
||||
h5: H5File,
|
||||
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||||
bin_var_names, bin_var_indices = _extract_bin_var_names(h5)
|
||||
var_values = h5.get_array("mip_var_values")
|
||||
assert var_values is not None
|
||||
bin_var_values = var_values[bin_var_indices].astype(int)
|
||||
return bin_var_names, bin_var_values, bin_var_indices
|
||||
|
||||
|
||||
def _extract_bin_var_names(h5: H5File) -> Tuple[np.ndarray, np.ndarray]:
|
||||
var_types = h5.get_array("static_var_types")
|
||||
var_names = h5.get_array("static_var_names")
|
||||
assert var_types is not None
|
||||
assert var_names is not None
|
||||
bin_var_indices = np.where(var_types == b"B")[0]
|
||||
bin_var_names = var_names[bin_var_indices]
|
||||
assert len(bin_var_names.shape) == 1
|
||||
return bin_var_names, bin_var_indices
|
||||
93
miplearn/components/primal/actions.py
Normal file
93
miplearn/components/primal/actions.py
Normal file
@@ -0,0 +1,93 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional, Dict
|
||||
|
||||
import numpy as np
|
||||
|
||||
from miplearn.solvers.abstract import AbstractModel
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
class PrimalComponentAction(ABC):
|
||||
@abstractmethod
|
||||
def perform(
|
||||
self,
|
||||
model: AbstractModel,
|
||||
var_names: np.ndarray,
|
||||
var_values: np.ndarray,
|
||||
stats: Optional[Dict],
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class SetWarmStart(PrimalComponentAction):
|
||||
def perform(
|
||||
self,
|
||||
model: AbstractModel,
|
||||
var_names: np.ndarray,
|
||||
var_values: np.ndarray,
|
||||
stats: Optional[Dict],
|
||||
) -> None:
|
||||
logger.info("Setting warm starts...")
|
||||
model.set_warm_starts(var_names, var_values, stats)
|
||||
|
||||
|
||||
class FixVariables(PrimalComponentAction):
|
||||
def perform(
|
||||
self,
|
||||
model: AbstractModel,
|
||||
var_names: np.ndarray,
|
||||
var_values: np.ndarray,
|
||||
stats: Optional[Dict],
|
||||
) -> None:
|
||||
logger.info("Fixing variables...")
|
||||
assert len(var_values.shape) == 2
|
||||
assert var_values.shape[0] == 1
|
||||
var_values = var_values.reshape(-1)
|
||||
model.fix_variables(var_names, var_values, stats)
|
||||
if stats is not None:
|
||||
stats["Heuristic"] = True
|
||||
|
||||
|
||||
class EnforceProximity(PrimalComponentAction):
|
||||
def __init__(self, tol: float) -> None:
|
||||
self.tol = tol
|
||||
|
||||
def perform(
|
||||
self,
|
||||
model: AbstractModel,
|
||||
var_names: np.ndarray,
|
||||
var_values: np.ndarray,
|
||||
stats: Optional[Dict],
|
||||
) -> None:
|
||||
assert len(var_values.shape) == 2
|
||||
assert var_values.shape[0] == 1
|
||||
var_values = var_values.reshape(-1)
|
||||
|
||||
constr_lhs = []
|
||||
constr_vars = []
|
||||
constr_rhs = 0.0
|
||||
for (i, var_name) in enumerate(var_names):
|
||||
if np.isnan(var_values[i]):
|
||||
continue
|
||||
constr_lhs.append(1.0 if var_values[i] < 0.5 else -1.0)
|
||||
constr_rhs -= var_values[i]
|
||||
constr_vars.append(var_name)
|
||||
|
||||
constr_rhs += len(constr_vars) * self.tol
|
||||
logger.info(
|
||||
f"Adding proximity constraint (tol={self.tol}, nz={len(constr_vars)})..."
|
||||
)
|
||||
|
||||
model.add_constrs(
|
||||
np.array(constr_vars),
|
||||
np.array([constr_lhs]),
|
||||
np.array(["<"], dtype="S"),
|
||||
np.array([constr_rhs]),
|
||||
)
|
||||
if stats is not None:
|
||||
stats["Heuristic"] = True
|
||||
32
miplearn/components/primal/expert.py
Normal file
32
miplearn/components/primal/expert.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from . import _extract_bin_var_names_values
|
||||
from .actions import PrimalComponentAction
|
||||
from ...solvers.abstract import AbstractModel
|
||||
from ...h5 import H5File
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ExpertPrimalComponent:
|
||||
def __init__(self, action: PrimalComponentAction):
|
||||
self.action = action
|
||||
|
||||
"""
|
||||
Component that predicts warm starts by peeking at the optimal solution.
|
||||
"""
|
||||
|
||||
def fit(self, train_h5: List[str]) -> None:
|
||||
pass
|
||||
|
||||
def before_mip(
|
||||
self, test_h5: str, model: AbstractModel, stats: Dict[str, Any]
|
||||
) -> None:
|
||||
with H5File(test_h5, "r") as h5:
|
||||
names, values, _ = _extract_bin_var_names_values(h5)
|
||||
self.action.perform(model, names, values.reshape(1, -1), stats)
|
||||
129
miplearn/components/primal/indep.py
Normal file
129
miplearn/components/primal/indep.py
Normal file
@@ -0,0 +1,129 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
import logging
|
||||
from typing import Any, Dict, List, Callable, Optional
|
||||
|
||||
import numpy as np
|
||||
import sklearn
|
||||
|
||||
from miplearn.components.primal import (
|
||||
_extract_bin_var_names_values,
|
||||
_extract_bin_var_names,
|
||||
)
|
||||
from miplearn.components.primal.actions import PrimalComponentAction
|
||||
from miplearn.extractors.abstract import FeaturesExtractor
|
||||
from miplearn.solvers.abstract import AbstractModel
|
||||
from miplearn.h5 import H5File
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IndependentVarsPrimalComponent:
|
||||
def __init__(
|
||||
self,
|
||||
base_clf: Any,
|
||||
extractor: FeaturesExtractor,
|
||||
action: PrimalComponentAction,
|
||||
clone_fn: Callable[[Any], Any] = sklearn.clone,
|
||||
):
|
||||
self.base_clf = base_clf
|
||||
self.extractor = extractor
|
||||
self.clf_: Dict[bytes, Any] = {}
|
||||
self.bin_var_names_: Optional[np.ndarray] = None
|
||||
self.n_features_: Optional[int] = None
|
||||
self.clone_fn = clone_fn
|
||||
self.action = action
|
||||
|
||||
def fit(self, train_h5: List[str]) -> None:
|
||||
logger.info("Reading training data...")
|
||||
self.bin_var_names_ = None
|
||||
n_bin_vars: Optional[int] = None
|
||||
n_vars: Optional[int] = None
|
||||
x, y = [], []
|
||||
for h5_filename in train_h5:
|
||||
with H5File(h5_filename, "r") as h5:
|
||||
# Get number of variables
|
||||
var_types = h5.get_array("static_var_types")
|
||||
assert var_types is not None
|
||||
n_vars = len(var_types)
|
||||
|
||||
# Extract features
|
||||
(
|
||||
bin_var_names,
|
||||
bin_var_values,
|
||||
bin_var_indices,
|
||||
) = _extract_bin_var_names_values(h5)
|
||||
|
||||
# Store/check variable names
|
||||
if self.bin_var_names_ is None:
|
||||
self.bin_var_names_ = bin_var_names
|
||||
n_bin_vars = len(self.bin_var_names_)
|
||||
else:
|
||||
assert np.all(bin_var_names == self.bin_var_names_)
|
||||
|
||||
# Build x and y vectors
|
||||
x_sample = self.extractor.get_var_features(h5)
|
||||
assert len(x_sample.shape) == 2
|
||||
assert x_sample.shape[0] == n_vars
|
||||
x_sample = x_sample[bin_var_indices]
|
||||
if self.n_features_ is None:
|
||||
self.n_features_ = x_sample.shape[1]
|
||||
else:
|
||||
assert x_sample.shape[1] == self.n_features_
|
||||
x.append(x_sample)
|
||||
y.append(bin_var_values)
|
||||
|
||||
assert n_bin_vars is not None
|
||||
assert self.bin_var_names_ is not None
|
||||
|
||||
logger.info("Constructing matrices...")
|
||||
x_np = np.vstack(x)
|
||||
y_np = np.hstack(y)
|
||||
n_samples = len(train_h5) * n_bin_vars
|
||||
assert x_np.shape == (n_samples, self.n_features_)
|
||||
assert y_np.shape == (n_samples,)
|
||||
logger.info(
|
||||
f"Dataset has {n_bin_vars} binary variables, "
|
||||
f"{len(train_h5):,d} samples per variable, "
|
||||
f"{self.n_features_:,d} features, 1 target and 2 classes"
|
||||
)
|
||||
|
||||
logger.info(f"Training {n_bin_vars} classifiers...")
|
||||
self.clf_ = {}
|
||||
for (var_idx, var_name) in enumerate(self.bin_var_names_):
|
||||
self.clf_[var_name] = self.clone_fn(self.base_clf)
|
||||
self.clf_[var_name].fit(
|
||||
x_np[var_idx::n_bin_vars, :], y_np[var_idx::n_bin_vars]
|
||||
)
|
||||
|
||||
logger.info("Done fitting.")
|
||||
|
||||
def before_mip(
|
||||
self, test_h5: str, model: AbstractModel, stats: Dict[str, Any]
|
||||
) -> None:
|
||||
assert self.bin_var_names_ is not None
|
||||
assert self.n_features_ is not None
|
||||
|
||||
# Read features
|
||||
with H5File(test_h5, "r") as h5:
|
||||
x_sample = self.extractor.get_var_features(h5)
|
||||
bin_var_names, bin_var_indices = _extract_bin_var_names(h5)
|
||||
assert np.all(bin_var_names == self.bin_var_names_)
|
||||
x_sample = x_sample[bin_var_indices]
|
||||
|
||||
assert x_sample.shape == (len(self.bin_var_names_), self.n_features_)
|
||||
|
||||
# Predict optimal solution
|
||||
logger.info("Predicting warm starts...")
|
||||
y_pred = []
|
||||
for (var_idx, var_name) in enumerate(self.bin_var_names_):
|
||||
x_var = x_sample[var_idx, :].reshape(1, -1)
|
||||
y_var = self.clf_[var_name].predict(x_var)
|
||||
assert y_var.shape == (1,)
|
||||
y_pred.append(y_var[0])
|
||||
|
||||
# Construct warm starts, based on prediction
|
||||
y_pred_np = np.array(y_pred).reshape(1, -1)
|
||||
assert y_pred_np.shape == (1, len(self.bin_var_names_))
|
||||
self.action.perform(model, self.bin_var_names_, y_pred_np, stats)
|
||||
88
miplearn/components/primal/joint.py
Normal file
88
miplearn/components/primal/joint.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
import logging
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
import numpy as np
|
||||
|
||||
from miplearn.components.primal import _extract_bin_var_names_values
|
||||
from miplearn.components.primal.actions import PrimalComponentAction
|
||||
from miplearn.extractors.abstract import FeaturesExtractor
|
||||
from miplearn.solvers.abstract import AbstractModel
|
||||
from miplearn.h5 import H5File
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JointVarsPrimalComponent:
|
||||
def __init__(
|
||||
self, clf: Any, extractor: FeaturesExtractor, action: PrimalComponentAction
|
||||
):
|
||||
self.clf = clf
|
||||
self.extractor = extractor
|
||||
self.bin_var_names_: Optional[np.ndarray] = None
|
||||
self.action = action
|
||||
|
||||
def fit(self, train_h5: List[str]) -> None:
|
||||
logger.info("Reading training data...")
|
||||
self.bin_var_names_ = None
|
||||
x, y, n_samples, n_features = [], [], len(train_h5), None
|
||||
for h5_filename in train_h5:
|
||||
with H5File(h5_filename, "r") as h5:
|
||||
bin_var_names, bin_var_values, _ = _extract_bin_var_names_values(h5)
|
||||
|
||||
# Store/check variable names
|
||||
if self.bin_var_names_ is None:
|
||||
self.bin_var_names_ = bin_var_names
|
||||
else:
|
||||
assert np.all(bin_var_names == self.bin_var_names_)
|
||||
|
||||
# Build x and y vectors
|
||||
x_sample = self.extractor.get_instance_features(h5)
|
||||
assert len(x_sample.shape) == 1
|
||||
if n_features is None:
|
||||
n_features = len(x_sample)
|
||||
else:
|
||||
assert len(x_sample) == n_features
|
||||
x.append(x_sample)
|
||||
y.append(bin_var_values)
|
||||
assert self.bin_var_names_ is not None
|
||||
|
||||
logger.info("Constructing matrices...")
|
||||
x_np = np.vstack(x)
|
||||
y_np = np.array(y)
|
||||
assert len(x_np.shape) == 2
|
||||
assert x_np.shape[0] == n_samples
|
||||
assert x_np.shape[1] == n_features
|
||||
assert y_np.shape == (n_samples, len(self.bin_var_names_))
|
||||
logger.info(
|
||||
f"Dataset has {n_samples:,d} samples, "
|
||||
f"{n_features:,d} features and {y_np.shape[1]:,d} targets"
|
||||
)
|
||||
|
||||
logger.info("Training classifier...")
|
||||
self.clf.fit(x_np, y_np)
|
||||
|
||||
logger.info("Done fitting.")
|
||||
|
||||
def before_mip(
|
||||
self, test_h5: str, model: AbstractModel, stats: Dict[str, Any]
|
||||
) -> None:
|
||||
assert self.bin_var_names_ is not None
|
||||
|
||||
# Read features
|
||||
with H5File(test_h5, "r") as h5:
|
||||
x_sample = self.extractor.get_instance_features(h5)
|
||||
assert len(x_sample.shape) == 1
|
||||
x_sample = x_sample.reshape(1, -1)
|
||||
|
||||
# Predict optimal solution
|
||||
logger.info("Predicting warm starts...")
|
||||
y_pred = self.clf.predict(x_sample)
|
||||
assert len(y_pred.shape) == 2
|
||||
assert y_pred.shape[0] == 1
|
||||
assert y_pred.shape[1] == len(self.bin_var_names_)
|
||||
|
||||
# Construct warm starts, based on prediction
|
||||
self.action.perform(model, self.bin_var_names_, y_pred, stats)
|
||||
167
miplearn/components/primal/mem.py
Normal file
167
miplearn/components/primal/mem.py
Normal file
@@ -0,0 +1,167 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List, Dict, Any, Optional, Tuple
|
||||
|
||||
import numpy as np
|
||||
|
||||
from . import _extract_bin_var_names_values
|
||||
from .actions import PrimalComponentAction
|
||||
from ...extractors.abstract import FeaturesExtractor
|
||||
from ...solvers.abstract import AbstractModel
|
||||
from ...h5 import H5File
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
||||
class SolutionConstructor(ABC):
|
||||
@abstractmethod
|
||||
def construct(self, y_proba: np.ndarray, solutions: np.ndarray) -> np.ndarray:
|
||||
pass
|
||||
|
||||
|
||||
class MemorizingPrimalComponent:
|
||||
"""
|
||||
Component that memorizes all solutions seen during training, then fits a
|
||||
single classifier to predict which of the memorized solutions should be
|
||||
provided to the solver. Optionally combines multiple memorized solutions
|
||||
into a single, partial one.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
clf: Any,
|
||||
extractor: FeaturesExtractor,
|
||||
constructor: SolutionConstructor,
|
||||
action: PrimalComponentAction,
|
||||
) -> None:
|
||||
assert clf is not None
|
||||
self.clf = clf
|
||||
self.extractor = extractor
|
||||
self.constructor = constructor
|
||||
self.solutions_: Optional[np.ndarray] = None
|
||||
self.bin_var_names_: Optional[np.ndarray] = None
|
||||
self.action = action
|
||||
|
||||
def fit(self, train_h5: List[str]) -> None:
|
||||
logger.info("Reading training data...")
|
||||
n_samples = len(train_h5)
|
||||
solutions_ = []
|
||||
self.bin_var_names_ = None
|
||||
x, y, n_features = [], [], None
|
||||
solution_to_idx: Dict[Tuple, int] = {}
|
||||
for h5_filename in train_h5:
|
||||
with H5File(h5_filename, "r") as h5:
|
||||
bin_var_names, bin_var_values, _ = _extract_bin_var_names_values(h5)
|
||||
|
||||
# Store/check variable names
|
||||
if self.bin_var_names_ is None:
|
||||
self.bin_var_names_ = bin_var_names
|
||||
else:
|
||||
assert np.all(bin_var_names == self.bin_var_names_)
|
||||
|
||||
# Store solution
|
||||
sol = tuple(np.where(bin_var_values)[0])
|
||||
if sol not in solution_to_idx:
|
||||
solutions_.append(bin_var_values)
|
||||
solution_to_idx[sol] = len(solution_to_idx)
|
||||
y.append(solution_to_idx[sol])
|
||||
|
||||
# Extract features
|
||||
x_sample = self.extractor.get_instance_features(h5)
|
||||
assert len(x_sample.shape) == 1
|
||||
if n_features is None:
|
||||
n_features = len(x_sample)
|
||||
else:
|
||||
assert len(x_sample) == n_features
|
||||
x.append(x_sample)
|
||||
|
||||
logger.info("Constructing matrices...")
|
||||
x_np = np.vstack(x)
|
||||
y_np = np.array(y)
|
||||
assert len(x_np.shape) == 2
|
||||
assert x_np.shape[0] == n_samples
|
||||
assert x_np.shape[1] == n_features
|
||||
assert y_np.shape == (n_samples,)
|
||||
self.solutions_ = np.array(solutions_)
|
||||
n_classes = len(solution_to_idx)
|
||||
logger.info(
|
||||
f"Dataset has {n_samples:,d} samples, "
|
||||
f"{n_features:,d} features and {n_classes:,d} classes"
|
||||
)
|
||||
|
||||
logger.info("Training classifier...")
|
||||
self.clf.fit(x_np, y_np)
|
||||
|
||||
logger.info("Done fitting.")
|
||||
|
||||
def before_mip(
|
||||
self, test_h5: str, model: AbstractModel, stats: Dict[str, Any]
|
||||
) -> None:
|
||||
assert self.solutions_ is not None
|
||||
assert self.bin_var_names_ is not None
|
||||
|
||||
# Read features
|
||||
with H5File(test_h5, "r") as h5:
|
||||
x_sample = self.extractor.get_instance_features(h5)
|
||||
assert len(x_sample.shape) == 1
|
||||
x_sample = x_sample.reshape(1, -1)
|
||||
|
||||
# Predict optimal solution
|
||||
logger.info("Predicting primal solution...")
|
||||
y_proba = self.clf.predict_proba(x_sample)
|
||||
assert len(y_proba.shape) == 2
|
||||
assert y_proba.shape[0] == 1
|
||||
assert y_proba.shape[1] == len(self.solutions_)
|
||||
|
||||
# Construct warm starts, based on prediction
|
||||
starts = self.constructor.construct(y_proba[0, :], self.solutions_)
|
||||
self.action.perform(model, self.bin_var_names_, starts, stats)
|
||||
|
||||
|
||||
class SelectTopSolutions(SolutionConstructor):
|
||||
"""
|
||||
Warm start construction strategy that selects and returns the top k solutions.
|
||||
"""
|
||||
|
||||
def __init__(self, k: int) -> None:
|
||||
self.k = k
|
||||
|
||||
def construct(self, y_proba: np.ndarray, solutions: np.ndarray) -> np.ndarray:
|
||||
# Check arguments
|
||||
assert len(y_proba.shape) == 1
|
||||
assert len(solutions.shape) == 2
|
||||
assert len(y_proba) == solutions.shape[0]
|
||||
|
||||
# Select top k solutions
|
||||
ind = np.argsort(-y_proba, kind="stable")
|
||||
selected = ind[: min(self.k, len(ind))]
|
||||
return solutions[selected, :]
|
||||
|
||||
|
||||
class MergeTopSolutions(SolutionConstructor):
|
||||
"""
|
||||
Warm start construction strategy that first selects the top k solutions,
|
||||
then merges them into a single solution.
|
||||
|
||||
To merge the solutions, the strategy first computes the mean optimal value of each
|
||||
decision variable, then: (i) sets the variable to zero if the mean is below
|
||||
thresholds[0]; (ii) sets the variable to one if the mean is above thresholds[1];
|
||||
(iii) leaves the variable free otherwise.
|
||||
"""
|
||||
|
||||
def __init__(self, k: int, thresholds: List[float]):
|
||||
assert len(thresholds) == 2
|
||||
self.k = k
|
||||
self.thresholds = thresholds
|
||||
|
||||
def construct(self, y_proba: np.ndarray, solutions: np.ndarray) -> np.ndarray:
|
||||
filtered = SelectTopSolutions(self.k).construct(y_proba, solutions)
|
||||
mean = filtered.mean(axis=0)
|
||||
start = np.full((1, solutions.shape[1]), float("nan"))
|
||||
start[0, mean <= self.thresholds[0]] = 0
|
||||
start[0, mean >= self.thresholds[1]] = 1
|
||||
return start
|
||||
31
miplearn/components/priority.py
Normal file
31
miplearn/components/priority.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
from math import log
|
||||
from typing import List, Dict, Any
|
||||
import numpy as np
|
||||
|
||||
import gurobipy as gp
|
||||
|
||||
from ..h5 import H5File
|
||||
|
||||
|
||||
class ExpertBranchPriorityComponent:
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def fit(self, train_h5: List[str]) -> None:
|
||||
pass
|
||||
|
||||
def before_mip(self, test_h5: str, model: gp.Model, _: Dict[str, Any]) -> None:
|
||||
with H5File(test_h5, "r") as h5:
|
||||
var_names = h5.get_array("static_var_names")
|
||||
var_priority = h5.get_array("bb_var_priority")
|
||||
assert var_priority is not None
|
||||
assert var_names is not None
|
||||
|
||||
for (var_idx, var_name) in enumerate(var_names):
|
||||
if np.isfinite(var_priority[var_idx]):
|
||||
var = model.getVarByName(var_name.decode())
|
||||
var.branchPriority = int(log(1 + var_priority[var_idx]))
|
||||
@@ -1,252 +0,0 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import logging
|
||||
from typing import Dict, Tuple, List, Any, TYPE_CHECKING, Set, Optional
|
||||
|
||||
import numpy as np
|
||||
from overrides import overrides
|
||||
|
||||
from miplearn.classifiers import Classifier
|
||||
from miplearn.classifiers.counting import CountingClassifier
|
||||
from miplearn.classifiers.threshold import MinProbabilityThreshold, Threshold
|
||||
from miplearn.components.component import Component
|
||||
from miplearn.features.sample import Sample
|
||||
from miplearn.solvers.internal import Constraints
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import LearningSolveStats, ConstraintName, ConstraintCategory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
|
||||
|
||||
class LazyConstraint:
|
||||
def __init__(self, cid: ConstraintName, obj: Any) -> None:
|
||||
self.cid = cid
|
||||
self.obj = obj
|
||||
|
||||
|
||||
class StaticLazyConstraintsComponent(Component):
|
||||
"""
|
||||
Component that decides which of the constraints tagged as lazy should
|
||||
be kept in the formulation, and which should be removed.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
classifier: Classifier = CountingClassifier(),
|
||||
threshold: Threshold = MinProbabilityThreshold([0.50, 0.50]),
|
||||
violation_tolerance: float = -0.5,
|
||||
) -> None:
|
||||
assert isinstance(classifier, Classifier)
|
||||
self.classifier_prototype: Classifier = classifier
|
||||
self.threshold_prototype: Threshold = threshold
|
||||
self.classifiers: Dict[ConstraintCategory, Classifier] = {}
|
||||
self.thresholds: Dict[ConstraintCategory, Threshold] = {}
|
||||
self.pool: Constraints = Constraints()
|
||||
self.violation_tolerance: float = violation_tolerance
|
||||
self.enforced_cids: Set[ConstraintName] = set()
|
||||
self.n_restored: int = 0
|
||||
self.n_iterations: int = 0
|
||||
|
||||
@overrides
|
||||
def after_solve_mip(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: "Instance",
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
sample.put_array(
|
||||
"mip_constr_lazy_enforced",
|
||||
np.array(list(self.enforced_cids), dtype="S"),
|
||||
)
|
||||
stats["LazyStatic: Restored"] = self.n_restored
|
||||
stats["LazyStatic: Iterations"] = self.n_iterations
|
||||
|
||||
@overrides
|
||||
def before_solve_mip(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: "Instance",
|
||||
model: Any,
|
||||
stats: LearningSolveStats,
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
assert solver.internal_solver is not None
|
||||
static_lazy_count = sample.get_scalar("static_constr_lazy_count")
|
||||
assert static_lazy_count is not None
|
||||
|
||||
logger.info("Predicting violated (static) lazy constraints...")
|
||||
if static_lazy_count == 0:
|
||||
logger.info("Instance does not have static lazy constraints. Skipping.")
|
||||
self.enforced_cids = set(self.sample_predict(sample))
|
||||
logger.info("Moving lazy constraints to the pool...")
|
||||
constraints = Constraints.from_sample(sample)
|
||||
assert constraints.lazy is not None
|
||||
assert constraints.names is not None
|
||||
selected = [
|
||||
(constraints.lazy[i] and constraints.names[i] not in self.enforced_cids)
|
||||
for i in range(len(constraints.lazy))
|
||||
]
|
||||
n_removed = sum(selected)
|
||||
n_kept = sum(constraints.lazy) - n_removed
|
||||
self.pool = constraints[selected]
|
||||
assert self.pool.names is not None
|
||||
solver.internal_solver.remove_constraints(self.pool.names)
|
||||
logger.info(f"{n_kept} lazy constraints kept; {n_removed} moved to the pool")
|
||||
stats["LazyStatic: Removed"] = n_removed
|
||||
stats["LazyStatic: Kept"] = n_kept
|
||||
stats["LazyStatic: Restored"] = 0
|
||||
self.n_restored = 0
|
||||
self.n_iterations = 0
|
||||
|
||||
@overrides
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[ConstraintCategory, np.ndarray],
|
||||
y: Dict[ConstraintCategory, np.ndarray],
|
||||
) -> None:
|
||||
for c in y.keys():
|
||||
assert c in x
|
||||
self.classifiers[c] = self.classifier_prototype.clone()
|
||||
self.thresholds[c] = self.threshold_prototype.clone()
|
||||
self.classifiers[c].fit(x[c], y[c])
|
||||
self.thresholds[c].fit(self.classifiers[c], x[c], y[c])
|
||||
|
||||
@overrides
|
||||
def iteration_cb(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: "Instance",
|
||||
model: Any,
|
||||
) -> bool:
|
||||
if solver.use_lazy_cb:
|
||||
return False
|
||||
else:
|
||||
return self._check_and_add(solver)
|
||||
|
||||
@overrides
|
||||
def lazy_cb(
|
||||
self,
|
||||
solver: "LearningSolver",
|
||||
instance: "Instance",
|
||||
model: Any,
|
||||
) -> None:
|
||||
self._check_and_add(solver)
|
||||
|
||||
def sample_predict(self, sample: Sample) -> List[ConstraintName]:
|
||||
x, y, cids = self._sample_xy_with_cids(sample)
|
||||
enforced_cids: List[ConstraintName] = []
|
||||
for category in x.keys():
|
||||
if category not in self.classifiers:
|
||||
continue
|
||||
npx = np.array(x[category])
|
||||
proba = self.classifiers[category].predict_proba(npx)
|
||||
thr = self.thresholds[category].predict(npx)
|
||||
pred = list(proba[:, 1] > thr[1])
|
||||
for (i, is_selected) in enumerate(pred):
|
||||
if is_selected:
|
||||
enforced_cids += [cids[category][i]]
|
||||
return enforced_cids
|
||||
|
||||
@overrides
|
||||
def sample_xy(
|
||||
self,
|
||||
_: Optional[Instance],
|
||||
sample: Sample,
|
||||
) -> Tuple[
|
||||
Dict[ConstraintCategory, List[List[float]]],
|
||||
Dict[ConstraintCategory, List[List[float]]],
|
||||
]:
|
||||
x, y, __ = self._sample_xy_with_cids(sample)
|
||||
return x, y
|
||||
|
||||
def _check_and_add(self, solver: "LearningSolver") -> bool:
|
||||
assert solver.internal_solver is not None
|
||||
assert self.pool.names is not None
|
||||
if len(self.pool.names) == 0:
|
||||
logger.info("Lazy constraint pool is empty. Skipping violation check.")
|
||||
return False
|
||||
self.n_iterations += 1
|
||||
logger.info("Finding violated lazy constraints...")
|
||||
is_satisfied = solver.internal_solver.are_constraints_satisfied(
|
||||
self.pool,
|
||||
tol=self.violation_tolerance,
|
||||
)
|
||||
is_violated = [not i for i in is_satisfied]
|
||||
violated_constraints = self.pool[is_violated]
|
||||
satisfied_constraints = self.pool[is_satisfied]
|
||||
self.pool = satisfied_constraints
|
||||
assert violated_constraints.names is not None
|
||||
assert satisfied_constraints.names is not None
|
||||
n_violated = len(violated_constraints.names)
|
||||
n_satisfied = len(satisfied_constraints.names)
|
||||
logger.info(f"Found {n_violated} violated lazy constraints found")
|
||||
if n_violated > 0:
|
||||
logger.info(
|
||||
f"Enforcing {n_violated} lazy constraints; "
|
||||
f"{n_satisfied} left in the pool..."
|
||||
)
|
||||
solver.internal_solver.add_constraints(violated_constraints)
|
||||
for (i, name) in enumerate(violated_constraints.names):
|
||||
self.enforced_cids.add(name)
|
||||
self.n_restored += 1
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def _sample_xy_with_cids(
|
||||
self, sample: Sample
|
||||
) -> Tuple[
|
||||
Dict[ConstraintCategory, List[List[float]]],
|
||||
Dict[ConstraintCategory, List[List[float]]],
|
||||
Dict[ConstraintCategory, List[ConstraintName]],
|
||||
]:
|
||||
x: Dict[ConstraintCategory, List[List[float]]] = {}
|
||||
y: Dict[ConstraintCategory, List[List[float]]] = {}
|
||||
cids: Dict[ConstraintCategory, List[ConstraintName]] = {}
|
||||
instance_features = sample.get_array("static_instance_features")
|
||||
constr_features = sample.get_array("lp_constr_features")
|
||||
constr_names = sample.get_array("static_constr_names")
|
||||
constr_categories = sample.get_array("static_constr_categories")
|
||||
constr_lazy = sample.get_array("static_constr_lazy")
|
||||
lazy_enforced = sample.get_array("mip_constr_lazy_enforced")
|
||||
if constr_features is None:
|
||||
constr_features = sample.get_array("static_constr_features")
|
||||
|
||||
assert instance_features is not None
|
||||
assert constr_features is not None
|
||||
assert constr_names is not None
|
||||
assert constr_categories is not None
|
||||
assert constr_lazy is not None
|
||||
|
||||
for (cidx, cname) in enumerate(constr_names):
|
||||
# Initialize categories
|
||||
if not constr_lazy[cidx]:
|
||||
continue
|
||||
category = constr_categories[cidx]
|
||||
if len(category) == 0:
|
||||
continue
|
||||
if category not in x:
|
||||
x[category] = []
|
||||
y[category] = []
|
||||
cids[category] = []
|
||||
|
||||
# Features
|
||||
features = list(instance_features)
|
||||
features.extend(constr_features[cidx])
|
||||
x[category].append(features)
|
||||
cids[category].append(cname)
|
||||
|
||||
# Labels
|
||||
if lazy_enforced is not None:
|
||||
if cname in lazy_enforced:
|
||||
y[category] += [[False, True]]
|
||||
else:
|
||||
y[category] += [[True, False]]
|
||||
return x, y, cids
|
||||
Reference in New Issue
Block a user