mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 01:18:52 -06:00
Use np.ndarray for constraint methods in Instance
This commit is contained in:
@@ -8,12 +8,14 @@ from typing import Dict, List, Tuple, Optional, Any, Set
|
||||
import numpy as np
|
||||
from overrides import overrides
|
||||
|
||||
from miplearn.features.extractor import FeaturesExtractor
|
||||
from miplearn.classifiers import Classifier
|
||||
from miplearn.classifiers.threshold import Threshold
|
||||
from miplearn.components import classifier_evaluation_dict
|
||||
from miplearn.components.component import Component
|
||||
from miplearn.features.sample import Sample
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import ConstraintCategory, ConstraintName
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -32,9 +34,9 @@ class DynamicConstraintsComponent(Component):
|
||||
assert isinstance(classifier, Classifier)
|
||||
self.threshold_prototype: Threshold = threshold
|
||||
self.classifier_prototype: Classifier = classifier
|
||||
self.classifiers: Dict[str, Classifier] = {}
|
||||
self.thresholds: Dict[str, Threshold] = {}
|
||||
self.known_cids: List[str] = []
|
||||
self.classifiers: Dict[ConstraintCategory, Classifier] = {}
|
||||
self.thresholds: Dict[ConstraintCategory, Threshold] = {}
|
||||
self.known_cids: List[ConstraintName] = []
|
||||
self.attr = attr
|
||||
|
||||
def sample_xy_with_cids(
|
||||
@@ -42,51 +44,45 @@ class DynamicConstraintsComponent(Component):
|
||||
instance: Optional[Instance],
|
||||
sample: Sample,
|
||||
) -> Tuple[
|
||||
Dict[str, List[List[float]]],
|
||||
Dict[str, List[List[bool]]],
|
||||
Dict[str, List[str]],
|
||||
Dict[ConstraintCategory, List[List[float]]],
|
||||
Dict[ConstraintCategory, List[List[bool]]],
|
||||
Dict[ConstraintCategory, List[ConstraintName]],
|
||||
]:
|
||||
if len(self.known_cids) == 0:
|
||||
return {}, {}, {}
|
||||
assert instance is not None
|
||||
x: Dict[str, List[List[float]]] = {}
|
||||
y: Dict[str, List[List[bool]]] = {}
|
||||
cids: Dict[str, List[str]] = {}
|
||||
constr_categories_dict = instance.get_constraint_categories()
|
||||
constr_features_dict = instance.get_constraint_features()
|
||||
x: Dict[ConstraintCategory, List[List[float]]] = {}
|
||||
y: Dict[ConstraintCategory, List[List[bool]]] = {}
|
||||
cids: Dict[ConstraintCategory, List[ConstraintName]] = {}
|
||||
known_cids = np.array(self.known_cids, dtype="S")
|
||||
|
||||
# Get user-provided constraint features
|
||||
(
|
||||
constr_features,
|
||||
constr_categories,
|
||||
constr_lazy,
|
||||
) = FeaturesExtractor._extract_user_features_constrs(instance, known_cids)
|
||||
|
||||
# Augment with instance features
|
||||
instance_features = sample.get_array("static_instance_features")
|
||||
assert instance_features is not None
|
||||
for cid in self.known_cids:
|
||||
# Initialize categories
|
||||
if cid in constr_categories_dict:
|
||||
category = constr_categories_dict[cid]
|
||||
else:
|
||||
category = cid
|
||||
if category is None:
|
||||
continue
|
||||
if category not in x:
|
||||
x[category] = []
|
||||
y[category] = []
|
||||
cids[category] = []
|
||||
constr_features = np.hstack(
|
||||
[
|
||||
instance_features.reshape(1, -1).repeat(len(known_cids), axis=0),
|
||||
constr_features,
|
||||
]
|
||||
)
|
||||
assert len(known_cids) == constr_features.shape[0]
|
||||
|
||||
# Features
|
||||
features: List[float] = []
|
||||
features.extend(instance_features)
|
||||
if cid in constr_features_dict:
|
||||
features.extend(constr_features_dict[cid])
|
||||
for ci in features:
|
||||
assert isinstance(ci, float), (
|
||||
f"Constraint features must be a list of floats. "
|
||||
f"Found {ci.__class__.__name__} instead."
|
||||
)
|
||||
x[category].append(features)
|
||||
cids[category].append(cid)
|
||||
|
||||
# Labels
|
||||
enforced_cids = sample.get_set(self.attr)
|
||||
categories = np.unique(constr_categories)
|
||||
for c in categories:
|
||||
x[c] = constr_features[constr_categories == c].tolist()
|
||||
cids[c] = known_cids[constr_categories == c].tolist()
|
||||
enforced_cids = np.array(list(sample.get_set(self.attr)), dtype="S")
|
||||
if enforced_cids is not None:
|
||||
if cid in enforced_cids:
|
||||
y[category] += [[False, True]]
|
||||
else:
|
||||
y[category] += [[True, False]]
|
||||
tmp = np.isin(cids[c], enforced_cids).reshape(-1, 1)
|
||||
y[c] = np.hstack([~tmp, tmp]).tolist() # type: ignore
|
||||
|
||||
return x, y, cids
|
||||
|
||||
@overrides
|
||||
@@ -111,8 +107,8 @@ class DynamicConstraintsComponent(Component):
|
||||
self,
|
||||
instance: Instance,
|
||||
sample: Sample,
|
||||
) -> List[str]:
|
||||
pred: List[str] = []
|
||||
) -> List[ConstraintName]:
|
||||
pred: List[ConstraintName] = []
|
||||
if len(self.known_cids) == 0:
|
||||
logger.info("Classifiers not fitted. Skipping.")
|
||||
return pred
|
||||
@@ -137,8 +133,8 @@ class DynamicConstraintsComponent(Component):
|
||||
@overrides
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[str, np.ndarray],
|
||||
y: Dict[str, np.ndarray],
|
||||
x: Dict[ConstraintCategory, np.ndarray],
|
||||
y: Dict[ConstraintCategory, np.ndarray],
|
||||
) -> None:
|
||||
for category in x.keys():
|
||||
self.classifiers[category] = self.classifier_prototype.clone()
|
||||
@@ -153,40 +149,20 @@ class DynamicConstraintsComponent(Component):
|
||||
self,
|
||||
instance: Instance,
|
||||
sample: Sample,
|
||||
) -> Dict[str, Dict[str, float]]:
|
||||
) -> Dict[str, float]:
|
||||
actual = sample.get_set(self.attr)
|
||||
assert actual is not None
|
||||
pred = set(self.sample_predict(instance, sample))
|
||||
tp: Dict[str, int] = {}
|
||||
tn: Dict[str, int] = {}
|
||||
fp: Dict[str, int] = {}
|
||||
fn: Dict[str, int] = {}
|
||||
constr_categories_dict = instance.get_constraint_categories()
|
||||
tp, tn, fp, fn = 0, 0, 0, 0
|
||||
for cid in self.known_cids:
|
||||
if cid not in constr_categories_dict:
|
||||
continue
|
||||
category = constr_categories_dict[cid]
|
||||
if category not in tp.keys():
|
||||
tp[category] = 0
|
||||
tn[category] = 0
|
||||
fp[category] = 0
|
||||
fn[category] = 0
|
||||
if cid in pred:
|
||||
if cid in actual:
|
||||
tp[category] += 1
|
||||
tp += 1
|
||||
else:
|
||||
fp[category] += 1
|
||||
fp += 1
|
||||
else:
|
||||
if cid in actual:
|
||||
fn[category] += 1
|
||||
fn += 1
|
||||
else:
|
||||
tn[category] += 1
|
||||
return {
|
||||
category: classifier_evaluation_dict(
|
||||
tp=tp[category],
|
||||
tn=tn[category],
|
||||
fp=fp[category],
|
||||
fn=fn[category],
|
||||
)
|
||||
for category in tp.keys()
|
||||
}
|
||||
tn += 1
|
||||
return classifier_evaluation_dict(tp=tp, tn=tn, fp=fp, fn=fn)
|
||||
|
||||
@@ -15,7 +15,7 @@ from miplearn.components.component import Component
|
||||
from miplearn.components.dynamic_common import DynamicConstraintsComponent
|
||||
from miplearn.features.sample import Sample
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import LearningSolveStats
|
||||
from miplearn.types import LearningSolveStats, ConstraintName, ConstraintCategory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -41,11 +41,11 @@ class DynamicLazyConstraintsComponent(Component):
|
||||
self.classifiers = self.dynamic.classifiers
|
||||
self.thresholds = self.dynamic.thresholds
|
||||
self.known_cids = self.dynamic.known_cids
|
||||
self.lazy_enforced: Set[str] = set()
|
||||
self.lazy_enforced: Set[ConstraintName] = set()
|
||||
|
||||
@staticmethod
|
||||
def enforce(
|
||||
cids: List[str],
|
||||
cids: List[ConstraintName],
|
||||
instance: Instance,
|
||||
model: Any,
|
||||
solver: "LearningSolver",
|
||||
@@ -117,7 +117,7 @@ class DynamicLazyConstraintsComponent(Component):
|
||||
self,
|
||||
instance: Instance,
|
||||
sample: Sample,
|
||||
) -> List[str]:
|
||||
) -> List[ConstraintName]:
|
||||
return self.dynamic.sample_predict(instance, sample)
|
||||
|
||||
@overrides
|
||||
@@ -127,8 +127,8 @@ class DynamicLazyConstraintsComponent(Component):
|
||||
@overrides
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[str, np.ndarray],
|
||||
y: Dict[str, np.ndarray],
|
||||
x: Dict[ConstraintCategory, np.ndarray],
|
||||
y: Dict[ConstraintCategory, np.ndarray],
|
||||
) -> None:
|
||||
self.dynamic.fit_xy(x, y)
|
||||
|
||||
@@ -137,5 +137,5 @@ class DynamicLazyConstraintsComponent(Component):
|
||||
self,
|
||||
instance: Instance,
|
||||
sample: Sample,
|
||||
) -> Dict[str, Dict[str, float]]:
|
||||
) -> Dict[ConstraintCategory, Dict[str, float]]:
|
||||
return self.dynamic.sample_evaluate(instance, sample)
|
||||
|
||||
@@ -15,7 +15,7 @@ from miplearn.components.component import Component
|
||||
from miplearn.components.dynamic_common import DynamicConstraintsComponent
|
||||
from miplearn.features.sample import Sample
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import LearningSolveStats
|
||||
from miplearn.types import LearningSolveStats, ConstraintName, ConstraintCategory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -34,7 +34,7 @@ class UserCutsComponent(Component):
|
||||
threshold=threshold,
|
||||
attr="mip_user_cuts_enforced",
|
||||
)
|
||||
self.enforced: Set[str] = set()
|
||||
self.enforced: Set[ConstraintName] = set()
|
||||
self.n_added_in_callback = 0
|
||||
|
||||
@overrides
|
||||
@@ -71,7 +71,7 @@ class UserCutsComponent(Component):
|
||||
for cid in cids:
|
||||
if cid in self.enforced:
|
||||
continue
|
||||
assert isinstance(cid, str)
|
||||
assert isinstance(cid, ConstraintName)
|
||||
instance.enforce_user_cut(solver.internal_solver, model, cid)
|
||||
self.enforced.add(cid)
|
||||
self.n_added_in_callback += 1
|
||||
@@ -110,7 +110,7 @@ class UserCutsComponent(Component):
|
||||
self,
|
||||
instance: "Instance",
|
||||
sample: Sample,
|
||||
) -> List[str]:
|
||||
) -> List[ConstraintName]:
|
||||
return self.dynamic.sample_predict(instance, sample)
|
||||
|
||||
@overrides
|
||||
@@ -120,8 +120,8 @@ class UserCutsComponent(Component):
|
||||
@overrides
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[str, np.ndarray],
|
||||
y: Dict[str, np.ndarray],
|
||||
x: Dict[ConstraintCategory, np.ndarray],
|
||||
y: Dict[ConstraintCategory, np.ndarray],
|
||||
) -> None:
|
||||
self.dynamic.fit_xy(x, y)
|
||||
|
||||
@@ -130,5 +130,5 @@ class UserCutsComponent(Component):
|
||||
self,
|
||||
instance: "Instance",
|
||||
sample: Sample,
|
||||
) -> Dict[str, Dict[str, float]]:
|
||||
) -> Dict[ConstraintCategory, Dict[str, float]]:
|
||||
return self.dynamic.sample_evaluate(instance, sample)
|
||||
|
||||
@@ -15,7 +15,7 @@ from miplearn.components.component import Component
|
||||
from miplearn.features.sample import Sample
|
||||
from miplearn.solvers.internal import Constraints
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import LearningSolveStats
|
||||
from miplearn.types import LearningSolveStats, ConstraintName, ConstraintCategory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -24,7 +24,7 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
class LazyConstraint:
|
||||
def __init__(self, cid: str, obj: Any) -> None:
|
||||
def __init__(self, cid: ConstraintName, obj: Any) -> None:
|
||||
self.cid = cid
|
||||
self.obj = obj
|
||||
|
||||
@@ -44,11 +44,11 @@ class StaticLazyConstraintsComponent(Component):
|
||||
assert isinstance(classifier, Classifier)
|
||||
self.classifier_prototype: Classifier = classifier
|
||||
self.threshold_prototype: Threshold = threshold
|
||||
self.classifiers: Dict[str, Classifier] = {}
|
||||
self.thresholds: Dict[str, Threshold] = {}
|
||||
self.classifiers: Dict[ConstraintCategory, Classifier] = {}
|
||||
self.thresholds: Dict[ConstraintCategory, Threshold] = {}
|
||||
self.pool: Constraints = Constraints()
|
||||
self.violation_tolerance: float = violation_tolerance
|
||||
self.enforced_cids: Set[str] = set()
|
||||
self.enforced_cids: Set[ConstraintName] = set()
|
||||
self.n_restored: int = 0
|
||||
self.n_iterations: int = 0
|
||||
|
||||
@@ -105,8 +105,8 @@ class StaticLazyConstraintsComponent(Component):
|
||||
@overrides
|
||||
def fit_xy(
|
||||
self,
|
||||
x: Dict[str, np.ndarray],
|
||||
y: Dict[str, np.ndarray],
|
||||
x: Dict[ConstraintCategory, np.ndarray],
|
||||
y: Dict[ConstraintCategory, np.ndarray],
|
||||
) -> None:
|
||||
for c in y.keys():
|
||||
assert c in x
|
||||
@@ -136,9 +136,9 @@ class StaticLazyConstraintsComponent(Component):
|
||||
) -> None:
|
||||
self._check_and_add(solver)
|
||||
|
||||
def sample_predict(self, sample: Sample) -> List[str]:
|
||||
def sample_predict(self, sample: Sample) -> List[ConstraintName]:
|
||||
x, y, cids = self._sample_xy_with_cids(sample)
|
||||
enforced_cids: List[str] = []
|
||||
enforced_cids: List[ConstraintName] = []
|
||||
for category in x.keys():
|
||||
if category not in self.classifiers:
|
||||
continue
|
||||
@@ -156,7 +156,10 @@ class StaticLazyConstraintsComponent(Component):
|
||||
self,
|
||||
_: Optional[Instance],
|
||||
sample: Sample,
|
||||
) -> Tuple[Dict[str, List[List[float]]], Dict[str, List[List[float]]]]:
|
||||
) -> Tuple[
|
||||
Dict[ConstraintCategory, List[List[float]]],
|
||||
Dict[ConstraintCategory, List[List[float]]],
|
||||
]:
|
||||
x, y, __ = self._sample_xy_with_cids(sample)
|
||||
return x, y
|
||||
|
||||
@@ -197,13 +200,13 @@ class StaticLazyConstraintsComponent(Component):
|
||||
def _sample_xy_with_cids(
|
||||
self, sample: Sample
|
||||
) -> Tuple[
|
||||
Dict[str, List[List[float]]],
|
||||
Dict[str, List[List[float]]],
|
||||
Dict[str, List[str]],
|
||||
Dict[ConstraintCategory, List[List[float]]],
|
||||
Dict[ConstraintCategory, List[List[float]]],
|
||||
Dict[ConstraintCategory, List[ConstraintName]],
|
||||
]:
|
||||
x: Dict[str, List[List[float]]] = {}
|
||||
y: Dict[str, List[List[float]]] = {}
|
||||
cids: Dict[str, List[str]] = {}
|
||||
x: Dict[ConstraintCategory, List[List[float]]] = {}
|
||||
y: Dict[ConstraintCategory, List[List[float]]] = {}
|
||||
cids: Dict[ConstraintCategory, List[ConstraintName]] = {}
|
||||
instance_features = sample.get_vector("static_instance_features")
|
||||
constr_features = sample.get_vector_list("lp_constr_features")
|
||||
constr_names = sample.get_array("static_constr_names")
|
||||
|
||||
@@ -2,10 +2,8 @@
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import collections
|
||||
import numbers
|
||||
from math import log, isfinite
|
||||
from typing import TYPE_CHECKING, Dict, Optional, List, Any, Tuple, KeysView, cast
|
||||
from typing import TYPE_CHECKING, List, Tuple
|
||||
|
||||
import numpy as np
|
||||
|
||||
@@ -34,6 +32,7 @@ class FeaturesExtractor:
|
||||
) -> None:
|
||||
variables = solver.get_variables(with_static=True)
|
||||
constraints = solver.get_constraints(with_static=True, with_lhs=self.with_lhs)
|
||||
assert constraints.names is not None
|
||||
sample.put_array("static_var_lower_bounds", variables.lower_bounds)
|
||||
sample.put_array("static_var_names", variables.names)
|
||||
sample.put_array("static_var_obj_coeffs", variables.obj_coeffs)
|
||||
@@ -43,15 +42,30 @@ class FeaturesExtractor:
|
||||
# sample.put("static_constr_lhs", constraints.lhs)
|
||||
sample.put_array("static_constr_rhs", constraints.rhs)
|
||||
sample.put_array("static_constr_senses", constraints.senses)
|
||||
vars_features_user, var_categories = self._extract_user_features_vars(
|
||||
instance, sample
|
||||
)
|
||||
sample.put_array("static_var_categories", var_categories)
|
||||
self._extract_user_features_constrs(instance, sample)
|
||||
self._extract_user_features_instance(instance, sample)
|
||||
alw17 = self._extract_var_features_AlvLouWeh2017(sample)
|
||||
|
||||
# Build static_var_features
|
||||
# Instance features
|
||||
self._extract_user_features_instance(instance, sample)
|
||||
|
||||
# Constraint features
|
||||
(
|
||||
constr_features,
|
||||
constr_categories,
|
||||
constr_lazy,
|
||||
) = FeaturesExtractor._extract_user_features_constrs(
|
||||
instance,
|
||||
constraints.names,
|
||||
)
|
||||
sample.put_array("static_constr_features", constr_features)
|
||||
sample.put_array("static_constr_categories", constr_categories)
|
||||
sample.put_array("static_constr_lazy", constr_lazy)
|
||||
sample.put_scalar("static_constr_lazy_count", int(constr_lazy.sum()))
|
||||
|
||||
# Variable features
|
||||
(
|
||||
vars_features_user,
|
||||
var_categories,
|
||||
) = self._extract_user_features_vars(instance, sample)
|
||||
sample.put_array("static_var_categories", var_categories)
|
||||
assert variables.lower_bounds is not None
|
||||
assert variables.obj_coeffs is not None
|
||||
assert variables.upper_bounds is not None
|
||||
@@ -60,7 +74,7 @@ class FeaturesExtractor:
|
||||
np.hstack(
|
||||
[
|
||||
vars_features_user,
|
||||
alw17,
|
||||
self._extract_var_features_AlvLouWeh2017(sample),
|
||||
variables.lower_bounds.reshape(-1, 1),
|
||||
variables.obj_coeffs.reshape(-1, 1),
|
||||
variables.upper_bounds.reshape(-1, 1),
|
||||
@@ -92,13 +106,12 @@ class FeaturesExtractor:
|
||||
sample.put_array("lp_constr_sa_rhs_down", constraints.sa_rhs_down)
|
||||
sample.put_array("lp_constr_sa_rhs_up", constraints.sa_rhs_up)
|
||||
sample.put_array("lp_constr_slacks", constraints.slacks)
|
||||
alw17 = self._extract_var_features_AlvLouWeh2017(sample)
|
||||
|
||||
# Build lp_var_features
|
||||
# Variable features
|
||||
lp_var_features_list = []
|
||||
for f in [
|
||||
sample.get_array("static_var_features"),
|
||||
alw17,
|
||||
self._extract_var_features_AlvLouWeh2017(sample),
|
||||
]:
|
||||
if f is not None:
|
||||
lp_var_features_list.append(f)
|
||||
@@ -116,18 +129,20 @@ class FeaturesExtractor:
|
||||
lp_var_features_list.append(f.reshape(-1, 1))
|
||||
sample.put_array("lp_var_features", np.hstack(lp_var_features_list))
|
||||
|
||||
sample.put_vector_list(
|
||||
"lp_constr_features",
|
||||
self._combine(
|
||||
[
|
||||
sample.get_vector_list("static_constr_features"),
|
||||
sample.get_array("lp_constr_dual_values"),
|
||||
sample.get_array("lp_constr_sa_rhs_down"),
|
||||
sample.get_array("lp_constr_sa_rhs_up"),
|
||||
sample.get_array("lp_constr_slacks"),
|
||||
],
|
||||
),
|
||||
)
|
||||
# Constraint features
|
||||
lp_constr_features_list = []
|
||||
for f in [sample.get_array("static_constr_features")]:
|
||||
if f is not None:
|
||||
lp_constr_features_list.append(f)
|
||||
for f in [
|
||||
sample.get_array("lp_constr_dual_values"),
|
||||
sample.get_array("lp_constr_sa_rhs_down"),
|
||||
sample.get_array("lp_constr_sa_rhs_up"),
|
||||
sample.get_array("lp_constr_slacks"),
|
||||
]:
|
||||
if f is not None:
|
||||
lp_constr_features_list.append(f.reshape(-1, 1))
|
||||
sample.put_array("lp_constr_features", np.hstack(lp_constr_features_list))
|
||||
|
||||
# Build lp_instance_features
|
||||
static_instance_features = sample.get_array("static_instance_features")
|
||||
@@ -155,6 +170,7 @@ class FeaturesExtractor:
|
||||
sample.put_array("mip_var_values", variables.values)
|
||||
sample.put_array("mip_constr_slacks", constraints.slacks)
|
||||
|
||||
# noinspection DuplicatedCode
|
||||
def _extract_user_features_vars(
|
||||
self,
|
||||
instance: "Instance",
|
||||
@@ -180,7 +196,7 @@ class FeaturesExtractor:
|
||||
)
|
||||
assert var_features.dtype.kind in ["f"], (
|
||||
f"Variable features must be floating point numbers. "
|
||||
f"Found dtype: {var_features.dtype} instead."
|
||||
f"Found {var_features.dtype} instead."
|
||||
)
|
||||
|
||||
# Query variable categories
|
||||
@@ -195,7 +211,7 @@ class FeaturesExtractor:
|
||||
)
|
||||
assert len(var_categories) == len(var_names), (
|
||||
f"Variable categories must have exactly {len(var_names)} elements. "
|
||||
f"Found {var_features.shape[0]} elements instead."
|
||||
f"Found {var_categories.shape[0]} elements instead."
|
||||
)
|
||||
assert var_categories.dtype.kind == "S", (
|
||||
f"Variable categories must be a numpy array with dtype='S'. "
|
||||
@@ -203,58 +219,71 @@ class FeaturesExtractor:
|
||||
)
|
||||
return var_features, var_categories
|
||||
|
||||
# noinspection DuplicatedCode
|
||||
@classmethod
|
||||
def _extract_user_features_constrs(
|
||||
self,
|
||||
cls,
|
||||
instance: "Instance",
|
||||
sample: Sample,
|
||||
) -> None:
|
||||
has_static_lazy = instance.has_static_lazy_constraints()
|
||||
user_features: List[Optional[List[float]]] = []
|
||||
categories: List[Optional[bytes]] = []
|
||||
lazy: List[bool] = []
|
||||
constr_categories_dict = instance.get_constraint_categories()
|
||||
constr_features_dict = instance.get_constraint_features()
|
||||
constr_names = sample.get_array("static_constr_names")
|
||||
assert constr_names is not None
|
||||
constr_names: np.ndarray,
|
||||
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||||
# Query constraint features
|
||||
constr_features = instance.get_constraint_features(constr_names)
|
||||
assert isinstance(constr_features, np.ndarray), (
|
||||
f"get_constraint_features must return a numpy array. "
|
||||
f"Found {constr_features.__class__} instead."
|
||||
)
|
||||
assert len(constr_features.shape) == 2, (
|
||||
f"get_constraint_features must return a 2-dimensional array. "
|
||||
f"Found array with shape {constr_features.shape} instead."
|
||||
)
|
||||
assert constr_features.shape[0] == len(constr_names), (
|
||||
f"get_constraint_features must return an array with {len(constr_names)} "
|
||||
f"rows. Found {constr_features.shape[0]} rows instead."
|
||||
)
|
||||
assert constr_features.dtype.kind in ["f"], (
|
||||
f"get_constraint_features must return floating point numbers. "
|
||||
f"Found {constr_features.dtype} instead."
|
||||
)
|
||||
|
||||
for (cidx, cname) in enumerate(constr_names):
|
||||
category: Optional[str] = cname
|
||||
if cname in constr_categories_dict:
|
||||
category = constr_categories_dict[cname]
|
||||
if category is None:
|
||||
user_features.append(None)
|
||||
categories.append(None)
|
||||
continue
|
||||
assert isinstance(category, bytes), (
|
||||
f"Constraint category must be bytes. "
|
||||
f"Found {type(category).__name__} instead for cname={cname}.",
|
||||
)
|
||||
categories.append(category)
|
||||
cf: Optional[List[float]] = None
|
||||
if cname in constr_features_dict:
|
||||
cf = constr_features_dict[cname]
|
||||
if isinstance(cf, np.ndarray):
|
||||
cf = cf.tolist()
|
||||
assert isinstance(cf, list), (
|
||||
f"Constraint features must be a list. "
|
||||
f"Found {type(cf).__name__} instead for cname={cname}."
|
||||
)
|
||||
for f in cf:
|
||||
assert isinstance(f, numbers.Real), (
|
||||
f"Constraint features must be a list of numbers. "
|
||||
f"Found {type(f).__name__} instead for cname={cname}."
|
||||
)
|
||||
cf = list(cf)
|
||||
user_features.append(cf)
|
||||
if has_static_lazy:
|
||||
lazy.append(instance.is_constraint_lazy(cname))
|
||||
else:
|
||||
lazy.append(False)
|
||||
sample.put_vector_list("static_constr_features", user_features)
|
||||
sample.put_array("static_constr_categories", np.array(categories, dtype="S"))
|
||||
constr_lazy = np.array(lazy, dtype=bool)
|
||||
sample.put_array("static_constr_lazy", constr_lazy)
|
||||
sample.put_scalar("static_constr_lazy_count", int(constr_lazy.sum()))
|
||||
# Query constraint categories
|
||||
constr_categories = instance.get_constraint_categories(constr_names)
|
||||
assert isinstance(constr_categories, np.ndarray), (
|
||||
f"get_constraint_categories must return a numpy array. "
|
||||
f"Found {constr_categories.__class__} instead."
|
||||
)
|
||||
assert len(constr_categories.shape) == 1, (
|
||||
f"get_constraint_categories must return a vector. "
|
||||
f"Found array with shape {constr_categories.shape} instead."
|
||||
)
|
||||
assert len(constr_categories) == len(constr_names), (
|
||||
f"get_constraint_categories must return a vector with {len(constr_names)} "
|
||||
f"elements. Found {constr_categories.shape[0]} elements instead."
|
||||
)
|
||||
assert constr_categories.dtype.kind == "S", (
|
||||
f"get_constraint_categories must return a numpy array with dtype='S'. "
|
||||
f"Found {constr_categories.dtype} instead."
|
||||
)
|
||||
|
||||
# Query constraint lazy attribute
|
||||
constr_lazy = instance.are_constraints_lazy(constr_names)
|
||||
assert isinstance(constr_lazy, np.ndarray), (
|
||||
f"are_constraints_lazy must return a numpy array. "
|
||||
f"Found {constr_lazy.__class__} instead."
|
||||
)
|
||||
assert len(constr_lazy.shape) == 1, (
|
||||
f"are_constraints_lazy must return a vector. "
|
||||
f"Found array with shape {constr_lazy.shape} instead."
|
||||
)
|
||||
assert constr_lazy.shape[0] == len(constr_names), (
|
||||
f"are_constraints_lazy must return a vector with {len(constr_names)} "
|
||||
f"elements. Found {constr_lazy.shape[0]} elements instead."
|
||||
)
|
||||
assert constr_lazy.dtype.kind == "b", (
|
||||
f"are_constraints_lazy must return a boolean array. "
|
||||
f"Found {constr_lazy.dtype} instead."
|
||||
)
|
||||
|
||||
return constr_features, constr_categories, constr_lazy
|
||||
|
||||
def _extract_user_features_instance(
|
||||
self,
|
||||
@@ -272,7 +301,7 @@ class FeaturesExtractor:
|
||||
)
|
||||
assert features.dtype.kind in [
|
||||
"f"
|
||||
], f"Instance features have unsupported dtype: {features.dtype}"
|
||||
], f"Instance features have unsupported {features.dtype}"
|
||||
sample.put_array("static_instance_features", features)
|
||||
|
||||
# Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based
|
||||
@@ -352,29 +381,3 @@ class FeaturesExtractor:
|
||||
|
||||
features.append(f)
|
||||
return np.array(features, dtype=float)
|
||||
|
||||
def _combine(
|
||||
self,
|
||||
items: List,
|
||||
) -> List[List[float]]:
|
||||
combined: List[List[float]] = []
|
||||
for series in items:
|
||||
if series is None:
|
||||
continue
|
||||
if len(combined) == 0:
|
||||
for i in range(len(series)):
|
||||
combined.append([])
|
||||
for (i, s) in enumerate(series):
|
||||
if s is None:
|
||||
continue
|
||||
elif isinstance(s, list):
|
||||
combined[i].extend([_clip(sj) for sj in s])
|
||||
else:
|
||||
combined[i].append(_clip(s))
|
||||
return combined
|
||||
|
||||
|
||||
def _clip(vi: float) -> float:
|
||||
if not isfinite(vi):
|
||||
return max(min(vi, 1e20), -1e20)
|
||||
return vi
|
||||
|
||||
@@ -103,7 +103,7 @@ class Sample(ABC):
|
||||
def _assert_is_scalar(self, value: Any) -> None:
|
||||
if value is None:
|
||||
return
|
||||
if isinstance(value, (str, bool, int, float, np.bytes_)):
|
||||
if isinstance(value, (str, bool, int, float, bytes, np.bytes_)):
|
||||
return
|
||||
assert False, f"scalar expected; found instead: {value} ({value.__class__})"
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ from typing import Any, List, TYPE_CHECKING, Dict
|
||||
import numpy as np
|
||||
|
||||
from miplearn.features.sample import Sample, MemorySample
|
||||
from miplearn.types import ConstraintName, ConstraintCategory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -97,26 +98,23 @@ class Instance(ABC):
|
||||
"""
|
||||
return names
|
||||
|
||||
def get_constraint_features(self) -> Dict[str, List[float]]:
|
||||
return {}
|
||||
def get_constraint_features(self, names: np.ndarray) -> np.ndarray:
|
||||
return np.zeros((len(names), 1))
|
||||
|
||||
def get_constraint_categories(self) -> Dict[str, str]:
|
||||
return {}
|
||||
|
||||
def has_static_lazy_constraints(self) -> bool:
|
||||
return False
|
||||
def get_constraint_categories(self, names: np.ndarray) -> np.ndarray:
|
||||
return names
|
||||
|
||||
def has_dynamic_lazy_constraints(self) -> bool:
|
||||
return False
|
||||
|
||||
def is_constraint_lazy(self, cid: str) -> bool:
|
||||
return False
|
||||
def are_constraints_lazy(self, names: np.ndarray) -> np.ndarray:
|
||||
return np.zeros(len(names), dtype=bool)
|
||||
|
||||
def find_violated_lazy_constraints(
|
||||
self,
|
||||
solver: "InternalSolver",
|
||||
model: Any,
|
||||
) -> List[str]:
|
||||
) -> List[ConstraintName]:
|
||||
"""
|
||||
Returns lazy constraint violations found for the current solution.
|
||||
|
||||
@@ -142,7 +140,7 @@ class Instance(ABC):
|
||||
self,
|
||||
solver: "InternalSolver",
|
||||
model: Any,
|
||||
violation: str,
|
||||
violation: ConstraintName,
|
||||
) -> None:
|
||||
"""
|
||||
Adds constraints to the model to ensure that the given violation is fixed.
|
||||
@@ -168,14 +166,14 @@ class Instance(ABC):
|
||||
def has_user_cuts(self) -> bool:
|
||||
return False
|
||||
|
||||
def find_violated_user_cuts(self, model: Any) -> List[str]:
|
||||
def find_violated_user_cuts(self, model: Any) -> List[ConstraintName]:
|
||||
return []
|
||||
|
||||
def enforce_user_cut(
|
||||
self,
|
||||
solver: "InternalSolver",
|
||||
model: Any,
|
||||
violation: str,
|
||||
violation: ConstraintName,
|
||||
) -> Any:
|
||||
return None
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from overrides import overrides
|
||||
|
||||
from miplearn.features.sample import Hdf5Sample, Sample
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import ConstraintName, ConstraintCategory
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from miplearn.solvers.learning import InternalSolver
|
||||
@@ -46,19 +47,14 @@ class FileInstance(Instance):
|
||||
return self.instance.get_variable_categories(names)
|
||||
|
||||
@overrides
|
||||
def get_constraint_features(self) -> Dict[str, List[float]]:
|
||||
def get_constraint_features(self, names: np.ndarray) -> np.ndarray:
|
||||
assert self.instance is not None
|
||||
return self.instance.get_constraint_features()
|
||||
return self.instance.get_constraint_features(names)
|
||||
|
||||
@overrides
|
||||
def get_constraint_categories(self) -> Dict[str, str]:
|
||||
def get_constraint_categories(self, names: np.ndarray) -> np.ndarray:
|
||||
assert self.instance is not None
|
||||
return self.instance.get_constraint_categories()
|
||||
|
||||
@overrides
|
||||
def has_static_lazy_constraints(self) -> bool:
|
||||
assert self.instance is not None
|
||||
return self.instance.has_static_lazy_constraints()
|
||||
return self.instance.get_constraint_categories(names)
|
||||
|
||||
@overrides
|
||||
def has_dynamic_lazy_constraints(self) -> bool:
|
||||
@@ -66,16 +62,16 @@ class FileInstance(Instance):
|
||||
return self.instance.has_dynamic_lazy_constraints()
|
||||
|
||||
@overrides
|
||||
def is_constraint_lazy(self, cid: str) -> bool:
|
||||
def are_constraints_lazy(self, names: np.ndarray) -> np.ndarray:
|
||||
assert self.instance is not None
|
||||
return self.instance.is_constraint_lazy(cid)
|
||||
return self.instance.are_constraints_lazy(names)
|
||||
|
||||
@overrides
|
||||
def find_violated_lazy_constraints(
|
||||
self,
|
||||
solver: "InternalSolver",
|
||||
model: Any,
|
||||
) -> List[str]:
|
||||
) -> List[ConstraintName]:
|
||||
assert self.instance is not None
|
||||
return self.instance.find_violated_lazy_constraints(solver, model)
|
||||
|
||||
@@ -84,13 +80,13 @@ class FileInstance(Instance):
|
||||
self,
|
||||
solver: "InternalSolver",
|
||||
model: Any,
|
||||
violation: str,
|
||||
violation: ConstraintName,
|
||||
) -> None:
|
||||
assert self.instance is not None
|
||||
self.instance.enforce_lazy_constraint(solver, model, violation)
|
||||
|
||||
@overrides
|
||||
def find_violated_user_cuts(self, model: Any) -> List[str]:
|
||||
def find_violated_user_cuts(self, model: Any) -> List[ConstraintName]:
|
||||
assert self.instance is not None
|
||||
return self.instance.find_violated_user_cuts(model)
|
||||
|
||||
@@ -99,7 +95,7 @@ class FileInstance(Instance):
|
||||
self,
|
||||
solver: "InternalSolver",
|
||||
model: Any,
|
||||
violation: str,
|
||||
violation: ConstraintName,
|
||||
) -> None:
|
||||
assert self.instance is not None
|
||||
self.instance.enforce_user_cut(solver, model, violation)
|
||||
|
||||
@@ -13,6 +13,7 @@ from overrides import overrides
|
||||
|
||||
from miplearn.features.sample import Sample
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.types import ConstraintName, ConstraintCategory
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from miplearn.solvers.learning import InternalSolver
|
||||
@@ -58,19 +59,14 @@ class PickleGzInstance(Instance):
|
||||
return self.instance.get_variable_categories(names)
|
||||
|
||||
@overrides
|
||||
def get_constraint_features(self) -> Dict[str, List[float]]:
|
||||
def get_constraint_features(self, names: np.ndarray) -> np.ndarray:
|
||||
assert self.instance is not None
|
||||
return self.instance.get_constraint_features()
|
||||
return self.instance.get_constraint_features(names)
|
||||
|
||||
@overrides
|
||||
def get_constraint_categories(self) -> Dict[str, str]:
|
||||
def get_constraint_categories(self, names: np.ndarray) -> np.ndarray:
|
||||
assert self.instance is not None
|
||||
return self.instance.get_constraint_categories()
|
||||
|
||||
@overrides
|
||||
def has_static_lazy_constraints(self) -> bool:
|
||||
assert self.instance is not None
|
||||
return self.instance.has_static_lazy_constraints()
|
||||
return self.instance.get_constraint_categories(names)
|
||||
|
||||
@overrides
|
||||
def has_dynamic_lazy_constraints(self) -> bool:
|
||||
@@ -78,16 +74,16 @@ class PickleGzInstance(Instance):
|
||||
return self.instance.has_dynamic_lazy_constraints()
|
||||
|
||||
@overrides
|
||||
def is_constraint_lazy(self, cid: str) -> bool:
|
||||
def are_constraints_lazy(self, names: np.ndarray) -> np.ndarray:
|
||||
assert self.instance is not None
|
||||
return self.instance.is_constraint_lazy(cid)
|
||||
return self.instance.are_constraints_lazy(names)
|
||||
|
||||
@overrides
|
||||
def find_violated_lazy_constraints(
|
||||
self,
|
||||
solver: "InternalSolver",
|
||||
model: Any,
|
||||
) -> List[str]:
|
||||
) -> List[ConstraintName]:
|
||||
assert self.instance is not None
|
||||
return self.instance.find_violated_lazy_constraints(solver, model)
|
||||
|
||||
@@ -96,13 +92,13 @@ class PickleGzInstance(Instance):
|
||||
self,
|
||||
solver: "InternalSolver",
|
||||
model: Any,
|
||||
violation: str,
|
||||
violation: ConstraintName,
|
||||
) -> None:
|
||||
assert self.instance is not None
|
||||
self.instance.enforce_lazy_constraint(solver, model, violation)
|
||||
|
||||
@overrides
|
||||
def find_violated_user_cuts(self, model: Any) -> List[str]:
|
||||
def find_violated_user_cuts(self, model: Any) -> List[ConstraintName]:
|
||||
assert self.instance is not None
|
||||
return self.instance.find_violated_user_cuts(model)
|
||||
|
||||
@@ -111,7 +107,7 @@ class PickleGzInstance(Instance):
|
||||
self,
|
||||
solver: "InternalSolver",
|
||||
model: Any,
|
||||
violation: str,
|
||||
violation: ConstraintName,
|
||||
) -> None:
|
||||
assert self.instance is not None
|
||||
self.instance.enforce_user_cut(solver, model, violation)
|
||||
|
||||
@@ -14,6 +14,7 @@ from scipy.stats.distributions import rv_frozen
|
||||
from miplearn.instance.base import Instance
|
||||
from miplearn.solvers.learning import InternalSolver
|
||||
from miplearn.solvers.pyomo.base import BasePyomoSolver
|
||||
from miplearn.types import ConstraintName
|
||||
|
||||
|
||||
class ChallengeA:
|
||||
@@ -85,14 +86,14 @@ class TravelingSalesmanInstance(Instance):
|
||||
self,
|
||||
solver: InternalSolver,
|
||||
model: Any,
|
||||
) -> List[str]:
|
||||
) -> List[ConstraintName]:
|
||||
selected_edges = [e for e in self.edges if model.x[e].value > 0.5]
|
||||
graph = nx.Graph()
|
||||
graph.add_edges_from(selected_edges)
|
||||
violations = []
|
||||
for c in list(nx.connected_components(graph)):
|
||||
if len(c) < self.n_cities:
|
||||
violations.append(",".join(map(str, c)))
|
||||
violations.append(",".join(map(str, c)).encode())
|
||||
return violations
|
||||
|
||||
@overrides
|
||||
@@ -100,10 +101,10 @@ class TravelingSalesmanInstance(Instance):
|
||||
self,
|
||||
solver: InternalSolver,
|
||||
model: Any,
|
||||
violation: str,
|
||||
violation: ConstraintName,
|
||||
) -> None:
|
||||
assert isinstance(solver, BasePyomoSolver)
|
||||
component = [int(v) for v in violation.split(",")]
|
||||
component = [int(v) for v in violation.decode().split(",")]
|
||||
cut_edges = [
|
||||
e
|
||||
for e in self.edges
|
||||
|
||||
@@ -260,7 +260,7 @@ def run_lazy_cb_tests(solver: InternalSolver) -> None:
|
||||
assert relsol is not None
|
||||
assert relsol[b"x[0]"] is not None
|
||||
if relsol[b"x[0]"] > 0:
|
||||
instance.enforce_lazy_constraint(cb_solver, cb_model, "cut")
|
||||
instance.enforce_lazy_constraint(cb_solver, cb_model, b"cut")
|
||||
|
||||
solver.set_instance(instance, model)
|
||||
solver.solve(lazy_cb=lazy_cb)
|
||||
|
||||
@@ -11,6 +11,8 @@ if TYPE_CHECKING:
|
||||
from miplearn.solvers.learning import InternalSolver
|
||||
|
||||
Category = bytes
|
||||
ConstraintName = bytes
|
||||
ConstraintCategory = bytes
|
||||
IterationCallback = Callable[[], bool]
|
||||
LazyCallback = Callable[[Any, Any], None]
|
||||
SolverParams = Dict[str, Any]
|
||||
|
||||
Reference in New Issue
Block a user