mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 09:28:51 -06:00
Make xy_sample receive features, not instances
This commit is contained in:
@@ -3,11 +3,11 @@
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import numpy as np
|
||||
from typing import Any, List, Union, TYPE_CHECKING, Tuple, Dict
|
||||
from typing import Any, List, Union, TYPE_CHECKING, Tuple, Dict, Optional
|
||||
|
||||
from miplearn.extractors import InstanceIterator
|
||||
from miplearn.instance import Instance
|
||||
from miplearn.types import LearningSolveStats, TrainingSample
|
||||
from miplearn.types import LearningSolveStats, TrainingSample, Features
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
@@ -133,14 +133,16 @@ class Component:
|
||||
|
||||
@staticmethod
|
||||
def xy_sample(
|
||||
instance: Any,
|
||||
training_sample: TrainingSample,
|
||||
) -> Tuple[Dict, Dict]:
|
||||
features: Features,
|
||||
sample: TrainingSample,
|
||||
) -> Optional[Tuple[Dict, Dict]]:
|
||||
"""
|
||||
Given a training sample, returns a pair of x and y dictionaries containing,
|
||||
respectively, the matrices of ML features and the labels for the sample.
|
||||
Given a set of features and a training sample, returns a pair of x and y
|
||||
dictionaries containing, respectively, the matrices of ML features and the
|
||||
labels for the sample. If the training sample does not include label
|
||||
information, returns None.
|
||||
"""
|
||||
return {}, {}
|
||||
return None
|
||||
|
||||
def xy_instances(
|
||||
self,
|
||||
@@ -149,8 +151,12 @@ class Component:
|
||||
x_combined: Dict = {}
|
||||
y_combined: Dict = {}
|
||||
for instance in InstanceIterator(instances):
|
||||
assert isinstance(instance, Instance)
|
||||
for sample in instance.training_data:
|
||||
x_sample, y_sample = self.xy_sample(instance, sample)
|
||||
xy = self.xy_sample(instance.features, sample)
|
||||
if xy is None:
|
||||
continue
|
||||
x_sample, y_sample = xy
|
||||
for cat in x_sample.keys():
|
||||
if cat not in x_combined:
|
||||
x_combined[cat] = []
|
||||
|
||||
@@ -5,14 +5,14 @@
|
||||
import logging
|
||||
import sys
|
||||
from copy import deepcopy
|
||||
from typing import Any, Dict, Tuple
|
||||
from typing import Any, Dict, Tuple, Optional
|
||||
|
||||
import numpy as np
|
||||
from tqdm.auto import tqdm
|
||||
|
||||
from miplearn.classifiers.counting import CountingClassifier
|
||||
from miplearn.components.component import Component
|
||||
from miplearn.types import TrainingSample
|
||||
from miplearn.types import TrainingSample, Features
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -207,15 +207,16 @@ class StaticLazyConstraintsComponent(Component):
|
||||
|
||||
@staticmethod
|
||||
def xy_sample(
|
||||
instance: Any,
|
||||
features: Features,
|
||||
sample: TrainingSample,
|
||||
) -> Tuple[Dict, Dict]:
|
||||
) -> Optional[Tuple[Dict, Dict]]:
|
||||
if "LazyStatic: Enforced" not in sample:
|
||||
return None
|
||||
x: Dict = {}
|
||||
y: Dict = {}
|
||||
if "LazyStatic: All" not in sample:
|
||||
return x, y
|
||||
for cid in sorted(sample["LazyStatic: All"]):
|
||||
cfeatures = instance.features["Constraints"][cid]
|
||||
for (cid, cfeatures) in features["Constraints"].items():
|
||||
if not cfeatures["Lazy"]:
|
||||
continue
|
||||
category = cfeatures["Category"]
|
||||
if category is None:
|
||||
continue
|
||||
|
||||
@@ -19,7 +19,7 @@ from miplearn.classifiers import Regressor
|
||||
from miplearn.components.component import Component
|
||||
from miplearn.extractors import InstanceIterator
|
||||
from miplearn.instance import Instance
|
||||
from miplearn.types import MIPSolveStats, TrainingSample, LearningSolveStats
|
||||
from miplearn.types import MIPSolveStats, TrainingSample, LearningSolveStats, Features
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
@@ -164,18 +164,20 @@ class ObjectiveValueComponent(Component):
|
||||
|
||||
@staticmethod
|
||||
def xy_sample(
|
||||
instance: Any,
|
||||
features: Features,
|
||||
sample: TrainingSample,
|
||||
) -> Tuple[Dict, Dict]:
|
||||
x: Dict = {}
|
||||
y: Dict = {}
|
||||
) -> Optional[Tuple[Dict, Dict]]:
|
||||
if "Lower bound" not in sample:
|
||||
return x, y
|
||||
features = instance.features["Instance"]["User features"]
|
||||
return None
|
||||
f = features["Instance"]["User features"]
|
||||
if "LP value" in sample and sample["LP value"] is not None:
|
||||
features += [sample["LP value"]]
|
||||
x["Lower bound"] = [features]
|
||||
x["Upper bound"] = [features]
|
||||
y["Lower bound"] = [[sample["Lower bound"]]]
|
||||
y["Upper bound"] = [[sample["Upper bound"]]]
|
||||
f += [sample["LP value"]]
|
||||
x = {
|
||||
"Lower bound": [f],
|
||||
"Upper bound": [f],
|
||||
}
|
||||
y = {
|
||||
"Lower bound": [[sample["Lower bound"]]],
|
||||
"Upper bound": [[sample["Upper bound"]]],
|
||||
}
|
||||
return x, y
|
||||
|
||||
@@ -211,15 +211,15 @@ class PrimalSolutionComponent(Component):
|
||||
|
||||
@staticmethod
|
||||
def xy_sample(
|
||||
instance: Any,
|
||||
features: Features,
|
||||
sample: TrainingSample,
|
||||
) -> Tuple[Dict, Dict]:
|
||||
) -> Optional[Tuple[Dict, Dict]]:
|
||||
if "Solution" not in sample:
|
||||
return {}, {}
|
||||
return None
|
||||
assert sample["Solution"] is not None
|
||||
return cast(
|
||||
Tuple[Dict, Dict],
|
||||
PrimalSolutionComponent._extract(instance.features, sample),
|
||||
PrimalSolutionComponent._extract(features, sample),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@@ -227,7 +227,10 @@ class PrimalSolutionComponent(Component):
|
||||
features: Features,
|
||||
sample: TrainingSample,
|
||||
) -> Dict:
|
||||
return cast(Dict, PrimalSolutionComponent._extract(features, sample))
|
||||
return cast(
|
||||
Dict,
|
||||
PrimalSolutionComponent._extract(features, sample),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _extract(
|
||||
|
||||
@@ -58,6 +58,7 @@ class FeaturesExtractor:
|
||||
self,
|
||||
instance: "Instance",
|
||||
) -> Dict[str, ConstraintFeatures]:
|
||||
has_static_lazy = instance.has_static_lazy_constraints()
|
||||
constraints: Dict[str, ConstraintFeatures] = {}
|
||||
for cid in self.solver.get_constraint_ids():
|
||||
user_features = None
|
||||
@@ -83,6 +84,10 @@ class FeaturesExtractor:
|
||||
"Category": category,
|
||||
"User features": user_features,
|
||||
}
|
||||
if has_static_lazy:
|
||||
constraints[cid]["Lazy"] = instance.is_constraint_lazy(cid)
|
||||
else:
|
||||
constraints[cid]["Lazy"] = False
|
||||
return constraints
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -101,13 +101,13 @@ class Instance(ABC):
|
||||
def get_constraint_category(self, cid: str) -> Optional[str]:
|
||||
return cid
|
||||
|
||||
def has_static_lazy_constraints(self):
|
||||
def has_static_lazy_constraints(self) -> bool:
|
||||
return False
|
||||
|
||||
def has_dynamic_lazy_constraints(self):
|
||||
return False
|
||||
|
||||
def is_constraint_lazy(self, cid):
|
||||
def is_constraint_lazy(self, cid: str) -> bool:
|
||||
return False
|
||||
|
||||
def find_violated_lazy_constraints(self, model):
|
||||
|
||||
@@ -98,6 +98,7 @@ ConstraintFeatures = TypedDict(
|
||||
"Sense": str,
|
||||
"Category": Optional[Hashable],
|
||||
"User features": Optional[List[float]],
|
||||
"Lazy": bool,
|
||||
},
|
||||
total=False,
|
||||
)
|
||||
|
||||
@@ -7,8 +7,7 @@ from miplearn import Component, Instance
|
||||
|
||||
|
||||
def test_xy_instance():
|
||||
def _xy_sample(instance, sample):
|
||||
print(sample)
|
||||
def _xy_sample(features, sample):
|
||||
x = {
|
||||
"s1": {
|
||||
"category_a": [
|
||||
@@ -54,8 +53,10 @@ def test_xy_instance():
|
||||
comp = Component()
|
||||
instance_1 = Mock(spec=Instance)
|
||||
instance_1.training_data = ["s1", "s2"]
|
||||
instance_1.features = {}
|
||||
instance_2 = Mock(spec=Instance)
|
||||
instance_2.training_data = ["s3"]
|
||||
instance_2.features = {}
|
||||
comp.xy_sample = _xy_sample
|
||||
x_expected = {
|
||||
"category_a": [
|
||||
|
||||
@@ -9,7 +9,7 @@ from miplearn.components.lazy_static import StaticLazyConstraintsComponent
|
||||
from miplearn.instance import Instance
|
||||
from miplearn.solvers.internal import InternalSolver
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
from miplearn.types import TrainingSample
|
||||
from miplearn.types import TrainingSample, Features
|
||||
|
||||
|
||||
def test_usage_with_solver():
|
||||
@@ -234,32 +234,35 @@ def test_fit():
|
||||
|
||||
|
||||
def test_xy_sample() -> None:
|
||||
instance = Mock(spec=Instance)
|
||||
sample: TrainingSample = {
|
||||
"LazyStatic: Enforced": {"c1", "c2", "c4", "c5"},
|
||||
"LazyStatic: All": {"c1", "c2", "c3", "c4", "c5"},
|
||||
"LazyStatic: Enforced": {"c1", "c2", "c4"},
|
||||
}
|
||||
instance.features = {
|
||||
features: Features = {
|
||||
"Constraints": {
|
||||
"c1": {
|
||||
"Category": "type-a",
|
||||
"User features": [1.0, 1.0],
|
||||
"Lazy": True,
|
||||
},
|
||||
"c2": {
|
||||
"Category": "type-a",
|
||||
"User features": [1.0, 2.0],
|
||||
"Lazy": True,
|
||||
},
|
||||
"c3": {
|
||||
"Category": "type-a",
|
||||
"User features": [1.0, 3.0],
|
||||
"Lazy": True,
|
||||
},
|
||||
"c4": {
|
||||
"Category": "type-b",
|
||||
"User features": [1.0, 4.0, 0.0],
|
||||
"Lazy": True,
|
||||
},
|
||||
"c5": {
|
||||
"Category": "type-b",
|
||||
"User features": [1.0, 5.0, 0.0],
|
||||
"Lazy": False,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -271,7 +274,6 @@ def test_xy_sample() -> None:
|
||||
],
|
||||
"type-b": [
|
||||
[1.0, 4.0, 0.0],
|
||||
[1.0, 5.0, 0.0],
|
||||
],
|
||||
}
|
||||
y_expected = {
|
||||
@@ -282,9 +284,10 @@ def test_xy_sample() -> None:
|
||||
],
|
||||
"type-b": [
|
||||
[False, True],
|
||||
[False, True],
|
||||
],
|
||||
}
|
||||
x_actual, y_actual = StaticLazyConstraintsComponent.xy_sample(instance, sample)
|
||||
xy = StaticLazyConstraintsComponent.xy_sample(features, sample)
|
||||
assert xy is not None
|
||||
x_actual, y_actual = xy
|
||||
assert x_actual == x_expected
|
||||
assert y_actual == y_expected
|
||||
|
||||
@@ -11,35 +11,10 @@ from numpy.testing import assert_array_equal
|
||||
from miplearn.instance import Instance
|
||||
from miplearn.classifiers import Regressor
|
||||
from miplearn.components.objective import ObjectiveValueComponent
|
||||
from miplearn.types import TrainingSample
|
||||
from miplearn.types import TrainingSample, Features
|
||||
from tests.fixtures.knapsack import get_test_pyomo_instances
|
||||
|
||||
|
||||
def test_xy_sample() -> None:
|
||||
instance = cast(Instance, Mock(spec=Instance))
|
||||
instance.features = {
|
||||
"Instance": {
|
||||
"User features": [1.0, 2.0],
|
||||
}
|
||||
}
|
||||
sample: TrainingSample = {
|
||||
"Lower bound": 1.0,
|
||||
"Upper bound": 2.0,
|
||||
"LP value": 3.0,
|
||||
}
|
||||
x_expected = {
|
||||
"Lower bound": [[1.0, 2.0, 3.0]],
|
||||
"Upper bound": [[1.0, 2.0, 3.0]],
|
||||
}
|
||||
y_expected = {
|
||||
"Lower bound": [[1.0]],
|
||||
"Upper bound": [[2.0]],
|
||||
}
|
||||
x_actual, y_actual = ObjectiveValueComponent.xy_sample(instance, sample)
|
||||
assert x_actual == x_expected
|
||||
assert y_actual == y_expected
|
||||
|
||||
|
||||
def test_x_y_predict() -> None:
|
||||
# Construct instance
|
||||
instance = cast(Instance, Mock(spec=Instance))
|
||||
@@ -125,3 +100,54 @@ def test_obj_evaluate():
|
||||
"R2": -5.012843605607331,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_xy_sample_with_lp() -> None:
|
||||
features: Features = {
|
||||
"Instance": {
|
||||
"User features": [1.0, 2.0],
|
||||
}
|
||||
}
|
||||
sample: TrainingSample = {
|
||||
"Lower bound": 1.0,
|
||||
"Upper bound": 2.0,
|
||||
"LP value": 3.0,
|
||||
}
|
||||
x_expected = {
|
||||
"Lower bound": [[1.0, 2.0, 3.0]],
|
||||
"Upper bound": [[1.0, 2.0, 3.0]],
|
||||
}
|
||||
y_expected = {
|
||||
"Lower bound": [[1.0]],
|
||||
"Upper bound": [[2.0]],
|
||||
}
|
||||
xy = ObjectiveValueComponent.xy_sample(features, sample)
|
||||
assert xy is not None
|
||||
x_actual, y_actual = xy
|
||||
assert x_actual == x_expected
|
||||
assert y_actual == y_expected
|
||||
|
||||
|
||||
def test_xy_sample_without_lp() -> None:
|
||||
features: Features = {
|
||||
"Instance": {
|
||||
"User features": [1.0, 2.0],
|
||||
}
|
||||
}
|
||||
sample: TrainingSample = {
|
||||
"Lower bound": 1.0,
|
||||
"Upper bound": 2.0,
|
||||
}
|
||||
x_expected = {
|
||||
"Lower bound": [[1.0, 2.0]],
|
||||
"Upper bound": [[1.0, 2.0]],
|
||||
}
|
||||
y_expected = {
|
||||
"Lower bound": [[1.0]],
|
||||
"Upper bound": [[2.0]],
|
||||
}
|
||||
xy = ObjectiveValueComponent.xy_sample(features, sample)
|
||||
assert xy is not None
|
||||
x_actual, y_actual = xy
|
||||
assert x_actual == x_expected
|
||||
assert y_actual == y_expected
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
from typing import cast, List
|
||||
from unittest.mock import Mock, call
|
||||
|
||||
from typing import cast
|
||||
from unittest.mock import Mock
|
||||
|
||||
import numpy as np
|
||||
from numpy.testing import assert_array_equal
|
||||
|
||||
from miplearn import Classifier
|
||||
from miplearn.classifiers.threshold import Threshold, MinPrecisionThreshold
|
||||
from miplearn.classifiers.threshold import Threshold
|
||||
from miplearn.components.primal import PrimalSolutionComponent
|
||||
from miplearn.instance import Instance
|
||||
from miplearn.types import TrainingSample
|
||||
from miplearn.types import TrainingSample, Features
|
||||
|
||||
|
||||
def test_xy_sample_with_lp_solution() -> None:
|
||||
instance = cast(Instance, Mock(spec=Instance))
|
||||
instance.features = {
|
||||
features: Features = {
|
||||
"Variables": {
|
||||
"x": {
|
||||
0: {
|
||||
@@ -56,34 +56,28 @@ def test_xy_sample_with_lp_solution() -> None:
|
||||
},
|
||||
}
|
||||
x_expected = {
|
||||
"default": np.array(
|
||||
[
|
||||
"default": [
|
||||
[0.0, 0.0, 0.1],
|
||||
[1.0, 0.0, 0.1],
|
||||
[1.0, 1.0, 0.1],
|
||||
]
|
||||
)
|
||||
}
|
||||
y_expected = {
|
||||
"default": np.array(
|
||||
[
|
||||
"default": [
|
||||
[True, False],
|
||||
[False, True],
|
||||
[True, False],
|
||||
]
|
||||
)
|
||||
}
|
||||
x_actual, y_actual = PrimalSolutionComponent.xy_sample(instance, sample)
|
||||
assert len(x_actual.keys()) == 1
|
||||
assert len(y_actual.keys()) == 1
|
||||
assert_array_equal(x_actual["default"], x_expected["default"])
|
||||
assert_array_equal(y_actual["default"], y_expected["default"])
|
||||
xy = PrimalSolutionComponent.xy_sample(features, sample)
|
||||
assert xy is not None
|
||||
x_actual, y_actual = xy
|
||||
assert x_actual == x_expected
|
||||
assert y_actual == y_expected
|
||||
|
||||
|
||||
def test_xy_sample_without_lp_solution() -> None:
|
||||
comp = PrimalSolutionComponent()
|
||||
instance = cast(Instance, Mock(spec=Instance))
|
||||
instance.features = {
|
||||
features: Features = {
|
||||
"Variables": {
|
||||
"x": {
|
||||
0: {
|
||||
@@ -115,28 +109,24 @@ def test_xy_sample_without_lp_solution() -> None:
|
||||
},
|
||||
}
|
||||
x_expected = {
|
||||
"default": np.array(
|
||||
[
|
||||
"default": [
|
||||
[0.0, 0.0],
|
||||
[1.0, 0.0],
|
||||
[1.0, 1.0],
|
||||
]
|
||||
)
|
||||
}
|
||||
y_expected = {
|
||||
"default": np.array(
|
||||
[
|
||||
"default": [
|
||||
[True, False],
|
||||
[False, True],
|
||||
[True, False],
|
||||
]
|
||||
)
|
||||
}
|
||||
x_actual, y_actual = comp.xy_sample(instance, sample)
|
||||
assert len(x_actual.keys()) == 1
|
||||
assert len(y_actual.keys()) == 1
|
||||
assert_array_equal(x_actual["default"], x_expected["default"])
|
||||
assert_array_equal(y_actual["default"], y_expected["default"])
|
||||
xy = PrimalSolutionComponent.xy_sample(features, sample)
|
||||
assert xy is not None
|
||||
x_actual, y_actual = xy
|
||||
assert x_actual == x_expected
|
||||
assert y_actual == y_expected
|
||||
|
||||
|
||||
def test_predict() -> None:
|
||||
|
||||
@@ -44,6 +44,7 @@ def test_knapsack() -> None:
|
||||
},
|
||||
"Sense": "<",
|
||||
"RHS": 67.0,
|
||||
"Lazy": False,
|
||||
"Category": "eq_capacity",
|
||||
"User features": [0.0],
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user