Add Component.xy and PrimalSolutionComponent.xy

master
Alinson S. Xavier 5 years ago
parent 75d1eee424
commit 9266743940

@ -3,7 +3,7 @@
# Released under the modified BSD license. See COPYING.md for more details.
from abc import ABC, abstractmethod
from typing import Any, List, Union, TYPE_CHECKING
from typing import Any, List, Union, TYPE_CHECKING, Tuple, Dict
from miplearn.instance import Instance
from miplearn.types import LearningSolveStats, TrainingSample
@ -12,6 +12,7 @@ if TYPE_CHECKING:
from miplearn.solvers.learning import LearningSolver
# noinspection PyMethodMayBeStatic
class Component(ABC):
"""
A Component is an object which adds functionality to a LearningSolver.
@ -135,6 +136,17 @@ class Component(ABC):
) -> None:
return
def xy(
self,
instance: Any,
training_sample: TrainingSample,
) -> Tuple[Dict, Dict]:
"""
Given a training sample, returns a pair of x and y dictionaries containing,
respectively, the matrices of ML features and the labels for the sample.
"""
return {}, {}
def iteration_cb(
self,
solver: "LearningSolver",

@ -3,7 +3,17 @@
# Released under the modified BSD license. See COPYING.md for more details.
import logging
from typing import Union, Dict, Callable, List, Hashable, Optional, Any, TYPE_CHECKING
from typing import (
Union,
Dict,
Callable,
List,
Hashable,
Optional,
Any,
TYPE_CHECKING,
Tuple,
)
import numpy as np
from tqdm.auto import tqdm
@ -286,3 +296,34 @@ class PrimalSolutionComponent(Component):
f"Please set its category to None."
)
return [opt_value < 0.5, opt_value > 0.5]
def xy(
self,
instance: Any,
sample: TrainingSample,
) -> Tuple[Dict, Dict]:
x: Dict = {}
y: Dict = {}
if "Solution" not in sample:
return x, y
assert sample["Solution"] is not None
for (var, var_dict) in sample["Solution"].items():
for (idx, opt_value) in var_dict.items():
assert opt_value is not None
assert 0.0 - 1e-5 <= opt_value <= 1.0 + 1e-5, (
f"Variable {var} has non-binary value {opt_value} in the optimal "
f"solution. Predicting values of non-binary variables is not "
f"currently supported. Please set its category to None."
)
category = instance.get_variable_category(var, idx)
if category is None:
continue
if category not in x.keys():
x[category] = []
y[category] = []
features: Any = instance.get_variable_features(var, idx)
if "LP solution" in sample and sample["LP solution"] is not None:
features += [sample["LP solution"][var][idx]]
x[category] += [features]
y[category] += [[opt_value < 0.5, opt_value >= 0.5]]
return x, y

@ -11,6 +11,123 @@ from miplearn import Classifier
from miplearn.classifiers.threshold import Threshold, MinPrecisionThreshold
from miplearn.components.primal import PrimalSolutionComponent
from miplearn.instance import Instance
from miplearn.types import TrainingSample
def test_xy_with_lp_solution() -> None:
comp = PrimalSolutionComponent()
instance = cast(Instance, Mock(spec=Instance))
instance.get_variable_category = Mock( # type: ignore
side_effect=lambda var_name, index: {
0: "default",
1: None,
2: "default",
3: "default",
}[index]
)
instance.get_variable_features = Mock( # type: ignore
side_effect=lambda var, index: {
0: [0.0, 0.0],
1: [0.0, 1.0],
2: [1.0, 0.0],
3: [1.0, 1.0],
}[index]
)
sample: TrainingSample = {
"Solution": {
"x": {
0: 0.0,
1: 1.0,
2: 1.0,
3: 0.0,
}
},
"LP solution": {
"x": {
0: 0.1,
1: 0.1,
2: 0.1,
3: 0.1,
}
},
}
x_expected = {
"default": np.array(
[
[0.0, 0.0, 0.1],
[1.0, 0.0, 0.1],
[1.0, 1.0, 0.1],
]
)
}
y_expected = {
"default": np.array(
[
[True, False],
[False, True],
[True, False],
]
)
}
x_actual, y_actual = comp.xy(instance, sample)
assert len(x_actual.keys()) == 1
assert len(y_actual.keys()) == 1
assert_array_equal(x_actual["default"], x_expected["default"])
assert_array_equal(y_actual["default"], y_expected["default"])
def test_xy_without_lp_solution() -> None:
comp = PrimalSolutionComponent()
instance = cast(Instance, Mock(spec=Instance))
instance.get_variable_category = Mock( # type: ignore
side_effect=lambda var_name, index: {
0: "default",
1: None,
2: "default",
3: "default",
}[index]
)
instance.get_variable_features = Mock( # type: ignore
side_effect=lambda var, index: {
0: [0.0, 0.0],
1: [0.0, 1.0],
2: [1.0, 0.0],
3: [1.0, 1.0],
}[index]
)
sample: TrainingSample = {
"Solution": {
"x": {
0: 0.0,
1: 1.0,
2: 1.0,
3: 0.0,
}
},
}
x_expected = {
"default": np.array(
[
[0.0, 0.0],
[1.0, 0.0],
[1.0, 1.0],
]
)
}
y_expected = {
"default": np.array(
[
[True, False],
[False, True],
[True, False],
]
)
}
x_actual, y_actual = comp.xy(instance, sample)
assert len(x_actual.keys()) == 1
assert len(y_actual.keys()) == 1
assert_array_equal(x_actual["default"], x_expected["default"])
assert_array_equal(y_actual["default"], y_expected["default"])
def test_x_y_fit() -> None:

Loading…
Cancel
Save