mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 09:28:51 -06:00
Rewrite ObjectiveValueComponent.sample_xy
This commit is contained in:
@@ -7,7 +7,7 @@ from typing import Any, List, TYPE_CHECKING, Tuple, Dict, Hashable
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
from overrides import EnforceOverrides
|
from overrides import EnforceOverrides
|
||||||
|
|
||||||
from miplearn.features import TrainingSample, Features
|
from miplearn.features import TrainingSample, Features, Sample
|
||||||
from miplearn.instance.base import Instance
|
from miplearn.instance.base import Instance
|
||||||
from miplearn.types import LearningSolveStats
|
from miplearn.types import LearningSolveStats
|
||||||
|
|
||||||
@@ -119,6 +119,14 @@ class Component:
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def sample_xy(self, sample: Sample) -> Tuple[Dict, Dict]:
|
||||||
|
"""
|
||||||
|
Returns a pair of x and y dictionaries containing, respectively, the matrices
|
||||||
|
of ML features and the labels for the sample. If the training sample does not
|
||||||
|
include label information, returns (x, {}).
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
def xy_instances(
|
def xy_instances(
|
||||||
self,
|
self,
|
||||||
instances: List[Instance],
|
instances: List[Instance],
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from sklearn.linear_model import LinearRegression
|
|||||||
from miplearn.classifiers import Regressor
|
from miplearn.classifiers import Regressor
|
||||||
from miplearn.classifiers.sklearn import ScikitLearnRegressor
|
from miplearn.classifiers.sklearn import ScikitLearnRegressor
|
||||||
from miplearn.components.component import Component
|
from miplearn.components.component import Component
|
||||||
from miplearn.features import TrainingSample, Features
|
from miplearn.features import TrainingSample, Features, Sample
|
||||||
from miplearn.instance.base import Instance
|
from miplearn.instance.base import Instance
|
||||||
from miplearn.types import LearningSolveStats
|
from miplearn.types import LearningSolveStats
|
||||||
|
|
||||||
@@ -98,6 +98,39 @@ class ObjectiveValueComponent(Component):
|
|||||||
y["Upper bound"] = [[sample.upper_bound]]
|
y["Upper bound"] = [[sample.upper_bound]]
|
||||||
return x, y
|
return x, y
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def sample_xy(
|
||||||
|
self,
|
||||||
|
sample: Sample,
|
||||||
|
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
|
||||||
|
# Instance features
|
||||||
|
assert sample.after_load is not None
|
||||||
|
assert sample.after_load.instance is not None
|
||||||
|
f = sample.after_load.instance.to_list()
|
||||||
|
|
||||||
|
# LP solve features
|
||||||
|
if sample.after_lp is not None:
|
||||||
|
assert sample.after_lp.lp_solve is not None
|
||||||
|
f.extend(sample.after_lp.lp_solve.to_list())
|
||||||
|
|
||||||
|
# Features
|
||||||
|
x: Dict[Hashable, List[List[float]]] = {
|
||||||
|
"Upper bound": [f],
|
||||||
|
"Lower bound": [f],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Labels
|
||||||
|
y: Dict[Hashable, List[List[float]]] = {}
|
||||||
|
if sample.after_mip is not None:
|
||||||
|
mip_stats = sample.after_mip.mip_solve
|
||||||
|
assert mip_stats is not None
|
||||||
|
if mip_stats.mip_lower_bound is not None:
|
||||||
|
y["Lower bound"] = [[mip_stats.mip_lower_bound]]
|
||||||
|
if mip_stats.mip_upper_bound is not None:
|
||||||
|
y["Upper bound"] = [[mip_stats.mip_upper_bound]]
|
||||||
|
|
||||||
|
return x, y
|
||||||
|
|
||||||
@overrides
|
@overrides
|
||||||
def sample_evaluate_old(
|
def sample_evaluate_old(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@@ -36,6 +36,12 @@ class InstanceFeatures:
|
|||||||
user_features: Optional[List[float]] = None
|
user_features: Optional[List[float]] = None
|
||||||
lazy_constraint_count: int = 0
|
lazy_constraint_count: int = 0
|
||||||
|
|
||||||
|
def to_list(self) -> List[float]:
|
||||||
|
features: List[float] = []
|
||||||
|
if self.user_features is not None:
|
||||||
|
features.extend(self.user_features)
|
||||||
|
return features
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Variable:
|
class Variable:
|
||||||
@@ -96,6 +102,26 @@ class Constraint:
|
|||||||
slack: Optional[float] = None
|
slack: Optional[float] = None
|
||||||
user_features: Optional[List[float]] = None
|
user_features: Optional[List[float]] = None
|
||||||
|
|
||||||
|
def to_list(self) -> List[float]:
|
||||||
|
features: List[float] = []
|
||||||
|
for attr in [
|
||||||
|
"dual value",
|
||||||
|
"rhs",
|
||||||
|
"sa_rhs_down",
|
||||||
|
"sa_rhs_up",
|
||||||
|
"slack",
|
||||||
|
]:
|
||||||
|
if getattr(self, attr) is not None:
|
||||||
|
features.append(getattr(self, attr))
|
||||||
|
for attr in ["user_features"]:
|
||||||
|
if getattr(self, attr) is not None:
|
||||||
|
features.extend(getattr(self, attr))
|
||||||
|
if self.lhs is not None and len(self.lhs) > 0:
|
||||||
|
features.append(np.max(self.lhs.values()))
|
||||||
|
features.append(np.average(self.lhs.values()))
|
||||||
|
features.append(np.min(self.lhs.values()))
|
||||||
|
return features
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Features:
|
class Features:
|
||||||
|
|||||||
@@ -29,16 +29,23 @@ class LPSolveStats:
|
|||||||
lp_value: Optional[float] = None
|
lp_value: Optional[float] = None
|
||||||
lp_wallclock_time: Optional[float] = None
|
lp_wallclock_time: Optional[float] = None
|
||||||
|
|
||||||
|
def to_list(self) -> List[float]:
|
||||||
|
features: List[float] = []
|
||||||
|
for attr in ["lp_value", "lp_wallclock_time"]:
|
||||||
|
if getattr(self, attr) is not None:
|
||||||
|
features.append(getattr(self, attr))
|
||||||
|
return features
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class MIPSolveStats:
|
class MIPSolveStats:
|
||||||
mip_lower_bound: Optional[float]
|
mip_lower_bound: Optional[float] = None
|
||||||
mip_log: str
|
mip_log: Optional[str] = None
|
||||||
mip_nodes: Optional[int]
|
mip_nodes: Optional[int] = None
|
||||||
mip_sense: str
|
mip_sense: Optional[str] = None
|
||||||
mip_upper_bound: Optional[float]
|
mip_upper_bound: Optional[float] = None
|
||||||
mip_wallclock_time: float
|
mip_wallclock_time: Optional[float] = None
|
||||||
mip_warm_start_value: Optional[float]
|
mip_warm_start_value: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
class InternalSolver(ABC, EnforceOverrides):
|
class InternalSolver(ABC, EnforceOverrides):
|
||||||
|
|||||||
@@ -10,8 +10,9 @@ from numpy.testing import assert_array_equal
|
|||||||
|
|
||||||
from miplearn.classifiers import Regressor
|
from miplearn.classifiers import Regressor
|
||||||
from miplearn.components.objective import ObjectiveValueComponent
|
from miplearn.components.objective import ObjectiveValueComponent
|
||||||
from miplearn.features import TrainingSample, InstanceFeatures, Features
|
from miplearn.features import TrainingSample, InstanceFeatures, Features, Sample
|
||||||
from miplearn.instance.base import Instance
|
from miplearn.instance.base import Instance
|
||||||
|
from miplearn.solvers.internal import MIPSolveStats, LPSolveStats
|
||||||
from miplearn.solvers.learning import LearningSolver
|
from miplearn.solvers.learning import LearningSolver
|
||||||
from miplearn.solvers.pyomo.gurobi import GurobiPyomoSolver
|
from miplearn.solvers.pyomo.gurobi import GurobiPyomoSolver
|
||||||
|
|
||||||
@@ -41,6 +42,27 @@ def sample_old() -> TrainingSample:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample() -> Sample:
|
||||||
|
sample = Sample(
|
||||||
|
after_load=Features(
|
||||||
|
instance=InstanceFeatures(),
|
||||||
|
),
|
||||||
|
after_lp=Features(
|
||||||
|
lp_solve=LPSolveStats(),
|
||||||
|
),
|
||||||
|
after_mip=Features(
|
||||||
|
mip_solve=MIPSolveStats(
|
||||||
|
mip_lower_bound=1.0,
|
||||||
|
mip_upper_bound=2.0,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
sample.after_load.instance.to_list = Mock(return_value=[1.0, 2.0]) # type: ignore
|
||||||
|
sample.after_lp.lp_solve.to_list = Mock(return_value=[3.0]) # type: ignore
|
||||||
|
return sample
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def sample_without_lp() -> TrainingSample:
|
def sample_without_lp() -> TrainingSample:
|
||||||
return TrainingSample(
|
return TrainingSample(
|
||||||
@@ -57,10 +79,7 @@ def sample_without_ub_old() -> TrainingSample:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_sample_xy(
|
def test_sample_xy(sample: Sample) -> None:
|
||||||
instance: Instance,
|
|
||||||
sample_old: TrainingSample,
|
|
||||||
) -> None:
|
|
||||||
x_expected = {
|
x_expected = {
|
||||||
"Lower bound": [[1.0, 2.0, 3.0]],
|
"Lower bound": [[1.0, 2.0, 3.0]],
|
||||||
"Upper bound": [[1.0, 2.0, 3.0]],
|
"Upper bound": [[1.0, 2.0, 3.0]],
|
||||||
@@ -69,7 +88,7 @@ def test_sample_xy(
|
|||||||
"Lower bound": [[1.0]],
|
"Lower bound": [[1.0]],
|
||||||
"Upper bound": [[2.0]],
|
"Upper bound": [[2.0]],
|
||||||
}
|
}
|
||||||
xy = ObjectiveValueComponent().sample_xy_old(instance, sample_old)
|
xy = ObjectiveValueComponent().sample_xy(sample)
|
||||||
assert xy is not None
|
assert xy is not None
|
||||||
x_actual, y_actual = xy
|
x_actual, y_actual = xy
|
||||||
assert x_actual == x_expected
|
assert x_actual == x_expected
|
||||||
|
|||||||
Reference in New Issue
Block a user