mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 09:28:51 -06:00
Convert Features into dataclass
This commit is contained in:
@@ -61,13 +61,16 @@ class StaticLazyConstraintsComponent(Component):
|
||||
training_data: TrainingSample,
|
||||
) -> None:
|
||||
assert solver.internal_solver is not None
|
||||
if not features["Instance"]["Lazy constraint count"] == 0:
|
||||
assert features.instance is not None
|
||||
assert features.constraints is not None
|
||||
|
||||
if not features.instance["Lazy constraint count"] == 0:
|
||||
logger.info("Instance does not have static lazy constraints. Skipping.")
|
||||
logger.info("Predicting required lazy constraints...")
|
||||
self.enforced_cids = set(self.sample_predict(features, training_data))
|
||||
logger.info("Moving lazy constraints to the pool...")
|
||||
self.pool = {}
|
||||
for (cid, cdict) in features["Constraints"].items():
|
||||
for (cid, cdict) in features.constraints.items():
|
||||
if cdict["Lazy"] and cid not in self.enforced_cids:
|
||||
self.pool[cid] = LazyConstraint(
|
||||
cid=cid,
|
||||
@@ -145,9 +148,11 @@ class StaticLazyConstraintsComponent(Component):
|
||||
features: Features,
|
||||
sample: TrainingSample,
|
||||
) -> List[str]:
|
||||
assert features.constraints is not None
|
||||
|
||||
x, y = self.sample_xy(features, sample)
|
||||
category_to_cids: Dict[Hashable, List[str]] = {}
|
||||
for (cid, cdict) in features["Constraints"].items():
|
||||
for (cid, cdict) in features.constraints.items():
|
||||
if "Category" not in cdict or cdict["Category"] is None:
|
||||
continue
|
||||
category = cdict["Category"]
|
||||
@@ -172,9 +177,10 @@ class StaticLazyConstraintsComponent(Component):
|
||||
features: Features,
|
||||
sample: TrainingSample,
|
||||
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
|
||||
assert features.constraints is not None
|
||||
x: Dict = {}
|
||||
y: Dict = {}
|
||||
for (cid, cfeatures) in features["Constraints"].items():
|
||||
for (cid, cfeatures) in features.constraints.items():
|
||||
if not cfeatures["Lazy"]:
|
||||
continue
|
||||
category = cfeatures["Category"]
|
||||
|
||||
@@ -77,9 +77,10 @@ class ObjectiveValueComponent(Component):
|
||||
features: Features,
|
||||
sample: TrainingSample,
|
||||
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
|
||||
assert features.instance is not None
|
||||
x: Dict[Hashable, List[List[float]]] = {}
|
||||
y: Dict[Hashable, List[List[float]]] = {}
|
||||
f = list(features["Instance"]["User features"])
|
||||
f = list(features.instance["User features"])
|
||||
if "LP value" in sample and sample["LP value"] is not None:
|
||||
f += [sample["LP value"]]
|
||||
for c in ["Upper bound", "Lower bound"]:
|
||||
|
||||
@@ -105,9 +105,11 @@ class PrimalSolutionComponent(Component):
|
||||
features: Features,
|
||||
sample: TrainingSample,
|
||||
) -> Solution:
|
||||
assert features.variables is not None
|
||||
|
||||
# Initialize empty solution
|
||||
solution: Solution = {}
|
||||
for (var_name, var_dict) in features["Variables"].items():
|
||||
for (var_name, var_dict) in features.variables.items():
|
||||
solution[var_name] = {}
|
||||
for idx in var_dict.keys():
|
||||
solution[var_name][idx] = None
|
||||
@@ -132,7 +134,7 @@ class PrimalSolutionComponent(Component):
|
||||
|
||||
# Convert y_pred into solution
|
||||
category_offset: Dict[Hashable, int] = {cat: 0 for cat in x.keys()}
|
||||
for (var_name, var_dict) in features["Variables"].items():
|
||||
for (var_name, var_dict) in features.variables.items():
|
||||
for (idx, var_features) in var_dict.items():
|
||||
category = var_features["Category"]
|
||||
offset = category_offset[category]
|
||||
@@ -149,12 +151,13 @@ class PrimalSolutionComponent(Component):
|
||||
features: Features,
|
||||
sample: TrainingSample,
|
||||
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
|
||||
assert features.variables is not None
|
||||
x: Dict = {}
|
||||
y: Dict = {}
|
||||
solution: Optional[Solution] = None
|
||||
if "Solution" in sample and sample["Solution"] is not None:
|
||||
solution = sample["Solution"]
|
||||
for (var_name, var_dict) in features["Variables"].items():
|
||||
for (var_name, var_dict) in features.variables.items():
|
||||
for (idx, var_features) in var_dict.items():
|
||||
category = var_features["Category"]
|
||||
if category is None:
|
||||
|
||||
@@ -19,13 +19,10 @@ class FeaturesExtractor:
|
||||
) -> None:
|
||||
self.solver = internal_solver
|
||||
|
||||
def extract(self, instance: "Instance") -> Features:
|
||||
features: Features = {
|
||||
"Variables": self._extract_variables(instance),
|
||||
"Constraints": self._extract_constraints(instance),
|
||||
}
|
||||
features["Instance"] = self._extract_instance(instance, features)
|
||||
return features
|
||||
def extract(self, instance: "Instance") -> None:
|
||||
instance.features.variables = self._extract_variables(instance)
|
||||
instance.features.constraints = self._extract_constraints(instance)
|
||||
instance.features.instance = self._extract_instance(instance, instance.features)
|
||||
|
||||
def _extract_variables(self, instance: "Instance") -> Dict:
|
||||
variables = self.solver.get_empty_solution()
|
||||
@@ -97,6 +94,7 @@ class FeaturesExtractor:
|
||||
instance: "Instance",
|
||||
features: Features,
|
||||
) -> InstanceFeatures:
|
||||
assert features.constraints is not None
|
||||
user_features = instance.get_instance_features()
|
||||
assert isinstance(user_features, list), (
|
||||
f"Instance features must be a list. "
|
||||
@@ -108,7 +106,7 @@ class FeaturesExtractor:
|
||||
f"Found {type(v).__name__} instead."
|
||||
)
|
||||
lazy_count = 0
|
||||
for (cid, cdict) in features["Constraints"].items():
|
||||
for (cid, cdict) in features.constraints.items():
|
||||
if cdict["Lazy"]:
|
||||
lazy_count += 1
|
||||
return {
|
||||
|
||||
@@ -47,7 +47,7 @@ class Instance(ABC):
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.training_data: List[TrainingSample] = []
|
||||
self.features: Features = {}
|
||||
self.features: Features = Features()
|
||||
|
||||
@abstractmethod
|
||||
def to_model(self) -> Any:
|
||||
@@ -206,8 +206,8 @@ class PickleGzInstance(Instance):
|
||||
Path of the gzipped pickle file that should be loaded.
|
||||
"""
|
||||
|
||||
# noinspection PyMissingConstructor
|
||||
def __init__(self, filename: str) -> None:
|
||||
super().__init__()
|
||||
assert os.path.exists(filename), f"File not found: {filename}"
|
||||
self.instance: Optional[Instance] = None
|
||||
self.filename: str = filename
|
||||
|
||||
@@ -148,9 +148,7 @@ class LearningSolver:
|
||||
self.internal_solver.set_instance(instance, model)
|
||||
|
||||
# Extract features
|
||||
extractor = FeaturesExtractor(self.internal_solver)
|
||||
instance.features.clear() # type: ignore
|
||||
instance.features.update(extractor.extract(instance))
|
||||
FeaturesExtractor(self.internal_solver).extract(instance)
|
||||
|
||||
callback_args = (
|
||||
self,
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
from typing import Optional, Dict, Callable, Any, Union, Tuple, List, Set, Hashable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from mypy_extensions import TypedDict
|
||||
|
||||
@@ -108,15 +109,13 @@ ConstraintFeatures = TypedDict(
|
||||
total=False,
|
||||
)
|
||||
|
||||
Features = TypedDict(
|
||||
"Features",
|
||||
{
|
||||
"Instance": InstanceFeatures,
|
||||
"Variables": Dict[str, Dict[VarIndex, VariableFeatures]],
|
||||
"Constraints": Dict[str, ConstraintFeatures],
|
||||
},
|
||||
total=False,
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class Features:
|
||||
instance: Optional[InstanceFeatures] = None
|
||||
variables: Optional[Dict[str, Dict[VarIndex, VariableFeatures]]] = None
|
||||
constraints: Optional[Dict[str, ConstraintFeatures]] = None
|
||||
|
||||
|
||||
IterationCallback = Callable[[], bool]
|
||||
|
||||
|
||||
@@ -24,11 +24,11 @@ def sample() -> TrainingSample:
|
||||
|
||||
@pytest.fixture
|
||||
def features() -> Features:
|
||||
return {
|
||||
"Instance": {
|
||||
return Features(
|
||||
instance={
|
||||
"Lazy constraint count": 4,
|
||||
},
|
||||
"Constraints": {
|
||||
constraints={
|
||||
"c1": {
|
||||
"Category": "type-a",
|
||||
"User features": [1.0, 1.0],
|
||||
@@ -55,7 +55,7 @@ def features() -> Features:
|
||||
"Lazy": False,
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def test_usage_with_solver(features: Features) -> None:
|
||||
|
||||
@@ -17,11 +17,11 @@ import numpy as np
|
||||
|
||||
@pytest.fixture
|
||||
def features() -> Features:
|
||||
return {
|
||||
"Instance": {
|
||||
return Features(
|
||||
instance={
|
||||
"User features": [1.0, 2.0],
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -17,8 +17,8 @@ from miplearn.types import TrainingSample, Features
|
||||
|
||||
|
||||
def test_xy() -> None:
|
||||
features: Features = {
|
||||
"Variables": {
|
||||
features = Features(
|
||||
variables={
|
||||
"x": {
|
||||
0: {
|
||||
"Category": "default",
|
||||
@@ -37,7 +37,7 @@ def test_xy() -> None:
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
sample: TrainingSample = {
|
||||
"Solution": {
|
||||
"x": {
|
||||
@@ -78,8 +78,8 @@ def test_xy() -> None:
|
||||
|
||||
|
||||
def test_xy_without_lp_solution() -> None:
|
||||
features: Features = {
|
||||
"Variables": {
|
||||
features = Features(
|
||||
variables={
|
||||
"x": {
|
||||
0: {
|
||||
"Category": "default",
|
||||
@@ -98,7 +98,7 @@ def test_xy_without_lp_solution() -> None:
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
sample: TrainingSample = {
|
||||
"Solution": {
|
||||
"x": {
|
||||
@@ -143,8 +143,8 @@ def test_predict() -> None:
|
||||
)
|
||||
thr = Mock(spec=Threshold)
|
||||
thr.predict = Mock(return_value=[0.75, 0.75])
|
||||
features: Features = {
|
||||
"Variables": {
|
||||
features = Features(
|
||||
variables={
|
||||
"x": {
|
||||
0: {
|
||||
"Category": "default",
|
||||
@@ -160,7 +160,7 @@ def test_predict() -> None:
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
sample: TrainingSample = {
|
||||
"LP solution": {
|
||||
"x": {
|
||||
@@ -243,8 +243,8 @@ def test_evaluate() -> None:
|
||||
4: 1.0,
|
||||
}
|
||||
}
|
||||
features: Features = {
|
||||
"Variables": {
|
||||
features = Features(
|
||||
variables={
|
||||
"x": {
|
||||
0: {},
|
||||
1: {},
|
||||
@@ -253,7 +253,7 @@ def test_evaluate() -> None:
|
||||
4: {},
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
sample: TrainingSample = {
|
||||
"Solution": {
|
||||
"x": {
|
||||
|
||||
@@ -91,14 +91,18 @@ def test_solve_fit_from_disk():
|
||||
solver.solve(instances[0])
|
||||
instance_loaded = read_pickle_gz(instances[0].filename)
|
||||
assert len(instance_loaded.training_data) > 0
|
||||
assert len(instance_loaded.features) > 0
|
||||
assert instance_loaded.features.instance is not None
|
||||
assert instance_loaded.features.variables is not None
|
||||
assert instance_loaded.features.constraints is not None
|
||||
|
||||
# Test: parallel_solve
|
||||
solver.parallel_solve(instances)
|
||||
for instance in instances:
|
||||
instance_loaded = read_pickle_gz(instance.filename)
|
||||
assert len(instance_loaded.training_data) > 0
|
||||
assert len(instance_loaded.features) > 0
|
||||
assert instance_loaded.features.instance is not None
|
||||
assert instance_loaded.features.variables is not None
|
||||
assert instance_loaded.features.constraints is not None
|
||||
|
||||
# Delete temporary files
|
||||
for instance in instances:
|
||||
|
||||
@@ -13,9 +13,8 @@ def test_knapsack() -> None:
|
||||
instance = get_knapsack_instance(solver)
|
||||
model = instance.to_model()
|
||||
solver.set_instance(instance, model)
|
||||
extractor = FeaturesExtractor(solver)
|
||||
features = extractor.extract(instance)
|
||||
assert features["Variables"] == {
|
||||
FeaturesExtractor(solver).extract(instance)
|
||||
assert instance.features.variables == {
|
||||
"x": {
|
||||
0: {
|
||||
"Category": "default",
|
||||
@@ -35,7 +34,8 @@ def test_knapsack() -> None:
|
||||
},
|
||||
}
|
||||
}
|
||||
assert features["Constraints"]["eq_capacity"] == {
|
||||
assert instance.features.constraints == {
|
||||
"eq_capacity": {
|
||||
"LHS": {
|
||||
"x[0]": 23.0,
|
||||
"x[1]": 26.0,
|
||||
@@ -48,7 +48,8 @@ def test_knapsack() -> None:
|
||||
"Category": "eq_capacity",
|
||||
"User features": [0.0],
|
||||
}
|
||||
assert features["Instance"] == {
|
||||
}
|
||||
assert instance.features.instance == {
|
||||
"User features": [67.0, 21.75],
|
||||
"Lazy constraint count": 0,
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user