mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 09:28:51 -06:00
Convert Features into dataclass
This commit is contained in:
@@ -61,13 +61,16 @@ class StaticLazyConstraintsComponent(Component):
|
|||||||
training_data: TrainingSample,
|
training_data: TrainingSample,
|
||||||
) -> None:
|
) -> None:
|
||||||
assert solver.internal_solver is not None
|
assert solver.internal_solver is not None
|
||||||
if not features["Instance"]["Lazy constraint count"] == 0:
|
assert features.instance is not None
|
||||||
|
assert features.constraints is not None
|
||||||
|
|
||||||
|
if not features.instance["Lazy constraint count"] == 0:
|
||||||
logger.info("Instance does not have static lazy constraints. Skipping.")
|
logger.info("Instance does not have static lazy constraints. Skipping.")
|
||||||
logger.info("Predicting required lazy constraints...")
|
logger.info("Predicting required lazy constraints...")
|
||||||
self.enforced_cids = set(self.sample_predict(features, training_data))
|
self.enforced_cids = set(self.sample_predict(features, training_data))
|
||||||
logger.info("Moving lazy constraints to the pool...")
|
logger.info("Moving lazy constraints to the pool...")
|
||||||
self.pool = {}
|
self.pool = {}
|
||||||
for (cid, cdict) in features["Constraints"].items():
|
for (cid, cdict) in features.constraints.items():
|
||||||
if cdict["Lazy"] and cid not in self.enforced_cids:
|
if cdict["Lazy"] and cid not in self.enforced_cids:
|
||||||
self.pool[cid] = LazyConstraint(
|
self.pool[cid] = LazyConstraint(
|
||||||
cid=cid,
|
cid=cid,
|
||||||
@@ -145,9 +148,11 @@ class StaticLazyConstraintsComponent(Component):
|
|||||||
features: Features,
|
features: Features,
|
||||||
sample: TrainingSample,
|
sample: TrainingSample,
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
|
assert features.constraints is not None
|
||||||
|
|
||||||
x, y = self.sample_xy(features, sample)
|
x, y = self.sample_xy(features, sample)
|
||||||
category_to_cids: Dict[Hashable, List[str]] = {}
|
category_to_cids: Dict[Hashable, List[str]] = {}
|
||||||
for (cid, cdict) in features["Constraints"].items():
|
for (cid, cdict) in features.constraints.items():
|
||||||
if "Category" not in cdict or cdict["Category"] is None:
|
if "Category" not in cdict or cdict["Category"] is None:
|
||||||
continue
|
continue
|
||||||
category = cdict["Category"]
|
category = cdict["Category"]
|
||||||
@@ -172,9 +177,10 @@ class StaticLazyConstraintsComponent(Component):
|
|||||||
features: Features,
|
features: Features,
|
||||||
sample: TrainingSample,
|
sample: TrainingSample,
|
||||||
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
|
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
|
||||||
|
assert features.constraints is not None
|
||||||
x: Dict = {}
|
x: Dict = {}
|
||||||
y: Dict = {}
|
y: Dict = {}
|
||||||
for (cid, cfeatures) in features["Constraints"].items():
|
for (cid, cfeatures) in features.constraints.items():
|
||||||
if not cfeatures["Lazy"]:
|
if not cfeatures["Lazy"]:
|
||||||
continue
|
continue
|
||||||
category = cfeatures["Category"]
|
category = cfeatures["Category"]
|
||||||
|
|||||||
@@ -77,9 +77,10 @@ class ObjectiveValueComponent(Component):
|
|||||||
features: Features,
|
features: Features,
|
||||||
sample: TrainingSample,
|
sample: TrainingSample,
|
||||||
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
|
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
|
||||||
|
assert features.instance is not None
|
||||||
x: Dict[Hashable, List[List[float]]] = {}
|
x: Dict[Hashable, List[List[float]]] = {}
|
||||||
y: Dict[Hashable, List[List[float]]] = {}
|
y: Dict[Hashable, List[List[float]]] = {}
|
||||||
f = list(features["Instance"]["User features"])
|
f = list(features.instance["User features"])
|
||||||
if "LP value" in sample and sample["LP value"] is not None:
|
if "LP value" in sample and sample["LP value"] is not None:
|
||||||
f += [sample["LP value"]]
|
f += [sample["LP value"]]
|
||||||
for c in ["Upper bound", "Lower bound"]:
|
for c in ["Upper bound", "Lower bound"]:
|
||||||
|
|||||||
@@ -105,9 +105,11 @@ class PrimalSolutionComponent(Component):
|
|||||||
features: Features,
|
features: Features,
|
||||||
sample: TrainingSample,
|
sample: TrainingSample,
|
||||||
) -> Solution:
|
) -> Solution:
|
||||||
|
assert features.variables is not None
|
||||||
|
|
||||||
# Initialize empty solution
|
# Initialize empty solution
|
||||||
solution: Solution = {}
|
solution: Solution = {}
|
||||||
for (var_name, var_dict) in features["Variables"].items():
|
for (var_name, var_dict) in features.variables.items():
|
||||||
solution[var_name] = {}
|
solution[var_name] = {}
|
||||||
for idx in var_dict.keys():
|
for idx in var_dict.keys():
|
||||||
solution[var_name][idx] = None
|
solution[var_name][idx] = None
|
||||||
@@ -132,7 +134,7 @@ class PrimalSolutionComponent(Component):
|
|||||||
|
|
||||||
# Convert y_pred into solution
|
# Convert y_pred into solution
|
||||||
category_offset: Dict[Hashable, int] = {cat: 0 for cat in x.keys()}
|
category_offset: Dict[Hashable, int] = {cat: 0 for cat in x.keys()}
|
||||||
for (var_name, var_dict) in features["Variables"].items():
|
for (var_name, var_dict) in features.variables.items():
|
||||||
for (idx, var_features) in var_dict.items():
|
for (idx, var_features) in var_dict.items():
|
||||||
category = var_features["Category"]
|
category = var_features["Category"]
|
||||||
offset = category_offset[category]
|
offset = category_offset[category]
|
||||||
@@ -149,12 +151,13 @@ class PrimalSolutionComponent(Component):
|
|||||||
features: Features,
|
features: Features,
|
||||||
sample: TrainingSample,
|
sample: TrainingSample,
|
||||||
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
|
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
|
||||||
|
assert features.variables is not None
|
||||||
x: Dict = {}
|
x: Dict = {}
|
||||||
y: Dict = {}
|
y: Dict = {}
|
||||||
solution: Optional[Solution] = None
|
solution: Optional[Solution] = None
|
||||||
if "Solution" in sample and sample["Solution"] is not None:
|
if "Solution" in sample and sample["Solution"] is not None:
|
||||||
solution = sample["Solution"]
|
solution = sample["Solution"]
|
||||||
for (var_name, var_dict) in features["Variables"].items():
|
for (var_name, var_dict) in features.variables.items():
|
||||||
for (idx, var_features) in var_dict.items():
|
for (idx, var_features) in var_dict.items():
|
||||||
category = var_features["Category"]
|
category = var_features["Category"]
|
||||||
if category is None:
|
if category is None:
|
||||||
|
|||||||
@@ -19,13 +19,10 @@ class FeaturesExtractor:
|
|||||||
) -> None:
|
) -> None:
|
||||||
self.solver = internal_solver
|
self.solver = internal_solver
|
||||||
|
|
||||||
def extract(self, instance: "Instance") -> Features:
|
def extract(self, instance: "Instance") -> None:
|
||||||
features: Features = {
|
instance.features.variables = self._extract_variables(instance)
|
||||||
"Variables": self._extract_variables(instance),
|
instance.features.constraints = self._extract_constraints(instance)
|
||||||
"Constraints": self._extract_constraints(instance),
|
instance.features.instance = self._extract_instance(instance, instance.features)
|
||||||
}
|
|
||||||
features["Instance"] = self._extract_instance(instance, features)
|
|
||||||
return features
|
|
||||||
|
|
||||||
def _extract_variables(self, instance: "Instance") -> Dict:
|
def _extract_variables(self, instance: "Instance") -> Dict:
|
||||||
variables = self.solver.get_empty_solution()
|
variables = self.solver.get_empty_solution()
|
||||||
@@ -97,6 +94,7 @@ class FeaturesExtractor:
|
|||||||
instance: "Instance",
|
instance: "Instance",
|
||||||
features: Features,
|
features: Features,
|
||||||
) -> InstanceFeatures:
|
) -> InstanceFeatures:
|
||||||
|
assert features.constraints is not None
|
||||||
user_features = instance.get_instance_features()
|
user_features = instance.get_instance_features()
|
||||||
assert isinstance(user_features, list), (
|
assert isinstance(user_features, list), (
|
||||||
f"Instance features must be a list. "
|
f"Instance features must be a list. "
|
||||||
@@ -108,7 +106,7 @@ class FeaturesExtractor:
|
|||||||
f"Found {type(v).__name__} instead."
|
f"Found {type(v).__name__} instead."
|
||||||
)
|
)
|
||||||
lazy_count = 0
|
lazy_count = 0
|
||||||
for (cid, cdict) in features["Constraints"].items():
|
for (cid, cdict) in features.constraints.items():
|
||||||
if cdict["Lazy"]:
|
if cdict["Lazy"]:
|
||||||
lazy_count += 1
|
lazy_count += 1
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ class Instance(ABC):
|
|||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.training_data: List[TrainingSample] = []
|
self.training_data: List[TrainingSample] = []
|
||||||
self.features: Features = {}
|
self.features: Features = Features()
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def to_model(self) -> Any:
|
def to_model(self) -> Any:
|
||||||
@@ -206,8 +206,8 @@ class PickleGzInstance(Instance):
|
|||||||
Path of the gzipped pickle file that should be loaded.
|
Path of the gzipped pickle file that should be loaded.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# noinspection PyMissingConstructor
|
||||||
def __init__(self, filename: str) -> None:
|
def __init__(self, filename: str) -> None:
|
||||||
super().__init__()
|
|
||||||
assert os.path.exists(filename), f"File not found: {filename}"
|
assert os.path.exists(filename), f"File not found: {filename}"
|
||||||
self.instance: Optional[Instance] = None
|
self.instance: Optional[Instance] = None
|
||||||
self.filename: str = filename
|
self.filename: str = filename
|
||||||
|
|||||||
@@ -148,9 +148,7 @@ class LearningSolver:
|
|||||||
self.internal_solver.set_instance(instance, model)
|
self.internal_solver.set_instance(instance, model)
|
||||||
|
|
||||||
# Extract features
|
# Extract features
|
||||||
extractor = FeaturesExtractor(self.internal_solver)
|
FeaturesExtractor(self.internal_solver).extract(instance)
|
||||||
instance.features.clear() # type: ignore
|
|
||||||
instance.features.update(extractor.extract(instance))
|
|
||||||
|
|
||||||
callback_args = (
|
callback_args = (
|
||||||
self,
|
self,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
from typing import Optional, Dict, Callable, Any, Union, Tuple, List, Set, Hashable
|
from typing import Optional, Dict, Callable, Any, Union, Tuple, List, Set, Hashable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from mypy_extensions import TypedDict
|
from mypy_extensions import TypedDict
|
||||||
|
|
||||||
@@ -108,15 +109,13 @@ ConstraintFeatures = TypedDict(
|
|||||||
total=False,
|
total=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
Features = TypedDict(
|
|
||||||
"Features",
|
@dataclass
|
||||||
{
|
class Features:
|
||||||
"Instance": InstanceFeatures,
|
instance: Optional[InstanceFeatures] = None
|
||||||
"Variables": Dict[str, Dict[VarIndex, VariableFeatures]],
|
variables: Optional[Dict[str, Dict[VarIndex, VariableFeatures]]] = None
|
||||||
"Constraints": Dict[str, ConstraintFeatures],
|
constraints: Optional[Dict[str, ConstraintFeatures]] = None
|
||||||
},
|
|
||||||
total=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
IterationCallback = Callable[[], bool]
|
IterationCallback = Callable[[], bool]
|
||||||
|
|
||||||
|
|||||||
@@ -24,11 +24,11 @@ def sample() -> TrainingSample:
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def features() -> Features:
|
def features() -> Features:
|
||||||
return {
|
return Features(
|
||||||
"Instance": {
|
instance={
|
||||||
"Lazy constraint count": 4,
|
"Lazy constraint count": 4,
|
||||||
},
|
},
|
||||||
"Constraints": {
|
constraints={
|
||||||
"c1": {
|
"c1": {
|
||||||
"Category": "type-a",
|
"Category": "type-a",
|
||||||
"User features": [1.0, 1.0],
|
"User features": [1.0, 1.0],
|
||||||
@@ -55,7 +55,7 @@ def features() -> Features:
|
|||||||
"Lazy": False,
|
"Lazy": False,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_usage_with_solver(features: Features) -> None:
|
def test_usage_with_solver(features: Features) -> None:
|
||||||
|
|||||||
@@ -17,11 +17,11 @@ import numpy as np
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def features() -> Features:
|
def features() -> Features:
|
||||||
return {
|
return Features(
|
||||||
"Instance": {
|
instance={
|
||||||
"User features": [1.0, 2.0],
|
"User features": [1.0, 2.0],
|
||||||
}
|
}
|
||||||
}
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ from miplearn.types import TrainingSample, Features
|
|||||||
|
|
||||||
|
|
||||||
def test_xy() -> None:
|
def test_xy() -> None:
|
||||||
features: Features = {
|
features = Features(
|
||||||
"Variables": {
|
variables={
|
||||||
"x": {
|
"x": {
|
||||||
0: {
|
0: {
|
||||||
"Category": "default",
|
"Category": "default",
|
||||||
@@ -37,7 +37,7 @@ def test_xy() -> None:
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
)
|
||||||
sample: TrainingSample = {
|
sample: TrainingSample = {
|
||||||
"Solution": {
|
"Solution": {
|
||||||
"x": {
|
"x": {
|
||||||
@@ -78,8 +78,8 @@ def test_xy() -> None:
|
|||||||
|
|
||||||
|
|
||||||
def test_xy_without_lp_solution() -> None:
|
def test_xy_without_lp_solution() -> None:
|
||||||
features: Features = {
|
features = Features(
|
||||||
"Variables": {
|
variables={
|
||||||
"x": {
|
"x": {
|
||||||
0: {
|
0: {
|
||||||
"Category": "default",
|
"Category": "default",
|
||||||
@@ -98,7 +98,7 @@ def test_xy_without_lp_solution() -> None:
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
)
|
||||||
sample: TrainingSample = {
|
sample: TrainingSample = {
|
||||||
"Solution": {
|
"Solution": {
|
||||||
"x": {
|
"x": {
|
||||||
@@ -143,8 +143,8 @@ def test_predict() -> None:
|
|||||||
)
|
)
|
||||||
thr = Mock(spec=Threshold)
|
thr = Mock(spec=Threshold)
|
||||||
thr.predict = Mock(return_value=[0.75, 0.75])
|
thr.predict = Mock(return_value=[0.75, 0.75])
|
||||||
features: Features = {
|
features = Features(
|
||||||
"Variables": {
|
variables={
|
||||||
"x": {
|
"x": {
|
||||||
0: {
|
0: {
|
||||||
"Category": "default",
|
"Category": "default",
|
||||||
@@ -160,7 +160,7 @@ def test_predict() -> None:
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
)
|
||||||
sample: TrainingSample = {
|
sample: TrainingSample = {
|
||||||
"LP solution": {
|
"LP solution": {
|
||||||
"x": {
|
"x": {
|
||||||
@@ -243,8 +243,8 @@ def test_evaluate() -> None:
|
|||||||
4: 1.0,
|
4: 1.0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
features: Features = {
|
features = Features(
|
||||||
"Variables": {
|
variables={
|
||||||
"x": {
|
"x": {
|
||||||
0: {},
|
0: {},
|
||||||
1: {},
|
1: {},
|
||||||
@@ -253,7 +253,7 @@ def test_evaluate() -> None:
|
|||||||
4: {},
|
4: {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
)
|
||||||
sample: TrainingSample = {
|
sample: TrainingSample = {
|
||||||
"Solution": {
|
"Solution": {
|
||||||
"x": {
|
"x": {
|
||||||
|
|||||||
@@ -91,14 +91,18 @@ def test_solve_fit_from_disk():
|
|||||||
solver.solve(instances[0])
|
solver.solve(instances[0])
|
||||||
instance_loaded = read_pickle_gz(instances[0].filename)
|
instance_loaded = read_pickle_gz(instances[0].filename)
|
||||||
assert len(instance_loaded.training_data) > 0
|
assert len(instance_loaded.training_data) > 0
|
||||||
assert len(instance_loaded.features) > 0
|
assert instance_loaded.features.instance is not None
|
||||||
|
assert instance_loaded.features.variables is not None
|
||||||
|
assert instance_loaded.features.constraints is not None
|
||||||
|
|
||||||
# Test: parallel_solve
|
# Test: parallel_solve
|
||||||
solver.parallel_solve(instances)
|
solver.parallel_solve(instances)
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
instance_loaded = read_pickle_gz(instance.filename)
|
instance_loaded = read_pickle_gz(instance.filename)
|
||||||
assert len(instance_loaded.training_data) > 0
|
assert len(instance_loaded.training_data) > 0
|
||||||
assert len(instance_loaded.features) > 0
|
assert instance_loaded.features.instance is not None
|
||||||
|
assert instance_loaded.features.variables is not None
|
||||||
|
assert instance_loaded.features.constraints is not None
|
||||||
|
|
||||||
# Delete temporary files
|
# Delete temporary files
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
|
|||||||
@@ -13,9 +13,8 @@ def test_knapsack() -> None:
|
|||||||
instance = get_knapsack_instance(solver)
|
instance = get_knapsack_instance(solver)
|
||||||
model = instance.to_model()
|
model = instance.to_model()
|
||||||
solver.set_instance(instance, model)
|
solver.set_instance(instance, model)
|
||||||
extractor = FeaturesExtractor(solver)
|
FeaturesExtractor(solver).extract(instance)
|
||||||
features = extractor.extract(instance)
|
assert instance.features.variables == {
|
||||||
assert features["Variables"] == {
|
|
||||||
"x": {
|
"x": {
|
||||||
0: {
|
0: {
|
||||||
"Category": "default",
|
"Category": "default",
|
||||||
@@ -35,20 +34,22 @@ def test_knapsack() -> None:
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert features["Constraints"]["eq_capacity"] == {
|
assert instance.features.constraints == {
|
||||||
"LHS": {
|
"eq_capacity": {
|
||||||
"x[0]": 23.0,
|
"LHS": {
|
||||||
"x[1]": 26.0,
|
"x[0]": 23.0,
|
||||||
"x[2]": 20.0,
|
"x[1]": 26.0,
|
||||||
"x[3]": 18.0,
|
"x[2]": 20.0,
|
||||||
},
|
"x[3]": 18.0,
|
||||||
"Sense": "<",
|
},
|
||||||
"RHS": 67.0,
|
"Sense": "<",
|
||||||
"Lazy": False,
|
"RHS": 67.0,
|
||||||
"Category": "eq_capacity",
|
"Lazy": False,
|
||||||
"User features": [0.0],
|
"Category": "eq_capacity",
|
||||||
|
"User features": [0.0],
|
||||||
|
}
|
||||||
}
|
}
|
||||||
assert features["Instance"] == {
|
assert instance.features.instance == {
|
||||||
"User features": [67.0, 21.75],
|
"User features": [67.0, 21.75],
|
||||||
"Lazy constraint count": 0,
|
"Lazy constraint count": 0,
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user