mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 01:18:52 -06:00
Remove {get,put}_set and deprecated functions
This commit is contained in:
@@ -56,6 +56,11 @@ class DynamicConstraintsComponent(Component):
|
|||||||
cids: Dict[ConstraintCategory, List[ConstraintName]] = {}
|
cids: Dict[ConstraintCategory, List[ConstraintName]] = {}
|
||||||
known_cids = np.array(self.known_cids, dtype="S")
|
known_cids = np.array(self.known_cids, dtype="S")
|
||||||
|
|
||||||
|
enforced_cids = None
|
||||||
|
enforced_cids_np = sample.get_array(self.attr)
|
||||||
|
if enforced_cids_np is not None:
|
||||||
|
enforced_cids = list(enforced_cids_np)
|
||||||
|
|
||||||
# Get user-provided constraint features
|
# Get user-provided constraint features
|
||||||
(
|
(
|
||||||
constr_features,
|
constr_features,
|
||||||
@@ -72,13 +77,11 @@ class DynamicConstraintsComponent(Component):
|
|||||||
constr_features,
|
constr_features,
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
assert len(known_cids) == constr_features.shape[0]
|
|
||||||
|
|
||||||
categories = np.unique(constr_categories)
|
categories = np.unique(constr_categories)
|
||||||
for c in categories:
|
for c in categories:
|
||||||
x[c] = constr_features[constr_categories == c].tolist()
|
x[c] = constr_features[constr_categories == c].tolist()
|
||||||
cids[c] = known_cids[constr_categories == c].tolist()
|
cids[c] = known_cids[constr_categories == c].tolist()
|
||||||
enforced_cids = np.array(list(sample.get_set(self.attr)), dtype="S")
|
|
||||||
if enforced_cids is not None:
|
if enforced_cids is not None:
|
||||||
tmp = np.isin(cids[c], enforced_cids).reshape(-1, 1)
|
tmp = np.isin(cids[c], enforced_cids).reshape(-1, 1)
|
||||||
y[c] = np.hstack([~tmp, tmp]).tolist() # type: ignore
|
y[c] = np.hstack([~tmp, tmp]).tolist() # type: ignore
|
||||||
@@ -99,7 +102,7 @@ class DynamicConstraintsComponent(Component):
|
|||||||
assert pre is not None
|
assert pre is not None
|
||||||
known_cids: Set = set()
|
known_cids: Set = set()
|
||||||
for cids in pre:
|
for cids in pre:
|
||||||
known_cids |= cids
|
known_cids |= set(list(cids))
|
||||||
self.known_cids.clear()
|
self.known_cids.clear()
|
||||||
self.known_cids.extend(sorted(known_cids))
|
self.known_cids.extend(sorted(known_cids))
|
||||||
|
|
||||||
@@ -128,7 +131,7 @@ class DynamicConstraintsComponent(Component):
|
|||||||
|
|
||||||
@overrides
|
@overrides
|
||||||
def pre_sample_xy(self, instance: Instance, sample: Sample) -> Any:
|
def pre_sample_xy(self, instance: Instance, sample: Sample) -> Any:
|
||||||
return sample.get_set(self.attr)
|
return sample.get_array(self.attr)
|
||||||
|
|
||||||
@overrides
|
@overrides
|
||||||
def fit_xy(
|
def fit_xy(
|
||||||
@@ -150,7 +153,7 @@ class DynamicConstraintsComponent(Component):
|
|||||||
instance: Instance,
|
instance: Instance,
|
||||||
sample: Sample,
|
sample: Sample,
|
||||||
) -> Dict[str, float]:
|
) -> Dict[str, float]:
|
||||||
actual = sample.get_set(self.attr)
|
actual = sample.get_array(self.attr)
|
||||||
assert actual is not None
|
assert actual is not None
|
||||||
pred = set(self.sample_predict(instance, sample))
|
pred = set(self.sample_predict(instance, sample))
|
||||||
tp, tn, fp, fn = 0, 0, 0, 0
|
tp, tn, fp, fn = 0, 0, 0, 0
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
import pdb
|
||||||
from typing import Dict, List, TYPE_CHECKING, Tuple, Any, Optional, Set
|
from typing import Dict, List, TYPE_CHECKING, Tuple, Any, Optional, Set
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -78,7 +79,10 @@ class DynamicLazyConstraintsComponent(Component):
|
|||||||
stats: LearningSolveStats,
|
stats: LearningSolveStats,
|
||||||
sample: Sample,
|
sample: Sample,
|
||||||
) -> None:
|
) -> None:
|
||||||
sample.put_set("mip_constr_lazy_enforced", set(self.lazy_enforced))
|
sample.put_array(
|
||||||
|
"mip_constr_lazy_enforced",
|
||||||
|
np.array(list(self.lazy_enforced), dtype="S"),
|
||||||
|
)
|
||||||
|
|
||||||
@overrides
|
@overrides
|
||||||
def iteration_cb(
|
def iteration_cb(
|
||||||
|
|||||||
@@ -87,7 +87,10 @@ class UserCutsComponent(Component):
|
|||||||
stats: LearningSolveStats,
|
stats: LearningSolveStats,
|
||||||
sample: Sample,
|
sample: Sample,
|
||||||
) -> None:
|
) -> None:
|
||||||
sample.put_set("mip_user_cuts_enforced", set(self.enforced))
|
sample.put_array(
|
||||||
|
"mip_user_cuts_enforced",
|
||||||
|
np.array(list(self.enforced), dtype="S"),
|
||||||
|
)
|
||||||
stats["UserCuts: Added in callback"] = self.n_added_in_callback
|
stats["UserCuts: Added in callback"] = self.n_added_in_callback
|
||||||
if self.n_added_in_callback > 0:
|
if self.n_added_in_callback > 0:
|
||||||
logger.info(f"{self.n_added_in_callback} user cuts added in callback")
|
logger.info(f"{self.n_added_in_callback} user cuts added in callback")
|
||||||
|
|||||||
@@ -61,7 +61,10 @@ class StaticLazyConstraintsComponent(Component):
|
|||||||
stats: LearningSolveStats,
|
stats: LearningSolveStats,
|
||||||
sample: Sample,
|
sample: Sample,
|
||||||
) -> None:
|
) -> None:
|
||||||
sample.put_set("mip_constr_lazy_enforced", self.enforced_cids)
|
sample.put_array(
|
||||||
|
"mip_constr_lazy_enforced",
|
||||||
|
np.array(list(self.enforced_cids), dtype="S"),
|
||||||
|
)
|
||||||
stats["LazyStatic: Restored"] = self.n_restored
|
stats["LazyStatic: Restored"] = self.n_restored
|
||||||
stats["LazyStatic: Iterations"] = self.n_iterations
|
stats["LazyStatic: Iterations"] = self.n_iterations
|
||||||
|
|
||||||
@@ -212,7 +215,7 @@ class StaticLazyConstraintsComponent(Component):
|
|||||||
constr_names = sample.get_array("static_constr_names")
|
constr_names = sample.get_array("static_constr_names")
|
||||||
constr_categories = sample.get_array("static_constr_categories")
|
constr_categories = sample.get_array("static_constr_categories")
|
||||||
constr_lazy = sample.get_array("static_constr_lazy")
|
constr_lazy = sample.get_array("static_constr_lazy")
|
||||||
lazy_enforced = sample.get_set("mip_constr_lazy_enforced")
|
lazy_enforced = sample.get_array("mip_constr_lazy_enforced")
|
||||||
if constr_features is None:
|
if constr_features is None:
|
||||||
constr_features = sample.get_array("static_constr_features")
|
constr_features = sample.get_array("static_constr_features")
|
||||||
|
|
||||||
|
|||||||
@@ -46,15 +46,6 @@ class Sample(ABC):
|
|||||||
def put_scalar(self, key: str, value: Scalar) -> None:
|
def put_scalar(self, key: str, value: Scalar) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_vector(self, key: str) -> Optional[Any]:
|
|
||||||
warnings.warn("Deprecated", DeprecationWarning)
|
|
||||||
return None
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def put_vector(self, key: str, value: Vector) -> None:
|
|
||||||
warnings.warn("Deprecated", DeprecationWarning)
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def put_array(self, key: str, value: Optional[np.ndarray]) -> None:
|
def put_array(self, key: str, value: Optional[np.ndarray]) -> None:
|
||||||
pass
|
pass
|
||||||
@@ -71,19 +62,6 @@ class Sample(ABC):
|
|||||||
def get_sparse(self, key: str) -> Optional[coo_matrix]:
|
def get_sparse(self, key: str) -> Optional[coo_matrix]:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def get_set(self, key: str) -> Set:
|
|
||||||
warnings.warn("Deprecated", DeprecationWarning)
|
|
||||||
v = self.get_vector(key)
|
|
||||||
if v:
|
|
||||||
return set(v)
|
|
||||||
else:
|
|
||||||
return set()
|
|
||||||
|
|
||||||
def put_set(self, key: str, value: Set) -> None:
|
|
||||||
warnings.warn("Deprecated", DeprecationWarning)
|
|
||||||
v = list(value)
|
|
||||||
self.put_vector(key, v)
|
|
||||||
|
|
||||||
def _assert_is_scalar(self, value: Any) -> None:
|
def _assert_is_scalar(self, value: Any) -> None:
|
||||||
if value is None:
|
if value is None:
|
||||||
return
|
return
|
||||||
@@ -91,20 +69,13 @@ class Sample(ABC):
|
|||||||
return
|
return
|
||||||
assert False, f"scalar expected; found instead: {value} ({value.__class__})"
|
assert False, f"scalar expected; found instead: {value} ({value.__class__})"
|
||||||
|
|
||||||
def _assert_is_vector(self, value: Any) -> None:
|
def _assert_is_array(self, value: np.ndarray) -> None:
|
||||||
assert isinstance(
|
|
||||||
value, (list, np.ndarray)
|
|
||||||
), f"list or numpy array expected; found instead: {value} ({value.__class__})"
|
|
||||||
for v in value:
|
|
||||||
self._assert_is_scalar(v)
|
|
||||||
|
|
||||||
def _assert_supported(self, value: np.ndarray) -> None:
|
|
||||||
assert isinstance(value, np.ndarray)
|
assert isinstance(value, np.ndarray)
|
||||||
assert value.dtype.kind in "biufS", f"Unsupported dtype: {value.dtype}"
|
assert value.dtype.kind in "biufS", f"Unsupported dtype: {value.dtype}"
|
||||||
|
|
||||||
def _assert_is_sparse(self, value: Any) -> None:
|
def _assert_is_sparse(self, value: Any) -> None:
|
||||||
assert isinstance(value, coo_matrix)
|
assert isinstance(value, coo_matrix)
|
||||||
self._assert_supported(value.data)
|
self._assert_is_array(value.data)
|
||||||
|
|
||||||
|
|
||||||
class MemorySample(Sample):
|
class MemorySample(Sample):
|
||||||
@@ -113,37 +84,22 @@ class MemorySample(Sample):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
data: Optional[Dict[str, Any]] = None,
|
data: Optional[Dict[str, Any]] = None,
|
||||||
check_data: bool = True,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
if data is None:
|
if data is None:
|
||||||
data = {}
|
data = {}
|
||||||
self._data: Dict[str, Any] = data
|
self._data: Dict[str, Any] = data
|
||||||
self._check_data = check_data
|
|
||||||
|
|
||||||
@overrides
|
@overrides
|
||||||
def get_scalar(self, key: str) -> Optional[Any]:
|
def get_scalar(self, key: str) -> Optional[Any]:
|
||||||
return self._get(key)
|
return self._get(key)
|
||||||
|
|
||||||
@overrides
|
|
||||||
def get_vector(self, key: str) -> Optional[Any]:
|
|
||||||
return self._get(key)
|
|
||||||
|
|
||||||
@overrides
|
@overrides
|
||||||
def put_scalar(self, key: str, value: Scalar) -> None:
|
def put_scalar(self, key: str, value: Scalar) -> None:
|
||||||
if value is None:
|
if value is None:
|
||||||
return
|
return
|
||||||
if self._check_data:
|
|
||||||
self._assert_is_scalar(value)
|
self._assert_is_scalar(value)
|
||||||
self._put(key, value)
|
self._put(key, value)
|
||||||
|
|
||||||
@overrides
|
|
||||||
def put_vector(self, key: str, value: Vector) -> None:
|
|
||||||
if value is None:
|
|
||||||
return
|
|
||||||
if self._check_data:
|
|
||||||
self._assert_is_vector(value)
|
|
||||||
self._put(key, value)
|
|
||||||
|
|
||||||
def _get(self, key: str) -> Optional[Any]:
|
def _get(self, key: str) -> Optional[Any]:
|
||||||
if key in self._data:
|
if key in self._data:
|
||||||
return self._data[key]
|
return self._data[key]
|
||||||
@@ -157,7 +113,7 @@ class MemorySample(Sample):
|
|||||||
def put_array(self, key: str, value: Optional[np.ndarray]) -> None:
|
def put_array(self, key: str, value: Optional[np.ndarray]) -> None:
|
||||||
if value is None:
|
if value is None:
|
||||||
return
|
return
|
||||||
self._assert_supported(value)
|
self._assert_is_array(value)
|
||||||
self._put(key, value)
|
self._put(key, value)
|
||||||
|
|
||||||
@overrides
|
@overrides
|
||||||
@@ -188,10 +144,8 @@ class Hdf5Sample(Sample):
|
|||||||
self,
|
self,
|
||||||
filename: str,
|
filename: str,
|
||||||
mode: str = "r+",
|
mode: str = "r+",
|
||||||
check_data: bool = True,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
self.file = h5py.File(filename, mode, libver="latest")
|
self.file = h5py.File(filename, mode, libver="latest")
|
||||||
self._check_data = check_data
|
|
||||||
|
|
||||||
@overrides
|
@overrides
|
||||||
def get_scalar(self, key: str) -> Optional[Any]:
|
def get_scalar(self, key: str) -> Optional[Any]:
|
||||||
@@ -206,66 +160,20 @@ class Hdf5Sample(Sample):
|
|||||||
else:
|
else:
|
||||||
return ds[()].tolist()
|
return ds[()].tolist()
|
||||||
|
|
||||||
@overrides
|
|
||||||
def get_vector(self, key: str) -> Optional[Any]:
|
|
||||||
if key not in self.file:
|
|
||||||
return None
|
|
||||||
ds = self.file[key]
|
|
||||||
assert (
|
|
||||||
len(ds.shape) == 1
|
|
||||||
), f"1-dimensional array expected; found shape {ds.shape}"
|
|
||||||
if h5py.check_string_dtype(ds.dtype):
|
|
||||||
result = ds.asstr()[:].tolist()
|
|
||||||
result = [r if len(r) > 0 else None for r in result]
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
return ds[:].tolist()
|
|
||||||
|
|
||||||
@overrides
|
@overrides
|
||||||
def put_scalar(self, key: str, value: Any) -> None:
|
def put_scalar(self, key: str, value: Any) -> None:
|
||||||
if value is None:
|
if value is None:
|
||||||
return
|
return
|
||||||
if self._check_data:
|
|
||||||
self._assert_is_scalar(value)
|
self._assert_is_scalar(value)
|
||||||
self._put(key, value)
|
|
||||||
|
|
||||||
@overrides
|
|
||||||
def put_vector(self, key: str, value: Vector) -> None:
|
|
||||||
if value is None:
|
|
||||||
return
|
|
||||||
if self._check_data:
|
|
||||||
self._assert_is_vector(value)
|
|
||||||
|
|
||||||
for v in value:
|
|
||||||
# Convert strings to bytes
|
|
||||||
if isinstance(v, str) or v is None:
|
|
||||||
value = np.array(
|
|
||||||
[u if u is not None else b"" for u in value],
|
|
||||||
dtype="S",
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
# Convert all floating point numbers to half-precision
|
|
||||||
if isinstance(v, float):
|
|
||||||
value = np.array(value, dtype=np.dtype("f2"))
|
|
||||||
break
|
|
||||||
|
|
||||||
self._put(key, value, compress=True)
|
|
||||||
|
|
||||||
def _put(self, key: str, value: Any, compress: bool = False) -> Dataset:
|
|
||||||
if key in self.file:
|
if key in self.file:
|
||||||
del self.file[key]
|
del self.file[key]
|
||||||
if compress:
|
self.file.create_dataset(key, data=value)
|
||||||
ds = self.file.create_dataset(key, data=value, compression="gzip")
|
|
||||||
else:
|
|
||||||
ds = self.file.create_dataset(key, data=value)
|
|
||||||
return ds
|
|
||||||
|
|
||||||
@overrides
|
@overrides
|
||||||
def put_array(self, key: str, value: Optional[np.ndarray]) -> None:
|
def put_array(self, key: str, value: Optional[np.ndarray]) -> None:
|
||||||
if value is None:
|
if value is None:
|
||||||
return
|
return
|
||||||
self._assert_supported(value)
|
self._assert_is_array(value)
|
||||||
if key in self.file:
|
if key in self.file:
|
||||||
del self.file[key]
|
del self.file[key]
|
||||||
return self.file.create_dataset(key, data=value, compression="gzip")
|
return self.file.create_dataset(key, data=value, compression="gzip")
|
||||||
|
|||||||
@@ -24,13 +24,13 @@ def training_instances() -> List[Instance]:
|
|||||||
samples_0 = [
|
samples_0 = [
|
||||||
MemorySample(
|
MemorySample(
|
||||||
{
|
{
|
||||||
"mip_constr_lazy_enforced": {b"c1", b"c2"},
|
"mip_constr_lazy_enforced": np.array(["c1", "c2"], dtype="S"),
|
||||||
"static_instance_features": np.array([5.0]),
|
"static_instance_features": np.array([5.0]),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
MemorySample(
|
MemorySample(
|
||||||
{
|
{
|
||||||
"mip_constr_lazy_enforced": {b"c2", b"c3"},
|
"mip_constr_lazy_enforced": np.array(["c2", "c3"], dtype="S"),
|
||||||
"static_instance_features": np.array([5.0]),
|
"static_instance_features": np.array([5.0]),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
@@ -55,7 +55,7 @@ def training_instances() -> List[Instance]:
|
|||||||
samples_1 = [
|
samples_1 = [
|
||||||
MemorySample(
|
MemorySample(
|
||||||
{
|
{
|
||||||
"mip_constr_lazy_enforced": {b"c3", b"c4"},
|
"mip_constr_lazy_enforced": np.array(["c3", "c4"], dtype="S"),
|
||||||
"static_instance_features": np.array([8.0]),
|
"static_instance_features": np.array([8.0]),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@@ -81,7 +81,12 @@ def training_instances() -> List[Instance]:
|
|||||||
|
|
||||||
def test_sample_xy(training_instances: List[Instance]) -> None:
|
def test_sample_xy(training_instances: List[Instance]) -> None:
|
||||||
comp = DynamicLazyConstraintsComponent()
|
comp = DynamicLazyConstraintsComponent()
|
||||||
comp.pre_fit([{b"c1", b"c2", b"c3", b"c4"}])
|
comp.pre_fit(
|
||||||
|
[
|
||||||
|
np.array(["c1", "c3", "c4"], dtype="S"),
|
||||||
|
np.array(["c1", "c2", "c4"], dtype="S"),
|
||||||
|
]
|
||||||
|
)
|
||||||
x_expected = {
|
x_expected = {
|
||||||
b"type-a": np.array([[5.0, 1.0, 2.0, 3.0], [5.0, 4.0, 5.0, 6.0]]),
|
b"type-a": np.array([[5.0, 1.0, 2.0, 3.0], [5.0, 4.0, 5.0, 6.0]]),
|
||||||
b"type-b": np.array([[5.0, 1.0, 2.0, 0.0], [5.0, 3.0, 4.0, 0.0]]),
|
b"type-b": np.array([[5.0, 1.0, 2.0, 0.0], [5.0, 3.0, 4.0, 0.0]]),
|
||||||
|
|||||||
@@ -82,7 +82,7 @@ def test_usage(
|
|||||||
) -> None:
|
) -> None:
|
||||||
stats_before = solver.solve(stab_instance)
|
stats_before = solver.solve(stab_instance)
|
||||||
sample = stab_instance.get_samples()[0]
|
sample = stab_instance.get_samples()[0]
|
||||||
user_cuts_enforced = sample.get_set("mip_user_cuts_enforced")
|
user_cuts_enforced = sample.get_array("mip_user_cuts_enforced")
|
||||||
assert user_cuts_enforced is not None
|
assert user_cuts_enforced is not None
|
||||||
assert len(user_cuts_enforced) > 0
|
assert len(user_cuts_enforced) > 0
|
||||||
assert stats_before["UserCuts: Added ahead-of-time"] == 0
|
assert stats_before["UserCuts: Added ahead-of-time"] == 0
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ from miplearn.types import (
|
|||||||
LearningSolveStats,
|
LearningSolveStats,
|
||||||
ConstraintCategory,
|
ConstraintCategory,
|
||||||
)
|
)
|
||||||
|
from miplearn.solvers.tests import assert_equals
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@@ -35,7 +36,7 @@ def sample() -> Sample:
|
|||||||
"static_constr_lazy": np.array([True, True, True, True, False]),
|
"static_constr_lazy": np.array([True, True, True, True, False]),
|
||||||
"static_constr_names": np.array(["c1", "c2", "c3", "c4", "c5"], dtype="S"),
|
"static_constr_names": np.array(["c1", "c2", "c3", "c4", "c5"], dtype="S"),
|
||||||
"static_instance_features": [5.0],
|
"static_instance_features": [5.0],
|
||||||
"mip_constr_lazy_enforced": {b"c1", b"c2", b"c4"},
|
"mip_constr_lazy_enforced": np.array(["c1", "c2", "c4"], dtype="S"),
|
||||||
"lp_constr_features": np.array(
|
"lp_constr_features": np.array(
|
||||||
[
|
[
|
||||||
[1.0, 1.0, 0.0],
|
[1.0, 1.0, 0.0],
|
||||||
@@ -96,7 +97,7 @@ def test_usage_with_solver(instance: Instance) -> None:
|
|||||||
|
|
||||||
stats: LearningSolveStats = {}
|
stats: LearningSolveStats = {}
|
||||||
sample = instance.get_samples()[0]
|
sample = instance.get_samples()[0]
|
||||||
assert sample.get_set("mip_constr_lazy_enforced") is not None
|
assert sample.get_array("mip_constr_lazy_enforced") is not None
|
||||||
|
|
||||||
# LearningSolver calls before_solve_mip
|
# LearningSolver calls before_solve_mip
|
||||||
component.before_solve_mip(
|
component.before_solve_mip(
|
||||||
@@ -145,8 +146,13 @@ def test_usage_with_solver(instance: Instance) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Should update training sample
|
# Should update training sample
|
||||||
assert sample.get_set("mip_constr_lazy_enforced") == {b"c1", b"c2", b"c3", b"c4"}
|
mip_constr_lazy_enforced = sample.get_array("mip_constr_lazy_enforced")
|
||||||
#
|
assert mip_constr_lazy_enforced is not None
|
||||||
|
assert_equals(
|
||||||
|
sorted(mip_constr_lazy_enforced),
|
||||||
|
np.array(["c1", "c2", "c3", "c4"], dtype="S"),
|
||||||
|
)
|
||||||
|
|
||||||
# Should update stats
|
# Should update stats
|
||||||
assert stats["LazyStatic: Removed"] == 1
|
assert stats["LazyStatic: Removed"] == 1
|
||||||
assert stats["LazyStatic: Kept"] == 3
|
assert stats["LazyStatic: Kept"] == 3
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ def test_knapsack() -> None:
|
|||||||
np.array(["eq_capacity"], dtype="S"),
|
np.array(["eq_capacity"], dtype="S"),
|
||||||
)
|
)
|
||||||
# assert_equals(
|
# assert_equals(
|
||||||
# sample.get_vector("static_constr_lhs"),
|
# sample.get_array("static_constr_lhs"),
|
||||||
# [
|
# [
|
||||||
# [
|
# [
|
||||||
# ("x[0]", 23.0),
|
# ("x[0]", 23.0),
|
||||||
@@ -89,7 +89,7 @@ def test_knapsack() -> None:
|
|||||||
# ],
|
# ],
|
||||||
# )
|
# )
|
||||||
assert_equals(
|
assert_equals(
|
||||||
sample.get_vector("static_constr_rhs"),
|
sample.get_array("static_constr_rhs"),
|
||||||
np.array([0.0]),
|
np.array([0.0]),
|
||||||
)
|
)
|
||||||
assert_equals(
|
assert_equals(
|
||||||
@@ -97,11 +97,11 @@ def test_knapsack() -> None:
|
|||||||
np.array(["="], dtype="S"),
|
np.array(["="], dtype="S"),
|
||||||
)
|
)
|
||||||
assert_equals(
|
assert_equals(
|
||||||
sample.get_vector("static_constr_features"),
|
sample.get_array("static_constr_features"),
|
||||||
np.array([[0.0]]),
|
np.array([[0.0]]),
|
||||||
)
|
)
|
||||||
assert_equals(
|
assert_equals(
|
||||||
sample.get_vector("static_constr_categories"),
|
sample.get_array("static_constr_categories"),
|
||||||
np.array(["eq_capacity"], dtype="S"),
|
np.array(["eq_capacity"], dtype="S"),
|
||||||
)
|
)
|
||||||
assert_equals(
|
assert_equals(
|
||||||
@@ -109,7 +109,7 @@ def test_knapsack() -> None:
|
|||||||
np.array([False]),
|
np.array([False]),
|
||||||
)
|
)
|
||||||
assert_equals(
|
assert_equals(
|
||||||
sample.get_vector("static_instance_features"),
|
sample.get_array("static_instance_features"),
|
||||||
np.array([67.0, 21.75]),
|
np.array([67.0, 21.75]),
|
||||||
)
|
)
|
||||||
assert_equals(sample.get_scalar("static_constr_lazy_count"), 0)
|
assert_equals(sample.get_scalar("static_constr_lazy_count"), 0)
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ def test_usage() -> None:
|
|||||||
# Save instance to disk
|
# Save instance to disk
|
||||||
filename = tempfile.mktemp()
|
filename = tempfile.mktemp()
|
||||||
FileInstance.save(original, filename)
|
FileInstance.save(original, filename)
|
||||||
sample = Hdf5Sample(filename, check_data=True)
|
sample = Hdf5Sample(filename)
|
||||||
assert len(sample.get_array("pickled")) > 0
|
assert len(sample.get_array("pickled")) > 0
|
||||||
|
|
||||||
# Solve instance from disk
|
# Solve instance from disk
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ def test_subtour() -> None:
|
|||||||
samples = instance.get_samples()
|
samples = instance.get_samples()
|
||||||
assert len(samples) == 1
|
assert len(samples) == 1
|
||||||
sample = samples[0]
|
sample = samples[0]
|
||||||
lazy_enforced = sample.get_set("mip_constr_lazy_enforced")
|
lazy_enforced = sample.get_array("mip_constr_lazy_enforced")
|
||||||
assert lazy_enforced is not None
|
assert lazy_enforced is not None
|
||||||
assert len(lazy_enforced) > 0
|
assert len(lazy_enforced) > 0
|
||||||
assert_equals(
|
assert_equals(
|
||||||
|
|||||||
Reference in New Issue
Block a user