Use np in Constraints.lazy; replace some get_vector

master
Alinson S. Xavier 4 years ago
parent f809dd7de4
commit 5b54153a3a
No known key found for this signature in database
GPG Key ID: DCA0DAD4D2F58624

@ -143,7 +143,7 @@ class PrimalSolutionComponent(Component):
x: Dict = {}
y: Dict = {}
instance_features = sample.get_vector("static_instance_features")
mip_var_values = sample.get_vector("mip_var_values")
mip_var_values = sample.get_array("mip_var_values")
var_features = sample.get_vector_list("lp_var_features")
var_names = sample.get_array("static_var_names")
var_categories = sample.get_vector("static_var_categories")

@ -183,7 +183,7 @@ class StaticLazyConstraintsComponent(Component):
logger.info(f"Found {n_violated} violated lazy constraints found")
if n_violated > 0:
logger.info(
"Enforcing {n_violated} lazy constraints; "
f"Enforcing {n_violated} lazy constraints; "
f"{n_satisfied} left in the pool..."
)
solver.internal_solver.add_constraints(violated_constraints)
@ -208,7 +208,7 @@ class StaticLazyConstraintsComponent(Component):
constr_features = sample.get_vector_list("lp_constr_features")
constr_names = sample.get_array("static_constr_names")
constr_categories = sample.get_vector("static_constr_categories")
constr_lazy = sample.get_vector("static_constr_lazy")
constr_lazy = sample.get_array("static_constr_lazy")
lazy_enforced = sample.get_set("mip_constr_lazy_enforced")
if constr_features is None:
constr_features = sample.get_vector_list("static_constr_features")

@ -241,7 +241,7 @@ class FeaturesExtractor:
else:
lazy.append(False)
sample.put_vector_list("static_constr_features", user_features)
sample.put_vector("static_constr_lazy", lazy)
sample.put_array("static_constr_lazy", np.array(lazy, dtype=bool))
sample.put_array("static_constr_categories", np.array(categories, dtype="S"))
def _extract_user_features_instance(
@ -261,18 +261,18 @@ class FeaturesExtractor:
f"Instance features must be a list of numbers. "
f"Found {type(v).__name__} instead."
)
constr_lazy = sample.get_vector("static_constr_lazy")
constr_lazy = sample.get_array("static_constr_lazy")
assert constr_lazy is not None
sample.put_vector("static_instance_features", user_features)
sample.put_scalar("static_constr_lazy_count", sum(constr_lazy))
sample.put_scalar("static_constr_lazy_count", int(sum(constr_lazy)))
# Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based
# approximation of strong branching. INFORMS Journal on Computing, 29(1), 185-195.
def _extract_var_features_AlvLouWeh2017(self, sample: Sample) -> List:
obj_coeffs = sample.get_vector("static_var_obj_coeffs")
obj_sa_down = sample.get_vector("lp_var_sa_obj_down")
obj_sa_up = sample.get_vector("lp_var_sa_obj_up")
values = sample.get_vector(f"lp_var_values")
obj_coeffs = sample.get_array("static_var_obj_coeffs")
obj_sa_down = sample.get_array("lp_var_sa_obj_down")
obj_sa_up = sample.get_array("lp_var_sa_obj_up")
values = sample.get_array("lp_var_values")
assert obj_coeffs is not None
pos_obj_coeff_sum = 0.0

@ -94,21 +94,21 @@ class Sample(ABC):
def _assert_is_scalar(self, value: Any) -> None:
if value is None:
return
if isinstance(value, (str, bool, int, float)):
if isinstance(value, (str, bool, int, float, np.bytes_)):
return
assert False, f"scalar expected; found instead: {value}"
assert False, f"scalar expected; found instead: {value} ({value.__class__})"
def _assert_is_vector(self, value: Any) -> None:
assert isinstance(
value, (list, np.ndarray)
), f"list or numpy array expected; found instead: {value}"
), f"list or numpy array expected; found instead: {value} ({value.__class__})"
for v in value:
self._assert_is_scalar(v)
def _assert_is_vector_list(self, value: Any) -> None:
assert isinstance(
value, (list, np.ndarray)
), f"list or numpy array expected; found instead: {value}"
), f"list or numpy array expected; found instead: {value} ({value.__class__})"
for v in value:
if v is None:
continue
@ -125,7 +125,7 @@ class MemorySample(Sample):
def __init__(
self,
data: Optional[Dict[str, Any]] = None,
check_data: bool = False,
check_data: bool = True,
) -> None:
if data is None:
data = {}
@ -210,7 +210,7 @@ class Hdf5Sample(Sample):
self,
filename: str,
mode: str = "r+",
check_data: bool = False,
check_data: bool = True,
) -> None:
self.file = h5py.File(filename, mode, libver="latest")
self._check_data = check_data

@ -70,7 +70,7 @@ class Variables:
class Constraints:
basis_status: Optional[np.ndarray] = None
dual_values: Optional[np.ndarray] = None
lazy: Optional[List[bool]] = None
lazy: Optional[np.ndarray] = None
lhs: Optional[List[List[Tuple[bytes, float]]]] = None
names: Optional[np.ndarray] = None
rhs: Optional[np.ndarray] = None
@ -83,15 +83,15 @@ class Constraints:
def from_sample(sample: "Sample") -> "Constraints":
return Constraints(
basis_status=sample.get_array("lp_constr_basis_status"),
dual_values=sample.get_vector("lp_constr_dual_values"),
lazy=sample.get_vector("static_constr_lazy"),
dual_values=sample.get_array("lp_constr_dual_values"),
lazy=sample.get_array("static_constr_lazy"),
# lhs=sample.get_vector("static_constr_lhs"),
names=sample.get_array("static_constr_names"),
rhs=sample.get_vector("static_constr_rhs"),
sa_rhs_down=sample.get_vector("lp_constr_sa_rhs_down"),
sa_rhs_up=sample.get_vector("lp_constr_sa_rhs_up"),
rhs=sample.get_array("static_constr_rhs"),
sa_rhs_down=sample.get_array("lp_constr_sa_rhs_down"),
sa_rhs_up=sample.get_array("lp_constr_sa_rhs_up"),
senses=sample.get_array("static_constr_senses"),
slacks=sample.get_vector("lp_constr_slacks"),
slacks=sample.get_array("lp_constr_slacks"),
)
def __getitem__(self, selected: List[bool]) -> "Constraints":
@ -103,7 +103,7 @@ class Constraints:
None if self.dual_values is None else self.dual_values[selected]
),
names=(None if self.names is None else self.names[selected]),
lazy=self._filter(self.lazy, selected),
lazy=(None if self.lazy is None else self.lazy[selected]),
lhs=self._filter(self.lhs, selected),
rhs=(None if self.rhs is None else self.rhs[selected]),
sa_rhs_down=(

@ -31,7 +31,7 @@ def sample() -> Sample:
"type-b",
"type-b",
],
"static_constr_lazy": [True, True, True, True, False],
"static_constr_lazy": np.array([True, True, True, True, False]),
"static_constr_names": np.array(["c1", "c2", "c3", "c4", "c5"], dtype="S"),
"static_instance_features": [5.0],
"mip_constr_lazy_enforced": {b"c1", b"c2", b"c4"},

@ -39,7 +39,7 @@ def test_knapsack() -> None:
sample.get_vector("static_var_lower_bounds"), [0.0, 0.0, 0.0, 0.0, 0.0]
)
assert_equals(
sample.get_vector("static_var_obj_coeffs"), [505.0, 352.0, 458.0, 220.0, 0.0]
sample.get_array("static_var_obj_coeffs"), [505.0, 352.0, 458.0, 220.0, 0.0]
)
assert_equals(
sample.get_array("static_var_types"),
@ -79,7 +79,7 @@ def test_knapsack() -> None:
sample.get_vector("static_constr_categories"),
np.array(["eq_capacity"], dtype="S"),
)
assert_equals(sample.get_vector("static_constr_lazy"), [False])
assert_equals(sample.get_array("static_constr_lazy"), np.array([False]))
assert_equals(sample.get_vector("static_instance_features"), [67.0, 21.75])
assert_equals(sample.get_scalar("static_constr_lazy_count"), 0)
@ -92,46 +92,46 @@ def test_knapsack() -> None:
np.array(["U", "B", "U", "L", "U"], dtype="S"),
)
assert_equals(
sample.get_vector("lp_var_reduced_costs"),
sample.get_array("lp_var_reduced_costs"),
[193.615385, 0.0, 187.230769, -23.692308, 13.538462],
)
assert_equals(
sample.get_vector("lp_var_sa_lb_down"),
sample.get_array("lp_var_sa_lb_down"),
[-inf, -inf, -inf, -0.111111, -inf],
)
assert_equals(
sample.get_vector("lp_var_sa_lb_up"),
sample.get_array("lp_var_sa_lb_up"),
[1.0, 0.923077, 1.0, 1.0, 67.0],
)
assert_equals(
sample.get_vector("lp_var_sa_obj_down"),
sample.get_array("lp_var_sa_obj_down"),
[311.384615, 317.777778, 270.769231, -inf, -13.538462],
)
assert_equals(
sample.get_vector("lp_var_sa_obj_up"),
sample.get_array("lp_var_sa_obj_up"),
[inf, 570.869565, inf, 243.692308, inf],
)
assert_equals(
sample.get_vector("lp_var_sa_ub_down"), [0.913043, 0.923077, 0.9, 0.0, 43.0]
sample.get_array("lp_var_sa_ub_down"), [0.913043, 0.923077, 0.9, 0.0, 43.0]
)
assert_equals(sample.get_vector("lp_var_sa_ub_up"), [2.043478, inf, 2.2, inf, 69.0])
assert_equals(sample.get_vector("lp_var_values"), [1.0, 0.923077, 1.0, 0.0, 67.0])
assert_equals(sample.get_array("lp_var_sa_ub_up"), [2.043478, inf, 2.2, inf, 69.0])
assert_equals(sample.get_array("lp_var_values"), [1.0, 0.923077, 1.0, 0.0, 67.0])
assert sample.get_vector_list("lp_var_features") is not None
assert_equals(
sample.get_array("lp_constr_basis_status"),
np.array(["N"], dtype="S"),
)
assert_equals(sample.get_vector("lp_constr_dual_values"), [13.538462])
assert_equals(sample.get_vector("lp_constr_sa_rhs_down"), [-24.0])
assert_equals(sample.get_vector("lp_constr_sa_rhs_up"), [2.0])
assert_equals(sample.get_vector("lp_constr_slacks"), [0.0])
assert_equals(sample.get_array("lp_constr_dual_values"), [13.538462])
assert_equals(sample.get_array("lp_constr_sa_rhs_down"), [-24.0])
assert_equals(sample.get_array("lp_constr_sa_rhs_up"), [2.0])
assert_equals(sample.get_array("lp_constr_slacks"), [0.0])
# after-mip
# -------------------------------------------------------
solver.solve()
extractor.extract_after_mip_features(solver, sample)
assert_equals(sample.get_vector("mip_var_values"), [1.0, 0.0, 1.0, 1.0, 61.0])
assert_equals(sample.get_vector("mip_constr_slacks"), [0.0])
assert_equals(sample.get_array("mip_var_values"), [1.0, 0.0, 1.0, 1.0, 61.0])
assert_equals(sample.get_array("mip_constr_slacks"), [0.0])
def test_constraint_getindex() -> None:

@ -28,5 +28,5 @@ def test_usage() -> None:
sample = FileInstance(filename).get_samples()[0]
assert sample.get_scalar("mip_lower_bound") == 1183.0
assert sample.get_scalar("mip_upper_bound") == 1183.0
assert len(sample.get_vector("lp_var_values")) == 5
assert len(sample.get_vector("mip_var_values")) == 5
assert len(sample.get_array("lp_var_values")) == 5
assert len(sample.get_array("mip_var_values")) == 5

@ -42,7 +42,7 @@ def test_instance() -> None:
solver.solve(instance)
assert len(instance.get_samples()) == 1
sample = instance.get_samples()[0]
assert_equals(sample.get_vector("mip_var_values"), [1.0, 0.0, 1.0, 1.0, 0.0, 1.0])
assert_equals(sample.get_array("mip_var_values"), [1.0, 0.0, 1.0, 1.0, 0.0, 1.0])
assert sample.get_scalar("mip_lower_bound") == 4.0
assert sample.get_scalar("mip_upper_bound") == 4.0
@ -70,7 +70,7 @@ def test_subtour() -> None:
assert lazy_enforced is not None
assert len(lazy_enforced) > 0
assert_equals(
sample.get_vector("mip_var_values"),
sample.get_array("mip_var_values"),
[
1.0,
0.0,

@ -39,7 +39,7 @@ def test_learning_solver(
sample = instance.get_samples()[0]
assert_equals(
sample.get_vector("mip_var_values"), [1.0, 0.0, 1.0, 1.0, 61.0]
sample.get_array("mip_var_values"), [1.0, 0.0, 1.0, 1.0, 61.0]
)
assert sample.get_scalar("mip_lower_bound") == 1183.0
assert sample.get_scalar("mip_upper_bound") == 1183.0
@ -48,7 +48,7 @@ def test_learning_solver(
assert len(mip_log) > 100
assert_equals(
sample.get_vector("lp_var_values"), [1.0, 0.923077, 1.0, 0.0, 67.0]
sample.get_array("lp_var_values"), [1.0, 0.923077, 1.0, 0.0, 67.0]
)
assert_equals(sample.get_scalar("lp_value"), 1287.923077)
lp_log = sample.get_scalar("lp_log")

Loading…
Cancel
Save