Don't include intermediary features in sample; rename some keys

master
Alinson S. Xavier 4 years ago
parent 865a4b2f40
commit 10eed9b306
No known key found for this signature in database
GPG Key ID: DCA0DAD4D2F58624

@ -52,7 +52,7 @@ class DynamicConstraintsComponent(Component):
cids: Dict[str, List[str]] = {} cids: Dict[str, List[str]] = {}
constr_categories_dict = instance.get_constraint_categories() constr_categories_dict = instance.get_constraint_categories()
constr_features_dict = instance.get_constraint_features() constr_features_dict = instance.get_constraint_features()
instance_features = sample.get_vector("instance_features_user") instance_features = sample.get_vector("instance_features")
assert instance_features is not None assert instance_features is not None
for cid in self.known_cids: for cid in self.known_cids:
# Initialize categories # Initialize categories

@ -79,7 +79,7 @@ class ObjectiveValueComponent(Component):
) -> Tuple[Dict[str, List[List[float]]], Dict[str, List[List[float]]]]: ) -> Tuple[Dict[str, List[List[float]]], Dict[str, List[List[float]]]]:
lp_instance_features = sample.get_vector("lp_instance_features") lp_instance_features = sample.get_vector("lp_instance_features")
if lp_instance_features is None: if lp_instance_features is None:
lp_instance_features = sample.get_vector("instance_features_user") lp_instance_features = sample.get_vector("instance_features")
assert lp_instance_features is not None assert lp_instance_features is not None
# Features # Features

@ -142,7 +142,7 @@ class PrimalSolutionComponent(Component):
) -> Tuple[Dict[Category, List[List[float]]], Dict[Category, List[List[float]]]]: ) -> Tuple[Dict[Category, List[List[float]]], Dict[Category, List[List[float]]]]:
x: Dict = {} x: Dict = {}
y: Dict = {} y: Dict = {}
instance_features = sample.get_vector("instance_features_user") instance_features = sample.get_vector("instance_features")
mip_var_values = sample.get_vector("mip_var_values") mip_var_values = sample.get_vector("mip_var_values")
var_features = sample.get_vector_list("lp_var_features") var_features = sample.get_vector_list("lp_var_features")
var_names = sample.get_vector("var_names") var_names = sample.get_vector("var_names")

@ -204,14 +204,14 @@ class StaticLazyConstraintsComponent(Component):
x: Dict[str, List[List[float]]] = {} x: Dict[str, List[List[float]]] = {}
y: Dict[str, List[List[float]]] = {} y: Dict[str, List[List[float]]] = {}
cids: Dict[str, List[str]] = {} cids: Dict[str, List[str]] = {}
instance_features = sample.get_vector("instance_features_user") instance_features = sample.get_vector("instance_features")
constr_features = sample.get_vector_list("lp_constr_features") constr_features = sample.get_vector_list("lp_constr_features")
constr_names = sample.get_vector("constr_names") constr_names = sample.get_vector("constr_names")
constr_categories = sample.get_vector("constr_categories") constr_categories = sample.get_vector("constr_categories")
constr_lazy = sample.get_vector("constr_lazy") constr_lazy = sample.get_vector("constr_lazy")
lazy_enforced = sample.get_set("lazy_enforced") lazy_enforced = sample.get_set("lazy_enforced")
if constr_features is None: if constr_features is None:
constr_features = sample.get_vector_list("constr_features_user") constr_features = sample.get_vector_list("constr_features")
assert instance_features is not None assert instance_features is not None
assert constr_features is not None assert constr_features is not None

@ -5,7 +5,7 @@
import collections import collections
import numbers import numbers
from math import log, isfinite from math import log, isfinite
from typing import TYPE_CHECKING, Dict, Optional, List, Any from typing import TYPE_CHECKING, Dict, Optional, List, Any, Tuple
import numpy as np import numpy as np
@ -42,16 +42,19 @@ class FeaturesExtractor:
# sample.put("constr_lhs", constraints.lhs) # sample.put("constr_lhs", constraints.lhs)
sample.put_vector("constr_rhs", constraints.rhs) sample.put_vector("constr_rhs", constraints.rhs)
sample.put_vector("constr_senses", constraints.senses) sample.put_vector("constr_senses", constraints.senses)
self._extract_user_features_vars(instance, sample) vars_features_user, var_categories = self._extract_user_features_vars(
instance, sample
)
sample.put_vector("var_categories", var_categories)
self._extract_user_features_constrs(instance, sample) self._extract_user_features_constrs(instance, sample)
self._extract_user_features_instance(instance, sample) self._extract_user_features_instance(instance, sample)
self._extract_var_features_AlvLouWeh2017(sample) alw17 = self._extract_var_features_AlvLouWeh2017(sample)
sample.put_vector_list( sample.put_vector_list(
"var_features", "var_features",
self._combine( self._combine(
[ [
sample.get_vector_list("var_features_AlvLouWeh2017"), alw17,
sample.get_vector_list("var_features_user"), vars_features_user,
sample.get_vector("var_lower_bounds"), sample.get_vector("var_lower_bounds"),
sample.get_vector("var_obj_coeffs"), sample.get_vector("var_obj_coeffs"),
sample.get_vector("var_upper_bounds"), sample.get_vector("var_upper_bounds"),
@ -80,12 +83,12 @@ class FeaturesExtractor:
sample.put_vector("lp_constr_sa_rhs_down", constraints.sa_rhs_down) sample.put_vector("lp_constr_sa_rhs_down", constraints.sa_rhs_down)
sample.put_vector("lp_constr_sa_rhs_up", constraints.sa_rhs_up) sample.put_vector("lp_constr_sa_rhs_up", constraints.sa_rhs_up)
sample.put_vector("lp_constr_slacks", constraints.slacks) sample.put_vector("lp_constr_slacks", constraints.slacks)
self._extract_var_features_AlvLouWeh2017(sample, prefix="lp_") alw17 = self._extract_var_features_AlvLouWeh2017(sample)
sample.put_vector_list( sample.put_vector_list(
"lp_var_features", "lp_var_features",
self._combine( self._combine(
[ [
sample.get_vector_list("lp_var_features_AlvLouWeh2017"), alw17,
sample.get_vector("lp_var_reduced_costs"), sample.get_vector("lp_var_reduced_costs"),
sample.get_vector("lp_var_sa_lb_down"), sample.get_vector("lp_var_sa_lb_down"),
sample.get_vector("lp_var_sa_lb_up"), sample.get_vector("lp_var_sa_lb_up"),
@ -105,7 +108,7 @@ class FeaturesExtractor:
"lp_constr_features", "lp_constr_features",
self._combine( self._combine(
[ [
sample.get_vector_list("constr_features_user"), sample.get_vector_list("constr_features"),
sample.get_vector("lp_constr_dual_values"), sample.get_vector("lp_constr_dual_values"),
sample.get_vector("lp_constr_sa_rhs_down"), sample.get_vector("lp_constr_sa_rhs_down"),
sample.get_vector("lp_constr_sa_rhs_up"), sample.get_vector("lp_constr_sa_rhs_up"),
@ -113,11 +116,11 @@ class FeaturesExtractor:
], ],
), ),
) )
instance_features_user = sample.get_vector("instance_features_user") instance_features = sample.get_vector("instance_features")
assert instance_features_user is not None assert instance_features is not None
sample.put_vector( sample.put_vector(
"lp_instance_features", "lp_instance_features",
instance_features_user instance_features
+ [ + [
sample.get_scalar("lp_value"), sample.get_scalar("lp_value"),
sample.get_scalar("lp_wallclock_time"), sample.get_scalar("lp_wallclock_time"),
@ -138,7 +141,7 @@ class FeaturesExtractor:
self, self,
instance: "Instance", instance: "Instance",
sample: Sample, sample: Sample,
) -> None: ) -> Tuple[List, List]:
categories: List[Optional[str]] = [] categories: List[Optional[str]] = []
user_features: List[Optional[List[float]]] = [] user_features: List[Optional[List[float]]] = []
var_features_dict = instance.get_variable_features() var_features_dict = instance.get_variable_features()
@ -174,8 +177,7 @@ class FeaturesExtractor:
) )
user_features_i = list(user_features_i) user_features_i = list(user_features_i)
user_features.append(user_features_i) user_features.append(user_features_i)
sample.put_vector("var_categories", categories) return user_features, categories
sample.put_vector_list("var_features_user", user_features)
def _extract_user_features_constrs( def _extract_user_features_constrs(
self, self,
@ -224,7 +226,7 @@ class FeaturesExtractor:
lazy.append(instance.is_constraint_lazy(cname)) lazy.append(instance.is_constraint_lazy(cname))
else: else:
lazy.append(False) lazy.append(False)
sample.put_vector_list("constr_features_user", user_features) sample.put_vector_list("constr_features", user_features)
sample.put_vector("constr_lazy", lazy) sample.put_vector("constr_lazy", lazy)
sample.put_vector("constr_categories", categories) sample.put_vector("constr_categories", categories)
@ -247,16 +249,12 @@ class FeaturesExtractor:
) )
constr_lazy = sample.get_vector("constr_lazy") constr_lazy = sample.get_vector("constr_lazy")
assert constr_lazy is not None assert constr_lazy is not None
sample.put_vector("instance_features_user", user_features) sample.put_vector("instance_features", user_features)
sample.put_scalar("static_lazy_count", sum(constr_lazy)) sample.put_scalar("static_lazy_count", sum(constr_lazy))
# Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based # Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based
# approximation of strong branching. INFORMS Journal on Computing, 29(1), 185-195. # approximation of strong branching. INFORMS Journal on Computing, 29(1), 185-195.
def _extract_var_features_AlvLouWeh2017( def _extract_var_features_AlvLouWeh2017(self, sample: Sample) -> List:
self,
sample: Sample,
prefix: str = "",
) -> None:
obj_coeffs = sample.get_vector("var_obj_coeffs") obj_coeffs = sample.get_vector("var_obj_coeffs")
obj_sa_down = sample.get_vector("lp_var_sa_obj_down") obj_sa_down = sample.get_vector("lp_var_sa_obj_down")
obj_sa_up = sample.get_vector("lp_var_sa_obj_up") obj_sa_up = sample.get_vector("lp_var_sa_obj_up")
@ -328,7 +326,7 @@ class FeaturesExtractor:
for v in f: for v in f:
assert isfinite(v), f"non-finite elements detected: {f}" assert isfinite(v), f"non-finite elements detected: {f}"
features.append(f) features.append(f)
sample.put_vector_list(f"{prefix}var_features_AlvLouWeh2017", features) return features
def _combine( def _combine(
self, self,

@ -25,13 +25,13 @@ def training_instances() -> List[Instance]:
MemorySample( MemorySample(
{ {
"lazy_enforced": {"c1", "c2"}, "lazy_enforced": {"c1", "c2"},
"instance_features_user": [5.0], "instance_features": [5.0],
}, },
), ),
MemorySample( MemorySample(
{ {
"lazy_enforced": {"c2", "c3"}, "lazy_enforced": {"c2", "c3"},
"instance_features_user": [5.0], "instance_features": [5.0],
}, },
), ),
] ]
@ -56,7 +56,7 @@ def training_instances() -> List[Instance]:
MemorySample( MemorySample(
{ {
"lazy_enforced": {"c3", "c4"}, "lazy_enforced": {"c3", "c4"},
"instance_features_user": [8.0], "instance_features": [8.0],
}, },
) )
] ]

@ -25,7 +25,7 @@ def sample() -> Sample:
"var_names": ["x[0]", "x[1]", "x[2]", "x[3]"], "var_names": ["x[0]", "x[1]", "x[2]", "x[3]"],
"var_categories": ["default", None, "default", "default"], "var_categories": ["default", None, "default", "default"],
"mip_var_values": [0.0, 1.0, 1.0, 0.0], "mip_var_values": [0.0, 1.0, 1.0, 0.0],
"instance_features_user": [5.0], "instance_features": [5.0],
"var_features": [ "var_features": [
[0.0, 0.0], [0.0, 0.0],
None, None,

@ -33,7 +33,7 @@ def sample() -> Sample:
], ],
"constr_lazy": [True, True, True, True, False], "constr_lazy": [True, True, True, True, False],
"constr_names": ["c1", "c2", "c3", "c4", "c5"], "constr_names": ["c1", "c2", "c3", "c4", "c5"],
"instance_features_user": [5.0], "instance_features": [5.0],
"lazy_enforced": {"c1", "c2", "c4"}, "lazy_enforced": {"c1", "c2", "c4"},
"lp_constr_features": [ "lp_constr_features": [
[1.0, 1.0], [1.0, 1.0],

@ -35,20 +35,6 @@ def test_knapsack() -> None:
sample.get_vector("var_categories"), sample.get_vector("var_categories"),
["default", "default", "default", "default", None], ["default", "default", "default", "default", None],
) )
assert_equals(
sample.get_vector_list("var_features_user"),
[[23.0, 505.0], [26.0, 352.0], [20.0, 458.0], [18.0, 220.0], None],
)
assert_equals(
sample.get_vector_list("var_features_AlvLouWeh2017"),
[
[1.0, 0.32899, 0.0],
[1.0, 0.229316, 0.0],
[1.0, 0.298371, 0.0],
[1.0, 0.143322, 0.0],
[0.0, 0.0, 0.0],
],
)
assert sample.get_vector_list("var_features") is not None assert sample.get_vector_list("var_features") is not None
assert_equals(sample.get_vector("constr_names"), ["eq_capacity"]) assert_equals(sample.get_vector("constr_names"), ["eq_capacity"])
# assert_equals( # assert_equals(
@ -65,10 +51,10 @@ def test_knapsack() -> None:
# ) # )
assert_equals(sample.get_vector("constr_rhs"), [0.0]) assert_equals(sample.get_vector("constr_rhs"), [0.0])
assert_equals(sample.get_vector("constr_senses"), ["="]) assert_equals(sample.get_vector("constr_senses"), ["="])
assert_equals(sample.get_vector("constr_features_user"), [None]) assert_equals(sample.get_vector("constr_features"), [None])
assert_equals(sample.get_vector("constr_categories"), ["eq_capacity"]) assert_equals(sample.get_vector("constr_categories"), ["eq_capacity"])
assert_equals(sample.get_vector("constr_lazy"), [False]) assert_equals(sample.get_vector("constr_lazy"), [False])
assert_equals(sample.get_vector("instance_features_user"), [67.0, 21.75]) assert_equals(sample.get_vector("instance_features"), [67.0, 21.75])
assert_equals(sample.get_scalar("static_lazy_count"), 0) assert_equals(sample.get_scalar("static_lazy_count"), 0)
# after-lp # after-lp
@ -104,16 +90,6 @@ def test_knapsack() -> None:
) )
assert_equals(sample.get_vector("lp_var_sa_ub_up"), [2.043478, inf, 2.2, inf, 69.0]) assert_equals(sample.get_vector("lp_var_sa_ub_up"), [2.043478, inf, 2.2, inf, 69.0])
assert_equals(sample.get_vector("lp_var_values"), [1.0, 0.923077, 1.0, 0.0, 67.0]) assert_equals(sample.get_vector("lp_var_values"), [1.0, 0.923077, 1.0, 0.0, 67.0])
assert_equals(
sample.get_vector_list("lp_var_features_AlvLouWeh2017"),
[
[1.0, 0.32899, 0.0, 0.0, 1.0, 1.0, 5.265874, 46.051702],
[1.0, 0.229316, 0.0, 0.076923, 1.0, 1.0, 3.532875, 5.388476],
[1.0, 0.298371, 0.0, 0.0, 1.0, 1.0, 5.232342, 46.051702],
[1.0, 0.143322, 0.0, 0.0, 1.0, -1.0, 46.051702, 3.16515],
[0.0, 0.0, 0.0, 0.0, 1.0, -1.0, 0.0, 0.0],
],
)
assert sample.get_vector_list("lp_var_features") is not None assert sample.get_vector_list("lp_var_features") is not None
assert_equals(sample.get_vector("lp_constr_basis_status"), ["N"]) assert_equals(sample.get_vector("lp_constr_basis_status"), ["N"])
assert_equals(sample.get_vector("lp_constr_dual_values"), [13.538462]) assert_equals(sample.get_vector("lp_constr_dual_values"), [13.538462])

@ -26,7 +26,7 @@ def _test_sample(sample: Sample) -> None:
_assert_roundtrip_scalar(sample, 1.0) _assert_roundtrip_scalar(sample, 1.0)
# Vector # Vector
_assert_roundtrip_vector(sample, ["A", "BB", "CCC", "こんにちは", None]) _assert_roundtrip_vector(sample, ["A", "BB", "CCC", None])
_assert_roundtrip_vector(sample, [True, True, False]) _assert_roundtrip_vector(sample, [True, True, False])
_assert_roundtrip_vector(sample, [1, 2, 3]) _assert_roundtrip_vector(sample, [1, 2, 3])
_assert_roundtrip_vector(sample, [1.0, 2.0, 3.0]) _assert_roundtrip_vector(sample, [1.0, 2.0, 3.0])

Loading…
Cancel
Save