Implement a small subset of Alvarez2017 features

master
Alinson S. Xavier 5 years ago
parent 9ca4cc3c24
commit c39231cb18
No known key found for this signature in database
GPG Key ID: DCA0DAD4D2F58624

@ -5,6 +5,7 @@
import collections
import numbers
from dataclasses import dataclass
from math import log, isfinite
from typing import TYPE_CHECKING, Dict, Optional, Set, List, Hashable
from miplearn.types import Solution, VariableName, Category
@ -53,6 +54,10 @@ class Variable:
user_features: Optional[List[float]] = None
value: Optional[float] = None
# Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based
# approximation of strong branching. INFORMS Journal on Computing, 29(1), 185-195.
alvarez_2017: Optional[List[float]] = None
@dataclass
class Constraint:
@ -89,6 +94,7 @@ class FeaturesExtractor:
self._extract_user_features_vars(instance)
self._extract_user_features_constrs(instance)
self._extract_user_features_instance(instance)
self._extract_alvarez_2017(instance)
def _extract_user_features_vars(self, instance: "Instance"):
for (var_name, var) in instance.features.variables.items():
@ -164,3 +170,68 @@ class FeaturesExtractor:
user_features=user_features,
lazy_constraint_count=lazy_count,
)
def _extract_alvarez_2017(self, instance: "Instance"):
assert instance.features is not None
assert instance.features.variables is not None
pos_obj_coeff_sum = 0.0
neg_obj_coeff_sum = 0.0
for (varname, var) in instance.features.variables.items():
if var.obj_coeff is not None:
if var.obj_coeff > 0:
pos_obj_coeff_sum += var.obj_coeff
if var.obj_coeff < 0:
neg_obj_coeff_sum += -var.obj_coeff
for (varname, var) in instance.features.variables.items():
assert isinstance(var, Variable)
features = []
if var.obj_coeff is not None:
# Feature 1
features.append(np.sign(var.obj_coeff))
# Feature 2
if pos_obj_coeff_sum > 0:
features.append(abs(var.obj_coeff) / pos_obj_coeff_sum)
else:
features.append(0.0)
# Feature 3
if neg_obj_coeff_sum > 0:
features.append(abs(var.obj_coeff) / neg_obj_coeff_sum)
else:
features.append(0.0)
if var.value is not None:
# Feature 37
features.append(
min(
var.value - np.floor(var.value),
np.ceil(var.value) - var.value,
)
)
if var.sa_obj_up is not None:
assert var.sa_obj_down is not None
csign = np.sign(var.obj_coeff)
# Features 44 and 46
features.append(np.sign(var.sa_obj_up))
features.append(np.sign(var.sa_obj_down))
# Feature 47
f47 = log((var.obj_coeff - var.sa_obj_down) / csign)
if isfinite(f47):
features.append(f47)
else:
features.append(0.0)
# Feature 48
f48 = log((var.sa_obj_up - var.obj_coeff) / csign)
if isfinite(f48):
features.append(f48)
else:
features.append(0.0)
var.alvarez_2017 = features

@ -3,6 +3,7 @@
# Released under the modified BSD license. See COPYING.md for more details.
from typing import Any, Dict
import numpy as np
from miplearn.features import Constraint, Variable
from miplearn.solvers.internal import InternalSolver
@ -38,6 +39,8 @@ def _round_variables(vars: Dict[str, Variable]) -> Dict[str, Variable]:
]:
if getattr(c, attr) is not None:
setattr(c, attr, round(getattr(c, attr), 6))
if c.alvarez_2017 is not None:
c.alvarez_2017 = list(np.round(c.alvarez_2017, 6))
return vars
@ -395,4 +398,4 @@ def run_lazy_cb_tests(solver: InternalSolver) -> None:
def assert_equals(left: Any, right: Any) -> None:
assert left == right, f"{left} != {right}"
assert left == right, f"left:\n{left}\nright:\n{right}"

@ -9,7 +9,9 @@ from miplearn.features import (
Constraint,
)
from miplearn.solvers.gurobi import GurobiSolver
from miplearn.solvers.tests import assert_equals
from miplearn.solvers.tests import assert_equals, _round_variables, _round_constraints
inf = float("inf")
def test_knapsack() -> None:
@ -17,58 +19,127 @@ def test_knapsack() -> None:
instance = solver.build_test_instance_knapsack()
model = instance.to_model()
solver.set_instance(instance, model)
solver.solve_lp()
FeaturesExtractor(solver).extract(instance)
assert_equals(
instance.features.variables,
_round_variables(instance.features.variables),
{
"x[0]": Variable(
basis_status="U",
category="default",
lower_bound=0.0,
obj_coeff=505.0,
type="B",
reduced_cost=193.615385,
sa_lb_down=-inf,
sa_lb_up=1.0,
sa_obj_down=311.384615,
sa_obj_up=inf,
sa_ub_down=0.913043,
sa_ub_up=2.043478,
type="C",
upper_bound=1.0,
user_features=[23.0, 505.0],
value=1.0,
alvarez_2017=[1.0, 0.32899, 0.0, 0.0, 1.0, 1.0, 5.265874, 0.0],
),
"x[1]": Variable(
basis_status="B",
category="default",
lower_bound=0.0,
obj_coeff=352.0,
type="B",
reduced_cost=0.0,
sa_lb_down=-inf,
sa_lb_up=0.923077,
sa_obj_down=317.777778,
sa_obj_up=570.869565,
sa_ub_down=0.923077,
sa_ub_up=inf,
type="C",
upper_bound=1.0,
user_features=[26.0, 352.0],
value=0.923077,
alvarez_2017=[
1.0,
0.229316,
0.0,
0.076923,
1.0,
1.0,
3.532875,
5.388476,
],
),
"x[2]": Variable(
basis_status="U",
category="default",
lower_bound=0.0,
obj_coeff=458.0,
type="B",
reduced_cost=187.230769,
sa_lb_down=-inf,
sa_lb_up=1.0,
sa_obj_down=270.769231,
sa_obj_up=inf,
sa_ub_down=0.9,
sa_ub_up=2.2,
type="C",
upper_bound=1.0,
user_features=[20.0, 458.0],
value=1.0,
alvarez_2017=[
1.0,
0.298371,
0.0,
0.0,
1.0,
1.0,
5.232342,
0.0,
],
),
"x[3]": Variable(
basis_status="L",
category="default",
lower_bound=0.0,
obj_coeff=220.0,
type="B",
reduced_cost=-23.692308,
sa_lb_down=-0.111111,
sa_lb_up=1.0,
sa_obj_down=-inf,
sa_obj_up=243.692308,
sa_ub_down=0.0,
sa_ub_up=inf,
type="C",
upper_bound=1.0,
user_features=[18.0, 220.0],
value=0.0,
alvarez_2017=[
1.0,
0.143322,
0.0,
0.0,
1.0,
-1.0,
0.0,
3.16515,
],
),
},
)
assert_equals(
instance.features.constraints,
_round_constraints(instance.features.constraints),
{
"eq_capacity": Constraint(
lhs={
"x[0]": 23.0,
"x[1]": 26.0,
"x[2]": 20.0,
"x[3]": 18.0,
},
sense="<",
rhs=67.0,
lazy=False,
basis_status="N",
category="eq_capacity",
dual_value=13.538462,
lazy=False,
lhs={"x[0]": 23.0, "x[1]": 26.0, "x[2]": 20.0, "x[3]": 18.0},
rhs=67.0,
sa_rhs_down=43.0,
sa_rhs_up=69.0,
sense="<",
slack=0.0,
user_features=[0.0],
)
},

Loading…
Cancel
Save