Remove unused classes and functions

master
Alinson S. Xavier 4 years ago
parent cd9e5d4144
commit c8c29138ca
No known key found for this signature in database
GPG Key ID: DCA0DAD4D2F58624

@ -4,7 +4,6 @@
import collections import collections
import numbers import numbers
from copy import copy
from dataclasses import dataclass from dataclasses import dataclass
from math import log, isfinite from math import log, isfinite
from typing import TYPE_CHECKING, Dict, Optional, List, Hashable, Tuple, Any from typing import TYPE_CHECKING, Dict, Optional, List, Hashable, Tuple, Any
@ -12,28 +11,14 @@ from typing import TYPE_CHECKING, Dict, Optional, List, Hashable, Tuple, Any
import numpy as np import numpy as np
if TYPE_CHECKING: if TYPE_CHECKING:
from miplearn.solvers.internal import InternalSolver, LPSolveStats, MIPSolveStats from miplearn.solvers.internal import InternalSolver
from miplearn.instance.base import Instance from miplearn.instance.base import Instance
@dataclass
class InstanceFeatures:
user_features: Optional[List[float]] = None
lazy_constraint_count: int = 0
def to_list(self) -> List[float]:
features: List[float] = []
if self.user_features is not None:
features.extend(self.user_features)
_clip(features)
return features
@dataclass @dataclass
class VariableFeatures: class VariableFeatures:
names: Optional[List[str]] = None names: Optional[List[str]] = None
basis_status: Optional[List[str]] = None basis_status: Optional[List[str]] = None
categories: Optional[List[Optional[Hashable]]] = None
lower_bounds: Optional[List[float]] = None lower_bounds: Optional[List[float]] = None
obj_coeffs: Optional[List[float]] = None obj_coeffs: Optional[List[float]] = None
reduced_costs: Optional[List[float]] = None reduced_costs: Optional[List[float]] = None
@ -45,42 +30,12 @@ class VariableFeatures:
sa_ub_up: Optional[List[float]] = None sa_ub_up: Optional[List[float]] = None
types: Optional[List[str]] = None types: Optional[List[str]] = None
upper_bounds: Optional[List[float]] = None upper_bounds: Optional[List[float]] = None
user_features: Optional[List[Optional[List[float]]]] = None
values: Optional[List[float]] = None values: Optional[List[float]] = None
# Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based
# approximation of strong branching. INFORMS Journal on Computing, 29(1), 185-195.
alvarez_2017: Optional[List[List[float]]] = None
def to_list(self, index: int) -> List[float]:
features: List[float] = []
for attr in [
"lower_bounds",
"obj_coeffs",
"reduced_costs",
"sa_lb_down",
"sa_lb_up",
"sa_obj_down",
"sa_obj_up",
"sa_ub_down",
"sa_ub_up",
"upper_bounds",
"values",
]:
if getattr(self, attr) is not None:
features.append(getattr(self, attr)[index])
for attr in ["user_features", "alvarez_2017"]:
if getattr(self, attr) is not None:
if getattr(self, attr)[index] is not None:
features.extend(getattr(self, attr)[index])
_clip(features)
return features
@dataclass @dataclass
class ConstraintFeatures: class ConstraintFeatures:
basis_status: Optional[List[str]] = None basis_status: Optional[List[str]] = None
categories: Optional[List[Optional[Hashable]]] = None
dual_values: Optional[List[float]] = None dual_values: Optional[List[float]] = None
lazy: Optional[List[bool]] = None lazy: Optional[List[bool]] = None
lhs: Optional[List[List[Tuple[str, float]]]] = None lhs: Optional[List[List[Tuple[str, float]]]] = None
@ -90,13 +45,11 @@ class ConstraintFeatures:
sa_rhs_up: Optional[List[float]] = None sa_rhs_up: Optional[List[float]] = None
senses: Optional[List[str]] = None senses: Optional[List[str]] = None
slacks: Optional[List[float]] = None slacks: Optional[List[float]] = None
user_features: Optional[List[Optional[List[float]]]] = None
@staticmethod @staticmethod
def from_sample(sample: "Sample") -> "ConstraintFeatures": def from_sample(sample: "Sample") -> "ConstraintFeatures":
return ConstraintFeatures( return ConstraintFeatures(
basis_status=sample.get("lp_constr_basis_status"), basis_status=sample.get("lp_constr_basis_status"),
categories=sample.get("constr_categories"),
dual_values=sample.get("lp_constr_dual_values"), dual_values=sample.get("lp_constr_dual_values"),
lazy=sample.get("constr_lazy"), lazy=sample.get("constr_lazy"),
lhs=sample.get("constr_lhs"), lhs=sample.get("constr_lhs"),
@ -106,29 +59,11 @@ class ConstraintFeatures:
sa_rhs_up=sample.get("lp_constr_sa_rhs_up"), sa_rhs_up=sample.get("lp_constr_sa_rhs_up"),
senses=sample.get("constr_senses"), senses=sample.get("constr_senses"),
slacks=sample.get("lp_constr_slacks"), slacks=sample.get("lp_constr_slacks"),
user_features=sample.get("constr_features_user"),
) )
def to_list(self, index: int) -> List[float]:
features: List[float] = []
for attr in [
"dual_values",
"rhs",
"slacks",
]:
if getattr(self, attr) is not None:
features.append(getattr(self, attr)[index])
for attr in ["user_features"]:
if getattr(self, attr) is not None:
if getattr(self, attr)[index] is not None:
features.extend(getattr(self, attr)[index])
_clip(features)
return features
def __getitem__(self, selected: List[bool]) -> "ConstraintFeatures": def __getitem__(self, selected: List[bool]) -> "ConstraintFeatures":
return ConstraintFeatures( return ConstraintFeatures(
basis_status=self._filter(self.basis_status, selected), basis_status=self._filter(self.basis_status, selected),
categories=self._filter(self.categories, selected),
dual_values=self._filter(self.dual_values, selected), dual_values=self._filter(self.dual_values, selected),
names=self._filter(self.names, selected), names=self._filter(self.names, selected),
lazy=self._filter(self.lazy, selected), lazy=self._filter(self.lazy, selected),
@ -138,7 +73,6 @@ class ConstraintFeatures:
sa_rhs_up=self._filter(self.sa_rhs_up, selected), sa_rhs_up=self._filter(self.sa_rhs_up, selected),
senses=self._filter(self.senses, selected), senses=self._filter(self.senses, selected),
slacks=self._filter(self.slacks, selected), slacks=self._filter(self.slacks, selected),
user_features=self._filter(self.user_features, selected),
) )
def _filter( def _filter(
@ -151,15 +85,6 @@ class ConstraintFeatures:
return [obj[i] for (i, selected_i) in enumerate(selected) if selected_i] return [obj[i] for (i, selected_i) in enumerate(selected) if selected_i]
@dataclass
class Features:
instance: Optional[InstanceFeatures] = None
variables: Optional[VariableFeatures] = None
constraints: Optional[ConstraintFeatures] = None
lp_solve: Optional["LPSolveStats"] = None
mip_solve: Optional["MIPSolveStats"] = None
class Sample: class Sample:
def __init__( def __init__(
self, self,
@ -300,29 +225,6 @@ class FeaturesExtractor:
sample.put("mip_var_values", variables.values) sample.put("mip_var_values", variables.values)
sample.put("mip_constr_slacks", constraints.slacks) sample.put("mip_constr_slacks", constraints.slacks)
def extract(
self,
instance: "Instance",
solver: "InternalSolver",
with_static: bool = True,
) -> Features:
features = Features()
features.variables = solver.get_variables(
with_static=with_static,
with_sa=self.with_sa,
)
features.constraints = solver.get_constraints(
with_static=with_static,
with_sa=self.with_sa,
with_lhs=self.with_lhs,
)
if with_static:
self._extract_user_features_vars_old(instance, features)
self._extract_user_features_constrs_old(instance, features)
self._extract_user_features_instance_old(instance, features)
self._extract_alvarez_2017_old(features)
return features
def _extract_user_features_vars( def _extract_user_features_vars(
self, self,
instance: "Instance", instance: "Instance",
@ -417,101 +319,6 @@ class FeaturesExtractor:
sample.put("constr_lazy", lazy) sample.put("constr_lazy", lazy)
sample.put("constr_categories", categories) sample.put("constr_categories", categories)
def _extract_user_features_vars_old(
self,
instance: "Instance",
features: Features,
) -> None:
assert features.variables is not None
assert features.variables.names is not None
categories: List[Optional[Hashable]] = []
user_features: List[Optional[List[float]]] = []
var_features_dict = instance.get_variable_features()
var_categories_dict = instance.get_variable_categories()
for (i, var_name) in enumerate(features.variables.names):
if var_name not in var_categories_dict:
user_features.append(None)
categories.append(None)
continue
category: Hashable = var_categories_dict[var_name]
assert isinstance(category, collections.Hashable), (
f"Variable category must be be hashable. "
f"Found {type(category).__name__} instead for var={var_name}."
)
categories.append(category)
user_features_i: Optional[List[float]] = None
if var_name in var_features_dict:
user_features_i = var_features_dict[var_name]
if isinstance(user_features_i, np.ndarray):
user_features_i = user_features_i.tolist()
assert isinstance(user_features_i, list), (
f"Variable features must be a list. "
f"Found {type(user_features_i).__name__} instead for "
f"var={var_name}."
)
for v in user_features_i:
assert isinstance(v, numbers.Real), (
f"Variable features must be a list of numbers. "
f"Found {type(v).__name__} instead "
f"for var={var_name}."
)
user_features_i = list(user_features_i)
user_features.append(user_features_i)
features.variables.categories = categories
features.variables.user_features = user_features
def _extract_user_features_constrs_old(
self,
instance: "Instance",
features: Features,
) -> None:
assert features.constraints is not None
assert features.constraints.names is not None
has_static_lazy = instance.has_static_lazy_constraints()
user_features: List[Optional[List[float]]] = []
categories: List[Optional[Hashable]] = []
lazy: List[bool] = []
constr_categories_dict = instance.get_constraint_categories()
constr_features_dict = instance.get_constraint_features()
for (cidx, cname) in enumerate(features.constraints.names):
category: Optional[Hashable] = cname
if cname in constr_categories_dict:
category = constr_categories_dict[cname]
if category is None:
user_features.append(None)
categories.append(None)
continue
assert isinstance(category, collections.Hashable), (
f"Constraint category must be hashable. "
f"Found {type(category).__name__} instead for cname={cname}.",
)
categories.append(category)
cf: Optional[List[float]] = None
if cname in constr_features_dict:
cf = constr_features_dict[cname]
if isinstance(cf, np.ndarray):
cf = cf.tolist()
assert isinstance(cf, list), (
f"Constraint features must be a list. "
f"Found {type(cf).__name__} instead for cname={cname}."
)
for f in cf:
assert isinstance(f, numbers.Real), (
f"Constraint features must be a list of numbers. "
f"Found {type(f).__name__} instead for cname={cname}."
)
cf = list(cf)
user_features.append(cf)
if has_static_lazy:
lazy.append(instance.is_constraint_lazy(cname))
else:
lazy.append(False)
features.constraints.user_features = user_features
features.constraints.lazy = lazy
features.constraints.categories = categories
def _extract_user_features_instance( def _extract_user_features_instance(
self, self,
instance: "Instance", instance: "Instance",
@ -534,106 +341,6 @@ class FeaturesExtractor:
sample.put("instance_features_user", user_features) sample.put("instance_features_user", user_features)
sample.put("static_lazy_count", sum(constr_lazy)) sample.put("static_lazy_count", sum(constr_lazy))
def _extract_user_features_instance_old(
self,
instance: "Instance",
features: Features,
) -> None:
user_features = instance.get_instance_features()
if isinstance(user_features, np.ndarray):
user_features = user_features.tolist()
assert isinstance(user_features, list), (
f"Instance features must be a list. "
f"Found {type(user_features).__name__} instead."
)
for v in user_features:
assert isinstance(v, numbers.Real), (
f"Instance features must be a list of numbers. "
f"Found {type(v).__name__} instead."
)
assert features.constraints is not None
assert features.constraints.lazy is not None
features.instance = InstanceFeatures(
user_features=user_features,
lazy_constraint_count=sum(features.constraints.lazy),
)
def _extract_alvarez_2017_old(self, features: Features) -> None:
assert features.variables is not None
assert features.variables.names is not None
obj_coeffs = features.variables.obj_coeffs
obj_sa_down = features.variables.sa_obj_down
obj_sa_up = features.variables.sa_obj_up
values = features.variables.values
pos_obj_coeff_sum = 0.0
neg_obj_coeff_sum = 0.0
if obj_coeffs is not None:
for coeff in obj_coeffs:
if coeff > 0:
pos_obj_coeff_sum += coeff
if coeff < 0:
neg_obj_coeff_sum += -coeff
features.variables.alvarez_2017 = []
for i in range(len(features.variables.names)):
f: List[float] = []
if obj_coeffs is not None:
# Feature 1
f.append(np.sign(obj_coeffs[i]))
# Feature 2
if pos_obj_coeff_sum > 0:
f.append(abs(obj_coeffs[i]) / pos_obj_coeff_sum)
else:
f.append(0.0)
# Feature 3
if neg_obj_coeff_sum > 0:
f.append(abs(obj_coeffs[i]) / neg_obj_coeff_sum)
else:
f.append(0.0)
if values is not None:
# Feature 37
f.append(
min(
values[i] - np.floor(values[i]),
np.ceil(values[i]) - values[i],
)
)
if obj_sa_up is not None:
assert obj_sa_down is not None
assert obj_coeffs is not None
# Convert inf into large finite numbers
sd = max(-1e20, obj_sa_down[i])
su = min(1e20, obj_sa_up[i])
obj = obj_coeffs[i]
# Features 44 and 46
f.append(np.sign(obj_sa_up[i]))
f.append(np.sign(obj_sa_down[i]))
# Feature 47
csign = np.sign(obj)
if csign != 0 and ((obj - sd) / csign) > 0.001:
f.append(log((obj - sd) / csign))
else:
f.append(0.0)
# Feature 48
if csign != 0 and ((su - obj) / csign) > 0.001:
f.append(log((su - obj) / csign))
else:
f.append(0.0)
for v in f:
assert isfinite(v), f"non-finite elements detected: {f}"
features.variables.alvarez_2017.append(f)
# Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based # Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based
# approximation of strong branching. INFORMS Journal on Computing, 29(1), 185-195. # approximation of strong branching. INFORMS Journal on Computing, 29(1), 185-195.
def _extract_var_features_AlvLouWeh2017( def _extract_var_features_AlvLouWeh2017(

@ -171,7 +171,6 @@ class LearningSolver:
self.extractor.extract_after_load_features( self.extractor.extract_after_load_features(
instance, self.internal_solver, sample instance, self.internal_solver, sample
) )
features = self.extractor.extract(instance, self.internal_solver)
logger.info( logger.info(
"Features (after-load) extracted in %.2f seconds" "Features (after-load) extracted in %.2f seconds"
% (time.time() - initial_time) % (time.time() - initial_time)

@ -11,11 +11,7 @@ from miplearn.classifiers import Classifier
from miplearn.classifiers.threshold import MinProbabilityThreshold from miplearn.classifiers.threshold import MinProbabilityThreshold
from miplearn.components import classifier_evaluation_dict from miplearn.components import classifier_evaluation_dict
from miplearn.components.dynamic_lazy import DynamicLazyConstraintsComponent from miplearn.components.dynamic_lazy import DynamicLazyConstraintsComponent
from miplearn.features import ( from miplearn.features import Sample
Features,
InstanceFeatures,
Sample,
)
from miplearn.instance.base import Instance from miplearn.instance.base import Instance
from miplearn.solvers.tests import assert_equals from miplearn.solvers.tests import assert_equals

@ -12,12 +12,7 @@ from miplearn.classifiers import Classifier
from miplearn.classifiers.threshold import Threshold from miplearn.classifiers.threshold import Threshold
from miplearn.components import classifier_evaluation_dict from miplearn.components import classifier_evaluation_dict
from miplearn.components.primal import PrimalSolutionComponent from miplearn.components.primal import PrimalSolutionComponent
from miplearn.features import ( from miplearn.features import Sample
Features,
Sample,
InstanceFeatures,
VariableFeatures,
)
from miplearn.problems.tsp import TravelingSalesmanGenerator from miplearn.problems.tsp import TravelingSalesmanGenerator
from miplearn.solvers.learning import LearningSolver from miplearn.solvers.learning import LearningSolver
from miplearn.solvers.tests import assert_equals from miplearn.solvers.tests import assert_equals

@ -11,12 +11,7 @@ from numpy.testing import assert_array_equal
from miplearn.classifiers import Classifier from miplearn.classifiers import Classifier
from miplearn.classifiers.threshold import Threshold, MinProbabilityThreshold from miplearn.classifiers.threshold import Threshold, MinProbabilityThreshold
from miplearn.components.static_lazy import StaticLazyConstraintsComponent from miplearn.components.static_lazy import StaticLazyConstraintsComponent
from miplearn.features import ( from miplearn.features import Sample, ConstraintFeatures
InstanceFeatures,
Features,
Sample,
ConstraintFeatures,
)
from miplearn.instance.base import Instance from miplearn.instance.base import Instance
from miplearn.solvers.internal import InternalSolver from miplearn.solvers.internal import InternalSolver
from miplearn.solvers.learning import LearningSolver from miplearn.solvers.learning import LearningSolver

@ -6,7 +6,6 @@ import numpy as np
from miplearn.features import ( from miplearn.features import (
FeaturesExtractor, FeaturesExtractor,
InstanceFeatures,
VariableFeatures, VariableFeatures,
ConstraintFeatures, ConstraintFeatures,
Sample, Sample,
@ -128,15 +127,6 @@ def test_knapsack() -> None:
assert_equals(sample.get("mip_var_values"), [1.0, 0.0, 1.0, 1.0, 61.0]) assert_equals(sample.get("mip_var_values"), [1.0, 0.0, 1.0, 1.0, 61.0])
assert_equals(sample.get("mip_constr_slacks"), [0.0]) assert_equals(sample.get("mip_constr_slacks"), [0.0])
features = extractor.extract(instance, solver)
assert_equals(
features.instance,
InstanceFeatures(
user_features=[67.0, 21.75],
lazy_constraint_count=0,
),
)
def test_constraint_getindex() -> None: def test_constraint_getindex() -> None:
cf = ConstraintFeatures( cf = ConstraintFeatures(

Loading…
Cancel
Save