commit
6a01c98c07
@ -1,384 +0,0 @@
|
|||||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
|
||||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
|
||||||
|
|
||||||
import collections
|
|
||||||
import numbers
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from math import log, isfinite
|
|
||||||
from typing import TYPE_CHECKING, Dict, Optional, List, Hashable, Tuple
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from miplearn.solvers.internal import InternalSolver, LPSolveStats, MIPSolveStats
|
|
||||||
from miplearn.instance.base import Instance
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class InstanceFeatures:
|
|
||||||
user_features: Optional[List[float]] = None
|
|
||||||
lazy_constraint_count: int = 0
|
|
||||||
|
|
||||||
def to_list(self) -> List[float]:
|
|
||||||
features: List[float] = []
|
|
||||||
if self.user_features is not None:
|
|
||||||
features.extend(self.user_features)
|
|
||||||
_clip(features)
|
|
||||||
return features
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class VariableFeatures:
|
|
||||||
names: Optional[List[str]] = None
|
|
||||||
basis_status: Optional[List[str]] = None
|
|
||||||
categories: Optional[List[Optional[Hashable]]] = None
|
|
||||||
lower_bounds: Optional[List[float]] = None
|
|
||||||
obj_coeffs: Optional[List[float]] = None
|
|
||||||
reduced_costs: Optional[List[float]] = None
|
|
||||||
sa_lb_down: Optional[List[float]] = None
|
|
||||||
sa_lb_up: Optional[List[float]] = None
|
|
||||||
sa_obj_down: Optional[List[float]] = None
|
|
||||||
sa_obj_up: Optional[List[float]] = None
|
|
||||||
sa_ub_down: Optional[List[float]] = None
|
|
||||||
sa_ub_up: Optional[List[float]] = None
|
|
||||||
types: Optional[List[str]] = None
|
|
||||||
upper_bounds: Optional[List[float]] = None
|
|
||||||
user_features: Optional[List[Optional[List[float]]]] = None
|
|
||||||
values: Optional[List[float]] = None
|
|
||||||
|
|
||||||
# Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based
|
|
||||||
# approximation of strong branching. INFORMS Journal on Computing, 29(1), 185-195.
|
|
||||||
alvarez_2017: Optional[List[List[float]]] = None
|
|
||||||
|
|
||||||
def to_list(self, index: int) -> List[float]:
|
|
||||||
features: List[float] = []
|
|
||||||
for attr in [
|
|
||||||
"lower_bounds",
|
|
||||||
"obj_coeffs",
|
|
||||||
"reduced_costs",
|
|
||||||
"sa_lb_down",
|
|
||||||
"sa_lb_up",
|
|
||||||
"sa_obj_down",
|
|
||||||
"sa_obj_up",
|
|
||||||
"sa_ub_down",
|
|
||||||
"sa_ub_up",
|
|
||||||
"upper_bounds",
|
|
||||||
"values",
|
|
||||||
]:
|
|
||||||
if getattr(self, attr) is not None:
|
|
||||||
features.append(getattr(self, attr)[index])
|
|
||||||
for attr in ["user_features", "alvarez_2017"]:
|
|
||||||
if getattr(self, attr) is not None:
|
|
||||||
if getattr(self, attr)[index] is not None:
|
|
||||||
features.extend(getattr(self, attr)[index])
|
|
||||||
_clip(features)
|
|
||||||
return features
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ConstraintFeatures:
|
|
||||||
basis_status: Optional[List[str]] = None
|
|
||||||
categories: Optional[List[Optional[Hashable]]] = None
|
|
||||||
dual_values: Optional[List[float]] = None
|
|
||||||
names: Optional[List[str]] = None
|
|
||||||
lazy: Optional[List[bool]] = None
|
|
||||||
lhs: Optional[List[List[Tuple[str, float]]]] = None
|
|
||||||
rhs: Optional[List[float]] = None
|
|
||||||
sa_rhs_down: Optional[List[float]] = None
|
|
||||||
sa_rhs_up: Optional[List[float]] = None
|
|
||||||
senses: Optional[List[str]] = None
|
|
||||||
slacks: Optional[List[float]] = None
|
|
||||||
user_features: Optional[List[Optional[List[float]]]] = None
|
|
||||||
|
|
||||||
def to_list(self, index: int) -> List[float]:
|
|
||||||
features: List[float] = []
|
|
||||||
for attr in [
|
|
||||||
"dual_values",
|
|
||||||
"rhs",
|
|
||||||
"slacks",
|
|
||||||
]:
|
|
||||||
if getattr(self, attr) is not None:
|
|
||||||
features.append(getattr(self, attr)[index])
|
|
||||||
for attr in ["user_features"]:
|
|
||||||
if getattr(self, attr) is not None:
|
|
||||||
if getattr(self, attr)[index] is not None:
|
|
||||||
features.extend(getattr(self, attr)[index])
|
|
||||||
_clip(features)
|
|
||||||
return features
|
|
||||||
|
|
||||||
def __getitem__(self, selected: List[bool]) -> "ConstraintFeatures":
|
|
||||||
return ConstraintFeatures(
|
|
||||||
basis_status=self._filter(self.basis_status, selected),
|
|
||||||
categories=self._filter(self.categories, selected),
|
|
||||||
dual_values=self._filter(self.dual_values, selected),
|
|
||||||
names=self._filter(self.names, selected),
|
|
||||||
lazy=self._filter(self.lazy, selected),
|
|
||||||
lhs=self._filter(self.lhs, selected),
|
|
||||||
rhs=self._filter(self.rhs, selected),
|
|
||||||
sa_rhs_down=self._filter(self.sa_rhs_down, selected),
|
|
||||||
sa_rhs_up=self._filter(self.sa_rhs_up, selected),
|
|
||||||
senses=self._filter(self.senses, selected),
|
|
||||||
slacks=self._filter(self.slacks, selected),
|
|
||||||
user_features=self._filter(self.user_features, selected),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _filter(
|
|
||||||
self,
|
|
||||||
obj: Optional[List],
|
|
||||||
selected: List[bool],
|
|
||||||
) -> Optional[List]:
|
|
||||||
if obj is None:
|
|
||||||
return None
|
|
||||||
return [obj[i] for (i, selected_i) in enumerate(selected) if selected_i]
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Features:
|
|
||||||
instance: Optional[InstanceFeatures] = None
|
|
||||||
variables: Optional[VariableFeatures] = None
|
|
||||||
constraints: Optional[ConstraintFeatures] = None
|
|
||||||
lp_solve: Optional["LPSolveStats"] = None
|
|
||||||
mip_solve: Optional["MIPSolveStats"] = None
|
|
||||||
extra: Optional[Dict] = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Sample:
|
|
||||||
after_load: Optional[Features] = None
|
|
||||||
after_lp: Optional[Features] = None
|
|
||||||
after_mip: Optional[Features] = None
|
|
||||||
|
|
||||||
|
|
||||||
class FeaturesExtractor:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
with_sa: bool = True,
|
|
||||||
with_lhs: bool = True,
|
|
||||||
) -> None:
|
|
||||||
self.with_sa = with_sa
|
|
||||||
self.with_lhs = with_lhs
|
|
||||||
|
|
||||||
def extract(
|
|
||||||
self,
|
|
||||||
instance: "Instance",
|
|
||||||
solver: "InternalSolver",
|
|
||||||
with_static: bool = True,
|
|
||||||
) -> Features:
|
|
||||||
features = Features()
|
|
||||||
features.variables = solver.get_variables(
|
|
||||||
with_static=with_static,
|
|
||||||
with_sa=self.with_sa,
|
|
||||||
)
|
|
||||||
features.constraints = solver.get_constraints(
|
|
||||||
with_static=with_static,
|
|
||||||
with_sa=self.with_sa,
|
|
||||||
with_lhs=self.with_lhs,
|
|
||||||
)
|
|
||||||
if with_static:
|
|
||||||
self._extract_user_features_vars(instance, features)
|
|
||||||
self._extract_user_features_constrs(instance, features)
|
|
||||||
self._extract_user_features_instance(instance, features)
|
|
||||||
self._extract_alvarez_2017(features)
|
|
||||||
return features
|
|
||||||
|
|
||||||
def _extract_user_features_vars(
|
|
||||||
self,
|
|
||||||
instance: "Instance",
|
|
||||||
features: Features,
|
|
||||||
) -> None:
|
|
||||||
assert features.variables is not None
|
|
||||||
assert features.variables.names is not None
|
|
||||||
categories: List[Optional[Hashable]] = []
|
|
||||||
user_features: List[Optional[List[float]]] = []
|
|
||||||
var_features_dict = instance.get_variable_features()
|
|
||||||
var_categories_dict = instance.get_variable_categories()
|
|
||||||
|
|
||||||
for (i, var_name) in enumerate(features.variables.names):
|
|
||||||
if var_name not in var_categories_dict:
|
|
||||||
user_features.append(None)
|
|
||||||
categories.append(None)
|
|
||||||
continue
|
|
||||||
category: Hashable = var_categories_dict[var_name]
|
|
||||||
assert isinstance(category, collections.Hashable), (
|
|
||||||
f"Variable category must be be hashable. "
|
|
||||||
f"Found {type(category).__name__} instead for var={var_name}."
|
|
||||||
)
|
|
||||||
categories.append(category)
|
|
||||||
user_features_i: Optional[List[float]] = None
|
|
||||||
if var_name in var_features_dict:
|
|
||||||
user_features_i = var_features_dict[var_name]
|
|
||||||
if isinstance(user_features_i, np.ndarray):
|
|
||||||
user_features_i = user_features_i.tolist()
|
|
||||||
assert isinstance(user_features_i, list), (
|
|
||||||
f"Variable features must be a list. "
|
|
||||||
f"Found {type(user_features_i).__name__} instead for "
|
|
||||||
f"var={var_name}."
|
|
||||||
)
|
|
||||||
for v in user_features_i:
|
|
||||||
assert isinstance(v, numbers.Real), (
|
|
||||||
f"Variable features must be a list of numbers. "
|
|
||||||
f"Found {type(v).__name__} instead "
|
|
||||||
f"for var={var_name}."
|
|
||||||
)
|
|
||||||
user_features_i = list(user_features_i)
|
|
||||||
user_features.append(user_features_i)
|
|
||||||
features.variables.categories = categories
|
|
||||||
features.variables.user_features = user_features
|
|
||||||
|
|
||||||
def _extract_user_features_constrs(
|
|
||||||
self,
|
|
||||||
instance: "Instance",
|
|
||||||
features: Features,
|
|
||||||
) -> None:
|
|
||||||
assert features.constraints is not None
|
|
||||||
assert features.constraints.names is not None
|
|
||||||
has_static_lazy = instance.has_static_lazy_constraints()
|
|
||||||
user_features: List[Optional[List[float]]] = []
|
|
||||||
categories: List[Optional[Hashable]] = []
|
|
||||||
lazy: List[bool] = []
|
|
||||||
constr_categories_dict = instance.get_constraint_categories()
|
|
||||||
constr_features_dict = instance.get_constraint_features()
|
|
||||||
|
|
||||||
for (cidx, cname) in enumerate(features.constraints.names):
|
|
||||||
category: Optional[Hashable] = cname
|
|
||||||
if cname in constr_categories_dict:
|
|
||||||
category = constr_categories_dict[cname]
|
|
||||||
if category is None:
|
|
||||||
user_features.append(None)
|
|
||||||
categories.append(None)
|
|
||||||
continue
|
|
||||||
assert isinstance(category, collections.Hashable), (
|
|
||||||
f"Constraint category must be hashable. "
|
|
||||||
f"Found {type(category).__name__} instead for cname={cname}.",
|
|
||||||
)
|
|
||||||
categories.append(category)
|
|
||||||
cf: Optional[List[float]] = None
|
|
||||||
if cname in constr_features_dict:
|
|
||||||
cf = constr_features_dict[cname]
|
|
||||||
if isinstance(cf, np.ndarray):
|
|
||||||
cf = cf.tolist()
|
|
||||||
assert isinstance(cf, list), (
|
|
||||||
f"Constraint features must be a list. "
|
|
||||||
f"Found {type(cf).__name__} instead for cname={cname}."
|
|
||||||
)
|
|
||||||
for f in cf:
|
|
||||||
assert isinstance(f, numbers.Real), (
|
|
||||||
f"Constraint features must be a list of numbers. "
|
|
||||||
f"Found {type(f).__name__} instead for cname={cname}."
|
|
||||||
)
|
|
||||||
cf = list(cf)
|
|
||||||
user_features.append(cf)
|
|
||||||
if has_static_lazy:
|
|
||||||
lazy.append(instance.is_constraint_lazy(cname))
|
|
||||||
else:
|
|
||||||
lazy.append(False)
|
|
||||||
features.constraints.user_features = user_features
|
|
||||||
features.constraints.lazy = lazy
|
|
||||||
features.constraints.categories = categories
|
|
||||||
|
|
||||||
def _extract_user_features_instance(
|
|
||||||
self,
|
|
||||||
instance: "Instance",
|
|
||||||
features: Features,
|
|
||||||
) -> None:
|
|
||||||
user_features = instance.get_instance_features()
|
|
||||||
if isinstance(user_features, np.ndarray):
|
|
||||||
user_features = user_features.tolist()
|
|
||||||
assert isinstance(user_features, list), (
|
|
||||||
f"Instance features must be a list. "
|
|
||||||
f"Found {type(user_features).__name__} instead."
|
|
||||||
)
|
|
||||||
for v in user_features:
|
|
||||||
assert isinstance(v, numbers.Real), (
|
|
||||||
f"Instance features must be a list of numbers. "
|
|
||||||
f"Found {type(v).__name__} instead."
|
|
||||||
)
|
|
||||||
assert features.constraints is not None
|
|
||||||
assert features.constraints.lazy is not None
|
|
||||||
features.instance = InstanceFeatures(
|
|
||||||
user_features=user_features,
|
|
||||||
lazy_constraint_count=sum(features.constraints.lazy),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _extract_alvarez_2017(self, features: Features) -> None:
|
|
||||||
assert features.variables is not None
|
|
||||||
assert features.variables.names is not None
|
|
||||||
|
|
||||||
obj_coeffs = features.variables.obj_coeffs
|
|
||||||
obj_sa_down = features.variables.sa_obj_down
|
|
||||||
obj_sa_up = features.variables.sa_obj_up
|
|
||||||
values = features.variables.values
|
|
||||||
|
|
||||||
pos_obj_coeff_sum = 0.0
|
|
||||||
neg_obj_coeff_sum = 0.0
|
|
||||||
if obj_coeffs is not None:
|
|
||||||
for coeff in obj_coeffs:
|
|
||||||
if coeff > 0:
|
|
||||||
pos_obj_coeff_sum += coeff
|
|
||||||
if coeff < 0:
|
|
||||||
neg_obj_coeff_sum += -coeff
|
|
||||||
|
|
||||||
features.variables.alvarez_2017 = []
|
|
||||||
for i in range(len(features.variables.names)):
|
|
||||||
f: List[float] = []
|
|
||||||
if obj_coeffs is not None:
|
|
||||||
# Feature 1
|
|
||||||
f.append(np.sign(obj_coeffs[i]))
|
|
||||||
|
|
||||||
# Feature 2
|
|
||||||
if pos_obj_coeff_sum > 0:
|
|
||||||
f.append(abs(obj_coeffs[i]) / pos_obj_coeff_sum)
|
|
||||||
else:
|
|
||||||
f.append(0.0)
|
|
||||||
|
|
||||||
# Feature 3
|
|
||||||
if neg_obj_coeff_sum > 0:
|
|
||||||
f.append(abs(obj_coeffs[i]) / neg_obj_coeff_sum)
|
|
||||||
else:
|
|
||||||
f.append(0.0)
|
|
||||||
|
|
||||||
if values is not None:
|
|
||||||
# Feature 37
|
|
||||||
f.append(
|
|
||||||
min(
|
|
||||||
values[i] - np.floor(values[i]),
|
|
||||||
np.ceil(values[i]) - values[i],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if obj_sa_up is not None:
|
|
||||||
assert obj_sa_down is not None
|
|
||||||
assert obj_coeffs is not None
|
|
||||||
|
|
||||||
# Convert inf into large finite numbers
|
|
||||||
sd = max(-1e20, obj_sa_down[i])
|
|
||||||
su = min(1e20, obj_sa_up[i])
|
|
||||||
obj = obj_coeffs[i]
|
|
||||||
|
|
||||||
# Features 44 and 46
|
|
||||||
f.append(np.sign(obj_sa_up[i]))
|
|
||||||
f.append(np.sign(obj_sa_down[i]))
|
|
||||||
|
|
||||||
# Feature 47
|
|
||||||
csign = np.sign(obj)
|
|
||||||
if csign != 0 and ((obj - sd) / csign) > 0.001:
|
|
||||||
f.append(log((obj - sd) / csign))
|
|
||||||
else:
|
|
||||||
f.append(0.0)
|
|
||||||
|
|
||||||
# Feature 48
|
|
||||||
if csign != 0 and ((su - obj) / csign) > 0.001:
|
|
||||||
f.append(log((su - obj) / csign))
|
|
||||||
else:
|
|
||||||
f.append(0.0)
|
|
||||||
|
|
||||||
for v in f:
|
|
||||||
assert isfinite(v), f"non-finite elements detected: {f}"
|
|
||||||
features.variables.alvarez_2017.append(f)
|
|
||||||
|
|
||||||
|
|
||||||
def _clip(v: List[float]) -> None:
|
|
||||||
for (i, vi) in enumerate(v):
|
|
||||||
if not isfinite(vi):
|
|
||||||
v[i] = max(min(vi, 1e20), -1e20)
|
|
@ -0,0 +1,3 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
@ -0,0 +1,432 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
from math import log, isfinite
|
||||||
|
from typing import TYPE_CHECKING, List, Tuple, Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from scipy.sparse import coo_matrix
|
||||||
|
|
||||||
|
from miplearn.features.sample import Sample
|
||||||
|
from miplearn.solvers.internal import LPSolveStats
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from miplearn.solvers.internal import InternalSolver
|
||||||
|
from miplearn.instance.base import Instance
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyPep8Naming
|
||||||
|
class FeaturesExtractor:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
with_sa: bool = True,
|
||||||
|
with_lhs: bool = True,
|
||||||
|
) -> None:
|
||||||
|
self.with_sa = with_sa
|
||||||
|
self.with_lhs = with_lhs
|
||||||
|
self.var_features_user: Optional[np.ndarray] = None
|
||||||
|
|
||||||
|
def extract_after_load_features(
|
||||||
|
self,
|
||||||
|
instance: "Instance",
|
||||||
|
solver: "InternalSolver",
|
||||||
|
sample: Sample,
|
||||||
|
) -> None:
|
||||||
|
variables = solver.get_variables(with_static=True)
|
||||||
|
constraints = solver.get_constraints(with_static=True, with_lhs=self.with_lhs)
|
||||||
|
assert constraints.names is not None
|
||||||
|
sample.put_array("static_var_lower_bounds", variables.lower_bounds)
|
||||||
|
sample.put_array("static_var_names", variables.names)
|
||||||
|
sample.put_array("static_var_obj_coeffs", variables.obj_coeffs)
|
||||||
|
sample.put_array("static_var_types", variables.types)
|
||||||
|
sample.put_array("static_var_upper_bounds", variables.upper_bounds)
|
||||||
|
sample.put_array("static_constr_names", constraints.names)
|
||||||
|
sample.put_sparse("static_constr_lhs", constraints.lhs)
|
||||||
|
sample.put_array("static_constr_rhs", constraints.rhs)
|
||||||
|
sample.put_array("static_constr_senses", constraints.senses)
|
||||||
|
|
||||||
|
# Instance features
|
||||||
|
self._extract_user_features_instance(instance, sample)
|
||||||
|
|
||||||
|
# Constraint features
|
||||||
|
(
|
||||||
|
constr_features,
|
||||||
|
constr_categories,
|
||||||
|
constr_lazy,
|
||||||
|
) = FeaturesExtractor._extract_user_features_constrs(
|
||||||
|
instance,
|
||||||
|
constraints.names,
|
||||||
|
)
|
||||||
|
sample.put_array("static_constr_features", constr_features)
|
||||||
|
sample.put_array("static_constr_categories", constr_categories)
|
||||||
|
sample.put_array("static_constr_lazy", constr_lazy)
|
||||||
|
sample.put_scalar("static_constr_lazy_count", int(constr_lazy.sum()))
|
||||||
|
|
||||||
|
# Variable features
|
||||||
|
(
|
||||||
|
vars_features_user,
|
||||||
|
var_categories,
|
||||||
|
) = self._extract_user_features_vars(instance, sample)
|
||||||
|
self.var_features_user = vars_features_user
|
||||||
|
sample.put_array("static_var_categories", var_categories)
|
||||||
|
assert variables.lower_bounds is not None
|
||||||
|
assert variables.obj_coeffs is not None
|
||||||
|
assert variables.upper_bounds is not None
|
||||||
|
sample.put_array(
|
||||||
|
"static_var_features",
|
||||||
|
np.hstack(
|
||||||
|
[
|
||||||
|
vars_features_user,
|
||||||
|
self._compute_AlvLouWeh2017(
|
||||||
|
A=constraints.lhs,
|
||||||
|
b=constraints.rhs,
|
||||||
|
c=variables.obj_coeffs,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def extract_after_lp_features(
|
||||||
|
self,
|
||||||
|
solver: "InternalSolver",
|
||||||
|
sample: Sample,
|
||||||
|
lp_stats: LPSolveStats,
|
||||||
|
) -> None:
|
||||||
|
for (k, v) in lp_stats.__dict__.items():
|
||||||
|
sample.put_scalar(k, v)
|
||||||
|
variables = solver.get_variables(with_static=False, with_sa=self.with_sa)
|
||||||
|
constraints = solver.get_constraints(with_static=False, with_sa=self.with_sa)
|
||||||
|
sample.put_array("lp_var_basis_status", variables.basis_status)
|
||||||
|
sample.put_array("lp_var_reduced_costs", variables.reduced_costs)
|
||||||
|
sample.put_array("lp_var_sa_lb_down", variables.sa_lb_down)
|
||||||
|
sample.put_array("lp_var_sa_lb_up", variables.sa_lb_up)
|
||||||
|
sample.put_array("lp_var_sa_obj_down", variables.sa_obj_down)
|
||||||
|
sample.put_array("lp_var_sa_obj_up", variables.sa_obj_up)
|
||||||
|
sample.put_array("lp_var_sa_ub_down", variables.sa_ub_down)
|
||||||
|
sample.put_array("lp_var_sa_ub_up", variables.sa_ub_up)
|
||||||
|
sample.put_array("lp_var_values", variables.values)
|
||||||
|
sample.put_array("lp_constr_basis_status", constraints.basis_status)
|
||||||
|
sample.put_array("lp_constr_dual_values", constraints.dual_values)
|
||||||
|
sample.put_array("lp_constr_sa_rhs_down", constraints.sa_rhs_down)
|
||||||
|
sample.put_array("lp_constr_sa_rhs_up", constraints.sa_rhs_up)
|
||||||
|
sample.put_array("lp_constr_slacks", constraints.slacks)
|
||||||
|
|
||||||
|
# Variable features
|
||||||
|
lp_var_features_list = []
|
||||||
|
for f in [
|
||||||
|
self.var_features_user,
|
||||||
|
self._compute_AlvLouWeh2017(
|
||||||
|
A=sample.get_sparse("static_constr_lhs"),
|
||||||
|
b=sample.get_array("static_constr_rhs"),
|
||||||
|
c=sample.get_array("static_var_obj_coeffs"),
|
||||||
|
c_sa_up=variables.sa_obj_up,
|
||||||
|
c_sa_down=variables.sa_obj_down,
|
||||||
|
values=variables.values,
|
||||||
|
),
|
||||||
|
]:
|
||||||
|
if f is not None:
|
||||||
|
lp_var_features_list.append(f)
|
||||||
|
for f in [
|
||||||
|
variables.reduced_costs,
|
||||||
|
variables.sa_lb_down,
|
||||||
|
variables.sa_lb_up,
|
||||||
|
variables.sa_obj_down,
|
||||||
|
variables.sa_obj_up,
|
||||||
|
variables.sa_ub_down,
|
||||||
|
variables.sa_ub_up,
|
||||||
|
variables.values,
|
||||||
|
]:
|
||||||
|
if f is not None:
|
||||||
|
lp_var_features_list.append(f.reshape(-1, 1))
|
||||||
|
lp_var_features = np.hstack(lp_var_features_list)
|
||||||
|
_fix_infinity(lp_var_features)
|
||||||
|
sample.put_array("lp_var_features", lp_var_features)
|
||||||
|
|
||||||
|
# Constraint features
|
||||||
|
lp_constr_features_list = []
|
||||||
|
for f in [sample.get_array("static_constr_features")]:
|
||||||
|
if f is not None:
|
||||||
|
lp_constr_features_list.append(f)
|
||||||
|
for f in [
|
||||||
|
sample.get_array("lp_constr_dual_values"),
|
||||||
|
sample.get_array("lp_constr_sa_rhs_down"),
|
||||||
|
sample.get_array("lp_constr_sa_rhs_up"),
|
||||||
|
sample.get_array("lp_constr_slacks"),
|
||||||
|
]:
|
||||||
|
if f is not None:
|
||||||
|
lp_constr_features_list.append(f.reshape(-1, 1))
|
||||||
|
lp_constr_features = np.hstack(lp_constr_features_list)
|
||||||
|
_fix_infinity(lp_constr_features)
|
||||||
|
sample.put_array("lp_constr_features", lp_constr_features)
|
||||||
|
|
||||||
|
# Build lp_instance_features
|
||||||
|
static_instance_features = sample.get_array("static_instance_features")
|
||||||
|
assert static_instance_features is not None
|
||||||
|
assert lp_stats.lp_value is not None
|
||||||
|
assert lp_stats.lp_wallclock_time is not None
|
||||||
|
sample.put_array(
|
||||||
|
"lp_instance_features",
|
||||||
|
np.hstack(
|
||||||
|
[
|
||||||
|
static_instance_features,
|
||||||
|
lp_stats.lp_value,
|
||||||
|
lp_stats.lp_wallclock_time,
|
||||||
|
]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def extract_after_mip_features(
|
||||||
|
self,
|
||||||
|
solver: "InternalSolver",
|
||||||
|
sample: Sample,
|
||||||
|
) -> None:
|
||||||
|
variables = solver.get_variables(with_static=False, with_sa=False)
|
||||||
|
constraints = solver.get_constraints(with_static=False, with_sa=False)
|
||||||
|
sample.put_array("mip_var_values", variables.values)
|
||||||
|
sample.put_array("mip_constr_slacks", constraints.slacks)
|
||||||
|
|
||||||
|
# noinspection DuplicatedCode
|
||||||
|
def _extract_user_features_vars(
|
||||||
|
self,
|
||||||
|
instance: "Instance",
|
||||||
|
sample: Sample,
|
||||||
|
) -> Tuple[np.ndarray, np.ndarray]:
|
||||||
|
# Query variable names
|
||||||
|
var_names = sample.get_array("static_var_names")
|
||||||
|
assert var_names is not None
|
||||||
|
|
||||||
|
# Query variable features
|
||||||
|
var_features = instance.get_variable_features(var_names)
|
||||||
|
assert isinstance(var_features, np.ndarray), (
|
||||||
|
f"Variable features must be a numpy array. "
|
||||||
|
f"Found {var_features.__class__} instead."
|
||||||
|
)
|
||||||
|
assert len(var_features.shape) == 2, (
|
||||||
|
f"Variable features must be 2-dimensional array. "
|
||||||
|
f"Found array with shape {var_features.shape} instead."
|
||||||
|
)
|
||||||
|
assert var_features.shape[0] == len(var_names), (
|
||||||
|
f"Variable features must have exactly {len(var_names)} rows. "
|
||||||
|
f"Found {var_features.shape[0]} rows instead."
|
||||||
|
)
|
||||||
|
assert var_features.dtype.kind in ["f"], (
|
||||||
|
f"Variable features must be floating point numbers. "
|
||||||
|
f"Found {var_features.dtype} instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Query variable categories
|
||||||
|
var_categories = instance.get_variable_categories(var_names)
|
||||||
|
assert isinstance(var_categories, np.ndarray), (
|
||||||
|
f"Variable categories must be a numpy array. "
|
||||||
|
f"Found {var_categories.__class__} instead."
|
||||||
|
)
|
||||||
|
assert len(var_categories.shape) == 1, (
|
||||||
|
f"Variable categories must be a vector. "
|
||||||
|
f"Found array with shape {var_categories.shape} instead."
|
||||||
|
)
|
||||||
|
assert len(var_categories) == len(var_names), (
|
||||||
|
f"Variable categories must have exactly {len(var_names)} elements. "
|
||||||
|
f"Found {var_categories.shape[0]} elements instead."
|
||||||
|
)
|
||||||
|
assert var_categories.dtype.kind == "S", (
|
||||||
|
f"Variable categories must be a numpy array with dtype='S'. "
|
||||||
|
f"Found {var_categories.dtype} instead."
|
||||||
|
)
|
||||||
|
return var_features, var_categories
|
||||||
|
|
||||||
|
# noinspection DuplicatedCode
|
||||||
|
@classmethod
|
||||||
|
def _extract_user_features_constrs(
|
||||||
|
cls,
|
||||||
|
instance: "Instance",
|
||||||
|
constr_names: np.ndarray,
|
||||||
|
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
|
||||||
|
# Query constraint features
|
||||||
|
constr_features = instance.get_constraint_features(constr_names)
|
||||||
|
assert isinstance(constr_features, np.ndarray), (
|
||||||
|
f"get_constraint_features must return a numpy array. "
|
||||||
|
f"Found {constr_features.__class__} instead."
|
||||||
|
)
|
||||||
|
assert len(constr_features.shape) == 2, (
|
||||||
|
f"get_constraint_features must return a 2-dimensional array. "
|
||||||
|
f"Found array with shape {constr_features.shape} instead."
|
||||||
|
)
|
||||||
|
assert constr_features.shape[0] == len(constr_names), (
|
||||||
|
f"get_constraint_features must return an array with {len(constr_names)} "
|
||||||
|
f"rows. Found {constr_features.shape[0]} rows instead."
|
||||||
|
)
|
||||||
|
assert constr_features.dtype.kind in ["f"], (
|
||||||
|
f"get_constraint_features must return floating point numbers. "
|
||||||
|
f"Found {constr_features.dtype} instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Query constraint categories
|
||||||
|
constr_categories = instance.get_constraint_categories(constr_names)
|
||||||
|
assert isinstance(constr_categories, np.ndarray), (
|
||||||
|
f"get_constraint_categories must return a numpy array. "
|
||||||
|
f"Found {constr_categories.__class__} instead."
|
||||||
|
)
|
||||||
|
assert len(constr_categories.shape) == 1, (
|
||||||
|
f"get_constraint_categories must return a vector. "
|
||||||
|
f"Found array with shape {constr_categories.shape} instead."
|
||||||
|
)
|
||||||
|
assert len(constr_categories) == len(constr_names), (
|
||||||
|
f"get_constraint_categories must return a vector with {len(constr_names)} "
|
||||||
|
f"elements. Found {constr_categories.shape[0]} elements instead."
|
||||||
|
)
|
||||||
|
assert constr_categories.dtype.kind == "S", (
|
||||||
|
f"get_constraint_categories must return a numpy array with dtype='S'. "
|
||||||
|
f"Found {constr_categories.dtype} instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Query constraint lazy attribute
|
||||||
|
constr_lazy = instance.are_constraints_lazy(constr_names)
|
||||||
|
assert isinstance(constr_lazy, np.ndarray), (
|
||||||
|
f"are_constraints_lazy must return a numpy array. "
|
||||||
|
f"Found {constr_lazy.__class__} instead."
|
||||||
|
)
|
||||||
|
assert len(constr_lazy.shape) == 1, (
|
||||||
|
f"are_constraints_lazy must return a vector. "
|
||||||
|
f"Found array with shape {constr_lazy.shape} instead."
|
||||||
|
)
|
||||||
|
assert constr_lazy.shape[0] == len(constr_names), (
|
||||||
|
f"are_constraints_lazy must return a vector with {len(constr_names)} "
|
||||||
|
f"elements. Found {constr_lazy.shape[0]} elements instead."
|
||||||
|
)
|
||||||
|
assert constr_lazy.dtype.kind == "b", (
|
||||||
|
f"are_constraints_lazy must return a boolean array. "
|
||||||
|
f"Found {constr_lazy.dtype} instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
return constr_features, constr_categories, constr_lazy
|
||||||
|
|
||||||
|
def _extract_user_features_instance(
|
||||||
|
self,
|
||||||
|
instance: "Instance",
|
||||||
|
sample: Sample,
|
||||||
|
) -> None:
|
||||||
|
features = instance.get_instance_features()
|
||||||
|
assert isinstance(features, np.ndarray), (
|
||||||
|
f"Instance features must be a numpy array. "
|
||||||
|
f"Found {features.__class__} instead."
|
||||||
|
)
|
||||||
|
assert len(features.shape) == 1, (
|
||||||
|
f"Instance features must be a vector. "
|
||||||
|
f"Found array with shape {features.shape} instead."
|
||||||
|
)
|
||||||
|
assert features.dtype.kind in [
|
||||||
|
"f"
|
||||||
|
], f"Instance features have unsupported {features.dtype}"
|
||||||
|
sample.put_array("static_instance_features", features)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _compute_AlvLouWeh2017(
|
||||||
|
cls,
|
||||||
|
A: Optional[coo_matrix] = None,
|
||||||
|
b: Optional[np.ndarray] = None,
|
||||||
|
c: Optional[np.ndarray] = None,
|
||||||
|
c_sa_down: Optional[np.ndarray] = None,
|
||||||
|
c_sa_up: Optional[np.ndarray] = None,
|
||||||
|
values: Optional[np.ndarray] = None,
|
||||||
|
) -> np.ndarray:
|
||||||
|
"""
|
||||||
|
Computes static variable features described in:
|
||||||
|
Alvarez, A. M., Louveaux, Q., & Wehenkel, L. (2017). A machine learning-based
|
||||||
|
approximation of strong branching. INFORMS Journal on Computing, 29(1),
|
||||||
|
185-195.
|
||||||
|
"""
|
||||||
|
assert b is not None
|
||||||
|
assert c is not None
|
||||||
|
nvars = len(c)
|
||||||
|
|
||||||
|
c_pos_sum = c[c > 0].sum()
|
||||||
|
c_neg_sum = -c[c < 0].sum()
|
||||||
|
|
||||||
|
curr = 0
|
||||||
|
max_n_features = 30
|
||||||
|
features = np.zeros((nvars, max_n_features))
|
||||||
|
|
||||||
|
def push(v: np.ndarray) -> None:
|
||||||
|
nonlocal curr
|
||||||
|
features[:, curr] = v
|
||||||
|
curr += 1
|
||||||
|
|
||||||
|
with np.errstate(divide="ignore", invalid="ignore"):
|
||||||
|
# Feature 1
|
||||||
|
push(np.sign(c))
|
||||||
|
|
||||||
|
# Feature 2
|
||||||
|
push(np.abs(c) / c_pos_sum)
|
||||||
|
|
||||||
|
# Feature 3
|
||||||
|
push(np.abs(c) / c_neg_sum)
|
||||||
|
|
||||||
|
if A is not None:
|
||||||
|
assert A.shape[1] == nvars
|
||||||
|
assert A.shape[0] == len(b)
|
||||||
|
|
||||||
|
M1 = A.T.multiply(1.0 / np.abs(b)).T.tocsr()
|
||||||
|
M1_pos = M1[b > 0, :]
|
||||||
|
if M1_pos.shape[0] > 0:
|
||||||
|
M1_pos_max = M1_pos.max(axis=0).todense()
|
||||||
|
M1_pos_min = M1_pos.min(axis=0).todense()
|
||||||
|
else:
|
||||||
|
M1_pos_max = np.zeros(nvars)
|
||||||
|
M1_pos_min = np.zeros(nvars)
|
||||||
|
M1_neg = M1[b < 0, :]
|
||||||
|
if M1_neg.shape[0] > 0:
|
||||||
|
M1_neg_max = M1_neg.max(axis=0).todense()
|
||||||
|
M1_neg_min = M1_neg.min(axis=0).todense()
|
||||||
|
else:
|
||||||
|
M1_neg_max = np.zeros(nvars)
|
||||||
|
M1_neg_min = np.zeros(nvars)
|
||||||
|
|
||||||
|
# Features 4-11
|
||||||
|
push(np.sign(M1_pos_min))
|
||||||
|
push(np.sign(M1_pos_max))
|
||||||
|
push(np.abs(M1_pos_min))
|
||||||
|
push(np.abs(M1_pos_max))
|
||||||
|
push(np.sign(M1_neg_min))
|
||||||
|
push(np.sign(M1_neg_max))
|
||||||
|
push(np.abs(M1_neg_min))
|
||||||
|
push(np.abs(M1_neg_max))
|
||||||
|
|
||||||
|
# Feature 37
|
||||||
|
if values is not None:
|
||||||
|
push(
|
||||||
|
np.minimum(
|
||||||
|
values - np.floor(values),
|
||||||
|
np.ceil(values) - values,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Feature 44
|
||||||
|
if c_sa_up is not None:
|
||||||
|
push(np.sign(c_sa_up))
|
||||||
|
|
||||||
|
# Feature 46
|
||||||
|
if c_sa_down is not None:
|
||||||
|
push(np.sign(c_sa_down))
|
||||||
|
|
||||||
|
# Feature 47
|
||||||
|
if c_sa_down is not None:
|
||||||
|
push(np.log(c - c_sa_down / np.sign(c)))
|
||||||
|
|
||||||
|
# Feature 48
|
||||||
|
if c_sa_up is not None:
|
||||||
|
push(np.log(c - c_sa_up / np.sign(c)))
|
||||||
|
|
||||||
|
features = features[:, 0:curr]
|
||||||
|
_fix_infinity(features)
|
||||||
|
return features
|
||||||
|
|
||||||
|
|
||||||
|
def _fix_infinity(m: Optional[np.ndarray]) -> None:
|
||||||
|
if m is None:
|
||||||
|
return
|
||||||
|
masked = np.ma.masked_invalid(m)
|
||||||
|
max_values = np.max(masked, axis=0)
|
||||||
|
min_values = np.min(masked, axis=0)
|
||||||
|
m[:] = np.maximum(np.minimum(m, max_values), min_values)
|
||||||
|
m[~np.isfinite(m)] = 0.0
|
@ -0,0 +1,226 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
import warnings
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from copy import deepcopy
|
||||||
|
from typing import Dict, Optional, Any, Union, List, Tuple, cast, Set
|
||||||
|
from scipy.sparse import coo_matrix
|
||||||
|
|
||||||
|
import h5py
|
||||||
|
import numpy as np
|
||||||
|
from h5py import Dataset
|
||||||
|
from overrides import overrides
|
||||||
|
|
||||||
|
Bytes = Union[bytes, bytearray]
|
||||||
|
Scalar = Union[None, bool, str, int, float]
|
||||||
|
Vector = Union[
|
||||||
|
None,
|
||||||
|
List[bool],
|
||||||
|
List[str],
|
||||||
|
List[int],
|
||||||
|
List[float],
|
||||||
|
List[Optional[str]],
|
||||||
|
np.ndarray,
|
||||||
|
]
|
||||||
|
VectorList = Union[
|
||||||
|
List[List[bool]],
|
||||||
|
List[List[str]],
|
||||||
|
List[List[int]],
|
||||||
|
List[List[float]],
|
||||||
|
List[Optional[List[bool]]],
|
||||||
|
List[Optional[List[str]]],
|
||||||
|
List[Optional[List[int]]],
|
||||||
|
List[Optional[List[float]]],
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class Sample(ABC):
|
||||||
|
"""Abstract dictionary-like class that stores training data."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_scalar(self, key: str) -> Optional[Any]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def put_scalar(self, key: str, value: Scalar) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def put_array(self, key: str, value: Optional[np.ndarray]) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_array(self, key: str) -> Optional[np.ndarray]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def put_sparse(self, key: str, value: coo_matrix) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_sparse(self, key: str) -> Optional[coo_matrix]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _assert_is_scalar(self, value: Any) -> None:
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
if isinstance(value, (str, bool, int, float, bytes, np.bytes_)):
|
||||||
|
return
|
||||||
|
assert False, f"scalar expected; found instead: {value} ({value.__class__})"
|
||||||
|
|
||||||
|
def _assert_is_array(self, value: np.ndarray) -> None:
|
||||||
|
assert isinstance(
|
||||||
|
value, np.ndarray
|
||||||
|
), f"np.ndarray expected; found instead: {value.__class__}"
|
||||||
|
assert value.dtype.kind in "biufS", f"Unsupported dtype: {value.dtype}"
|
||||||
|
|
||||||
|
def _assert_is_sparse(self, value: Any) -> None:
|
||||||
|
assert isinstance(
|
||||||
|
value, coo_matrix
|
||||||
|
), f"coo_matrix expected; found: {value.__class__}"
|
||||||
|
self._assert_is_array(value.data)
|
||||||
|
|
||||||
|
|
||||||
|
class MemorySample(Sample):
|
||||||
|
"""Dictionary-like class that stores training data in-memory."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
data: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> None:
|
||||||
|
if data is None:
|
||||||
|
data = {}
|
||||||
|
self._data: Dict[str, Any] = data
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_scalar(self, key: str) -> Optional[Any]:
|
||||||
|
return self._get(key)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def put_scalar(self, key: str, value: Scalar) -> None:
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
self._assert_is_scalar(value)
|
||||||
|
self._put(key, value)
|
||||||
|
|
||||||
|
def _get(self, key: str) -> Optional[Any]:
|
||||||
|
if key in self._data:
|
||||||
|
return self._data[key]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _put(self, key: str, value: Any) -> None:
|
||||||
|
self._data[key] = value
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def put_array(self, key: str, value: Optional[np.ndarray]) -> None:
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
self._assert_is_array(value)
|
||||||
|
self._put(key, value)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_array(self, key: str) -> Optional[np.ndarray]:
|
||||||
|
return cast(Optional[np.ndarray], self._get(key))
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def put_sparse(self, key: str, value: coo_matrix) -> None:
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
self._assert_is_sparse(value)
|
||||||
|
self._put(key, value)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_sparse(self, key: str) -> Optional[coo_matrix]:
|
||||||
|
return cast(Optional[coo_matrix], self._get(key))
|
||||||
|
|
||||||
|
|
||||||
|
class Hdf5Sample(Sample):
|
||||||
|
"""
|
||||||
|
Dictionary-like class that stores training data in an HDF5 file.
|
||||||
|
|
||||||
|
Unlike MemorySample, this class only loads to memory the parts of the data set that
|
||||||
|
are actually accessed, and therefore it is more scalable.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
filename: str,
|
||||||
|
mode: str = "r+",
|
||||||
|
) -> None:
|
||||||
|
self.file = h5py.File(filename, mode, libver="latest")
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_scalar(self, key: str) -> Optional[Any]:
|
||||||
|
if key not in self.file:
|
||||||
|
return None
|
||||||
|
ds = self.file[key]
|
||||||
|
assert (
|
||||||
|
len(ds.shape) == 0
|
||||||
|
), f"0-dimensional array expected; found shape {ds.shape}"
|
||||||
|
if h5py.check_string_dtype(ds.dtype):
|
||||||
|
return ds.asstr()[()]
|
||||||
|
else:
|
||||||
|
return ds[()].tolist()
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def put_scalar(self, key: str, value: Any) -> None:
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
self._assert_is_scalar(value)
|
||||||
|
if key in self.file:
|
||||||
|
del self.file[key]
|
||||||
|
self.file.create_dataset(key, data=value)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def put_array(self, key: str, value: Optional[np.ndarray]) -> None:
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
self._assert_is_array(value)
|
||||||
|
if value.dtype.kind == "f":
|
||||||
|
value = value.astype("float32")
|
||||||
|
if key in self.file:
|
||||||
|
del self.file[key]
|
||||||
|
return self.file.create_dataset(key, data=value, compression="gzip")
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_array(self, key: str) -> Optional[np.ndarray]:
|
||||||
|
if key not in self.file:
|
||||||
|
return None
|
||||||
|
return self.file[key][:]
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def put_sparse(self, key: str, value: coo_matrix) -> None:
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
self._assert_is_sparse(value)
|
||||||
|
self.put_array(f"{key}_row", value.row)
|
||||||
|
self.put_array(f"{key}_col", value.col)
|
||||||
|
self.put_array(f"{key}_data", value.data)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_sparse(self, key: str) -> Optional[coo_matrix]:
|
||||||
|
row = self.get_array(f"{key}_row")
|
||||||
|
if row is None:
|
||||||
|
return None
|
||||||
|
col = self.get_array(f"{key}_col")
|
||||||
|
data = self.get_array(f"{key}_data")
|
||||||
|
assert col is not None
|
||||||
|
assert data is not None
|
||||||
|
return coo_matrix((data, (row, col)))
|
||||||
|
|
||||||
|
def get_bytes(self, key: str) -> Optional[Bytes]:
|
||||||
|
if key not in self.file:
|
||||||
|
return None
|
||||||
|
ds = self.file[key]
|
||||||
|
assert (
|
||||||
|
len(ds.shape) == 1
|
||||||
|
), f"1-dimensional array expected; found shape {ds.shape}"
|
||||||
|
return ds[()].tobytes()
|
||||||
|
|
||||||
|
def put_bytes(self, key: str, value: Bytes) -> None:
|
||||||
|
assert isinstance(
|
||||||
|
value, (bytes, bytearray)
|
||||||
|
), f"bytes expected; found: {value.__class__}" # type: ignore
|
||||||
|
self.put_array(key, np.frombuffer(value, dtype="uint8"))
|
@ -0,0 +1,131 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
import gc
|
||||||
|
import os
|
||||||
|
from typing import Any, Optional, List, Dict, TYPE_CHECKING
|
||||||
|
import pickle
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from overrides import overrides
|
||||||
|
|
||||||
|
from miplearn.features.sample import Hdf5Sample, Sample
|
||||||
|
from miplearn.instance.base import Instance
|
||||||
|
from miplearn.types import ConstraintName, ConstraintCategory
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from miplearn.solvers.learning import InternalSolver
|
||||||
|
|
||||||
|
|
||||||
|
class FileInstance(Instance):
|
||||||
|
def __init__(self, filename: str) -> None:
|
||||||
|
super().__init__()
|
||||||
|
assert os.path.exists(filename), f"File not found: {filename}"
|
||||||
|
self.h5 = Hdf5Sample(filename)
|
||||||
|
self.instance: Optional[Instance] = None
|
||||||
|
|
||||||
|
# Delegation
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
@overrides
|
||||||
|
def to_model(self) -> Any:
|
||||||
|
assert self.instance is not None
|
||||||
|
return self.instance.to_model()
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_instance_features(self) -> np.ndarray:
|
||||||
|
assert self.instance is not None
|
||||||
|
return self.instance.get_instance_features()
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_variable_features(self, names: np.ndarray) -> np.ndarray:
|
||||||
|
assert self.instance is not None
|
||||||
|
return self.instance.get_variable_features(names)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_variable_categories(self, names: np.ndarray) -> np.ndarray:
|
||||||
|
assert self.instance is not None
|
||||||
|
return self.instance.get_variable_categories(names)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_constraint_features(self, names: np.ndarray) -> np.ndarray:
|
||||||
|
assert self.instance is not None
|
||||||
|
return self.instance.get_constraint_features(names)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_constraint_categories(self, names: np.ndarray) -> np.ndarray:
|
||||||
|
assert self.instance is not None
|
||||||
|
return self.instance.get_constraint_categories(names)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def has_dynamic_lazy_constraints(self) -> bool:
|
||||||
|
assert self.instance is not None
|
||||||
|
return self.instance.has_dynamic_lazy_constraints()
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def are_constraints_lazy(self, names: np.ndarray) -> np.ndarray:
|
||||||
|
assert self.instance is not None
|
||||||
|
return self.instance.are_constraints_lazy(names)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def find_violated_lazy_constraints(
|
||||||
|
self,
|
||||||
|
solver: "InternalSolver",
|
||||||
|
model: Any,
|
||||||
|
) -> List[ConstraintName]:
|
||||||
|
assert self.instance is not None
|
||||||
|
return self.instance.find_violated_lazy_constraints(solver, model)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def enforce_lazy_constraint(
|
||||||
|
self,
|
||||||
|
solver: "InternalSolver",
|
||||||
|
model: Any,
|
||||||
|
violation: ConstraintName,
|
||||||
|
) -> None:
|
||||||
|
assert self.instance is not None
|
||||||
|
self.instance.enforce_lazy_constraint(solver, model, violation)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def find_violated_user_cuts(self, model: Any) -> List[ConstraintName]:
|
||||||
|
assert self.instance is not None
|
||||||
|
return self.instance.find_violated_user_cuts(model)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def enforce_user_cut(
|
||||||
|
self,
|
||||||
|
solver: "InternalSolver",
|
||||||
|
model: Any,
|
||||||
|
violation: ConstraintName,
|
||||||
|
) -> None:
|
||||||
|
assert self.instance is not None
|
||||||
|
self.instance.enforce_user_cut(solver, model, violation)
|
||||||
|
|
||||||
|
# Input & Output
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
@overrides
|
||||||
|
def free(self) -> None:
|
||||||
|
self.instance = None
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def load(self) -> None:
|
||||||
|
if self.instance is not None:
|
||||||
|
return
|
||||||
|
pkl = self.h5.get_bytes("pickled")
|
||||||
|
assert pkl is not None
|
||||||
|
self.instance = pickle.loads(pkl)
|
||||||
|
assert isinstance(self.instance, Instance)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def save(cls, instance: Instance, filename: str) -> None:
|
||||||
|
h5 = Hdf5Sample(filename, mode="w")
|
||||||
|
instance_pkl = pickle.dumps(instance)
|
||||||
|
h5.put_bytes("pickled", instance_pkl)
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def create_sample(self) -> Sample:
|
||||||
|
return self.h5
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_samples(self) -> List[Sample]:
|
||||||
|
return [self.h5]
|
@ -0,0 +1,456 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import gurobipy as gp
|
||||||
|
from scipy.sparse import coo_matrix
|
||||||
|
|
||||||
|
from miplearn.features.extractor import FeaturesExtractor
|
||||||
|
from miplearn.features.sample import Hdf5Sample, MemorySample
|
||||||
|
from miplearn.instance.base import Instance
|
||||||
|
from miplearn.solvers.gurobi import GurobiSolver
|
||||||
|
from miplearn.solvers.internal import Variables, Constraints
|
||||||
|
from miplearn.solvers.tests import assert_equals
|
||||||
|
import cProfile
|
||||||
|
|
||||||
|
inf = float("inf")
|
||||||
|
|
||||||
|
|
||||||
|
def test_knapsack() -> None:
|
||||||
|
solver = GurobiSolver()
|
||||||
|
instance = solver.build_test_instance_knapsack()
|
||||||
|
model = instance.to_model()
|
||||||
|
solver.set_instance(instance, model)
|
||||||
|
extractor = FeaturesExtractor()
|
||||||
|
sample = MemorySample()
|
||||||
|
|
||||||
|
# after-load
|
||||||
|
# -------------------------------------------------------
|
||||||
|
extractor.extract_after_load_features(instance, solver, sample)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_instance_features"),
|
||||||
|
np.array([67.0, 21.75]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_var_names"),
|
||||||
|
np.array(["x[0]", "x[1]", "x[2]", "x[3]", "z"], dtype="S"),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_var_lower_bounds"),
|
||||||
|
np.array([0.0, 0.0, 0.0, 0.0, 0.0]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_var_obj_coeffs"),
|
||||||
|
np.array([505.0, 352.0, 458.0, 220.0, 0.0]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_var_types"),
|
||||||
|
np.array(["B", "B", "B", "B", "C"], dtype="S"),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_var_upper_bounds"),
|
||||||
|
np.array([1.0, 1.0, 1.0, 1.0, 67.0]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_var_categories"),
|
||||||
|
np.array(["default", "default", "default", "default", ""], dtype="S"),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_var_features"),
|
||||||
|
np.array(
|
||||||
|
[
|
||||||
|
[
|
||||||
|
23.0,
|
||||||
|
505.0,
|
||||||
|
1.0,
|
||||||
|
0.32899,
|
||||||
|
1e20,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
],
|
||||||
|
[
|
||||||
|
26.0,
|
||||||
|
352.0,
|
||||||
|
1.0,
|
||||||
|
0.229316,
|
||||||
|
1e20,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
],
|
||||||
|
[
|
||||||
|
20.0,
|
||||||
|
458.0,
|
||||||
|
1.0,
|
||||||
|
0.298371,
|
||||||
|
1e20,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
],
|
||||||
|
[
|
||||||
|
18.0,
|
||||||
|
220.0,
|
||||||
|
1.0,
|
||||||
|
0.143322,
|
||||||
|
1e20,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
],
|
||||||
|
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
||||||
|
]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_constr_names"),
|
||||||
|
np.array(["eq_capacity"], dtype="S"),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_sparse("static_constr_lhs"),
|
||||||
|
[[23.0, 26.0, 20.0, 18.0, -1.0]],
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_constr_rhs"),
|
||||||
|
np.array([0.0]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_constr_senses"),
|
||||||
|
np.array(["="], dtype="S"),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_constr_features"),
|
||||||
|
np.array([[0.0]]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_constr_categories"),
|
||||||
|
np.array(["eq_capacity"], dtype="S"),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_constr_lazy"),
|
||||||
|
np.array([False]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("static_instance_features"),
|
||||||
|
np.array([67.0, 21.75]),
|
||||||
|
)
|
||||||
|
assert_equals(sample.get_scalar("static_constr_lazy_count"), 0)
|
||||||
|
|
||||||
|
# after-lp
|
||||||
|
# -------------------------------------------------------
|
||||||
|
lp_stats = solver.solve_lp()
|
||||||
|
extractor.extract_after_lp_features(solver, sample, lp_stats)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_var_basis_status"),
|
||||||
|
np.array(["U", "B", "U", "L", "U"], dtype="S"),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_var_reduced_costs"),
|
||||||
|
[193.615385, 0.0, 187.230769, -23.692308, 13.538462],
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_var_sa_lb_down"),
|
||||||
|
[-inf, -inf, -inf, -0.111111, -inf],
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_var_sa_lb_up"),
|
||||||
|
[1.0, 0.923077, 1.0, 1.0, 67.0],
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_var_sa_obj_down"),
|
||||||
|
[311.384615, 317.777778, 270.769231, -inf, -13.538462],
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_var_sa_obj_up"),
|
||||||
|
[inf, 570.869565, inf, 243.692308, inf],
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_var_sa_ub_down"),
|
||||||
|
np.array([0.913043, 0.923077, 0.9, 0.0, 43.0]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_var_sa_ub_up"),
|
||||||
|
np.array([2.043478, inf, 2.2, inf, 69.0]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_var_values"),
|
||||||
|
np.array([1.0, 0.923077, 1.0, 0.0, 67.0]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_var_features"),
|
||||||
|
np.array(
|
||||||
|
[
|
||||||
|
[
|
||||||
|
23.0,
|
||||||
|
505.0,
|
||||||
|
1.0,
|
||||||
|
0.32899,
|
||||||
|
1e20,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
1.0,
|
||||||
|
1.0,
|
||||||
|
5.265874,
|
||||||
|
0.0,
|
||||||
|
193.615385,
|
||||||
|
-0.111111,
|
||||||
|
1.0,
|
||||||
|
311.384615,
|
||||||
|
570.869565,
|
||||||
|
0.913043,
|
||||||
|
2.043478,
|
||||||
|
1.0,
|
||||||
|
],
|
||||||
|
[
|
||||||
|
26.0,
|
||||||
|
352.0,
|
||||||
|
1.0,
|
||||||
|
0.229316,
|
||||||
|
1e20,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.076923,
|
||||||
|
1.0,
|
||||||
|
1.0,
|
||||||
|
3.532875,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
-0.111111,
|
||||||
|
0.923077,
|
||||||
|
317.777778,
|
||||||
|
570.869565,
|
||||||
|
0.923077,
|
||||||
|
69.0,
|
||||||
|
0.923077,
|
||||||
|
],
|
||||||
|
[
|
||||||
|
20.0,
|
||||||
|
458.0,
|
||||||
|
1.0,
|
||||||
|
0.298371,
|
||||||
|
1e20,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
1.0,
|
||||||
|
1.0,
|
||||||
|
5.232342,
|
||||||
|
0.0,
|
||||||
|
187.230769,
|
||||||
|
-0.111111,
|
||||||
|
1.0,
|
||||||
|
270.769231,
|
||||||
|
570.869565,
|
||||||
|
0.9,
|
||||||
|
2.2,
|
||||||
|
1.0,
|
||||||
|
],
|
||||||
|
[
|
||||||
|
18.0,
|
||||||
|
220.0,
|
||||||
|
1.0,
|
||||||
|
0.143322,
|
||||||
|
1e20,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
1.0,
|
||||||
|
-1.0,
|
||||||
|
5.265874,
|
||||||
|
0.0,
|
||||||
|
-23.692308,
|
||||||
|
-0.111111,
|
||||||
|
1.0,
|
||||||
|
-13.538462,
|
||||||
|
243.692308,
|
||||||
|
0.0,
|
||||||
|
69.0,
|
||||||
|
0.0,
|
||||||
|
],
|
||||||
|
[
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
1.0,
|
||||||
|
-1.0,
|
||||||
|
5.265874,
|
||||||
|
0.0,
|
||||||
|
13.538462,
|
||||||
|
-0.111111,
|
||||||
|
67.0,
|
||||||
|
-13.538462,
|
||||||
|
570.869565,
|
||||||
|
43.0,
|
||||||
|
69.0,
|
||||||
|
67.0,
|
||||||
|
],
|
||||||
|
]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_constr_basis_status"),
|
||||||
|
np.array(["N"], dtype="S"),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_constr_dual_values"),
|
||||||
|
np.array([13.538462]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_constr_sa_rhs_down"),
|
||||||
|
np.array([-24.0]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_constr_sa_rhs_up"),
|
||||||
|
np.array([2.0]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_constr_slacks"),
|
||||||
|
np.array([0.0]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("lp_constr_features"),
|
||||||
|
np.array([[0.0, 13.538462, -24.0, 2.0, 0.0]]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# after-mip
|
||||||
|
# -------------------------------------------------------
|
||||||
|
solver.solve()
|
||||||
|
extractor.extract_after_mip_features(solver, sample)
|
||||||
|
assert_equals(
|
||||||
|
sample.get_array("mip_var_values"), np.array([1.0, 0.0, 1.0, 1.0, 61.0])
|
||||||
|
)
|
||||||
|
assert_equals(sample.get_array("mip_constr_slacks"), np.array([0.0]))
|
||||||
|
|
||||||
|
|
||||||
|
def test_constraint_getindex() -> None:
|
||||||
|
cf = Constraints(
|
||||||
|
names=np.array(["c1", "c2", "c3"], dtype="S"),
|
||||||
|
rhs=np.array([1.0, 2.0, 3.0]),
|
||||||
|
senses=np.array(["=", "<", ">"], dtype="S"),
|
||||||
|
lhs=coo_matrix(
|
||||||
|
[
|
||||||
|
[1, 2, 3],
|
||||||
|
[4, 5, 6],
|
||||||
|
[7, 8, 9],
|
||||||
|
]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
cf[[True, False, True]],
|
||||||
|
Constraints(
|
||||||
|
names=np.array(["c1", "c3"], dtype="S"),
|
||||||
|
rhs=np.array([1.0, 3.0]),
|
||||||
|
senses=np.array(["=", ">"], dtype="S"),
|
||||||
|
lhs=coo_matrix(
|
||||||
|
[
|
||||||
|
[1, 2, 3],
|
||||||
|
[7, 8, 9],
|
||||||
|
]
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_assert_equals() -> None:
|
||||||
|
assert_equals("hello", "hello")
|
||||||
|
assert_equals([1.0, 2.0], [1.0, 2.0])
|
||||||
|
assert_equals(np.array([1.0, 2.0]), np.array([1.0, 2.0]))
|
||||||
|
assert_equals(
|
||||||
|
np.array([[1.0, 2.0], [3.0, 4.0]]),
|
||||||
|
np.array([[1.0, 2.0], [3.0, 4.0]]),
|
||||||
|
)
|
||||||
|
assert_equals(
|
||||||
|
Variables(values=np.array([1.0, 2.0])), # type: ignore
|
||||||
|
Variables(values=np.array([1.0, 2.0])), # type: ignore
|
||||||
|
)
|
||||||
|
assert_equals(np.array([True, True]), [True, True])
|
||||||
|
assert_equals((1.0,), (1.0,))
|
||||||
|
assert_equals({"x": 10}, {"x": 10})
|
||||||
|
|
||||||
|
|
||||||
|
class MpsInstance(Instance):
|
||||||
|
def __init__(self, filename: str) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.filename = filename
|
||||||
|
|
||||||
|
def to_model(self) -> Any:
|
||||||
|
return gp.read(self.filename)
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
solver = GurobiSolver()
|
||||||
|
instance = MpsInstance(sys.argv[1])
|
||||||
|
solver.set_instance(instance)
|
||||||
|
extractor = FeaturesExtractor(with_lhs=False)
|
||||||
|
sample = Hdf5Sample("tmp/prof.h5", mode="w")
|
||||||
|
extractor.extract_after_load_features(instance, solver, sample)
|
||||||
|
lp_stats = solver.solve_lp(tee=True)
|
||||||
|
extractor.extract_after_lp_features(solver, sample, lp_stats)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
cProfile.run("main()", filename="tmp/prof")
|
||||||
|
os.system("flameprof tmp/prof > tmp/prof.svg")
|
@ -0,0 +1,71 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from scipy.sparse import coo_matrix
|
||||||
|
|
||||||
|
from miplearn.features.sample import MemorySample, Sample, Hdf5Sample
|
||||||
|
|
||||||
|
|
||||||
|
def test_memory_sample() -> None:
|
||||||
|
_test_sample(MemorySample())
|
||||||
|
|
||||||
|
|
||||||
|
def test_hdf5_sample() -> None:
|
||||||
|
file = NamedTemporaryFile()
|
||||||
|
_test_sample(Hdf5Sample(file.name))
|
||||||
|
|
||||||
|
|
||||||
|
def _test_sample(sample: Sample) -> None:
|
||||||
|
_assert_roundtrip_scalar(sample, "A")
|
||||||
|
_assert_roundtrip_scalar(sample, True)
|
||||||
|
_assert_roundtrip_scalar(sample, 1)
|
||||||
|
_assert_roundtrip_scalar(sample, 1.0)
|
||||||
|
assert sample.get_scalar("unknown-key") is None
|
||||||
|
|
||||||
|
_assert_roundtrip_array(sample, np.array([True, False]))
|
||||||
|
_assert_roundtrip_array(sample, np.array([1, 2, 3]))
|
||||||
|
_assert_roundtrip_array(sample, np.array([1.0, 2.0, 3.0]))
|
||||||
|
_assert_roundtrip_array(sample, np.array(["A", "BB", "CCC"], dtype="S"))
|
||||||
|
assert sample.get_array("unknown-key") is None
|
||||||
|
|
||||||
|
_assert_roundtrip_sparse(
|
||||||
|
sample,
|
||||||
|
coo_matrix(
|
||||||
|
[
|
||||||
|
[1.0, 0.0, 0.0],
|
||||||
|
[0.0, 2.0, 3.0],
|
||||||
|
[0.0, 0.0, 4.0],
|
||||||
|
],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
assert sample.get_sparse("unknown-key") is None
|
||||||
|
|
||||||
|
|
||||||
|
def _assert_roundtrip_array(sample: Sample, original: np.ndarray) -> None:
|
||||||
|
sample.put_array("key", original)
|
||||||
|
recovered = sample.get_array("key")
|
||||||
|
assert recovered is not None
|
||||||
|
assert isinstance(recovered, np.ndarray)
|
||||||
|
assert (recovered == original).all()
|
||||||
|
|
||||||
|
|
||||||
|
def _assert_roundtrip_scalar(sample: Sample, original: Any) -> None:
|
||||||
|
sample.put_scalar("key", original)
|
||||||
|
recovered = sample.get_scalar("key")
|
||||||
|
assert recovered == original
|
||||||
|
assert recovered is not None
|
||||||
|
assert isinstance(
|
||||||
|
recovered, original.__class__
|
||||||
|
), f"Expected {original.__class__}, found {recovered.__class__} instead"
|
||||||
|
|
||||||
|
|
||||||
|
def _assert_roundtrip_sparse(sample: Sample, original: coo_matrix) -> None:
|
||||||
|
sample.put_sparse("key", original)
|
||||||
|
recovered = sample.get_sparse("key")
|
||||||
|
assert recovered is not None
|
||||||
|
assert isinstance(recovered, coo_matrix)
|
||||||
|
assert (original != recovered).sum() == 0
|
@ -0,0 +1,32 @@
|
|||||||
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
|
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||||
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from miplearn.solvers.learning import LearningSolver
|
||||||
|
from miplearn.solvers.gurobi import GurobiSolver
|
||||||
|
from miplearn.features.sample import Hdf5Sample
|
||||||
|
from miplearn.instance.file import FileInstance
|
||||||
|
|
||||||
|
|
||||||
|
def test_usage() -> None:
|
||||||
|
# Create original instance
|
||||||
|
original = GurobiSolver().build_test_instance_knapsack()
|
||||||
|
|
||||||
|
# Save instance to disk
|
||||||
|
filename = tempfile.mktemp()
|
||||||
|
FileInstance.save(original, filename)
|
||||||
|
sample = Hdf5Sample(filename)
|
||||||
|
assert len(sample.get_array("pickled")) > 0
|
||||||
|
|
||||||
|
# Solve instance from disk
|
||||||
|
solver = LearningSolver(solver=GurobiSolver())
|
||||||
|
solver.solve(FileInstance(filename))
|
||||||
|
|
||||||
|
# Assert HDF5 contains training data
|
||||||
|
sample = FileInstance(filename).get_samples()[0]
|
||||||
|
assert sample.get_scalar("mip_lower_bound") == 1183.0
|
||||||
|
assert sample.get_scalar("mip_upper_bound") == 1183.0
|
||||||
|
assert len(sample.get_array("lp_var_values")) == 5
|
||||||
|
assert len(sample.get_array("mip_var_values")) == 5
|
@ -1,158 +0,0 @@
|
|||||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
|
||||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
from miplearn.features import (
|
|
||||||
FeaturesExtractor,
|
|
||||||
InstanceFeatures,
|
|
||||||
VariableFeatures,
|
|
||||||
ConstraintFeatures,
|
|
||||||
)
|
|
||||||
from miplearn.solvers.gurobi import GurobiSolver
|
|
||||||
from miplearn.solvers.tests import assert_equals
|
|
||||||
|
|
||||||
inf = float("inf")
|
|
||||||
|
|
||||||
|
|
||||||
def test_knapsack() -> None:
|
|
||||||
solver = GurobiSolver()
|
|
||||||
instance = solver.build_test_instance_knapsack()
|
|
||||||
model = instance.to_model()
|
|
||||||
solver.set_instance(instance, model)
|
|
||||||
solver.solve_lp()
|
|
||||||
|
|
||||||
features = FeaturesExtractor().extract(instance, solver)
|
|
||||||
assert features.variables is not None
|
|
||||||
assert features.instance is not None
|
|
||||||
|
|
||||||
assert_equals(
|
|
||||||
features.variables,
|
|
||||||
VariableFeatures(
|
|
||||||
names=["x[0]", "x[1]", "x[2]", "x[3]", "z"],
|
|
||||||
basis_status=["U", "B", "U", "L", "U"],
|
|
||||||
categories=["default", "default", "default", "default", None],
|
|
||||||
lower_bounds=[0.0, 0.0, 0.0, 0.0, 0.0],
|
|
||||||
obj_coeffs=[505.0, 352.0, 458.0, 220.0, 0.0],
|
|
||||||
reduced_costs=[193.615385, 0.0, 187.230769, -23.692308, 13.538462],
|
|
||||||
sa_lb_down=[-inf, -inf, -inf, -0.111111, -inf],
|
|
||||||
sa_lb_up=[1.0, 0.923077, 1.0, 1.0, 67.0],
|
|
||||||
sa_obj_down=[311.384615, 317.777778, 270.769231, -inf, -13.538462],
|
|
||||||
sa_obj_up=[inf, 570.869565, inf, 243.692308, inf],
|
|
||||||
sa_ub_down=[0.913043, 0.923077, 0.9, 0.0, 43.0],
|
|
||||||
sa_ub_up=[2.043478, inf, 2.2, inf, 69.0],
|
|
||||||
types=["B", "B", "B", "B", "C"],
|
|
||||||
upper_bounds=[1.0, 1.0, 1.0, 1.0, 67.0],
|
|
||||||
user_features=[
|
|
||||||
[23.0, 505.0],
|
|
||||||
[26.0, 352.0],
|
|
||||||
[20.0, 458.0],
|
|
||||||
[18.0, 220.0],
|
|
||||||
None,
|
|
||||||
],
|
|
||||||
values=[1.0, 0.923077, 1.0, 0.0, 67.0],
|
|
||||||
alvarez_2017=[
|
|
||||||
[1.0, 0.32899, 0.0, 0.0, 1.0, 1.0, 5.265874, 46.051702],
|
|
||||||
[1.0, 0.229316, 0.0, 0.076923, 1.0, 1.0, 3.532875, 5.388476],
|
|
||||||
[1.0, 0.298371, 0.0, 0.0, 1.0, 1.0, 5.232342, 46.051702],
|
|
||||||
[1.0, 0.143322, 0.0, 0.0, 1.0, -1.0, 46.051702, 3.16515],
|
|
||||||
[0.0, 0.0, 0.0, 0.0, 1.0, -1.0, 0.0, 0.0],
|
|
||||||
],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
assert_equals(
|
|
||||||
features.constraints,
|
|
||||||
ConstraintFeatures(
|
|
||||||
basis_status=["N"],
|
|
||||||
categories=["eq_capacity"],
|
|
||||||
dual_values=[13.538462],
|
|
||||||
names=["eq_capacity"],
|
|
||||||
lazy=[False],
|
|
||||||
lhs=[
|
|
||||||
[
|
|
||||||
("x[0]", 23.0),
|
|
||||||
("x[1]", 26.0),
|
|
||||||
("x[2]", 20.0),
|
|
||||||
("x[3]", 18.0),
|
|
||||||
("z", -1.0),
|
|
||||||
],
|
|
||||||
],
|
|
||||||
rhs=[0.0],
|
|
||||||
sa_rhs_down=[-24.0],
|
|
||||||
sa_rhs_up=[2.0],
|
|
||||||
senses=["="],
|
|
||||||
slacks=[0.0],
|
|
||||||
user_features=[None],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
assert_equals(
|
|
||||||
features.instance,
|
|
||||||
InstanceFeatures(
|
|
||||||
user_features=[67.0, 21.75],
|
|
||||||
lazy_constraint_count=0,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_constraint_getindex() -> None:
|
|
||||||
cf = ConstraintFeatures(
|
|
||||||
names=["c1", "c2", "c3"],
|
|
||||||
rhs=[1.0, 2.0, 3.0],
|
|
||||||
senses=["=", "<", ">"],
|
|
||||||
lhs=[
|
|
||||||
[
|
|
||||||
("x1", 1.0),
|
|
||||||
("x2", 1.0),
|
|
||||||
],
|
|
||||||
[
|
|
||||||
("x2", 2.0),
|
|
||||||
("x3", 2.0),
|
|
||||||
],
|
|
||||||
[
|
|
||||||
("x3", 3.0),
|
|
||||||
("x4", 3.0),
|
|
||||||
],
|
|
||||||
],
|
|
||||||
)
|
|
||||||
assert_equals(
|
|
||||||
cf[[True, False, True]],
|
|
||||||
ConstraintFeatures(
|
|
||||||
names=["c1", "c3"],
|
|
||||||
rhs=[1.0, 3.0],
|
|
||||||
senses=["=", ">"],
|
|
||||||
lhs=[
|
|
||||||
[
|
|
||||||
("x1", 1.0),
|
|
||||||
("x2", 1.0),
|
|
||||||
],
|
|
||||||
[
|
|
||||||
("x3", 3.0),
|
|
||||||
("x4", 3.0),
|
|
||||||
],
|
|
||||||
],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_assert_equals() -> None:
|
|
||||||
assert_equals("hello", "hello")
|
|
||||||
assert_equals([1.0, 2.0], [1.0, 2.0])
|
|
||||||
assert_equals(
|
|
||||||
np.array([1.0, 2.0]),
|
|
||||||
np.array([1.0, 2.0]),
|
|
||||||
)
|
|
||||||
assert_equals(
|
|
||||||
np.array([[1.0, 2.0], [3.0, 4.0]]),
|
|
||||||
np.array([[1.0, 2.0], [3.0, 4.0]]),
|
|
||||||
)
|
|
||||||
assert_equals(
|
|
||||||
VariableFeatures(values=np.array([1.0, 2.0])), # type: ignore
|
|
||||||
VariableFeatures(values=np.array([1.0, 2.0])), # type: ignore
|
|
||||||
)
|
|
||||||
assert_equals(
|
|
||||||
np.array([True, True]),
|
|
||||||
[True, True],
|
|
||||||
)
|
|
||||||
assert_equals((1.0,), (1.0,))
|
|
||||||
assert_equals({"x": 10}, {"x": 10})
|
|
Loading…
Reference in new issue