mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 09:28:51 -06:00
Refer to variables by varname instead of (vname, index)
This commit is contained in:
@@ -7,7 +7,6 @@ from typing import (
|
|||||||
Dict,
|
Dict,
|
||||||
List,
|
List,
|
||||||
Hashable,
|
Hashable,
|
||||||
Optional,
|
|
||||||
Any,
|
Any,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
Tuple,
|
Tuple,
|
||||||
@@ -23,8 +22,9 @@ from miplearn.components.component import Component
|
|||||||
from miplearn.features import TrainingSample, Features
|
from miplearn.features import TrainingSample, Features
|
||||||
from miplearn.instance.base import Instance
|
from miplearn.instance.base import Instance
|
||||||
from miplearn.types import (
|
from miplearn.types import (
|
||||||
Solution,
|
|
||||||
LearningSolveStats,
|
LearningSolveStats,
|
||||||
|
Category,
|
||||||
|
Solution,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -84,15 +84,14 @@ class PrimalSolutionComponent(Component):
|
|||||||
stats["Primal: Free"] = 0
|
stats["Primal: Free"] = 0
|
||||||
stats["Primal: Zero"] = 0
|
stats["Primal: Zero"] = 0
|
||||||
stats["Primal: One"] = 0
|
stats["Primal: One"] = 0
|
||||||
for (var, var_dict) in solution.items():
|
for (var_name, value) in solution.items():
|
||||||
for (idx, value) in var_dict.items():
|
if value is None:
|
||||||
if value is None:
|
stats["Primal: Free"] += 1
|
||||||
stats["Primal: Free"] += 1
|
else:
|
||||||
|
if value < 0.5:
|
||||||
|
stats["Primal: Zero"] += 1
|
||||||
else:
|
else:
|
||||||
if value < 0.5:
|
stats["Primal: One"] += 1
|
||||||
stats["Primal: Zero"] += 1
|
|
||||||
else:
|
|
||||||
stats["Primal: One"] += 1
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Predicted: free: {stats['Primal: Free']}, "
|
f"Predicted: free: {stats['Primal: Free']}, "
|
||||||
f"zero: {stats['Primal: Zero']}, "
|
f"zero: {stats['Primal: Zero']}, "
|
||||||
@@ -106,13 +105,6 @@ class PrimalSolutionComponent(Component):
|
|||||||
) -> Solution:
|
) -> Solution:
|
||||||
assert instance.features.variables is not None
|
assert instance.features.variables is not None
|
||||||
|
|
||||||
# Initialize empty solution
|
|
||||||
solution: Solution = {}
|
|
||||||
for (var_name, var_dict) in instance.features.variables.items():
|
|
||||||
solution[var_name] = {}
|
|
||||||
for idx in var_dict.keys():
|
|
||||||
solution[var_name][idx] = None
|
|
||||||
|
|
||||||
# Compute y_pred
|
# Compute y_pred
|
||||||
x, _ = self.sample_xy(instance, sample)
|
x, _ = self.sample_xy(instance, sample)
|
||||||
y_pred = {}
|
y_pred = {}
|
||||||
@@ -132,56 +124,52 @@ class PrimalSolutionComponent(Component):
|
|||||||
).T
|
).T
|
||||||
|
|
||||||
# Convert y_pred into solution
|
# Convert y_pred into solution
|
||||||
|
solution: Solution = {v: None for v in instance.features.variables.keys()}
|
||||||
category_offset: Dict[Hashable, int] = {cat: 0 for cat in x.keys()}
|
category_offset: Dict[Hashable, int] = {cat: 0 for cat in x.keys()}
|
||||||
for (var_name, var_dict) in instance.features.variables.items():
|
for (var_name, var_features) in instance.features.variables.items():
|
||||||
for (idx, var_features) in var_dict.items():
|
category = var_features.category
|
||||||
category = var_features.category
|
offset = category_offset[category]
|
||||||
offset = category_offset[category]
|
category_offset[category] += 1
|
||||||
category_offset[category] += 1
|
if y_pred[category][offset, 0]:
|
||||||
if y_pred[category][offset, 0]:
|
solution[var_name] = 0.0
|
||||||
solution[var_name][idx] = 0.0
|
if y_pred[category][offset, 1]:
|
||||||
if y_pred[category][offset, 1]:
|
solution[var_name] = 1.0
|
||||||
solution[var_name][idx] = 1.0
|
|
||||||
|
|
||||||
return solution
|
return solution
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def sample_xy(
|
def sample_xy(
|
||||||
|
self,
|
||||||
instance: Instance,
|
instance: Instance,
|
||||||
sample: TrainingSample,
|
sample: TrainingSample,
|
||||||
) -> Tuple[Dict[Hashable, List[List[float]]], Dict[Hashable, List[List[float]]]]:
|
) -> Tuple[Dict[Category, List[List[float]]], Dict[Category, List[List[float]]]]:
|
||||||
assert instance.features.variables is not None
|
assert instance.features.variables is not None
|
||||||
x: Dict = {}
|
x: Dict = {}
|
||||||
y: Dict = {}
|
y: Dict = {}
|
||||||
solution: Optional[Solution] = None
|
for (var_name, var_features) in instance.features.variables.items():
|
||||||
if sample.solution is not None:
|
category = var_features.category
|
||||||
solution = sample.solution
|
if category is None:
|
||||||
for (var_name, var_dict) in instance.features.variables.items():
|
continue
|
||||||
for (idx, var_features) in var_dict.items():
|
if category not in x.keys():
|
||||||
category = var_features.category
|
x[category] = []
|
||||||
if category is None:
|
y[category] = []
|
||||||
continue
|
f: List[float] = []
|
||||||
if category not in x.keys():
|
assert var_features.user_features is not None
|
||||||
x[category] = []
|
f += var_features.user_features
|
||||||
y[category] = []
|
if sample.lp_solution is not None:
|
||||||
f: List[float] = []
|
lp_value = sample.lp_solution[var_name]
|
||||||
assert var_features.user_features is not None
|
if lp_value is not None:
|
||||||
f += var_features.user_features
|
f += [lp_value]
|
||||||
if sample.lp_solution is not None:
|
x[category] += [f]
|
||||||
lp_value = sample.lp_solution[var_name][idx]
|
if sample.solution is not None:
|
||||||
if lp_value is not None:
|
opt_value = sample.solution[var_name]
|
||||||
f += [lp_value]
|
assert opt_value is not None
|
||||||
x[category] += [f]
|
assert 0.0 - 1e-5 <= opt_value <= 1.0 + 1e-5, (
|
||||||
if solution is not None:
|
f"Variable {var_name} has non-binary value {opt_value} in the "
|
||||||
opt_value = solution[var_name][idx]
|
"optimal solution. Predicting values of non-binary "
|
||||||
assert opt_value is not None
|
"variables is not currently supported. Please set its "
|
||||||
assert 0.0 - 1e-5 <= opt_value <= 1.0 + 1e-5, (
|
"category to None."
|
||||||
f"Variable {var_name} has non-binary value {opt_value} in the "
|
)
|
||||||
"optimal solution. Predicting values of non-binary "
|
y[category] += [[opt_value < 0.5, opt_value >= 0.5]]
|
||||||
"variables is not currently supported. Please set its "
|
|
||||||
"category to None."
|
|
||||||
)
|
|
||||||
y[category] += [[opt_value < 0.5, opt_value >= 0.5]]
|
|
||||||
return x, y
|
return x, y
|
||||||
|
|
||||||
def sample_evaluate(
|
def sample_evaluate(
|
||||||
@@ -194,22 +182,19 @@ class PrimalSolutionComponent(Component):
|
|||||||
solution_pred = self.sample_predict(instance, sample)
|
solution_pred = self.sample_predict(instance, sample)
|
||||||
vars_all, vars_one, vars_zero = set(), set(), set()
|
vars_all, vars_one, vars_zero = set(), set(), set()
|
||||||
pred_one_positive, pred_zero_positive = set(), set()
|
pred_one_positive, pred_zero_positive = set(), set()
|
||||||
for (varname, var_dict) in solution_actual.items():
|
for (var_name, value_actual) in solution_actual.items():
|
||||||
if varname not in solution_pred.keys():
|
assert value_actual is not None
|
||||||
continue
|
vars_all.add(var_name)
|
||||||
for (idx, value_actual) in var_dict.items():
|
if value_actual > 0.5:
|
||||||
assert value_actual is not None
|
vars_one.add(var_name)
|
||||||
vars_all.add((varname, idx))
|
else:
|
||||||
if value_actual > 0.5:
|
vars_zero.add(var_name)
|
||||||
vars_one.add((varname, idx))
|
value_pred = solution_pred[var_name]
|
||||||
|
if value_pred is not None:
|
||||||
|
if value_pred > 0.5:
|
||||||
|
pred_one_positive.add(var_name)
|
||||||
else:
|
else:
|
||||||
vars_zero.add((varname, idx))
|
pred_zero_positive.add(var_name)
|
||||||
value_pred = solution_pred[varname][idx]
|
|
||||||
if value_pred is not None:
|
|
||||||
if value_pred > 0.5:
|
|
||||||
pred_one_positive.add((varname, idx))
|
|
||||||
else:
|
|
||||||
pred_zero_positive.add((varname, idx))
|
|
||||||
pred_one_negative = vars_all - pred_one_positive
|
pred_one_negative = vars_all - pred_one_positive
|
||||||
pred_zero_negative = vars_all - pred_zero_positive
|
pred_zero_negative = vars_all - pred_zero_positive
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import numbers
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import TYPE_CHECKING, Dict, Optional, Set, List, Hashable
|
from typing import TYPE_CHECKING, Dict, Optional, Set, List, Hashable
|
||||||
|
|
||||||
from miplearn.types import VarIndex, Solution
|
from miplearn.types import Solution, VariableName, Category
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from miplearn.solvers.internal import InternalSolver
|
from miplearn.solvers.internal import InternalSolver
|
||||||
@@ -53,7 +53,7 @@ class ConstraintFeatures:
|
|||||||
@dataclass
|
@dataclass
|
||||||
class Features:
|
class Features:
|
||||||
instance: Optional[InstanceFeatures] = None
|
instance: Optional[InstanceFeatures] = None
|
||||||
variables: Optional[Dict[str, Dict[VarIndex, VariableFeatures]]] = None
|
variables: Optional[Dict[str, VariableFeatures]] = None
|
||||||
constraints: Optional[Dict[str, ConstraintFeatures]] = None
|
constraints: Optional[Dict[str, ConstraintFeatures]] = None
|
||||||
|
|
||||||
|
|
||||||
@@ -72,35 +72,32 @@ class FeaturesExtractor:
|
|||||||
def _extract_variables(
|
def _extract_variables(
|
||||||
self,
|
self,
|
||||||
instance: "Instance",
|
instance: "Instance",
|
||||||
) -> Dict[str, Dict[VarIndex, VariableFeatures]]:
|
) -> Dict[VariableName, VariableFeatures]:
|
||||||
result: Dict[str, Dict[VarIndex, VariableFeatures]] = {}
|
result: Dict[VariableName, VariableFeatures] = {}
|
||||||
empty_solution = self.solver.get_empty_solution()
|
for var_name in self.solver.get_variable_names():
|
||||||
for (var_name, var_dict) in empty_solution.items():
|
user_features: Optional[List[float]] = None
|
||||||
result[var_name] = {}
|
category: Category = instance.get_variable_category(var_name)
|
||||||
for idx in var_dict.keys():
|
if category is not None:
|
||||||
user_features = None
|
assert isinstance(category, collections.Hashable), (
|
||||||
category = instance.get_variable_category(var_name, idx)
|
f"Variable category must be be hashable. "
|
||||||
if category is not None:
|
f"Found {type(category).__name__} instead for var={var_name}."
|
||||||
assert isinstance(category, collections.Hashable), (
|
|
||||||
f"Variable category must be be hashable. "
|
|
||||||
f"Found {type(category).__name__} instead for var={var_name}."
|
|
||||||
)
|
|
||||||
user_features = instance.get_variable_features(var_name, idx)
|
|
||||||
assert isinstance(user_features, list), (
|
|
||||||
f"Variable features must be a list. "
|
|
||||||
f"Found {type(user_features).__name__} instead for "
|
|
||||||
f"var={var_name}[{idx}]."
|
|
||||||
)
|
|
||||||
for v in user_features:
|
|
||||||
assert isinstance(v, numbers.Real), (
|
|
||||||
f"Variable features must be a list of numbers. "
|
|
||||||
f"Found {type(v).__name__} instead "
|
|
||||||
f"for var={var_name}[{idx}]."
|
|
||||||
)
|
|
||||||
result[var_name][idx] = VariableFeatures(
|
|
||||||
category=category,
|
|
||||||
user_features=user_features,
|
|
||||||
)
|
)
|
||||||
|
user_features = instance.get_variable_features(var_name)
|
||||||
|
assert isinstance(user_features, list), (
|
||||||
|
f"Variable features must be a list. "
|
||||||
|
f"Found {type(user_features).__name__} instead for "
|
||||||
|
f"var={var_name}."
|
||||||
|
)
|
||||||
|
for v in user_features:
|
||||||
|
assert isinstance(v, numbers.Real), (
|
||||||
|
f"Variable features must be a list of numbers. "
|
||||||
|
f"Found {type(v).__name__} instead "
|
||||||
|
f"for var={var_name}."
|
||||||
|
)
|
||||||
|
result[var_name] = VariableFeatures(
|
||||||
|
category=category,
|
||||||
|
user_features=user_features,
|
||||||
|
)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _extract_constraints(
|
def _extract_constraints(
|
||||||
|
|||||||
@@ -6,14 +6,16 @@ import logging
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Any, List, Optional, Hashable
|
from typing import Any, List, Optional, Hashable
|
||||||
|
|
||||||
|
from overrides import EnforceOverrides
|
||||||
|
|
||||||
from miplearn.features import TrainingSample, Features
|
from miplearn.features import TrainingSample, Features
|
||||||
from miplearn.types import VarIndex
|
from miplearn.types import VariableName, Category
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# noinspection PyMethodMayBeStatic
|
# noinspection PyMethodMayBeStatic
|
||||||
class Instance(ABC):
|
class Instance(ABC, EnforceOverrides):
|
||||||
"""
|
"""
|
||||||
Abstract class holding all the data necessary to generate a concrete model of the
|
Abstract class holding all the data necessary to generate a concrete model of the
|
||||||
proble.
|
proble.
|
||||||
@@ -60,9 +62,9 @@ class Instance(ABC):
|
|||||||
"""
|
"""
|
||||||
return [0]
|
return [0]
|
||||||
|
|
||||||
def get_variable_features(self, var_name: str, index: VarIndex) -> List[float]:
|
def get_variable_features(self, var_name: VariableName) -> List[float]:
|
||||||
"""
|
"""
|
||||||
Returns a 1-dimensional array of (numerical) features describing a particular
|
Returns a (1-dimensional) list of numerical features describing a particular
|
||||||
decision variable.
|
decision variable.
|
||||||
|
|
||||||
In combination with instance features, variable features are used by
|
In combination with instance features, variable features are used by
|
||||||
@@ -79,11 +81,7 @@ class Instance(ABC):
|
|||||||
"""
|
"""
|
||||||
return [0]
|
return [0]
|
||||||
|
|
||||||
def get_variable_category(
|
def get_variable_category(self, var_name: VariableName) -> Optional[Category]:
|
||||||
self,
|
|
||||||
var_name: str,
|
|
||||||
index: VarIndex,
|
|
||||||
) -> Optional[Hashable]:
|
|
||||||
"""
|
"""
|
||||||
Returns the category for each decision variable.
|
Returns the category for each decision variable.
|
||||||
|
|
||||||
@@ -91,6 +89,7 @@ class Instance(ABC):
|
|||||||
internal ML model to predict the values of both variables. If the returned
|
internal ML model to predict the values of both variables. If the returned
|
||||||
category is None, ML models will ignore the variable.
|
category is None, ML models will ignore the variable.
|
||||||
|
|
||||||
|
A category can be any hashable type, such as strings, numbers or tuples.
|
||||||
By default, returns "default".
|
By default, returns "default".
|
||||||
"""
|
"""
|
||||||
return "default"
|
return "default"
|
||||||
|
|||||||
@@ -2,14 +2,16 @@
|
|||||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
|
import gc
|
||||||
import gzip
|
import gzip
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import gc
|
from typing import Optional, Any, List, Hashable, cast, IO
|
||||||
from typing import Optional, Any, List, Hashable, cast, IO, Callable
|
|
||||||
|
from overrides import overrides
|
||||||
|
|
||||||
from miplearn.instance.base import logger, Instance
|
from miplearn.instance.base import logger, Instance
|
||||||
from miplearn.types import VarIndex
|
from miplearn.types import VariableName, Category
|
||||||
|
|
||||||
|
|
||||||
class PickleGzInstance(Instance):
|
class PickleGzInstance(Instance):
|
||||||
@@ -31,62 +33,72 @@ class PickleGzInstance(Instance):
|
|||||||
self.instance: Optional[Instance] = None
|
self.instance: Optional[Instance] = None
|
||||||
self.filename: str = filename
|
self.filename: str = filename
|
||||||
|
|
||||||
|
@overrides
|
||||||
def to_model(self) -> Any:
|
def to_model(self) -> Any:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.to_model()
|
return self.instance.to_model()
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_instance_features(self) -> List[float]:
|
def get_instance_features(self) -> List[float]:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.get_instance_features()
|
return self.instance.get_instance_features()
|
||||||
|
|
||||||
def get_variable_features(self, var_name: str, index: VarIndex) -> List[float]:
|
@overrides
|
||||||
|
def get_variable_features(self, var_name: VariableName) -> List[float]:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.get_variable_features(var_name, index)
|
return self.instance.get_variable_features(var_name)
|
||||||
|
|
||||||
def get_variable_category(
|
@overrides
|
||||||
self,
|
def get_variable_category(self, var_name: VariableName) -> Optional[Category]:
|
||||||
var_name: str,
|
|
||||||
index: VarIndex,
|
|
||||||
) -> Optional[Hashable]:
|
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.get_variable_category(var_name, index)
|
return self.instance.get_variable_category(var_name)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_constraint_features(self, cid: str) -> Optional[List[float]]:
|
def get_constraint_features(self, cid: str) -> Optional[List[float]]:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.get_constraint_features(cid)
|
return self.instance.get_constraint_features(cid)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_constraint_category(self, cid: str) -> Optional[Hashable]:
|
def get_constraint_category(self, cid: str) -> Optional[Hashable]:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.get_constraint_category(cid)
|
return self.instance.get_constraint_category(cid)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def has_static_lazy_constraints(self) -> bool:
|
def has_static_lazy_constraints(self) -> bool:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.has_static_lazy_constraints()
|
return self.instance.has_static_lazy_constraints()
|
||||||
|
|
||||||
|
@overrides
|
||||||
def has_dynamic_lazy_constraints(self) -> bool:
|
def has_dynamic_lazy_constraints(self) -> bool:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.has_dynamic_lazy_constraints()
|
return self.instance.has_dynamic_lazy_constraints()
|
||||||
|
|
||||||
|
@overrides
|
||||||
def is_constraint_lazy(self, cid: str) -> bool:
|
def is_constraint_lazy(self, cid: str) -> bool:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.is_constraint_lazy(cid)
|
return self.instance.is_constraint_lazy(cid)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def find_violated_lazy_constraints(self, model: Any) -> List[Hashable]:
|
def find_violated_lazy_constraints(self, model: Any) -> List[Hashable]:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.find_violated_lazy_constraints(model)
|
return self.instance.find_violated_lazy_constraints(model)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def build_lazy_constraint(self, model: Any, violation: Hashable) -> Any:
|
def build_lazy_constraint(self, model: Any, violation: Hashable) -> Any:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.build_lazy_constraint(model, violation)
|
return self.instance.build_lazy_constraint(model, violation)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def find_violated_user_cuts(self, model: Any) -> List[Hashable]:
|
def find_violated_user_cuts(self, model: Any) -> List[Hashable]:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.find_violated_user_cuts(model)
|
return self.instance.find_violated_user_cuts(model)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def build_user_cut(self, model: Any, violation: Hashable) -> Any:
|
def build_user_cut(self, model: Any, violation: Hashable) -> Any:
|
||||||
assert self.instance is not None
|
assert self.instance is not None
|
||||||
return self.instance.build_user_cut(model, violation)
|
return self.instance.build_user_cut(model, violation)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def load(self) -> None:
|
def load(self) -> None:
|
||||||
if self.instance is None:
|
if self.instance is None:
|
||||||
obj = read_pickle_gz(self.filename)
|
obj = read_pickle_gz(self.filename)
|
||||||
@@ -95,12 +107,14 @@ class PickleGzInstance(Instance):
|
|||||||
self.features = self.instance.features
|
self.features = self.instance.features
|
||||||
self.training_data = self.instance.training_data
|
self.training_data = self.instance.training_data
|
||||||
|
|
||||||
|
@overrides
|
||||||
def free(self) -> None:
|
def free(self) -> None:
|
||||||
self.instance = None # type: ignore
|
self.instance = None # type: ignore
|
||||||
self.features = None # type: ignore
|
self.features = None # type: ignore
|
||||||
self.training_data = None # type: ignore
|
self.training_data = None # type: ignore
|
||||||
gc.collect()
|
gc.collect()
|
||||||
|
|
||||||
|
@overrides
|
||||||
def flush(self) -> None:
|
def flush(self) -> None:
|
||||||
write_pickle_gz(self.instance, self.filename)
|
write_pickle_gz(self.instance, self.filename)
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,16 @@
|
|||||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
from typing import List
|
from typing import List, Dict
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyomo.environ as pe
|
import pyomo.environ as pe
|
||||||
|
from overrides import overrides
|
||||||
from scipy.stats import uniform, randint
|
from scipy.stats import uniform, randint
|
||||||
from scipy.stats.distributions import rv_frozen
|
from scipy.stats.distributions import rv_frozen
|
||||||
|
|
||||||
from miplearn.instance.base import Instance
|
from miplearn.instance.base import Instance
|
||||||
|
from miplearn.types import VariableName
|
||||||
|
|
||||||
|
|
||||||
class ChallengeA:
|
class ChallengeA:
|
||||||
@@ -67,7 +69,9 @@ class MultiKnapsackInstance(Instance):
|
|||||||
self.prices = prices
|
self.prices = prices
|
||||||
self.capacities = capacities
|
self.capacities = capacities
|
||||||
self.weights = weights
|
self.weights = weights
|
||||||
|
self.varname_to_index = {f"x[{i}]": i for i in range(self.n)}
|
||||||
|
|
||||||
|
@overrides
|
||||||
def to_model(self):
|
def to_model(self):
|
||||||
model = pe.ConcreteModel()
|
model = pe.ConcreteModel()
|
||||||
model.x = pe.Var(range(self.n), domain=pe.Binary)
|
model.x = pe.Var(range(self.n), domain=pe.Binary)
|
||||||
@@ -84,10 +88,13 @@ class MultiKnapsackInstance(Instance):
|
|||||||
|
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_instance_features(self):
|
def get_instance_features(self):
|
||||||
return [np.mean(self.prices)] + list(self.capacities)
|
return [np.mean(self.prices)] + list(self.capacities)
|
||||||
|
|
||||||
def get_variable_features(self, var, index):
|
@overrides
|
||||||
|
def get_variable_features(self, var_name: VariableName) -> List[float]:
|
||||||
|
index = self.varname_to_index[var_name]
|
||||||
return [self.prices[index]] + list(self.weights[:, index])
|
return [self.prices[index]] + list(self.weights[:, index])
|
||||||
|
|
||||||
|
|
||||||
@@ -237,7 +244,11 @@ class KnapsackInstance(Instance):
|
|||||||
self.weights = weights
|
self.weights = weights
|
||||||
self.prices = prices
|
self.prices = prices
|
||||||
self.capacity = capacity
|
self.capacity = capacity
|
||||||
|
self.varname_to_item: Dict[VariableName, int] = {
|
||||||
|
f"x[{i}]": i for i in range(len(self.weights))
|
||||||
|
}
|
||||||
|
|
||||||
|
@overrides
|
||||||
def to_model(self):
|
def to_model(self):
|
||||||
model = pe.ConcreteModel()
|
model = pe.ConcreteModel()
|
||||||
items = range(len(self.weights))
|
items = range(len(self.weights))
|
||||||
@@ -250,16 +261,19 @@ class KnapsackInstance(Instance):
|
|||||||
)
|
)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_instance_features(self):
|
def get_instance_features(self):
|
||||||
return [
|
return [
|
||||||
self.capacity,
|
self.capacity,
|
||||||
np.average(self.weights),
|
np.average(self.weights),
|
||||||
]
|
]
|
||||||
|
|
||||||
def get_variable_features(self, var, index):
|
@overrides
|
||||||
|
def get_variable_features(self, var_name):
|
||||||
|
item = self.varname_to_item[var_name]
|
||||||
return [
|
return [
|
||||||
self.weights[index],
|
self.weights[item],
|
||||||
self.prices[index],
|
self.prices[item],
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -277,6 +291,7 @@ class GurobiKnapsackInstance(KnapsackInstance):
|
|||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(weights, prices, capacity)
|
super().__init__(weights, prices, capacity)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def to_model(self):
|
def to_model(self):
|
||||||
import gurobipy as gp
|
import gurobipy as gp
|
||||||
from gurobipy import GRB
|
from gurobipy import GRB
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
import networkx as nx
|
import networkx as nx
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyomo.environ as pe
|
import pyomo.environ as pe
|
||||||
|
from overrides import overrides
|
||||||
from scipy.stats import uniform, randint
|
from scipy.stats import uniform, randint
|
||||||
from scipy.stats.distributions import rv_frozen
|
from scipy.stats.distributions import rv_frozen
|
||||||
|
|
||||||
@@ -104,32 +105,38 @@ class MaxWeightStableSetInstance(Instance):
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
self.graph = graph
|
self.graph = graph
|
||||||
self.weights = weights
|
self.weights = weights
|
||||||
|
self.nodes = list(self.graph.nodes)
|
||||||
|
self.varname_to_node = {f"x[{v}]": v for v in self.nodes}
|
||||||
|
|
||||||
|
@overrides
|
||||||
def to_model(self):
|
def to_model(self):
|
||||||
nodes = list(self.graph.nodes)
|
|
||||||
model = pe.ConcreteModel()
|
model = pe.ConcreteModel()
|
||||||
model.x = pe.Var(nodes, domain=pe.Binary)
|
model.x = pe.Var(self.nodes, domain=pe.Binary)
|
||||||
model.OBJ = pe.Objective(
|
model.OBJ = pe.Objective(
|
||||||
expr=sum(model.x[v] * self.weights[v] for v in nodes), sense=pe.maximize
|
expr=sum(model.x[v] * self.weights[v] for v in self.nodes),
|
||||||
|
sense=pe.maximize,
|
||||||
)
|
)
|
||||||
model.clique_eqs = pe.ConstraintList()
|
model.clique_eqs = pe.ConstraintList()
|
||||||
for clique in nx.find_cliques(self.graph):
|
for clique in nx.find_cliques(self.graph):
|
||||||
model.clique_eqs.add(sum(model.x[i] for i in clique) <= 1)
|
model.clique_eqs.add(sum(model.x[v] for v in clique) <= 1)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
def get_variable_features(self, var, index):
|
@overrides
|
||||||
|
def get_variable_features(self, var_name):
|
||||||
|
v1 = self.varname_to_node[var_name]
|
||||||
neighbor_weights = [0] * 15
|
neighbor_weights = [0] * 15
|
||||||
neighbor_degrees = [100] * 15
|
neighbor_degrees = [100] * 15
|
||||||
for n in self.graph.neighbors(index):
|
for v2 in self.graph.neighbors(v1):
|
||||||
neighbor_weights += [self.weights[n] / self.weights[index]]
|
neighbor_weights += [self.weights[v2] / self.weights[v1]]
|
||||||
neighbor_degrees += [self.graph.degree(n) / self.graph.degree(index)]
|
neighbor_degrees += [self.graph.degree(v2) / self.graph.degree(v1)]
|
||||||
neighbor_weights.sort(reverse=True)
|
neighbor_weights.sort(reverse=True)
|
||||||
neighbor_degrees.sort()
|
neighbor_degrees.sort()
|
||||||
features = []
|
features = []
|
||||||
features += neighbor_weights[:5]
|
features += neighbor_weights[:5]
|
||||||
features += neighbor_degrees[:5]
|
features += neighbor_degrees[:5]
|
||||||
features += [self.graph.degree(index)]
|
features += [self.graph.degree(v1)]
|
||||||
return features
|
return features
|
||||||
|
|
||||||
def get_variable_category(self, var, index):
|
@overrides
|
||||||
|
def get_variable_category(self, var):
|
||||||
return "default"
|
return "default"
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
import networkx as nx
|
import networkx as nx
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyomo.environ as pe
|
import pyomo.environ as pe
|
||||||
|
from overrides import overrides
|
||||||
from scipy.spatial.distance import pdist, squareform
|
from scipy.spatial.distance import pdist, squareform
|
||||||
from scipy.stats import uniform, randint
|
from scipy.stats import uniform, randint
|
||||||
from scipy.stats.distributions import rv_frozen
|
from scipy.stats.distributions import rv_frozen
|
||||||
@@ -133,15 +134,17 @@ class TravelingSalesmanInstance(Instance):
|
|||||||
assert distances.shape == (n_cities, n_cities)
|
assert distances.shape == (n_cities, n_cities)
|
||||||
self.n_cities = n_cities
|
self.n_cities = n_cities
|
||||||
self.distances = distances
|
self.distances = distances
|
||||||
|
self.edges = [
|
||||||
def to_model(self):
|
|
||||||
model = pe.ConcreteModel()
|
|
||||||
model.edges = edges = [
|
|
||||||
(i, j) for i in range(self.n_cities) for j in range(i + 1, self.n_cities)
|
(i, j) for i in range(self.n_cities) for j in range(i + 1, self.n_cities)
|
||||||
]
|
]
|
||||||
model.x = pe.Var(edges, domain=pe.Binary)
|
self.varname_to_index = {f"x[{e}]": e for e in self.edges}
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def to_model(self):
|
||||||
|
model = pe.ConcreteModel()
|
||||||
|
model.x = pe.Var(self.edges, domain=pe.Binary)
|
||||||
model.obj = pe.Objective(
|
model.obj = pe.Objective(
|
||||||
expr=sum(model.x[i, j] * self.distances[i, j] for (i, j) in edges),
|
expr=sum(model.x[i, j] * self.distances[i, j] for (i, j) in self.edges),
|
||||||
sense=pe.minimize,
|
sense=pe.minimize,
|
||||||
)
|
)
|
||||||
model.eq_degree = pe.ConstraintList()
|
model.eq_degree = pe.ConstraintList()
|
||||||
@@ -157,17 +160,13 @@ class TravelingSalesmanInstance(Instance):
|
|||||||
)
|
)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
def get_instance_features(self):
|
@overrides
|
||||||
return [0.0]
|
def get_variable_category(self, var_name):
|
||||||
|
return self.varname_to_index[var_name]
|
||||||
def get_variable_features(self, var_name, index):
|
|
||||||
return [0.0]
|
|
||||||
|
|
||||||
def get_variable_category(self, var_name, index):
|
|
||||||
return index
|
|
||||||
|
|
||||||
|
@overrides
|
||||||
def find_violated_lazy_constraints(self, model):
|
def find_violated_lazy_constraints(self, model):
|
||||||
selected_edges = [e for e in model.edges if model.x[e].value > 0.5]
|
selected_edges = [e for e in self.edges if model.x[e].value > 0.5]
|
||||||
graph = nx.Graph()
|
graph = nx.Graph()
|
||||||
graph.add_edges_from(selected_edges)
|
graph.add_edges_from(selected_edges)
|
||||||
components = [frozenset(c) for c in list(nx.connected_components(graph))]
|
components = [frozenset(c) for c in list(nx.connected_components(graph))]
|
||||||
@@ -177,10 +176,11 @@ class TravelingSalesmanInstance(Instance):
|
|||||||
violations += [c]
|
violations += [c]
|
||||||
return violations
|
return violations
|
||||||
|
|
||||||
|
@overrides
|
||||||
def build_lazy_constraint(self, model, component):
|
def build_lazy_constraint(self, model, component):
|
||||||
cut_edges = [
|
cut_edges = [
|
||||||
e
|
e
|
||||||
for e in model.edges
|
for e in self.edges
|
||||||
if (e[0] in component and e[1] not in component)
|
if (e[0] in component and e[1] not in component)
|
||||||
or (e[0] not in component and e[1] in component)
|
or (e[0] not in component and e[1] in component)
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -6,7 +6,9 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from random import randint
|
from random import randint
|
||||||
from typing import List, Any, Dict, Optional, cast, Tuple, Union
|
from typing import List, Any, Dict, Optional
|
||||||
|
|
||||||
|
from overrides import overrides
|
||||||
|
|
||||||
from miplearn.instance.base import Instance
|
from miplearn.instance.base import Instance
|
||||||
from miplearn.solvers import _RedirectOutput
|
from miplearn.solvers import _RedirectOutput
|
||||||
@@ -17,7 +19,12 @@ from miplearn.solvers.internal import (
|
|||||||
LazyCallback,
|
LazyCallback,
|
||||||
MIPSolveStats,
|
MIPSolveStats,
|
||||||
)
|
)
|
||||||
from miplearn.types import VarIndex, SolverParams, Solution, UserCutCallback
|
from miplearn.types import (
|
||||||
|
SolverParams,
|
||||||
|
UserCutCallback,
|
||||||
|
Solution,
|
||||||
|
VariableName,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -52,8 +59,8 @@ class GurobiSolver(InternalSolver):
|
|||||||
self.instance: Optional[Instance] = None
|
self.instance: Optional[Instance] = None
|
||||||
self.model: Optional["gurobipy.Model"] = None
|
self.model: Optional["gurobipy.Model"] = None
|
||||||
self.params: SolverParams = params
|
self.params: SolverParams = params
|
||||||
self._all_vars: Dict = {}
|
self.varname_to_var: Dict[str, "gurobipy.Var"] = {}
|
||||||
self._bin_vars: Optional[Dict[str, Dict[VarIndex, "gurobipy.Var"]]] = None
|
self.bin_vars: List["gurobipy.Var"] = []
|
||||||
self.cb_where: Optional[int] = None
|
self.cb_where: Optional[int] = None
|
||||||
|
|
||||||
assert lazy_cb_frequency in [1, 2]
|
assert lazy_cb_frequency in [1, 2]
|
||||||
@@ -65,6 +72,7 @@ class GurobiSolver(InternalSolver):
|
|||||||
self.gp.GRB.Callback.MIPNODE,
|
self.gp.GRB.Callback.MIPNODE,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@overrides
|
||||||
def set_instance(
|
def set_instance(
|
||||||
self,
|
self,
|
||||||
instance: Instance,
|
instance: Instance,
|
||||||
@@ -85,30 +93,20 @@ class GurobiSolver(InternalSolver):
|
|||||||
|
|
||||||
def _update_vars(self) -> None:
|
def _update_vars(self) -> None:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
self._all_vars = {}
|
self.varname_to_var.clear()
|
||||||
self._bin_vars = {}
|
self.bin_vars.clear()
|
||||||
idx: VarIndex
|
|
||||||
for var in self.model.getVars():
|
for var in self.model.getVars():
|
||||||
m = re.search(r"([^[]*)\[(.*)]", var.varName)
|
assert var.varName not in self.varname_to_var, (
|
||||||
if m is None:
|
f"Duplicated variable name detected: {var.varName}. "
|
||||||
name = var.varName
|
f"Unique variable names are currently required."
|
||||||
idx = (0,)
|
)
|
||||||
else:
|
self.varname_to_var[var.varName] = var
|
||||||
name = m.group(1)
|
assert var.vtype in ["B", "C"], (
|
||||||
parts = m.group(2).split(",")
|
"Only binary and continuous variables are currently supported. "
|
||||||
idx = cast(
|
"Variable {var.varName} has type {var.vtype}."
|
||||||
Tuple[Union[str, int]],
|
)
|
||||||
tuple(int(k) if k.isdecimal() else str(k) for k in parts),
|
if var.vtype == "B":
|
||||||
)
|
self.bin_vars.append(var)
|
||||||
if len(idx) == 1:
|
|
||||||
idx = idx[0]
|
|
||||||
if name not in self._all_vars:
|
|
||||||
self._all_vars[name] = {}
|
|
||||||
self._all_vars[name][idx] = var
|
|
||||||
if var.vtype != "C":
|
|
||||||
if name not in self._bin_vars:
|
|
||||||
self._bin_vars[name] = {}
|
|
||||||
self._bin_vars[name][idx] = var
|
|
||||||
|
|
||||||
def _apply_params(self, streams: List[Any]) -> None:
|
def _apply_params(self, streams: List[Any]) -> None:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
@@ -118,6 +116,7 @@ class GurobiSolver(InternalSolver):
|
|||||||
if "seed" not in [k.lower() for k in self.params.keys()]:
|
if "seed" not in [k.lower() for k in self.params.keys()]:
|
||||||
self.model.setParam("Seed", randint(0, 1_000_000))
|
self.model.setParam("Seed", randint(0, 1_000_000))
|
||||||
|
|
||||||
|
@overrides
|
||||||
def solve_lp(
|
def solve_lp(
|
||||||
self,
|
self,
|
||||||
tee: bool = False,
|
tee: bool = False,
|
||||||
@@ -128,17 +127,14 @@ class GurobiSolver(InternalSolver):
|
|||||||
streams += [sys.stdout]
|
streams += [sys.stdout]
|
||||||
self._apply_params(streams)
|
self._apply_params(streams)
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
assert self._bin_vars is not None
|
for var in self.bin_vars:
|
||||||
for (varname, vardict) in self._bin_vars.items():
|
var.vtype = self.gp.GRB.CONTINUOUS
|
||||||
for (idx, var) in vardict.items():
|
var.lb = 0.0
|
||||||
var.vtype = self.gp.GRB.CONTINUOUS
|
var.ub = 1.0
|
||||||
var.lb = 0.0
|
|
||||||
var.ub = 1.0
|
|
||||||
with _RedirectOutput(streams):
|
with _RedirectOutput(streams):
|
||||||
self.model.optimize()
|
self.model.optimize()
|
||||||
for (varname, vardict) in self._bin_vars.items():
|
for var in self.bin_vars:
|
||||||
for (idx, var) in vardict.items():
|
var.vtype = self.gp.GRB.BINARY
|
||||||
var.vtype = self.gp.GRB.BINARY
|
|
||||||
log = streams[0].getvalue()
|
log = streams[0].getvalue()
|
||||||
opt_value = None
|
opt_value = None
|
||||||
if not self.is_infeasible():
|
if not self.is_infeasible():
|
||||||
@@ -148,6 +144,7 @@ class GurobiSolver(InternalSolver):
|
|||||||
"LP log": log,
|
"LP log": log,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@overrides
|
||||||
def solve(
|
def solve(
|
||||||
self,
|
self,
|
||||||
tee: bool = False,
|
tee: bool = False,
|
||||||
@@ -218,33 +215,30 @@ class GurobiSolver(InternalSolver):
|
|||||||
}
|
}
|
||||||
return stats
|
return stats
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_solution(self) -> Optional[Solution]:
|
def get_solution(self) -> Optional[Solution]:
|
||||||
self._raise_if_callback()
|
self._raise_if_callback()
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
if self.model.solCount == 0:
|
if self.model.solCount == 0:
|
||||||
return None
|
return None
|
||||||
solution: Solution = {}
|
return {v.varName: v.x for v in self.model.getVars()}
|
||||||
for (varname, vardict) in self._all_vars.items():
|
|
||||||
solution[varname] = {}
|
|
||||||
for (idx, var) in vardict.items():
|
|
||||||
solution[varname][idx] = var.x
|
|
||||||
return solution
|
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_variable_names(self) -> List[VariableName]:
|
||||||
|
self._raise_if_callback()
|
||||||
|
assert self.model is not None
|
||||||
|
return [v.varName for v in self.model.getVars()]
|
||||||
|
|
||||||
|
@overrides
|
||||||
def set_warm_start(self, solution: Solution) -> None:
|
def set_warm_start(self, solution: Solution) -> None:
|
||||||
self._raise_if_callback()
|
self._raise_if_callback()
|
||||||
self._clear_warm_start()
|
self._clear_warm_start()
|
||||||
count_fixed, count_total = 0, 0
|
for (var_name, value) in solution.items():
|
||||||
for (varname, vardict) in solution.items():
|
var = self.varname_to_var[var_name]
|
||||||
for (idx, value) in vardict.items():
|
if value is not None:
|
||||||
count_total += 1
|
var.start = value
|
||||||
if value is not None:
|
|
||||||
count_fixed += 1
|
|
||||||
self._all_vars[varname][idx].start = value
|
|
||||||
logger.info(
|
|
||||||
"Setting start values for %d variables (out of %d)"
|
|
||||||
% (count_fixed, count_total)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_sense(self) -> str:
|
def get_sense(self) -> str:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
if self.model.modelSense == 1:
|
if self.model.modelSense == 1:
|
||||||
@@ -252,18 +246,12 @@ class GurobiSolver(InternalSolver):
|
|||||||
else:
|
else:
|
||||||
return "max"
|
return "max"
|
||||||
|
|
||||||
def get_value(
|
@overrides
|
||||||
self,
|
|
||||||
var_name: str,
|
|
||||||
index: VarIndex,
|
|
||||||
) -> Optional[float]:
|
|
||||||
var = self._all_vars[var_name][index]
|
|
||||||
return self._get_value(var)
|
|
||||||
|
|
||||||
def is_infeasible(self) -> bool:
|
def is_infeasible(self) -> bool:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
return self.model.status in [self.gp.GRB.INFEASIBLE, self.gp.GRB.INF_OR_UNBD]
|
return self.model.status in [self.gp.GRB.INFEASIBLE, self.gp.GRB.INF_OR_UNBD]
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_dual(self, cid: str) -> float:
|
def get_dual(self, cid: str) -> float:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
c = self.model.getConstrByName(cid)
|
c = self.model.getConstrByName(cid)
|
||||||
@@ -288,15 +276,7 @@ class GurobiSolver(InternalSolver):
|
|||||||
"get_value cannot be called from cb_where=%s" % self.cb_where
|
"get_value cannot be called from cb_where=%s" % self.cb_where
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_empty_solution(self) -> Solution:
|
@overrides
|
||||||
self._raise_if_callback()
|
|
||||||
solution: Solution = {}
|
|
||||||
for (varname, vardict) in self._all_vars.items():
|
|
||||||
solution[varname] = {}
|
|
||||||
for (idx, var) in vardict.items():
|
|
||||||
solution[varname][idx] = None
|
|
||||||
return solution
|
|
||||||
|
|
||||||
def add_constraint(
|
def add_constraint(
|
||||||
self,
|
self,
|
||||||
constraint: Any,
|
constraint: Any,
|
||||||
@@ -321,36 +301,39 @@ class GurobiSolver(InternalSolver):
|
|||||||
else:
|
else:
|
||||||
self.model.addConstr(constraint, name=name)
|
self.model.addConstr(constraint, name=name)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def add_cut(self, cobj: Any) -> None:
|
def add_cut(self, cobj: Any) -> None:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
assert self.cb_where == self.gp.GRB.Callback.MIPNODE
|
assert self.cb_where == self.gp.GRB.Callback.MIPNODE
|
||||||
self.model.cbCut(cobj)
|
self.model.cbCut(cobj)
|
||||||
|
|
||||||
def _clear_warm_start(self) -> None:
|
def _clear_warm_start(self) -> None:
|
||||||
for (varname, vardict) in self._all_vars.items():
|
for var in self.varname_to_var.values():
|
||||||
for (idx, var) in vardict.items():
|
var.start = self.gp.GRB.UNDEFINED
|
||||||
var.start = self.gp.GRB.UNDEFINED
|
|
||||||
|
|
||||||
|
@overrides
|
||||||
def fix(self, solution: Solution) -> None:
|
def fix(self, solution: Solution) -> None:
|
||||||
self._raise_if_callback()
|
self._raise_if_callback()
|
||||||
for (varname, vardict) in solution.items():
|
for (varname, value) in solution.items():
|
||||||
for (idx, value) in vardict.items():
|
if value is None:
|
||||||
if value is None:
|
continue
|
||||||
continue
|
var = self.varname_to_var[varname]
|
||||||
var = self._all_vars[varname][idx]
|
var.vtype = self.gp.GRB.CONTINUOUS
|
||||||
var.vtype = self.gp.GRB.CONTINUOUS
|
var.lb = value
|
||||||
var.lb = value
|
var.ub = value
|
||||||
var.ub = value
|
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_constraint_ids(self):
|
def get_constraint_ids(self):
|
||||||
self._raise_if_callback()
|
self._raise_if_callback()
|
||||||
self.model.update()
|
self.model.update()
|
||||||
return [c.ConstrName for c in self.model.getConstrs()]
|
return [c.ConstrName for c in self.model.getConstrs()]
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_constraint_rhs(self, cid: str) -> float:
|
def get_constraint_rhs(self, cid: str) -> float:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
return self.model.getConstrByName(cid).rhs
|
return self.model.getConstrByName(cid).rhs
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_constraint_lhs(self, cid: str) -> Dict[str, float]:
|
def get_constraint_lhs(self, cid: str) -> Dict[str, float]:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
constr = self.model.getConstrByName(cid)
|
constr = self.model.getConstrByName(cid)
|
||||||
@@ -360,6 +343,7 @@ class GurobiSolver(InternalSolver):
|
|||||||
lhs[expr.getVar(i).varName] = expr.getCoeff(i)
|
lhs[expr.getVar(i).varName] = expr.getCoeff(i)
|
||||||
return lhs
|
return lhs
|
||||||
|
|
||||||
|
@overrides
|
||||||
def extract_constraint(self, cid):
|
def extract_constraint(self, cid):
|
||||||
self._raise_if_callback()
|
self._raise_if_callback()
|
||||||
constr = self.model.getConstrByName(cid)
|
constr = self.model.getConstrByName(cid)
|
||||||
@@ -367,6 +351,7 @@ class GurobiSolver(InternalSolver):
|
|||||||
self.model.remove(constr)
|
self.model.remove(constr)
|
||||||
return cobj
|
return cobj
|
||||||
|
|
||||||
|
@overrides
|
||||||
def is_constraint_satisfied(self, cobj, tol=1e-6):
|
def is_constraint_satisfied(self, cobj, tol=1e-6):
|
||||||
lhs, sense, rhs, name = cobj
|
lhs, sense, rhs, name = cobj
|
||||||
if self.cb_where is not None:
|
if self.cb_where is not None:
|
||||||
@@ -386,21 +371,25 @@ class GurobiSolver(InternalSolver):
|
|||||||
else:
|
else:
|
||||||
raise Exception("Unknown sense: %s" % sense)
|
raise Exception("Unknown sense: %s" % sense)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_inequality_slacks(self) -> Dict[str, float]:
|
def get_inequality_slacks(self) -> Dict[str, float]:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
ineqs = [c for c in self.model.getConstrs() if c.sense != "="]
|
ineqs = [c for c in self.model.getConstrs() if c.sense != "="]
|
||||||
return {c.ConstrName: c.Slack for c in ineqs}
|
return {c.ConstrName: c.Slack for c in ineqs}
|
||||||
|
|
||||||
|
@overrides
|
||||||
def set_constraint_sense(self, cid: str, sense: str) -> None:
|
def set_constraint_sense(self, cid: str, sense: str) -> None:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
c = self.model.getConstrByName(cid)
|
c = self.model.getConstrByName(cid)
|
||||||
c.Sense = sense
|
c.Sense = sense
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_constraint_sense(self, cid: str) -> str:
|
def get_constraint_sense(self, cid: str) -> str:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
c = self.model.getConstrByName(cid)
|
c = self.model.getConstrByName(cid)
|
||||||
return c.Sense
|
return c.Sense
|
||||||
|
|
||||||
|
@overrides
|
||||||
def relax(self) -> None:
|
def relax(self) -> None:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
self.model.update()
|
self.model.update()
|
||||||
@@ -438,6 +427,4 @@ class GurobiSolver(InternalSolver):
|
|||||||
self.lazy_cb_where = state["lazy_cb_where"]
|
self.lazy_cb_where = state["lazy_cb_where"]
|
||||||
self.instance = None
|
self.instance = None
|
||||||
self.model = None
|
self.model = None
|
||||||
self._all_vars = None
|
|
||||||
self._bin_vars = None
|
|
||||||
self.cb_where = None
|
self.cb_where = None
|
||||||
|
|||||||
@@ -6,23 +6,25 @@ import logging
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from overrides import EnforceOverrides
|
||||||
|
|
||||||
from miplearn.instance.base import Instance
|
from miplearn.instance.base import Instance
|
||||||
from miplearn.types import (
|
from miplearn.types import (
|
||||||
LPSolveStats,
|
LPSolveStats,
|
||||||
IterationCallback,
|
IterationCallback,
|
||||||
LazyCallback,
|
LazyCallback,
|
||||||
MIPSolveStats,
|
MIPSolveStats,
|
||||||
VarIndex,
|
|
||||||
Solution,
|
|
||||||
BranchPriorities,
|
BranchPriorities,
|
||||||
Constraint,
|
Constraint,
|
||||||
UserCutCallback,
|
UserCutCallback,
|
||||||
|
Solution,
|
||||||
|
VariableName,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class InternalSolver(ABC):
|
class InternalSolver(ABC, EnforceOverrides):
|
||||||
"""
|
"""
|
||||||
Abstract class representing the MIP solver used internally by LearningSolver.
|
Abstract class representing the MIP solver used internally by LearningSolver.
|
||||||
"""
|
"""
|
||||||
@@ -90,9 +92,6 @@ class InternalSolver(ABC):
|
|||||||
If called after `solve`, returns the best primal solution found during
|
If called after `solve`, returns the best primal solution found during
|
||||||
the search. If called after `solve_lp`, returns the optimal solution
|
the search. If called after `solve_lp`, returns the optimal solution
|
||||||
to the LP relaxation. If no primal solution is available, return None.
|
to the LP relaxation. If no primal solution is available, return None.
|
||||||
|
|
||||||
The solution is a dictionary `sol`, where the optimal value of `var[idx]`
|
|
||||||
is given by `sol[var][idx]`.
|
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -235,14 +234,6 @@ class InternalSolver(ABC):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_value(self, var_name: str, index: VarIndex) -> Optional[float]:
|
|
||||||
"""
|
|
||||||
Returns the value of a given variable in the current solution. If no
|
|
||||||
solution is available, returns None.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def relax(self) -> None:
|
def relax(self) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -286,11 +277,10 @@ class InternalSolver(ABC):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_empty_solution(self) -> Dict[str, Dict[VarIndex, Optional[float]]]:
|
def get_variable_names(self) -> List[VariableName]:
|
||||||
"""
|
"""
|
||||||
Returns a dictionary with the same shape as the one produced by
|
Returns a list containing the names of all variables in the model. This
|
||||||
`get_solution`, but with all values set to None. This method is
|
method is used by the ML components to query what variables are there in the
|
||||||
used by the ML components to query what variables are there in
|
model before a solution is available.
|
||||||
the model before a solution is available.
|
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from io import StringIO
|
|||||||
from typing import Any, List, Dict, Optional
|
from typing import Any, List, Dict, Optional
|
||||||
|
|
||||||
import pyomo
|
import pyomo
|
||||||
|
from overrides import overrides
|
||||||
from pyomo import environ as pe
|
from pyomo import environ as pe
|
||||||
from pyomo.core import Var, Constraint
|
from pyomo.core import Var, Constraint
|
||||||
from pyomo.opt import TerminationCondition
|
from pyomo.opt import TerminationCondition
|
||||||
@@ -23,7 +24,12 @@ from miplearn.solvers.internal import (
|
|||||||
LazyCallback,
|
LazyCallback,
|
||||||
MIPSolveStats,
|
MIPSolveStats,
|
||||||
)
|
)
|
||||||
from miplearn.types import VarIndex, SolverParams, Solution, UserCutCallback
|
from miplearn.types import (
|
||||||
|
SolverParams,
|
||||||
|
UserCutCallback,
|
||||||
|
Solution,
|
||||||
|
VariableName,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -52,6 +58,7 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
for (key, value) in params.items():
|
for (key, value) in params.items():
|
||||||
self._pyomo_solver.options[key] = value
|
self._pyomo_solver.options[key] = value
|
||||||
|
|
||||||
|
@overrides
|
||||||
def solve_lp(
|
def solve_lp(
|
||||||
self,
|
self,
|
||||||
tee: bool = False,
|
tee: bool = False,
|
||||||
@@ -76,6 +83,7 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
var.domain = pyomo.core.base.set_types.Binary
|
var.domain = pyomo.core.base.set_types.Binary
|
||||||
self._pyomo_solver.update_var(var)
|
self._pyomo_solver.update_var(var)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def solve(
|
def solve(
|
||||||
self,
|
self,
|
||||||
tee: bool = False,
|
tee: bool = False,
|
||||||
@@ -123,36 +131,44 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
}
|
}
|
||||||
return stats
|
return stats
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_solution(self) -> Optional[Solution]:
|
def get_solution(self) -> Optional[Solution]:
|
||||||
assert self.model is not None
|
assert self.model is not None
|
||||||
if self.is_infeasible():
|
if self.is_infeasible():
|
||||||
return None
|
return None
|
||||||
solution: Solution = {}
|
solution: Solution = {}
|
||||||
for var in self.model.component_objects(Var):
|
for var in self.model.component_objects(Var):
|
||||||
solution[str(var)] = {}
|
|
||||||
for index in var:
|
for index in var:
|
||||||
if var[index].fixed:
|
if var[index].fixed:
|
||||||
continue
|
continue
|
||||||
solution[str(var)][index] = var[index].value
|
solution[f"{var}[{index}]"] = var[index].value
|
||||||
return solution
|
return solution
|
||||||
|
|
||||||
|
@overrides
|
||||||
|
def get_variable_names(self) -> List[VariableName]:
|
||||||
|
assert self.model is not None
|
||||||
|
variables: List[VariableName] = []
|
||||||
|
for var in self.model.component_objects(Var):
|
||||||
|
for index in var:
|
||||||
|
if var[index].fixed:
|
||||||
|
continue
|
||||||
|
variables += [f"{var}[{index}]"]
|
||||||
|
return variables
|
||||||
|
|
||||||
|
@overrides
|
||||||
def set_warm_start(self, solution: Solution) -> None:
|
def set_warm_start(self, solution: Solution) -> None:
|
||||||
self._clear_warm_start()
|
self._clear_warm_start()
|
||||||
count_total, count_fixed = 0, 0
|
count_fixed = 0
|
||||||
for var_name in solution:
|
for (var_name, value) in solution.items():
|
||||||
|
if value is None:
|
||||||
|
continue
|
||||||
var = self._varname_to_var[var_name]
|
var = self._varname_to_var[var_name]
|
||||||
for index in solution[var_name]:
|
var.value = solution[var_name]
|
||||||
count_total += 1
|
count_fixed += 1
|
||||||
var[index].value = solution[var_name][index]
|
|
||||||
if solution[var_name][index] is not None:
|
|
||||||
count_fixed += 1
|
|
||||||
if count_fixed > 0:
|
if count_fixed > 0:
|
||||||
self._is_warm_start_available = True
|
self._is_warm_start_available = True
|
||||||
logger.info(
|
|
||||||
"Setting start values for %d variables (out of %d)"
|
|
||||||
% (count_fixed, count_total)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
@overrides
|
||||||
def set_instance(
|
def set_instance(
|
||||||
self,
|
self,
|
||||||
instance: Instance,
|
instance: Instance,
|
||||||
@@ -168,25 +184,6 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
self._update_vars()
|
self._update_vars()
|
||||||
self._update_constrs()
|
self._update_constrs()
|
||||||
|
|
||||||
def get_value(self, var_name: str, index: VarIndex) -> Optional[float]:
|
|
||||||
if self.is_infeasible():
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
var = self._varname_to_var[var_name]
|
|
||||||
return var[index].value
|
|
||||||
|
|
||||||
def get_empty_solution(self) -> Solution:
|
|
||||||
assert self.model is not None
|
|
||||||
solution: Solution = {}
|
|
||||||
for var in self.model.component_objects(Var):
|
|
||||||
svar = str(var)
|
|
||||||
solution[svar] = {}
|
|
||||||
for index in var:
|
|
||||||
if var[index].fixed:
|
|
||||||
continue
|
|
||||||
solution[svar][index] = None
|
|
||||||
return solution
|
|
||||||
|
|
||||||
def _clear_warm_start(self) -> None:
|
def _clear_warm_start(self) -> None:
|
||||||
for var in self._all_vars:
|
for var in self._all_vars:
|
||||||
if not var.fixed:
|
if not var.fixed:
|
||||||
@@ -204,8 +201,8 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
self._bin_vars = []
|
self._bin_vars = []
|
||||||
self._varname_to_var = {}
|
self._varname_to_var = {}
|
||||||
for var in self.model.component_objects(Var):
|
for var in self.model.component_objects(Var):
|
||||||
self._varname_to_var[var.name] = var
|
|
||||||
for idx in var:
|
for idx in var:
|
||||||
|
self._varname_to_var[f"{var.name}[{idx}]"] = var[idx]
|
||||||
self._all_vars += [var[idx]]
|
self._all_vars += [var[idx]]
|
||||||
if var[idx].domain == pyomo.core.base.set_types.Binary:
|
if var[idx].domain == pyomo.core.base.set_types.Binary:
|
||||||
self._bin_vars += [var[idx]]
|
self._bin_vars += [var[idx]]
|
||||||
@@ -220,25 +217,16 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
else:
|
else:
|
||||||
self._cname_to_constr[constr.name] = constr
|
self._cname_to_constr[constr.name] = constr
|
||||||
|
|
||||||
def fix(self, solution):
|
@overrides
|
||||||
count_total, count_fixed = 0, 0
|
def fix(self, solution: Solution) -> None:
|
||||||
for varname in solution:
|
for (varname, value) in solution.items():
|
||||||
for index in solution[varname]:
|
if value is None:
|
||||||
var = self._varname_to_var[varname]
|
continue
|
||||||
count_total += 1
|
var = self._varname_to_var[varname]
|
||||||
if solution[varname][index] is None:
|
var.fix(value)
|
||||||
continue
|
self._pyomo_solver.update_var(var)
|
||||||
count_fixed += 1
|
|
||||||
var[index].fix(solution[varname][index])
|
|
||||||
self._pyomo_solver.update_var(var[index])
|
|
||||||
logger.info(
|
|
||||||
"Fixing values for %d variables (out of %d)"
|
|
||||||
% (
|
|
||||||
count_fixed,
|
|
||||||
count_total,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
@overrides
|
||||||
def add_constraint(self, constraint):
|
def add_constraint(self, constraint):
|
||||||
self._pyomo_solver.add_constraint(constraint)
|
self._pyomo_solver.add_constraint(constraint)
|
||||||
self._update_constrs()
|
self._update_constrs()
|
||||||
@@ -271,6 +259,7 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
return None
|
return None
|
||||||
return int(value)
|
return int(value)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_constraint_ids(self):
|
def get_constraint_ids(self):
|
||||||
return list(self._cname_to_constr.keys())
|
return list(self._cname_to_constr.keys())
|
||||||
|
|
||||||
@@ -280,6 +269,7 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
def _get_node_count_regexp(self) -> Optional[str]:
|
def _get_node_count_regexp(self) -> Optional[str]:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@overrides
|
||||||
def relax(self) -> None:
|
def relax(self) -> None:
|
||||||
for var in self._bin_vars:
|
for var in self._bin_vars:
|
||||||
lb, ub = var.bounds
|
lb, ub = var.bounds
|
||||||
@@ -288,6 +278,7 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
var.domain = pyomo.core.base.set_types.Reals
|
var.domain = pyomo.core.base.set_types.Reals
|
||||||
self._pyomo_solver.update_var(var)
|
self._pyomo_solver.update_var(var)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_inequality_slacks(self) -> Dict[str, float]:
|
def get_inequality_slacks(self) -> Dict[str, float]:
|
||||||
result: Dict[str, float] = {}
|
result: Dict[str, float] = {}
|
||||||
for (cname, cobj) in self._cname_to_constr.items():
|
for (cname, cobj) in self._cname_to_constr.items():
|
||||||
@@ -296,6 +287,7 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
result[cname] = cobj.slack()
|
result[cname] = cobj.slack()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_constraint_sense(self, cid: str) -> str:
|
def get_constraint_sense(self, cid: str) -> str:
|
||||||
cobj = self._cname_to_constr[cid]
|
cobj = self._cname_to_constr[cid]
|
||||||
has_ub = cobj.has_ub()
|
has_ub = cobj.has_ub()
|
||||||
@@ -310,6 +302,7 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
else:
|
else:
|
||||||
return "="
|
return "="
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_constraint_rhs(self, cid: str) -> float:
|
def get_constraint_rhs(self, cid: str) -> float:
|
||||||
cobj = self._cname_to_constr[cid]
|
cobj = self._cname_to_constr[cid]
|
||||||
if cobj.has_ub:
|
if cobj.has_ub:
|
||||||
@@ -317,23 +310,30 @@ class BasePyomoSolver(InternalSolver):
|
|||||||
else:
|
else:
|
||||||
return cobj.lower()
|
return cobj.lower()
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_constraint_lhs(self, cid: str) -> Dict[str, float]:
|
def get_constraint_lhs(self, cid: str) -> Dict[str, float]:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
@overrides
|
||||||
def set_constraint_sense(self, cid: str, sense: str) -> None:
|
def set_constraint_sense(self, cid: str, sense: str) -> None:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@overrides
|
||||||
def extract_constraint(self, cid: str) -> Constraint:
|
def extract_constraint(self, cid: str) -> Constraint:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@overrides
|
||||||
def is_constraint_satisfied(self, cobj: Constraint, tol: float = 1e-6) -> bool:
|
def is_constraint_satisfied(self, cobj: Constraint, tol: float = 1e-6) -> bool:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@overrides
|
||||||
def is_infeasible(self) -> bool:
|
def is_infeasible(self) -> bool:
|
||||||
return self._termination_condition == TerminationCondition.infeasible
|
return self._termination_condition == TerminationCondition.infeasible
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_dual(self, cid):
|
def get_dual(self, cid):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@overrides
|
||||||
def get_sense(self) -> str:
|
def get_sense(self) -> str:
|
||||||
return self._obj_sense
|
return self._obj_sense
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from overrides import overrides
|
||||||
from pyomo import environ as pe
|
from pyomo import environ as pe
|
||||||
from scipy.stats import randint
|
from scipy.stats import randint
|
||||||
|
|
||||||
@@ -36,8 +37,10 @@ class CplexPyomoSolver(BasePyomoSolver):
|
|||||||
params=params,
|
params=params,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def _get_warm_start_regexp(self):
|
def _get_warm_start_regexp(self):
|
||||||
return "MIP start .* with objective ([0-9.e+-]*)\\."
|
return "MIP start .* with objective ([0-9.e+-]*)\\."
|
||||||
|
|
||||||
|
@overrides
|
||||||
def _get_node_count_regexp(self):
|
def _get_node_count_regexp(self):
|
||||||
return "^[ *] *([0-9]+)"
|
return "^[ *] *([0-9]+)"
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from overrides import overrides
|
||||||
from pyomo import environ as pe
|
from pyomo import environ as pe
|
||||||
from scipy.stats import randint
|
from scipy.stats import randint
|
||||||
|
|
||||||
@@ -38,22 +39,25 @@ class GurobiPyomoSolver(BasePyomoSolver):
|
|||||||
params=params,
|
params=params,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@overrides
|
||||||
def _extract_node_count(self, log: str) -> int:
|
def _extract_node_count(self, log: str) -> int:
|
||||||
return max(1, int(self._pyomo_solver._solver_model.getAttr("NodeCount")))
|
return max(1, int(self._pyomo_solver._solver_model.getAttr("NodeCount")))
|
||||||
|
|
||||||
|
@overrides
|
||||||
def _get_warm_start_regexp(self) -> str:
|
def _get_warm_start_regexp(self) -> str:
|
||||||
return "MIP start with objective ([0-9.e+-]*)"
|
return "MIP start with objective ([0-9.e+-]*)"
|
||||||
|
|
||||||
|
@overrides
|
||||||
def _get_node_count_regexp(self) -> Optional[str]:
|
def _get_node_count_regexp(self) -> Optional[str]:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@overrides
|
||||||
def set_branching_priorities(self, priorities: BranchPriorities) -> None:
|
def set_branching_priorities(self, priorities: BranchPriorities) -> None:
|
||||||
from gurobipy import GRB
|
from gurobipy import GRB
|
||||||
|
|
||||||
for varname in priorities.keys():
|
for (varname, priority) in priorities.items():
|
||||||
|
if priority is None:
|
||||||
|
continue
|
||||||
var = self._varname_to_var[varname]
|
var = self._varname_to_var[varname]
|
||||||
for (index, priority) in priorities[varname].items():
|
gvar = self._pyomo_solver._pyomo_var_to_solver_var_map[var]
|
||||||
if priority is None:
|
gvar.setAttr(GRB.Attr.BranchPriority, int(round(priority)))
|
||||||
continue
|
|
||||||
gvar = self._pyomo_solver._pyomo_var_to_solver_var_map[var[index]]
|
|
||||||
gvar.setAttr(GRB.Attr.BranchPriority, int(round(priority)))
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||||
# Released under the modified BSD license. See COPYING.md for more details.
|
# Released under the modified BSD license. See COPYING.md for more details.
|
||||||
|
|
||||||
from typing import Optional, Dict, Callable, Any, Union, Tuple, TYPE_CHECKING
|
from typing import Optional, Dict, Callable, Any, Union, Tuple, TYPE_CHECKING, Hashable
|
||||||
|
|
||||||
from mypy_extensions import TypedDict
|
from mypy_extensions import TypedDict
|
||||||
|
|
||||||
@@ -10,9 +10,15 @@ if TYPE_CHECKING:
|
|||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
from miplearn.solvers.learning import InternalSolver
|
from miplearn.solvers.learning import InternalSolver
|
||||||
|
|
||||||
VarIndex = Union[str, int, Tuple[Union[str, int]]]
|
BranchPriorities = Dict[str, Optional[float]]
|
||||||
|
Category = Hashable
|
||||||
Solution = Dict[str, Dict[VarIndex, Optional[float]]]
|
Constraint = Any
|
||||||
|
IterationCallback = Callable[[], bool]
|
||||||
|
LazyCallback = Callable[[Any, Any], None]
|
||||||
|
SolverParams = Dict[str, Any]
|
||||||
|
UserCutCallback = Callable[["InternalSolver", Any], None]
|
||||||
|
VariableName = str
|
||||||
|
Solution = Dict[VariableName, Optional[float]]
|
||||||
|
|
||||||
LPSolveStats = TypedDict(
|
LPSolveStats = TypedDict(
|
||||||
"LPSolveStats",
|
"LPSolveStats",
|
||||||
@@ -65,17 +71,3 @@ LearningSolveStats = TypedDict(
|
|||||||
},
|
},
|
||||||
total=False,
|
total=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
IterationCallback = Callable[[], bool]
|
|
||||||
|
|
||||||
LazyCallback = Callable[[Any, Any], None]
|
|
||||||
|
|
||||||
UserCutCallback = Callable[["InternalSolver", Any], None]
|
|
||||||
|
|
||||||
SolverParams = Dict[str, Any]
|
|
||||||
|
|
||||||
BranchPriorities = Solution
|
|
||||||
|
|
||||||
|
|
||||||
class Constraint:
|
|
||||||
pass
|
|
||||||
|
|||||||
@@ -17,3 +17,4 @@ pre-commit~=2.9
|
|||||||
mypy==0.790
|
mypy==0.790
|
||||||
pdoc3==0.7.*
|
pdoc3==0.7.*
|
||||||
decorator~=4.4
|
decorator~=4.4
|
||||||
|
overrides
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import networkx as nx
|
|||||||
import pytest
|
import pytest
|
||||||
from gurobipy import GRB
|
from gurobipy import GRB
|
||||||
from networkx import Graph
|
from networkx import Graph
|
||||||
|
from overrides import overrides
|
||||||
|
|
||||||
from miplearn.components.dynamic_user_cuts import UserCutsComponent
|
from miplearn.components.dynamic_user_cuts import UserCutsComponent
|
||||||
from miplearn.instance.base import Instance
|
from miplearn.instance.base import Instance
|
||||||
@@ -24,6 +25,7 @@ class GurobiStableSetProblem(Instance):
|
|||||||
super().__init__()
|
super().__init__()
|
||||||
self.graph: Graph = graph
|
self.graph: Graph = graph
|
||||||
|
|
||||||
|
@overrides
|
||||||
def to_model(self) -> Any:
|
def to_model(self) -> Any:
|
||||||
model = gp.Model()
|
model = gp.Model()
|
||||||
x = [model.addVar(vtype=GRB.BINARY) for _ in range(len(self.graph.nodes))]
|
x = [model.addVar(vtype=GRB.BINARY) for _ in range(len(self.graph.nodes))]
|
||||||
@@ -32,9 +34,11 @@ class GurobiStableSetProblem(Instance):
|
|||||||
model.addConstr(x[e[0]] + x[e[1]] <= 1)
|
model.addConstr(x[e[0]] + x[e[1]] <= 1)
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
@overrides
|
||||||
def has_user_cuts(self) -> bool:
|
def has_user_cuts(self) -> bool:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@overrides
|
||||||
def find_violated_user_cuts(self, model):
|
def find_violated_user_cuts(self, model):
|
||||||
assert isinstance(model, gp.Model)
|
assert isinstance(model, gp.Model)
|
||||||
vals = model.cbGetNodeRel(model.getVars())
|
vals = model.cbGetNodeRel(model.getVars())
|
||||||
@@ -44,6 +48,7 @@ class GurobiStableSetProblem(Instance):
|
|||||||
violations += [frozenset(clique)]
|
violations += [frozenset(clique)]
|
||||||
return violations
|
return violations
|
||||||
|
|
||||||
|
@overrides
|
||||||
def build_user_cut(self, model: Any, cid: Hashable) -> Any:
|
def build_user_cut(self, model: Any, cid: Hashable) -> Any:
|
||||||
assert isinstance(cid, FrozenSet)
|
assert isinstance(cid, FrozenSet)
|
||||||
x = model.getVars()
|
x = model.getVars()
|
||||||
|
|||||||
@@ -20,43 +20,37 @@ from miplearn.solvers.learning import LearningSolver
|
|||||||
def test_xy() -> None:
|
def test_xy() -> None:
|
||||||
features = Features(
|
features = Features(
|
||||||
variables={
|
variables={
|
||||||
"x": {
|
"x[0]": VariableFeatures(
|
||||||
0: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[0.0, 0.0],
|
||||||
user_features=[0.0, 0.0],
|
),
|
||||||
),
|
"x[1]": VariableFeatures(
|
||||||
1: VariableFeatures(
|
category=None,
|
||||||
category=None,
|
),
|
||||||
),
|
"x[2]": VariableFeatures(
|
||||||
2: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[1.0, 0.0],
|
||||||
user_features=[1.0, 0.0],
|
),
|
||||||
),
|
"x[3]": VariableFeatures(
|
||||||
3: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[1.0, 1.0],
|
||||||
user_features=[1.0, 1.0],
|
),
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
instance = Mock(spec=Instance)
|
instance = Mock(spec=Instance)
|
||||||
instance.features = features
|
instance.features = features
|
||||||
sample = TrainingSample(
|
sample = TrainingSample(
|
||||||
solution={
|
solution={
|
||||||
"x": {
|
"x[0]": 0.0,
|
||||||
0: 0.0,
|
"x[1]": 1.0,
|
||||||
1: 1.0,
|
"x[2]": 1.0,
|
||||||
2: 1.0,
|
"x[3]": 0.0,
|
||||||
3: 0.0,
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
lp_solution={
|
lp_solution={
|
||||||
"x": {
|
"x[0]": 0.1,
|
||||||
0: 0.1,
|
"x[1]": 0.1,
|
||||||
1: 0.1,
|
"x[2]": 0.1,
|
||||||
2: 0.1,
|
"x[3]": 0.1,
|
||||||
3: 0.1,
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
x_expected = {
|
x_expected = {
|
||||||
@@ -73,7 +67,7 @@ def test_xy() -> None:
|
|||||||
[True, False],
|
[True, False],
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
xy = PrimalSolutionComponent.sample_xy(instance, sample)
|
xy = PrimalSolutionComponent().sample_xy(instance, sample)
|
||||||
assert xy is not None
|
assert xy is not None
|
||||||
x_actual, y_actual = xy
|
x_actual, y_actual = xy
|
||||||
assert x_actual == x_expected
|
assert x_actual == x_expected
|
||||||
@@ -83,35 +77,31 @@ def test_xy() -> None:
|
|||||||
def test_xy_without_lp_solution() -> None:
|
def test_xy_without_lp_solution() -> None:
|
||||||
features = Features(
|
features = Features(
|
||||||
variables={
|
variables={
|
||||||
"x": {
|
"x[0]": VariableFeatures(
|
||||||
0: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[0.0, 0.0],
|
||||||
user_features=[0.0, 0.0],
|
),
|
||||||
),
|
"x[1]": VariableFeatures(
|
||||||
1: VariableFeatures(
|
category=None,
|
||||||
category=None,
|
),
|
||||||
),
|
"x[2]": VariableFeatures(
|
||||||
2: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[1.0, 0.0],
|
||||||
user_features=[1.0, 0.0],
|
),
|
||||||
),
|
"x[3]": VariableFeatures(
|
||||||
3: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[1.0, 1.0],
|
||||||
user_features=[1.0, 1.0],
|
),
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
instance = Mock(spec=Instance)
|
instance = Mock(spec=Instance)
|
||||||
instance.features = features
|
instance.features = features
|
||||||
sample = TrainingSample(
|
sample = TrainingSample(
|
||||||
solution={
|
solution={
|
||||||
"x": {
|
"x[0]": 0.0,
|
||||||
0: 0.0,
|
"x[1]": 1.0,
|
||||||
1: 1.0,
|
"x[2]": 1.0,
|
||||||
2: 1.0,
|
"x[3]": 0.0,
|
||||||
3: 0.0,
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
x_expected = {
|
x_expected = {
|
||||||
@@ -128,7 +118,7 @@ def test_xy_without_lp_solution() -> None:
|
|||||||
[True, False],
|
[True, False],
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
xy = PrimalSolutionComponent.sample_xy(instance, sample)
|
xy = PrimalSolutionComponent().sample_xy(instance, sample)
|
||||||
assert xy is not None
|
assert xy is not None
|
||||||
x_actual, y_actual = xy
|
x_actual, y_actual = xy
|
||||||
assert x_actual == x_expected
|
assert x_actual == x_expected
|
||||||
@@ -150,48 +140,42 @@ def test_predict() -> None:
|
|||||||
thr.predict = Mock(return_value=[0.75, 0.75])
|
thr.predict = Mock(return_value=[0.75, 0.75])
|
||||||
features = Features(
|
features = Features(
|
||||||
variables={
|
variables={
|
||||||
"x": {
|
"x[0]": VariableFeatures(
|
||||||
0: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[0.0, 0.0],
|
||||||
user_features=[0.0, 0.0],
|
),
|
||||||
),
|
"x[1]": VariableFeatures(
|
||||||
1: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[0.0, 2.0],
|
||||||
user_features=[0.0, 2.0],
|
),
|
||||||
),
|
"x[2]": VariableFeatures(
|
||||||
2: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[2.0, 0.0],
|
||||||
user_features=[2.0, 0.0],
|
),
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
instance = Mock(spec=Instance)
|
instance = Mock(spec=Instance)
|
||||||
instance.features = features
|
instance.features = features
|
||||||
sample = TrainingSample(
|
sample = TrainingSample(
|
||||||
lp_solution={
|
lp_solution={
|
||||||
"x": {
|
"x[0]": 0.1,
|
||||||
0: 0.1,
|
"x[1]": 0.5,
|
||||||
1: 0.5,
|
"x[2]": 0.9,
|
||||||
2: 0.9,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
x, _ = PrimalSolutionComponent.sample_xy(instance, sample)
|
x, _ = PrimalSolutionComponent().sample_xy(instance, sample)
|
||||||
comp = PrimalSolutionComponent()
|
comp = PrimalSolutionComponent()
|
||||||
comp.classifiers = {"default": clf}
|
comp.classifiers = {"default": clf}
|
||||||
comp.thresholds = {"default": thr}
|
comp.thresholds = {"default": thr}
|
||||||
solution_actual = comp.sample_predict(instance, sample)
|
pred = comp.sample_predict(instance, sample)
|
||||||
clf.predict_proba.assert_called_once()
|
clf.predict_proba.assert_called_once()
|
||||||
assert_array_equal(x["default"], clf.predict_proba.call_args[0][0])
|
assert_array_equal(x["default"], clf.predict_proba.call_args[0][0])
|
||||||
thr.predict.assert_called_once()
|
thr.predict.assert_called_once()
|
||||||
assert_array_equal(x["default"], thr.predict.call_args[0][0])
|
assert_array_equal(x["default"], thr.predict.call_args[0][0])
|
||||||
assert solution_actual == {
|
assert pred == {
|
||||||
"x": {
|
"x[0]": 0.0,
|
||||||
0: 0.0,
|
"x[1]": None,
|
||||||
1: None,
|
"x[2]": 1.0,
|
||||||
2: 1.0,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -242,36 +226,30 @@ def test_usage():
|
|||||||
def test_evaluate() -> None:
|
def test_evaluate() -> None:
|
||||||
comp = PrimalSolutionComponent()
|
comp = PrimalSolutionComponent()
|
||||||
comp.sample_predict = lambda _, __: { # type: ignore
|
comp.sample_predict = lambda _, __: { # type: ignore
|
||||||
"x": {
|
"x[0]": 1.0,
|
||||||
0: 1.0,
|
"x[1]": 0.0,
|
||||||
1: 0.0,
|
"x[2]": 0.0,
|
||||||
2: 0.0,
|
"x[3]": None,
|
||||||
3: None,
|
"x[4]": 1.0,
|
||||||
4: 1.0,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
features: Features = Features(
|
features: Features = Features(
|
||||||
variables={
|
variables={
|
||||||
"x": {
|
"x[0]": VariableFeatures(),
|
||||||
0: VariableFeatures(),
|
"x[1]": VariableFeatures(),
|
||||||
1: VariableFeatures(),
|
"x[2]": VariableFeatures(),
|
||||||
2: VariableFeatures(),
|
"x[3]": VariableFeatures(),
|
||||||
3: VariableFeatures(),
|
"x[4]": VariableFeatures(),
|
||||||
4: VariableFeatures(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
instance = Mock(spec=Instance)
|
instance = Mock(spec=Instance)
|
||||||
instance.features = features
|
instance.features = features
|
||||||
sample: TrainingSample = TrainingSample(
|
sample: TrainingSample = TrainingSample(
|
||||||
solution={
|
solution={
|
||||||
"x": {
|
"x[0]": 1.0,
|
||||||
0: 1.0,
|
"x[1]": 1.0,
|
||||||
1: 1.0,
|
"x[2]": 0.0,
|
||||||
2: 0.0,
|
"x[3]": 1.0,
|
||||||
3: 1.0,
|
"x[4]": 1.0,
|
||||||
4: 1.0,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
ev = comp.sample_evaluate(instance, sample)
|
ev = comp.sample_evaluate(instance, sample)
|
||||||
|
|||||||
3
tests/fixtures/infeasible.py
vendored
3
tests/fixtures/infeasible.py
vendored
@@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from overrides import overrides
|
||||||
from pyomo import environ as pe
|
from pyomo import environ as pe
|
||||||
|
|
||||||
from miplearn.instance.base import Instance
|
from miplearn.instance.base import Instance
|
||||||
@@ -13,6 +14,7 @@ from tests.solvers import _is_subclass_or_instance
|
|||||||
|
|
||||||
|
|
||||||
class InfeasiblePyomoInstance(Instance):
|
class InfeasiblePyomoInstance(Instance):
|
||||||
|
@overrides
|
||||||
def to_model(self) -> pe.ConcreteModel:
|
def to_model(self) -> pe.ConcreteModel:
|
||||||
model = pe.ConcreteModel()
|
model = pe.ConcreteModel()
|
||||||
model.x = pe.Var([0], domain=pe.Binary)
|
model.x = pe.Var([0], domain=pe.Binary)
|
||||||
@@ -22,6 +24,7 @@ class InfeasiblePyomoInstance(Instance):
|
|||||||
|
|
||||||
|
|
||||||
class InfeasibleGurobiInstance(Instance):
|
class InfeasibleGurobiInstance(Instance):
|
||||||
|
@overrides
|
||||||
def to_model(self) -> Any:
|
def to_model(self) -> Any:
|
||||||
import gurobipy as gp
|
import gurobipy as gp
|
||||||
from gurobipy import GRB
|
from gurobipy import GRB
|
||||||
|
|||||||
@@ -39,13 +39,13 @@ def test_instance():
|
|||||||
instance = TravelingSalesmanInstance(n_cities, distances)
|
instance = TravelingSalesmanInstance(n_cities, distances)
|
||||||
solver = LearningSolver()
|
solver = LearningSolver()
|
||||||
stats = solver.solve(instance)
|
stats = solver.solve(instance)
|
||||||
x = instance.training_data[0].solution["x"]
|
solution = instance.training_data[0].solution
|
||||||
assert x[0, 1] == 1.0
|
assert solution["x[(0, 1)]"] == 1.0
|
||||||
assert x[0, 2] == 0.0
|
assert solution["x[(0, 2)]"] == 0.0
|
||||||
assert x[0, 3] == 1.0
|
assert solution["x[(0, 3)]"] == 1.0
|
||||||
assert x[1, 2] == 1.0
|
assert solution["x[(1, 2)]"] == 1.0
|
||||||
assert x[1, 3] == 0.0
|
assert solution["x[(1, 3)]"] == 0.0
|
||||||
assert x[2, 3] == 1.0
|
assert solution["x[(2, 3)]"] == 1.0
|
||||||
assert stats["Lower bound"] == 4.0
|
assert stats["Lower bound"] == 4.0
|
||||||
assert stats["Upper bound"] == 4.0
|
assert stats["Upper bound"] == 4.0
|
||||||
|
|
||||||
@@ -67,12 +67,12 @@ def test_subtour():
|
|||||||
solver = LearningSolver()
|
solver = LearningSolver()
|
||||||
solver.solve(instance)
|
solver.solve(instance)
|
||||||
assert len(instance.training_data[0].lazy_enforced) > 0
|
assert len(instance.training_data[0].lazy_enforced) > 0
|
||||||
x = instance.training_data[0].solution["x"]
|
solution = instance.training_data[0].solution
|
||||||
assert x[0, 1] == 1.0
|
assert solution["x[(0, 1)]"] == 1.0
|
||||||
assert x[0, 4] == 1.0
|
assert solution["x[(0, 4)]"] == 1.0
|
||||||
assert x[1, 2] == 1.0
|
assert solution["x[(1, 2)]"] == 1.0
|
||||||
assert x[2, 3] == 1.0
|
assert solution["x[(2, 3)]"] == 1.0
|
||||||
assert x[3, 5] == 1.0
|
assert solution["x[(3, 5)]"] == 1.0
|
||||||
assert x[4, 5] == 1.0
|
assert solution["x[(4, 5)]"] == 1.0
|
||||||
solver.fit([instance])
|
solver.fit([instance])
|
||||||
solver.solve(instance)
|
solver.solve(instance)
|
||||||
|
|||||||
@@ -38,45 +38,18 @@ def test_internal_solver_warm_starts():
|
|||||||
model = instance.to_model()
|
model = instance.to_model()
|
||||||
solver = solver_class()
|
solver = solver_class()
|
||||||
solver.set_instance(instance, model)
|
solver.set_instance(instance, model)
|
||||||
solver.set_warm_start(
|
solver.set_warm_start({"x[0]": 1.0, "x[1]": 0.0, "x[2]": 0.0, "x[3]": 1.0})
|
||||||
{
|
|
||||||
"x": {
|
|
||||||
0: 1.0,
|
|
||||||
1: 0.0,
|
|
||||||
2: 0.0,
|
|
||||||
3: 1.0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
stats = solver.solve(tee=True)
|
stats = solver.solve(tee=True)
|
||||||
if stats["Warm start value"] is not None:
|
if stats["Warm start value"] is not None:
|
||||||
assert stats["Warm start value"] == 725.0
|
assert stats["Warm start value"] == 725.0
|
||||||
else:
|
else:
|
||||||
warn(f"{solver_class.__name__} should set warm start value")
|
warn(f"{solver_class.__name__} should set warm start value")
|
||||||
|
|
||||||
solver.set_warm_start(
|
solver.set_warm_start({"x[0]": 1.0, "x[1]": 1.0, "x[2]": 1.0, "x[3]": 1.0})
|
||||||
{
|
|
||||||
"x": {
|
|
||||||
0: 1.0,
|
|
||||||
1: 1.0,
|
|
||||||
2: 1.0,
|
|
||||||
3: 1.0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
stats = solver.solve(tee=True)
|
stats = solver.solve(tee=True)
|
||||||
assert stats["Warm start value"] is None
|
assert stats["Warm start value"] is None
|
||||||
|
|
||||||
solver.fix(
|
solver.fix({"x[0]": 1.0, "x[1]": 0.0, "x[2]": 0.0, "x[3]": 1.0})
|
||||||
{
|
|
||||||
"x": {
|
|
||||||
0: 1.0,
|
|
||||||
1: 0.0,
|
|
||||||
2: 0.0,
|
|
||||||
3: 1.0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
stats = solver.solve(tee=True)
|
stats = solver.solve(tee=True)
|
||||||
assert stats["Lower bound"] == 725.0
|
assert stats["Lower bound"] == 725.0
|
||||||
assert stats["Upper bound"] == 725.0
|
assert stats["Upper bound"] == 725.0
|
||||||
@@ -91,16 +64,18 @@ def test_internal_solver():
|
|||||||
solver = solver_class()
|
solver = solver_class()
|
||||||
solver.set_instance(instance, model)
|
solver.set_instance(instance, model)
|
||||||
|
|
||||||
|
assert solver.get_variable_names() == ["x[0]", "x[1]", "x[2]", "x[3]"]
|
||||||
|
|
||||||
stats = solver.solve_lp()
|
stats = solver.solve_lp()
|
||||||
assert not solver.is_infeasible()
|
assert not solver.is_infeasible()
|
||||||
assert round(stats["LP value"], 3) == 1287.923
|
assert round(stats["LP value"], 3) == 1287.923
|
||||||
assert len(stats["LP log"]) > 100
|
assert len(stats["LP log"]) > 100
|
||||||
|
|
||||||
solution = solver.get_solution()
|
solution = solver.get_solution()
|
||||||
assert round(solution["x"][0], 3) == 1.000
|
assert round(solution["x[0]"], 3) == 1.000
|
||||||
assert round(solution["x"][1], 3) == 0.923
|
assert round(solution["x[1]"], 3) == 0.923
|
||||||
assert round(solution["x"][2], 3) == 1.000
|
assert round(solution["x[2]"], 3) == 1.000
|
||||||
assert round(solution["x"][3], 3) == 0.000
|
assert round(solution["x[3]"], 3) == 0.000
|
||||||
|
|
||||||
stats = solver.solve(tee=True)
|
stats = solver.solve(tee=True)
|
||||||
assert not solver.is_infeasible()
|
assert not solver.is_infeasible()
|
||||||
@@ -111,10 +86,10 @@ def test_internal_solver():
|
|||||||
assert isinstance(stats["Wallclock time"], float)
|
assert isinstance(stats["Wallclock time"], float)
|
||||||
|
|
||||||
solution = solver.get_solution()
|
solution = solver.get_solution()
|
||||||
assert solution["x"][0] == 1.0
|
assert solution["x[0]"] == 1.0
|
||||||
assert solution["x"][1] == 0.0
|
assert solution["x[1]"] == 0.0
|
||||||
assert solution["x"][2] == 1.0
|
assert solution["x[2]"] == 1.0
|
||||||
assert solution["x"][3] == 1.0
|
assert solution["x[3]"] == 1.0
|
||||||
|
|
||||||
# Add a brand new constraint
|
# Add a brand new constraint
|
||||||
if isinstance(solver, BasePyomoSolver):
|
if isinstance(solver, BasePyomoSolver):
|
||||||
@@ -199,7 +174,6 @@ def test_infeasible_instance():
|
|||||||
stats = solver.solve_lp()
|
stats = solver.solve_lp()
|
||||||
assert solver.get_solution() is None
|
assert solver.get_solution() is None
|
||||||
assert stats["LP value"] is None
|
assert stats["LP value"] is None
|
||||||
assert solver.get_value("x", 0) is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_iteration_cb():
|
def test_iteration_cb():
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ def test_lazy_cb():
|
|||||||
model = instance.to_model()
|
model = instance.to_model()
|
||||||
|
|
||||||
def lazy_cb(cb_solver, cb_model):
|
def lazy_cb(cb_solver, cb_model):
|
||||||
logger.info("x[0] = %.f" % cb_solver.get_value("x", 0))
|
|
||||||
cobj = (cb_model.getVarByName("x[0]") * 1.0, "<", 0.0, "cut")
|
cobj = (cb_model.getVarByName("x[0]") * 1.0, "<", 0.0, "cut")
|
||||||
if not cb_solver.is_constraint_satisfied(cobj):
|
if not cb_solver.is_constraint_satisfied(cobj):
|
||||||
cb_solver.add_constraint(cobj)
|
cb_solver.add_constraint(cobj)
|
||||||
@@ -24,4 +23,4 @@ def test_lazy_cb():
|
|||||||
solver.set_instance(instance, model)
|
solver.set_instance(instance, model)
|
||||||
solver.solve(lazy_cb=lazy_cb)
|
solver.solve(lazy_cb=lazy_cb)
|
||||||
solution = solver.get_solution()
|
solution = solver.get_solution()
|
||||||
assert solution["x"][0] == 0.0
|
assert solution["x[0]"] == 0.0
|
||||||
|
|||||||
@@ -30,16 +30,16 @@ def test_learning_solver():
|
|||||||
assert hasattr(instance, "features")
|
assert hasattr(instance, "features")
|
||||||
|
|
||||||
sample = instance.training_data[0]
|
sample = instance.training_data[0]
|
||||||
assert sample.solution["x"][0] == 1.0
|
assert sample.solution["x[0]"] == 1.0
|
||||||
assert sample.solution["x"][1] == 0.0
|
assert sample.solution["x[1]"] == 0.0
|
||||||
assert sample.solution["x"][2] == 1.0
|
assert sample.solution["x[2]"] == 1.0
|
||||||
assert sample.solution["x"][3] == 1.0
|
assert sample.solution["x[3]"] == 1.0
|
||||||
assert sample.lower_bound == 1183.0
|
assert sample.lower_bound == 1183.0
|
||||||
assert sample.upper_bound == 1183.0
|
assert sample.upper_bound == 1183.0
|
||||||
assert round(sample.lp_solution["x"][0], 3) == 1.000
|
assert round(sample.lp_solution["x[0]"], 3) == 1.000
|
||||||
assert round(sample.lp_solution["x"][1], 3) == 0.923
|
assert round(sample.lp_solution["x[1]"], 3) == 0.923
|
||||||
assert round(sample.lp_solution["x"][2], 3) == 1.000
|
assert round(sample.lp_solution["x[2]"], 3) == 1.000
|
||||||
assert round(sample.lp_solution["x"][3], 3) == 0.000
|
assert round(sample.lp_solution["x[3]"], 3) == 0.000
|
||||||
assert round(sample.lp_value, 3) == 1287.923
|
assert round(sample.lp_value, 3) == 1287.923
|
||||||
assert len(sample.mip_log) > 100
|
assert len(sample.mip_log) > 100
|
||||||
|
|
||||||
@@ -72,7 +72,7 @@ def test_parallel_solve():
|
|||||||
assert len(results) == 10
|
assert len(results) == 10
|
||||||
for instance in instances:
|
for instance in instances:
|
||||||
data = instance.training_data[0]
|
data = instance.training_data[0]
|
||||||
assert len(data.solution["x"].keys()) == 4
|
assert len(data.solution.keys()) == 4
|
||||||
|
|
||||||
|
|
||||||
def test_solve_fit_from_disk():
|
def test_solve_fit_from_disk():
|
||||||
|
|||||||
@@ -20,24 +20,22 @@ def test_knapsack() -> None:
|
|||||||
solver.set_instance(instance, model)
|
solver.set_instance(instance, model)
|
||||||
FeaturesExtractor(solver).extract(instance)
|
FeaturesExtractor(solver).extract(instance)
|
||||||
assert instance.features.variables == {
|
assert instance.features.variables == {
|
||||||
"x": {
|
"x[0]": VariableFeatures(
|
||||||
0: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[23.0, 505.0],
|
||||||
user_features=[23.0, 505.0],
|
),
|
||||||
),
|
"x[1]": VariableFeatures(
|
||||||
1: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[26.0, 352.0],
|
||||||
user_features=[26.0, 352.0],
|
),
|
||||||
),
|
"x[2]": VariableFeatures(
|
||||||
2: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[20.0, 458.0],
|
||||||
user_features=[20.0, 458.0],
|
),
|
||||||
),
|
"x[3]": VariableFeatures(
|
||||||
3: VariableFeatures(
|
category="default",
|
||||||
category="default",
|
user_features=[18.0, 220.0],
|
||||||
user_features=[18.0, 220.0],
|
),
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
assert instance.features.constraints == {
|
assert instance.features.constraints == {
|
||||||
"eq_capacity": ConstraintFeatures(
|
"eq_capacity": ConstraintFeatures(
|
||||||
|
|||||||
Reference in New Issue
Block a user