mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 01:18:52 -06:00
Improve logging
This commit is contained in:
@@ -13,6 +13,10 @@ from miplearn.components import classifier_evaluation_dict
|
|||||||
from miplearn.components.component import Component
|
from miplearn.components.component import Component
|
||||||
from miplearn.features import TrainingSample
|
from miplearn.features import TrainingSample
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from miplearn.solvers.learning import Instance
|
from miplearn.solvers.learning import Instance
|
||||||
|
|
||||||
@@ -89,6 +93,9 @@ class DynamicConstraintsComponent(Component):
|
|||||||
sample: TrainingSample,
|
sample: TrainingSample,
|
||||||
) -> List[Hashable]:
|
) -> List[Hashable]:
|
||||||
pred: List[Hashable] = []
|
pred: List[Hashable] = []
|
||||||
|
if len(self.known_cids) == 0:
|
||||||
|
logger.info("Classifiers not fitted. Skipping.")
|
||||||
|
return pred
|
||||||
x, _, cids = self.sample_xy_with_cids(instance, sample)
|
x, _, cids = self.sample_xy_with_cids(instance, sample)
|
||||||
for category in x.keys():
|
for category in x.keys():
|
||||||
assert category in self.classifiers
|
assert category in self.classifiers
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ class DynamicLazyConstraintsComponent(Component):
|
|||||||
training_data: TrainingSample,
|
training_data: TrainingSample,
|
||||||
) -> None:
|
) -> None:
|
||||||
training_data.lazy_enforced = set()
|
training_data.lazy_enforced = set()
|
||||||
logger.info("Predicting violated lazy constraints...")
|
logger.info("Predicting violated (dynamic) lazy constraints...")
|
||||||
cids = self.dynamic.sample_predict(instance, training_data)
|
cids = self.dynamic.sample_predict(instance, training_data)
|
||||||
logger.info("Enforcing %d lazy constraints..." % len(cids))
|
logger.info("Enforcing %d lazy constraints..." % len(cids))
|
||||||
self.enforce(cids, instance, model, solver)
|
self.enforce(cids, instance, model, solver)
|
||||||
|
|||||||
@@ -93,6 +93,7 @@ class UserCutsComponent(Component):
|
|||||||
) -> None:
|
) -> None:
|
||||||
training_data.user_cuts_enforced = set(self.enforced)
|
training_data.user_cuts_enforced = set(self.enforced)
|
||||||
stats["UserCuts: Added in callback"] = self.n_added_in_callback
|
stats["UserCuts: Added in callback"] = self.n_added_in_callback
|
||||||
|
if self.n_added_in_callback > 0:
|
||||||
logger.info(f"{self.n_added_in_callback} user cuts added in callback")
|
logger.info(f"{self.n_added_in_callback} user cuts added in callback")
|
||||||
|
|
||||||
# Delegate ML methods to self.dynamic
|
# Delegate ML methods to self.dynamic
|
||||||
|
|||||||
@@ -69,12 +69,14 @@ class PrimalSolutionComponent(Component):
|
|||||||
features: Features,
|
features: Features,
|
||||||
training_data: TrainingSample,
|
training_data: TrainingSample,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
logger.info("Predicting primal solution...")
|
||||||
|
|
||||||
# Do nothing if models are not trained
|
# Do nothing if models are not trained
|
||||||
if len(self.classifiers) == 0:
|
if len(self.classifiers) == 0:
|
||||||
|
logger.info("Classifiers not fitted. Skipping.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Predict solution and provide it to the solver
|
# Predict solution and provide it to the solver
|
||||||
logger.info("Predicting MIP solution...")
|
|
||||||
solution = self.sample_predict(instance, training_data)
|
solution = self.sample_predict(instance, training_data)
|
||||||
assert solver.internal_solver is not None
|
assert solver.internal_solver is not None
|
||||||
if self.mode == "heuristic":
|
if self.mode == "heuristic":
|
||||||
@@ -130,6 +132,8 @@ class PrimalSolutionComponent(Component):
|
|||||||
category_offset: Dict[Hashable, int] = {cat: 0 for cat in x.keys()}
|
category_offset: Dict[Hashable, int] = {cat: 0 for cat in x.keys()}
|
||||||
for (var_name, var_features) in instance.features.variables.items():
|
for (var_name, var_features) in instance.features.variables.items():
|
||||||
category = var_features.category
|
category = var_features.category
|
||||||
|
if category not in category_offset:
|
||||||
|
continue
|
||||||
offset = category_offset[category]
|
offset = category_offset[category]
|
||||||
category_offset[category] += 1
|
category_offset[category] += 1
|
||||||
if y_pred[category][offset, 0]:
|
if y_pred[category][offset, 0]:
|
||||||
|
|||||||
@@ -64,9 +64,9 @@ class StaticLazyConstraintsComponent(Component):
|
|||||||
assert features.instance is not None
|
assert features.instance is not None
|
||||||
assert features.constraints is not None
|
assert features.constraints is not None
|
||||||
|
|
||||||
if not features.instance.lazy_constraint_count == 0:
|
logger.info("Predicting violated (static) lazy constraints...")
|
||||||
|
if features.instance.lazy_constraint_count == 0:
|
||||||
logger.info("Instance does not have static lazy constraints. Skipping.")
|
logger.info("Instance does not have static lazy constraints. Skipping.")
|
||||||
logger.info("Predicting required lazy constraints...")
|
|
||||||
self.enforced_cids = set(self.sample_predict(instance, training_data))
|
self.enforced_cids = set(self.sample_predict(instance, training_data))
|
||||||
logger.info("Moving lazy constraints to the pool...")
|
logger.info("Moving lazy constraints to the pool...")
|
||||||
self.pool = {}
|
self.pool = {}
|
||||||
|
|||||||
@@ -155,6 +155,7 @@ class LearningSolver:
|
|||||||
|
|
||||||
# Extract features
|
# Extract features
|
||||||
# -------------------------------------------------------
|
# -------------------------------------------------------
|
||||||
|
logger.info("Extracting features...")
|
||||||
FeaturesExtractor(self.internal_solver).extract(instance)
|
FeaturesExtractor(self.internal_solver).extract(instance)
|
||||||
|
|
||||||
callback_args = (
|
callback_args = (
|
||||||
|
|||||||
Reference in New Issue
Block a user