Improve logging

master
Alinson S. Xavier 5 years ago
parent 6330354c47
commit 3edc8139e9

@ -13,6 +13,10 @@ from miplearn.components import classifier_evaluation_dict
from miplearn.components.component import Component
from miplearn.features import TrainingSample
import logging
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from miplearn.solvers.learning import Instance
@ -89,6 +93,9 @@ class DynamicConstraintsComponent(Component):
sample: TrainingSample,
) -> List[Hashable]:
pred: List[Hashable] = []
if len(self.known_cids) == 0:
logger.info("Classifiers not fitted. Skipping.")
return pred
x, _, cids = self.sample_xy_with_cids(instance, sample)
for category in x.keys():
assert category in self.classifiers

@ -65,7 +65,7 @@ class DynamicLazyConstraintsComponent(Component):
training_data: TrainingSample,
) -> None:
training_data.lazy_enforced = set()
logger.info("Predicting violated lazy constraints...")
logger.info("Predicting violated (dynamic) lazy constraints...")
cids = self.dynamic.sample_predict(instance, training_data)
logger.info("Enforcing %d lazy constraints..." % len(cids))
self.enforce(cids, instance, model, solver)

@ -93,6 +93,7 @@ class UserCutsComponent(Component):
) -> None:
training_data.user_cuts_enforced = set(self.enforced)
stats["UserCuts: Added in callback"] = self.n_added_in_callback
if self.n_added_in_callback > 0:
logger.info(f"{self.n_added_in_callback} user cuts added in callback")
# Delegate ML methods to self.dynamic

@ -69,12 +69,14 @@ class PrimalSolutionComponent(Component):
features: Features,
training_data: TrainingSample,
) -> None:
logger.info("Predicting primal solution...")
# Do nothing if models are not trained
if len(self.classifiers) == 0:
logger.info("Classifiers not fitted. Skipping.")
return
# Predict solution and provide it to the solver
logger.info("Predicting MIP solution...")
solution = self.sample_predict(instance, training_data)
assert solver.internal_solver is not None
if self.mode == "heuristic":
@ -130,6 +132,8 @@ class PrimalSolutionComponent(Component):
category_offset: Dict[Hashable, int] = {cat: 0 for cat in x.keys()}
for (var_name, var_features) in instance.features.variables.items():
category = var_features.category
if category not in category_offset:
continue
offset = category_offset[category]
category_offset[category] += 1
if y_pred[category][offset, 0]:

@ -64,9 +64,9 @@ class StaticLazyConstraintsComponent(Component):
assert features.instance is not None
assert features.constraints is not None
if not features.instance.lazy_constraint_count == 0:
logger.info("Predicting violated (static) lazy constraints...")
if features.instance.lazy_constraint_count == 0:
logger.info("Instance does not have static lazy constraints. Skipping.")
logger.info("Predicting required lazy constraints...")
self.enforced_cids = set(self.sample_predict(instance, training_data))
logger.info("Moving lazy constraints to the pool...")
self.pool = {}

@ -155,6 +155,7 @@ class LearningSolver:
# Extract features
# -------------------------------------------------------
logger.info("Extracting features...")
FeaturesExtractor(self.internal_solver).extract(instance)
callback_args = (

Loading…
Cancel
Save