Replace InstanceIterator by PickleGzInstance

This commit is contained in:
2021-04-04 14:48:46 -05:00
parent b4770c6c0a
commit 08e808690e
14 changed files with 253 additions and 257 deletions

View File

@@ -2,10 +2,10 @@
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import numpy as np
from typing import Any, List, Union, TYPE_CHECKING, Tuple, Dict, Optional, Hashable
from typing import Any, List, TYPE_CHECKING, Tuple, Dict, Hashable
import numpy as np
from miplearn.extractors import InstanceIterator
from miplearn.instance import Instance
from miplearn.types import LearningSolveStats, TrainingSample, Features
@@ -120,11 +120,11 @@ class Component:
def xy_instances(
self,
instances: Union[List[str], List[Instance]],
instances: List[Instance],
) -> Tuple[Dict, Dict]:
x_combined: Dict = {}
y_combined: Dict = {}
for instance in InstanceIterator(instances):
for instance in instances:
assert isinstance(instance, Instance)
for sample in instance.training_data:
xy = self.sample_xy(instance.features, sample)
@@ -141,7 +141,7 @@ class Component:
def fit(
self,
training_instances: Union[List[str], List[Instance]],
training_instances: List[Instance],
) -> None:
x, y = self.xy_instances(training_instances)
for cat in x.keys():
@@ -198,9 +198,9 @@ class Component:
) -> None:
return
def evaluate(self, instances: Union[List[str], List[Instance]]) -> List:
def evaluate(self, instances: List[Instance]) -> List:
ev = []
for instance in InstanceIterator(instances):
for instance in instances:
for sample in instance.training_data:
ev += [self.sample_evaluate(instance.features, sample)]
return ev

View File

@@ -13,7 +13,7 @@ from miplearn.classifiers import Classifier
from miplearn.classifiers.counting import CountingClassifier
from miplearn.components import classifier_evaluation_dict
from miplearn.components.component import Component
from miplearn.extractors import InstanceFeaturesExtractor, InstanceIterator
from miplearn.extractors import InstanceFeaturesExtractor
logger = logging.getLogger(__name__)
@@ -68,7 +68,7 @@ class DynamicLazyConstraintsComponent(Component):
self.classifiers = {}
violation_to_instance_idx = {}
for (idx, instance) in enumerate(InstanceIterator(training_instances)):
for (idx, instance) in enumerate(training_instances):
for v in instance.found_violated_lazy_constraints:
if isinstance(v, list):
v = tuple(v)

View File

@@ -3,22 +3,14 @@
# Released under the modified BSD license. See COPYING.md for more details.
import logging
from typing import List, Dict, Union, Optional, Any, TYPE_CHECKING, Tuple, Hashable
from typing import List, Dict, Any, TYPE_CHECKING, Tuple, Hashable
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.metrics import (
mean_squared_error,
explained_variance_score,
max_error,
mean_absolute_error,
r2_score,
)
from miplearn.classifiers import Regressor
from miplearn.classifiers.sklearn import ScikitLearnRegressor
from miplearn.components.component import Component
from miplearn.extractors import InstanceIterator
from miplearn.instance import Instance
from miplearn.types import TrainingSample, LearningSolveStats, Features

View File

@@ -13,7 +13,6 @@ from miplearn.classifiers.counting import CountingClassifier
from miplearn.components import classifier_evaluation_dict
from miplearn.components.component import Component
from miplearn.components.steps.drop_redundant import DropRedundantInequalitiesStep
from miplearn.extractors import InstanceIterator
logger = logging.getLogger(__name__)
@@ -116,7 +115,7 @@ class ConvertTightIneqsIntoEqsStep(Component):
def _x_train(instances):
x = {}
for instance in tqdm(
InstanceIterator(instances),
instances,
desc="Extract (drop:x)",
disable=len(instances) < 5,
):
@@ -139,7 +138,7 @@ class ConvertTightIneqsIntoEqsStep(Component):
def y(self, instances):
y = {}
for instance in tqdm(
InstanceIterator(instances),
instances,
desc="Extract (rlx:conv_ineqs:y)",
disable=len(instances) < 5,
):

View File

@@ -6,14 +6,13 @@ import logging
from copy import deepcopy
import numpy as np
from tqdm import tqdm
from p_tqdm import p_umap
from tqdm import tqdm
from miplearn.classifiers.counting import CountingClassifier
from miplearn.components import classifier_evaluation_dict
from miplearn.components.component import Component
from miplearn.components.lazy_static import LazyConstraint
from miplearn.extractors import InstanceIterator
logger = logging.getLogger(__name__)
@@ -131,31 +130,24 @@ class DropRedundantInequalitiesStep(Component):
def _extract(instance):
x = {}
y = {}
for instance in InstanceIterator([instance]):
for training_data in instance.training_data:
for (cid, slack) in training_data["slacks"].items():
category = instance.get_constraint_category(cid)
if category is None:
continue
if category not in x:
x[category] = []
if category not in y:
y[category] = []
if slack > self.slack_tolerance:
y[category] += [[False, True]]
else:
y[category] += [[True, False]]
x[category] += [instance.get_constraint_features(cid)]
for training_data in instance.training_data:
for (cid, slack) in training_data["slacks"].items():
category = instance.get_constraint_category(cid)
if category is None:
continue
if category not in x:
x[category] = []
if category not in y:
y[category] = []
if slack > self.slack_tolerance:
y[category] += [[False, True]]
else:
y[category] += [[True, False]]
x[category] += [instance.get_constraint_features(cid)]
return x, y
if n_jobs == 1:
results = [
_extract(i)
for i in tqdm(
instances,
desc="Extract (drop 1/3)",
)
]
results = [_extract(i) for i in tqdm(instances, desc="Extract (drop 1/3)")]
else:
results = p_umap(
_extract,