Update PrimalSolutionComponent

master
Alinson S. Xavier 5 years ago
parent d7aa31f3eb
commit a9dcdb8e4e
No known key found for this signature in database
GPG Key ID: DCA0DAD4D2F58624

@ -266,6 +266,13 @@ class Component(EnforceOverrides):
) -> Dict[Hashable, Dict[str, float]]:
return {}
def sample_evaluate(
self,
instance: Optional[Instance],
sample: Sample,
) -> Dict[Hashable, Dict[str, float]]:
return {}
def sample_xy(
self,
instance: Optional[Instance],

@ -61,14 +61,13 @@ class PrimalSolutionComponent(Component):
self.classifier_prototype = classifier
@overrides
def before_solve_mip_old(
def before_solve_mip(
self,
solver: "LearningSolver",
instance: Instance,
model: Any,
stats: LearningSolveStats,
features: Features,
training_data: TrainingSample,
sample: Sample,
) -> None:
logger.info("Predicting primal solution...")
@ -78,7 +77,7 @@ class PrimalSolutionComponent(Component):
return
# Predict solution and provide it to the solver
solution = self.sample_predict(instance, training_data)
solution = self.sample_predict(sample)
assert solver.internal_solver is not None
if self.mode == "heuristic":
solver.internal_solver.fix(solution)
@ -103,15 +102,12 @@ class PrimalSolutionComponent(Component):
f"one: {stats['Primal: One']}"
)
def sample_predict(
self,
instance: Instance,
sample: TrainingSample,
) -> Solution:
assert instance.features.variables is not None
def sample_predict(self, sample: Sample) -> Solution:
assert sample.after_load is not None
assert sample.after_load.variables is not None
# Compute y_pred
x, _ = self.sample_xy_old(instance, sample)
x, _ = self.sample_xy(None, sample)
y_pred = {}
for category in x.keys():
assert category in self.classifiers, (
@ -129,9 +125,9 @@ class PrimalSolutionComponent(Component):
).T
# Convert y_pred into solution
solution: Solution = {v: None for v in instance.features.variables.keys()}
solution: Solution = {v: None for v in sample.after_load.variables.keys()}
category_offset: Dict[Hashable, int] = {cat: 0 for cat in x.keys()}
for (var_name, var_features) in instance.features.variables.items():
for (var_name, var_features) in sample.after_load.variables.items():
category = var_features.category
if category not in category_offset:
continue
@ -144,42 +140,6 @@ class PrimalSolutionComponent(Component):
return solution
@overrides
def sample_xy_old(
self,
instance: Instance,
sample: TrainingSample,
) -> Tuple[Dict[Category, List[List[float]]], Dict[Category, List[List[float]]]]:
assert instance.features.variables is not None
x: Dict = {}
y: Dict = {}
for (var_name, var_features) in instance.features.variables.items():
category = var_features.category
if category is None:
continue
if category not in x.keys():
x[category] = []
y[category] = []
f: List[float] = []
assert var_features.user_features is not None
f += var_features.user_features
if sample.lp_solution is not None:
lp_value = sample.lp_solution[var_name]
if lp_value is not None:
f += [lp_value]
x[category] += [f]
if sample.solution is not None:
opt_value = sample.solution[var_name]
assert opt_value is not None
assert 0.0 - 1e-5 <= opt_value <= 1.0 + 1e-5, (
f"Variable {var_name} has non-binary value {opt_value} in the "
"optimal solution. Predicting values of non-binary "
"variables is not currently supported. Please set its "
"category to None."
)
y[category] += [[opt_value < 0.5, opt_value >= 0.5]]
return x, y
@overrides
def sample_xy(
self,
@ -226,18 +186,21 @@ class PrimalSolutionComponent(Component):
return x, y
@overrides
def sample_evaluate_old(
def sample_evaluate(
self,
instance: Instance,
sample: TrainingSample,
_: Optional[Instance],
sample: Sample,
) -> Dict[Hashable, Dict[str, float]]:
solution_actual = sample.solution
assert solution_actual is not None
solution_pred = self.sample_predict(instance, sample)
assert sample.after_mip is not None
assert sample.after_mip.variables is not None
solution_actual = sample.after_mip.variables
solution_pred = self.sample_predict(sample)
vars_all, vars_one, vars_zero = set(), set(), set()
pred_one_positive, pred_zero_positive = set(), set()
for (var_name, value_actual) in solution_actual.items():
assert value_actual is not None
for (var_name, var) in solution_actual.items():
assert var.value is not None
value_actual = var.value
vars_all.add(var_name)
if value_actual > 0.5:
vars_one.add(var_name)
@ -279,10 +242,3 @@ class PrimalSolutionComponent(Component):
thr.fit(clf, x[category], y[category])
self.classifiers[category] = clf
self.thresholds[category] = thr
@overrides
def fit(
self,
training_instances: List[Instance],
) -> None:
return

@ -1,7 +1,6 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
from typing import cast
from unittest.mock import Mock
import numpy as np
@ -14,15 +13,14 @@ from miplearn.classifiers.threshold import Threshold
from miplearn.components import classifier_evaluation_dict
from miplearn.components.primal import PrimalSolutionComponent
from miplearn.features import (
TrainingSample,
Variable,
Features,
Sample,
InstanceFeatures,
)
from miplearn.instance.base import Instance
from miplearn.problems.tsp import TravelingSalesmanGenerator
from miplearn.solvers.learning import LearningSolver
from miplearn.solvers.tests import assert_equals
@pytest.fixture
@ -48,7 +46,7 @@ def sample() -> Sample:
after_mip=Features(
variables={
"x[0]": Variable(value=0.0),
"x[1]": Variable(value=0.0),
"x[1]": Variable(value=1.0),
"x[2]": Variable(value=1.0),
"x[3]": Variable(value=0.0),
}
@ -89,168 +87,6 @@ def test_xy(sample: Sample) -> None:
assert y_actual == y_expected
def test_xy_old() -> None:
features = Features(
variables={
"x[0]": Variable(
category="default",
user_features=[0.0, 0.0],
),
"x[1]": Variable(
category=None,
),
"x[2]": Variable(
category="default",
user_features=[1.0, 0.0],
),
"x[3]": Variable(
category="default",
user_features=[1.0, 1.0],
),
}
)
instance = Mock(spec=Instance)
instance.features = features
sample = TrainingSample(
solution={
"x[0]": 0.0,
"x[1]": 1.0,
"x[2]": 1.0,
"x[3]": 0.0,
},
lp_solution={
"x[0]": 0.1,
"x[1]": 0.1,
"x[2]": 0.1,
"x[3]": 0.1,
},
)
x_expected = {
"default": [
[0.0, 0.0, 0.1],
[1.0, 0.0, 0.1],
[1.0, 1.0, 0.1],
]
}
y_expected = {
"default": [
[True, False],
[False, True],
[True, False],
]
}
xy = PrimalSolutionComponent().sample_xy_old(instance, sample)
assert xy is not None
x_actual, y_actual = xy
assert x_actual == x_expected
assert y_actual == y_expected
def test_xy_without_lp_solution_old() -> None:
features = Features(
variables={
"x[0]": Variable(
category="default",
user_features=[0.0, 0.0],
),
"x[1]": Variable(
category=None,
),
"x[2]": Variable(
category="default",
user_features=[1.0, 0.0],
),
"x[3]": Variable(
category="default",
user_features=[1.0, 1.0],
),
}
)
instance = Mock(spec=Instance)
instance.features = features
sample = TrainingSample(
solution={
"x[0]": 0.0,
"x[1]": 1.0,
"x[2]": 1.0,
"x[3]": 0.0,
},
)
x_expected = {
"default": [
[0.0, 0.0],
[1.0, 0.0],
[1.0, 1.0],
]
}
y_expected = {
"default": [
[True, False],
[False, True],
[True, False],
]
}
xy = PrimalSolutionComponent().sample_xy_old(instance, sample)
assert xy is not None
x_actual, y_actual = xy
assert x_actual == x_expected
assert y_actual == y_expected
def test_predict_old() -> None:
clf = Mock(spec=Classifier)
clf.predict_proba = Mock(
return_value=np.array(
[
[0.9, 0.1],
[0.5, 0.5],
[0.1, 0.9],
]
)
)
thr = Mock(spec=Threshold)
thr.predict = Mock(return_value=[0.75, 0.75])
features = Features(
variables={
"x[0]": Variable(
category="default",
user_features=[0.0, 0.0],
),
"x[1]": Variable(
category="default",
user_features=[0.0, 2.0],
),
"x[2]": Variable(
category="default",
user_features=[2.0, 0.0],
),
}
)
instance = Mock(spec=Instance)
instance.features = features
sample = TrainingSample(
lp_solution={
"x[0]": 0.1,
"x[1]": 0.5,
"x[2]": 0.9,
}
)
x, _ = PrimalSolutionComponent().sample_xy_old(instance, sample)
comp = PrimalSolutionComponent()
comp.classifiers = {"default": clf}
comp.thresholds = {"default": thr}
pred = comp.sample_predict(instance, sample)
clf.predict_proba.assert_called_once()
assert_array_equal(x["default"], clf.predict_proba.call_args[0][0])
thr.predict.assert_called_once()
assert_array_equal(x["default"], thr.predict.call_args[0][0])
assert pred == {
"x[0]": 0.0,
"x[1]": None,
"x[2]": 1.0,
}
def test_fit_xy() -> None:
clf = Mock(spec=Classifier)
clf.clone = lambda: Mock(spec=Classifier) # type: ignore
@ -295,37 +131,49 @@ def test_usage() -> None:
assert stats["mip_lower_bound"] == stats["mip_warm_start_value"]
def test_evaluate_old() -> None:
def test_evaluate(sample: Sample) -> None:
comp = PrimalSolutionComponent()
comp.sample_predict = lambda _, __: { # type: ignore
comp.sample_predict = lambda _: { # type: ignore
"x[0]": 1.0,
"x[1]": 0.0,
"x[1]": 1.0,
"x[2]": 0.0,
"x[3]": None,
"x[4]": 1.0,
}
features: Features = Features(
variables={
"x[0]": Variable(),
"x[1]": Variable(),
"x[2]": Variable(),
"x[3]": Variable(),
"x[4]": Variable(),
}
ev = comp.sample_evaluate(None, sample)
assert_equals(
ev,
{
0: classifier_evaluation_dict(tp=0, fp=1, tn=1, fn=2),
1: classifier_evaluation_dict(tp=1, fp=1, tn=1, fn=1),
},
)
instance = Mock(spec=Instance)
instance.features = features
sample: TrainingSample = TrainingSample(
solution={
"x[0]": 1.0,
"x[1]": 1.0,
"x[2]": 0.0,
"x[3]": 1.0,
"x[4]": 1.0,
}
def test_predict(sample: Sample) -> None:
clf = Mock(spec=Classifier)
clf.predict_proba = Mock(
return_value=np.array(
[
[0.9, 0.1],
[0.5, 0.5],
[0.1, 0.9],
]
)
)
ev = comp.sample_evaluate_old(instance, sample)
assert ev == {
0: classifier_evaluation_dict(tp=1, fp=1, tn=3, fn=0),
1: classifier_evaluation_dict(tp=2, fp=0, tn=1, fn=2),
thr = Mock(spec=Threshold)
thr.predict = Mock(return_value=[0.75, 0.75])
comp = PrimalSolutionComponent()
x, _ = comp.sample_xy(None, sample)
comp.classifiers = {"default": clf}
comp.thresholds = {"default": thr}
pred = comp.sample_predict(sample)
clf.predict_proba.assert_called_once()
thr.predict.assert_called_once()
assert_array_equal(x["default"], clf.predict_proba.call_args[0][0])
assert_array_equal(x["default"], thr.predict.call_args[0][0])
assert pred == {
"x[0]": 0.0,
"x[1]": None,
"x[2]": None,
"x[3]": 1.0,
}

Loading…
Cancel
Save