mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 01:18:52 -06:00
Implement KnnWarmStartPredictor; make it the default method
This commit is contained in:
@@ -3,7 +3,7 @@
|
||||
# Written by Alinson S. Xavier <axavier@anl.gov>
|
||||
|
||||
from .transformers import PerVariableTransformer
|
||||
from .warmstart import LogisticWarmStartPredictor
|
||||
from .warmstart import KnnWarmStartPredictor
|
||||
import pyomo.environ as pe
|
||||
import numpy as np
|
||||
from copy import deepcopy
|
||||
@@ -18,7 +18,7 @@ class LearningSolver:
|
||||
def __init__(self,
|
||||
threads=4,
|
||||
parent_solver=pe.SolverFactory('cbc'),
|
||||
ws_predictor=LogisticWarmStartPredictor(),
|
||||
ws_predictor=KnnWarmStartPredictor(),
|
||||
mode="exact"):
|
||||
self.parent_solver = parent_solver
|
||||
self.parent_solver.options["threads"] = threads
|
||||
|
||||
88
miplearn/tests/test_warmstart_knn.py
Normal file
88
miplearn/tests/test_warmstart_knn.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# MIPLearn, an extensible framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2019-2020 Argonne National Laboratory. All rights reserved.
|
||||
# Written by Alinson S. Xavier <axavier@anl.gov>
|
||||
|
||||
from miplearn.warmstart import KnnWarmStartPredictor
|
||||
from sklearn.metrics import accuracy_score, precision_score
|
||||
import numpy as np
|
||||
|
||||
|
||||
def test_knn_with_consensus():
|
||||
x_train = np.array([
|
||||
[0.0, 0.0],
|
||||
[0.1, 0.0],
|
||||
[0.0, 0.1],
|
||||
[1.0, 1.0],
|
||||
])
|
||||
y_train = np.array([
|
||||
[0., 1.],
|
||||
[0., 1.],
|
||||
[0., 1.],
|
||||
[1., 0.],
|
||||
])
|
||||
ws = KnnWarmStartPredictor(k=3, thr_clip=[0.75, 0.75])
|
||||
ws.fit(x_train, y_train)
|
||||
|
||||
x_test = np.array([[0.0, 0.0]])
|
||||
y_test = np.array([[0, 1]])
|
||||
assert (ws.predict(x_test) == y_test).all()
|
||||
|
||||
def test_knn_without_consensus():
|
||||
x_train = np.array([
|
||||
[0.0, 0.0],
|
||||
[0.1, 0.1],
|
||||
[0.9, 0.9],
|
||||
[1.0, 1.0],
|
||||
])
|
||||
y_train = np.array([
|
||||
[0., 1.],
|
||||
[0., 1.],
|
||||
[1., 0.],
|
||||
[1., 0.],
|
||||
])
|
||||
ws = KnnWarmStartPredictor(k=4, thr_clip=[0.75, 0.75])
|
||||
ws.fit(x_train, y_train)
|
||||
|
||||
x_test = np.array([[0.5, 0.5]])
|
||||
y_test = np.array([[0, 0]])
|
||||
assert (ws.predict(x_test) == y_test).all()
|
||||
|
||||
def test_knn_always_true():
|
||||
x_train = np.array([
|
||||
[0.0, 0.0],
|
||||
[0.1, 0.1],
|
||||
[0.9, 0.9],
|
||||
[1.0, 1.0],
|
||||
])
|
||||
y_train = np.array([
|
||||
[1., 0.],
|
||||
[1., 0.],
|
||||
[1., 0.],
|
||||
[1., 0.],
|
||||
])
|
||||
ws = KnnWarmStartPredictor(k=4, thr_clip=[0.75, 0.75])
|
||||
ws.fit(x_train, y_train)
|
||||
|
||||
x_test = np.array([[0.5, 0.5]])
|
||||
y_test = np.array([[1, 0]])
|
||||
assert (ws.predict(x_test) == y_test).all()
|
||||
|
||||
def test_knn_always_false():
|
||||
x_train = np.array([
|
||||
[0.0, 0.0],
|
||||
[0.1, 0.1],
|
||||
[0.9, 0.9],
|
||||
[1.0, 1.0],
|
||||
])
|
||||
y_train = np.array([
|
||||
[0., 1.],
|
||||
[0., 1.],
|
||||
[0., 1.],
|
||||
[0., 1.],
|
||||
])
|
||||
ws = KnnWarmStartPredictor(k=4, thr_clip=[0.75, 0.75])
|
||||
ws.fit(x_train, y_train)
|
||||
|
||||
x_test = np.array([[0.5, 0.5]])
|
||||
y_test = np.array([[0, 1]])
|
||||
assert (ws.predict(x_test) == y_test).all()
|
||||
@@ -8,10 +8,12 @@ from sklearn.pipeline import make_pipeline
|
||||
from sklearn.linear_model import LogisticRegression
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
from sklearn.model_selection import cross_val_score
|
||||
from sklearn.neighbors import KNeighborsClassifier
|
||||
|
||||
class WarmStartPredictor(ABC):
|
||||
def __init__(self):
|
||||
def __init__(self, thr_clip=[0.50, 0.50]):
|
||||
self.models = [None, None]
|
||||
self.thr_clip = thr_clip
|
||||
|
||||
def fit(self, x_train, y_train):
|
||||
assert isinstance(x_train, np.ndarray)
|
||||
@@ -23,13 +25,16 @@ class WarmStartPredictor(ABC):
|
||||
|
||||
def predict(self, x_test):
|
||||
assert isinstance(x_test, np.ndarray)
|
||||
y_pred = np.zeros((x_test.shape[0], 2), dtype=np.int)
|
||||
y_pred = np.zeros((x_test.shape[0], 2))
|
||||
for i in [0,1]:
|
||||
if isinstance(self.models[i], int):
|
||||
y_pred[:, i] = self.models[i]
|
||||
else:
|
||||
y_pred[:, i] = self.models[i].predict(x_test)
|
||||
return y_pred
|
||||
y = self.models[i].predict_proba(x_test)[:,1]
|
||||
y[y < self.thr_clip[i]] = 0.
|
||||
y[y > 0.] = 1.
|
||||
y_pred[:, i] = y
|
||||
return y_pred.astype(int)
|
||||
|
||||
@abstractmethod
|
||||
def _fit(self, x_train, y_train, label):
|
||||
@@ -71,4 +76,32 @@ class LogisticWarmStartPredictor(WarmStartPredictor):
|
||||
return 0
|
||||
|
||||
reg.fit(x_train, y_train.astype(int))
|
||||
return reg
|
||||
return reg
|
||||
|
||||
|
||||
class KnnWarmStartPredictor(WarmStartPredictor):
|
||||
def __init__(self, k=50,
|
||||
thr_clip=[0.90, 0.90],
|
||||
thr_fix=[0.99, 0.99]):
|
||||
super().__init__(thr_clip=thr_clip)
|
||||
self.k = k
|
||||
self.thr_fix = thr_fix
|
||||
|
||||
def _fit(self, x_train, y_train, label):
|
||||
y_train_avg = np.average(y_train)
|
||||
|
||||
# If number of training samples is too small, don't predict anything.
|
||||
if x_train.shape[0] < self.k:
|
||||
return 0
|
||||
|
||||
# If vast majority of observations are true, always return true.
|
||||
if y_train_avg > self.thr_fix[label]:
|
||||
return 1
|
||||
|
||||
# If vast majority of observations are false, always return false.
|
||||
if y_train_avg < (1 - self.thr_fix[label]):
|
||||
return 0
|
||||
|
||||
knn = KNeighborsClassifier(n_neighbors=self.k)
|
||||
knn.fit(x_train, y_train)
|
||||
return knn
|
||||
Reference in New Issue
Block a user