mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 09:28:51 -06:00
Move python files to root folder; remove built docs
This commit is contained in:
3
miplearn/classifiers/tests/__init__.py
Normal file
3
miplearn/classifiers/tests/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
18
miplearn/classifiers/tests/test_counting.py
Normal file
18
miplearn/classifiers/tests/test_counting.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
from miplearn.classifiers.counting import CountingClassifier
|
||||
|
||||
import numpy as np
|
||||
from numpy.linalg import norm
|
||||
|
||||
E = 0.1
|
||||
|
||||
|
||||
def test_counting():
|
||||
clf = CountingClassifier()
|
||||
clf.fit(np.zeros((8, 25)), [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0])
|
||||
expected_proba = np.array([[0.375, 0.625],
|
||||
[0.375, 0.625]])
|
||||
actual_proba = clf.predict_proba(np.zeros((2, 25)))
|
||||
assert norm(actual_proba - expected_proba) < E
|
||||
46
miplearn/classifiers/tests/test_cv.py
Normal file
46
miplearn/classifiers/tests/test_cv.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import numpy as np
|
||||
from miplearn.classifiers.cv import CrossValidatedClassifier
|
||||
from numpy.linalg import norm
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
from sklearn.svm import SVC
|
||||
|
||||
E = 0.1
|
||||
|
||||
|
||||
def test_cv():
|
||||
# Training set: label is true if point is inside a 2D circle
|
||||
x_train = np.array([[x1, x2]
|
||||
for x1 in range(-10, 11)
|
||||
for x2 in range(-10, 11)])
|
||||
x_train = StandardScaler().fit_transform(x_train)
|
||||
n_samples = x_train.shape[0]
|
||||
|
||||
y_train = np.array([1.0 if x1*x1 + x2*x2 <= 100 else 0.0
|
||||
for x1 in range(-10, 11)
|
||||
for x2 in range(-10, 11)])
|
||||
|
||||
# Support vector machines with linear kernels do not perform well on this
|
||||
# data set, so predictor should return the given constant.
|
||||
clf = CrossValidatedClassifier(classifier=SVC(probability=True,
|
||||
random_state=42),
|
||||
threshold=0.90,
|
||||
constant=0.0,
|
||||
cv=30)
|
||||
clf.fit(x_train, y_train)
|
||||
assert norm(np.zeros(n_samples) - clf.predict(x_train)) < E
|
||||
|
||||
# Support vector machines with quadratic kernels perform almost perfectly
|
||||
# on this data set, so predictor should return their prediction.
|
||||
clf = CrossValidatedClassifier(classifier=SVC(probability=True,
|
||||
kernel='poly',
|
||||
degree=2,
|
||||
random_state=42),
|
||||
threshold=0.90,
|
||||
cv=30)
|
||||
clf.fit(x_train, y_train)
|
||||
print(y_train - clf.predict(x_train))
|
||||
assert norm(y_train - clf.predict(x_train)) < E
|
||||
20
miplearn/classifiers/tests/test_evaluator.py
Normal file
20
miplearn/classifiers/tests/test_evaluator.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import numpy as np
|
||||
from miplearn.classifiers.evaluator import ClassifierEvaluator
|
||||
from sklearn.neighbors import KNeighborsClassifier
|
||||
|
||||
|
||||
def test_evaluator():
|
||||
clf_a = KNeighborsClassifier(n_neighbors=1)
|
||||
clf_b = KNeighborsClassifier(n_neighbors=2)
|
||||
x_train = np.array([[0, 0], [1, 0]])
|
||||
y_train = np.array([0, 1])
|
||||
clf_a.fit(x_train, y_train)
|
||||
clf_b.fit(x_train, y_train)
|
||||
ev = ClassifierEvaluator()
|
||||
assert ev.evaluate(clf_a, x_train, y_train) == 1.0
|
||||
assert ev.evaluate(clf_b, x_train, y_train) == 0.5
|
||||
|
||||
34
miplearn/classifiers/tests/test_threshold.py
Normal file
34
miplearn/classifiers/tests/test_threshold.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
from unittest.mock import Mock
|
||||
|
||||
import numpy as np
|
||||
from miplearn.classifiers import Classifier
|
||||
from miplearn.classifiers.threshold import MinPrecisionThreshold
|
||||
|
||||
|
||||
def test_threshold_dynamic():
|
||||
clf = Mock(spec=Classifier)
|
||||
clf.predict_proba = Mock(return_value=np.array([
|
||||
[0.10, 0.90],
|
||||
[0.10, 0.90],
|
||||
[0.20, 0.80],
|
||||
[0.30, 0.70],
|
||||
]))
|
||||
x_train = np.array([0, 1, 2, 3])
|
||||
y_train = np.array([1, 1, 0, 0])
|
||||
|
||||
threshold = MinPrecisionThreshold(min_precision=1.0)
|
||||
assert threshold.find(clf, x_train, y_train) == 0.90
|
||||
|
||||
threshold = MinPrecisionThreshold(min_precision=0.65)
|
||||
assert threshold.find(clf, x_train, y_train) == 0.80
|
||||
|
||||
threshold = MinPrecisionThreshold(min_precision=0.50)
|
||||
assert threshold.find(clf, x_train, y_train) == 0.70
|
||||
|
||||
threshold = MinPrecisionThreshold(min_precision=0.00)
|
||||
assert threshold.find(clf, x_train, y_train) == 0.70
|
||||
|
||||
Reference in New Issue
Block a user