Move python files to root folder; remove built docs

This commit is contained in:
2020-08-29 11:42:02 -05:00
parent 741af8506b
commit 5663ced0be
116 changed files with 8 additions and 12408 deletions

View File

@@ -0,0 +1,4 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.

View File

@@ -0,0 +1,25 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
from miplearn import LearningSolver
from miplearn.problems.knapsack import MultiKnapsackGenerator, MultiKnapsackInstance
from scipy.stats import uniform, randint
import numpy as np
def test_knapsack_generator():
gen = MultiKnapsackGenerator(n=randint(low=100, high=101),
m=randint(low=30, high=31),
w=randint(low=0, high=1000),
K=randint(low=500, high=501),
u=uniform(loc=1.0, scale=1.0),
alpha=uniform(loc=0.50, scale=0.0),
)
instances = gen.generate(100)
w_sum = sum(instance.weights for instance in instances) / len(instances)
p_sum = sum(instance.prices for instance in instances) / len(instances)
b_sum = sum(instance.capacities for instance in instances) / len(instances)
assert round(np.mean(w_sum), -1) == 500.
# assert round(np.mean(p_sum), -1) == 1200. # flaky
assert round(np.mean(b_sum), -3) == 25000.

View File

@@ -0,0 +1,46 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import networkx as nx
import numpy as np
from miplearn import LearningSolver
from miplearn.problems.stab import MaxWeightStableSetInstance
from scipy.stats import uniform, randint
def test_stab():
graph = nx.cycle_graph(5)
weights = [1., 1., 1., 1., 1.]
instance = MaxWeightStableSetInstance(graph, weights)
solver = LearningSolver()
solver.solve(instance)
assert instance.lower_bound == 2.
def test_stab_generator_fixed_graph():
np.random.seed(42)
from miplearn.problems.stab import MaxWeightStableSetGenerator
gen = MaxWeightStableSetGenerator(w=uniform(loc=50., scale=10.),
n=randint(low=10, high=11),
p=uniform(loc=0.05, scale=0.),
fix_graph=True)
instances = gen.generate(1_000)
weights = np.array([instance.weights for instance in instances])
weights_avg_actual = np.round(np.average(weights, axis=0))
weights_avg_expected = [55.0] * 10
assert list(weights_avg_actual) == weights_avg_expected
def test_stab_generator_random_graph():
np.random.seed(42)
from miplearn.problems.stab import MaxWeightStableSetGenerator
gen = MaxWeightStableSetGenerator(w=uniform(loc=50., scale=10.),
n=randint(low=30, high=41),
p=uniform(loc=0.5, scale=0.),
fix_graph=False)
instances = gen.generate(1_000)
n_nodes = [instance.graph.number_of_nodes() for instance in instances]
n_edges = [instance.graph.number_of_edges() for instance in instances]
assert np.round(np.mean(n_nodes)) == 35.
assert np.round(np.mean(n_edges), -1) == 300.

View File

@@ -0,0 +1,74 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
from miplearn import LearningSolver
from miplearn.problems.tsp import TravelingSalesmanGenerator, TravelingSalesmanInstance
import numpy as np
from numpy.linalg import norm
from scipy.spatial.distance import pdist, squareform
from scipy.stats import uniform, randint
def test_generator():
instances = TravelingSalesmanGenerator(x=uniform(loc=0.0, scale=1000.0),
y=uniform(loc=0.0, scale=1000.0),
n=randint(low=100, high=101),
gamma=uniform(loc=0.95, scale=0.1),
fix_cities=True).generate(100)
assert len(instances) == 100
assert instances[0].n_cities == 100
assert norm(instances[0].distances - instances[0].distances.T) < 1e-6
d = [instance.distances[0,1] for instance in instances]
assert np.std(d) > 0
def test_instance():
n_cities = 4
distances = np.array([
[0., 1., 2., 1.],
[1., 0., 1., 2.],
[2., 1., 0., 1.],
[1., 2., 1., 0.],
])
instance = TravelingSalesmanInstance(n_cities, distances)
for solver_name in ['gurobi', 'cplex']:
solver = LearningSolver(solver=solver_name)
solver.solve(instance)
x = instance.solution["x"]
assert x[0,1] == 1.0
assert x[0,2] == 0.0
assert x[0,3] == 1.0
assert x[1,2] == 1.0
assert x[1,3] == 0.0
assert x[2,3] == 1.0
assert instance.lower_bound == 4.0
assert instance.upper_bound == 4.0
def test_subtour():
n_cities = 6
cities = np.array([
[0., 0.],
[1., 0.],
[2., 0.],
[3., 0.],
[0., 1.],
[3., 1.],
])
distances = squareform(pdist(cities))
instance = TravelingSalesmanInstance(n_cities, distances)
for solver_name in ['gurobi', 'cplex']:
solver = LearningSolver(solver=solver_name)
solver.solve(instance)
assert hasattr(instance, "found_violated_lazy_constraints")
assert hasattr(instance, "found_violated_user_cuts")
x = instance.solution["x"]
assert x[0,1] == 1.0
assert x[0,4] == 1.0
assert x[1,2] == 1.0
assert x[2,3] == 1.0
assert x[3,5] == 1.0
assert x[4,5] == 1.0
solver.fit([instance])
solver.solve(instance)