mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-07 01:48:51 -06:00
MIPLearn v0.3
This commit is contained in:
@@ -1,3 +1,3 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
58
tests/problems/test_binpack.py
Normal file
58
tests/problems/test_binpack.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import numpy as np
|
||||
from scipy.stats import uniform, randint
|
||||
|
||||
from miplearn.problems.binpack import build_binpack_model, BinPackData, BinPackGenerator
|
||||
|
||||
|
||||
def test_binpack_generator() -> None:
|
||||
np.random.seed(42)
|
||||
gen = BinPackGenerator(
|
||||
n=randint(low=10, high=11),
|
||||
sizes=uniform(loc=0, scale=10),
|
||||
capacity=uniform(loc=100, scale=0),
|
||||
sizes_jitter=uniform(loc=0.9, scale=0.2),
|
||||
capacity_jitter=uniform(loc=0.9, scale=0.2),
|
||||
fix_items=True,
|
||||
)
|
||||
data = gen.generate(2)
|
||||
assert data[0].sizes.tolist() == [
|
||||
3.39,
|
||||
10.4,
|
||||
7.81,
|
||||
5.64,
|
||||
1.46,
|
||||
1.46,
|
||||
0.56,
|
||||
8.7,
|
||||
5.93,
|
||||
6.79,
|
||||
]
|
||||
assert data[0].capacity == 102.24
|
||||
assert data[1].sizes.tolist() == [
|
||||
3.48,
|
||||
9.11,
|
||||
7.12,
|
||||
5.93,
|
||||
1.65,
|
||||
1.47,
|
||||
0.58,
|
||||
8.82,
|
||||
5.47,
|
||||
7.23,
|
||||
]
|
||||
assert data[1].capacity == 93.41
|
||||
|
||||
|
||||
def test_binpack() -> None:
|
||||
model = build_binpack_model(
|
||||
BinPackData(
|
||||
sizes=np.array([4, 8, 1, 4, 2, 1]),
|
||||
capacity=10,
|
||||
)
|
||||
)
|
||||
model.optimize()
|
||||
assert model.inner.objVal == 2.0
|
||||
@@ -1,39 +0,0 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import numpy as np
|
||||
from scipy.stats import uniform, randint
|
||||
|
||||
from miplearn import LearningSolver
|
||||
from miplearn.problems.knapsack import MultiKnapsackGenerator, MultiKnapsackInstance
|
||||
|
||||
|
||||
def test_knapsack_generator() -> None:
|
||||
gen = MultiKnapsackGenerator(
|
||||
n=randint(low=100, high=101),
|
||||
m=randint(low=30, high=31),
|
||||
w=randint(low=0, high=1000),
|
||||
K=randint(low=500, high=501),
|
||||
u=uniform(loc=1.0, scale=1.0),
|
||||
alpha=uniform(loc=0.50, scale=0.0),
|
||||
)
|
||||
data = gen.generate(100)
|
||||
w_sum = sum(d.weights for d in data) / len(data)
|
||||
b_sum = sum(d.capacities for d in data) / len(data)
|
||||
assert round(float(np.mean(w_sum)), -1) == 500.0
|
||||
assert round(float(np.mean(b_sum)), -3) == 25000.0
|
||||
|
||||
|
||||
def test_knapsack() -> None:
|
||||
data = MultiKnapsackGenerator(
|
||||
n=randint(low=5, high=6),
|
||||
m=randint(low=5, high=6),
|
||||
).generate(1)
|
||||
instance = MultiKnapsackInstance(
|
||||
prices=data[0].prices,
|
||||
capacities=data[0].capacities,
|
||||
weights=data[0].weights,
|
||||
)
|
||||
solver = LearningSolver()
|
||||
solver._solve(instance)
|
||||
61
tests/problems/test_multiknapsack.py
Normal file
61
tests/problems/test_multiknapsack.py
Normal file
@@ -0,0 +1,61 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import numpy as np
|
||||
from scipy.stats import uniform, randint
|
||||
|
||||
from miplearn.problems.multiknapsack import (
|
||||
MultiKnapsackGenerator,
|
||||
MultiKnapsackData,
|
||||
build_multiknapsack_model,
|
||||
)
|
||||
|
||||
|
||||
def test_knapsack_generator() -> None:
|
||||
np.random.seed(42)
|
||||
gen = MultiKnapsackGenerator(
|
||||
n=randint(low=5, high=6),
|
||||
m=randint(low=3, high=4),
|
||||
w=randint(low=0, high=1000),
|
||||
K=randint(low=500, high=501),
|
||||
u=uniform(loc=0.0, scale=1.0),
|
||||
alpha=uniform(loc=0.25, scale=0.0),
|
||||
fix_w=True,
|
||||
w_jitter=uniform(loc=0.9, scale=0.2),
|
||||
p_jitter=uniform(loc=0.9, scale=0.2),
|
||||
round=True,
|
||||
)
|
||||
data = gen.generate(2)
|
||||
assert data[0].prices.tolist() == [433.0, 477.0, 802.0, 494.0, 458.0]
|
||||
assert data[0].capacities.tolist() == [458.0, 357.0, 392.0]
|
||||
assert data[0].weights.tolist() == [
|
||||
[111.0, 392.0, 945.0, 276.0, 108.0],
|
||||
[64.0, 633.0, 20.0, 602.0, 110.0],
|
||||
[510.0, 203.0, 303.0, 469.0, 85.0],
|
||||
]
|
||||
|
||||
assert data[1].prices.tolist() == [344.0, 527.0, 658.0, 519.0, 460.0]
|
||||
assert data[1].capacities.tolist() == [449.0, 377.0, 380.0]
|
||||
assert data[1].weights.tolist() == [
|
||||
[92.0, 473.0, 871.0, 264.0, 96.0],
|
||||
[67.0, 664.0, 21.0, 628.0, 129.0],
|
||||
[436.0, 209.0, 309.0, 481.0, 86.0],
|
||||
]
|
||||
|
||||
|
||||
def test_knapsack_model() -> None:
|
||||
data = MultiKnapsackData(
|
||||
prices=np.array([344.0, 527.0, 658.0, 519.0, 460.0]),
|
||||
capacities=np.array([449.0, 377.0, 380.0]),
|
||||
weights=np.array(
|
||||
[
|
||||
[92.0, 473.0, 871.0, 264.0, 96.0],
|
||||
[67.0, 664.0, 21.0, 628.0, 129.0],
|
||||
[436.0, 209.0, 309.0, 481.0, 86.0],
|
||||
]
|
||||
),
|
||||
)
|
||||
model = build_multiknapsack_model(data)
|
||||
model.optimize()
|
||||
assert model.inner.objVal == -460.0
|
||||
53
tests/problems/test_pmedian.py
Normal file
53
tests/problems/test_pmedian.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import numpy as np
|
||||
from scipy.stats import uniform, randint
|
||||
|
||||
from miplearn.problems.pmedian import PMedianGenerator, build_pmedian_model
|
||||
|
||||
|
||||
def test_pmedian() -> None:
|
||||
np.random.seed(42)
|
||||
gen = PMedianGenerator(
|
||||
x=uniform(loc=0.0, scale=100.0),
|
||||
y=uniform(loc=0.0, scale=100.0),
|
||||
n=randint(low=5, high=6),
|
||||
p=randint(low=2, high=3),
|
||||
demands=uniform(loc=0, scale=20),
|
||||
capacities=uniform(loc=0, scale=100),
|
||||
distances_jitter=uniform(loc=0.95, scale=0.1),
|
||||
demands_jitter=uniform(loc=0.95, scale=0.1),
|
||||
capacities_jitter=uniform(loc=0.95, scale=0.1),
|
||||
fixed=True,
|
||||
)
|
||||
data = gen.generate(2)
|
||||
|
||||
assert data[0].p == 2
|
||||
assert data[0].demands.tolist() == [0.41, 19.4, 16.65, 4.25, 3.64]
|
||||
assert data[0].capacities.tolist() == [18.34, 30.42, 52.48, 43.19, 29.12]
|
||||
assert data[0].distances.tolist() == [
|
||||
[0.0, 50.17, 82.42, 32.76, 33.2],
|
||||
[50.17, 0.0, 72.64, 72.51, 17.06],
|
||||
[82.42, 72.64, 0.0, 71.69, 70.92],
|
||||
[32.76, 72.51, 71.69, 0.0, 56.56],
|
||||
[33.2, 17.06, 70.92, 56.56, 0.0],
|
||||
]
|
||||
|
||||
assert data[1].p == 2
|
||||
assert data[1].demands.tolist() == [0.42, 19.03, 16.68, 4.27, 3.53]
|
||||
assert data[1].capacities.tolist() == [19.2, 31.26, 54.79, 44.9, 29.41]
|
||||
assert data[1].distances.tolist() == [
|
||||
[0.0, 51.6, 83.31, 33.77, 31.95],
|
||||
[51.6, 0.0, 70.25, 71.09, 17.05],
|
||||
[83.31, 70.25, 0.0, 68.81, 67.62],
|
||||
[33.77, 71.09, 68.81, 0.0, 58.88],
|
||||
[31.95, 17.05, 67.62, 58.88, 0.0],
|
||||
]
|
||||
|
||||
model = build_pmedian_model(data[0])
|
||||
assert model.inner.numVars == 30
|
||||
assert model.inner.numConstrs == 11
|
||||
model.optimize()
|
||||
assert round(model.inner.objVal) == 107
|
||||
91
tests/problems/test_setcover.py
Normal file
91
tests/problems/test_setcover.py
Normal file
@@ -0,0 +1,91 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
import numpy as np
|
||||
from scipy.stats import randint, uniform
|
||||
|
||||
from miplearn.h5 import H5File
|
||||
from miplearn.problems.setcover import (
|
||||
SetCoverData,
|
||||
build_setcover_model_gurobipy,
|
||||
SetCoverGenerator,
|
||||
build_setcover_model_pyomo,
|
||||
)
|
||||
|
||||
|
||||
def test_set_cover_generator() -> None:
|
||||
np.random.seed(42)
|
||||
gen = SetCoverGenerator(
|
||||
n_elements=randint(low=3, high=4),
|
||||
n_sets=randint(low=5, high=6),
|
||||
costs=uniform(loc=0.0, scale=100.0),
|
||||
costs_jitter=uniform(loc=0.95, scale=0.10),
|
||||
density=uniform(loc=0.5, scale=0),
|
||||
K=uniform(loc=25, scale=0),
|
||||
fix_sets=False,
|
||||
)
|
||||
data = gen.generate(2)
|
||||
|
||||
assert data[0].costs.round(1).tolist() == [136.8, 86.2, 25.7, 27.3, 102.5]
|
||||
assert data[0].incidence_matrix.tolist() == [
|
||||
[1, 0, 1, 0, 1],
|
||||
[1, 1, 0, 0, 0],
|
||||
[1, 0, 0, 1, 1],
|
||||
]
|
||||
assert data[1].costs.round(1).tolist() == [63.5, 76.6, 48.1, 74.1, 93.3]
|
||||
assert data[1].incidence_matrix.tolist() == [
|
||||
[1, 1, 0, 1, 1],
|
||||
[0, 1, 0, 1, 0],
|
||||
[0, 1, 1, 0, 0],
|
||||
]
|
||||
|
||||
|
||||
def test_set_cover_generator_with_fixed_sets() -> None:
|
||||
np.random.seed(42)
|
||||
gen = SetCoverGenerator(
|
||||
n_elements=randint(low=3, high=4),
|
||||
n_sets=randint(low=5, high=6),
|
||||
costs=uniform(loc=0.0, scale=100.0),
|
||||
costs_jitter=uniform(loc=0.95, scale=0.10),
|
||||
density=uniform(loc=0.5, scale=0.00),
|
||||
fix_sets=True,
|
||||
)
|
||||
data = gen.generate(3)
|
||||
|
||||
assert data[0].costs.tolist() == [136.75, 86.17, 25.71, 27.31, 102.48]
|
||||
assert data[1].costs.tolist() == [135.38, 82.26, 26.92, 26.58, 98.28]
|
||||
assert data[2].costs.tolist() == [138.37, 85.15, 26.95, 27.22, 106.17]
|
||||
|
||||
print(data[0].incidence_matrix)
|
||||
|
||||
for i in range(3):
|
||||
assert data[i].incidence_matrix.tolist() == [
|
||||
[1, 0, 1, 0, 1],
|
||||
[1, 1, 0, 0, 0],
|
||||
[1, 0, 0, 1, 1],
|
||||
]
|
||||
|
||||
|
||||
def test_set_cover() -> None:
|
||||
data = SetCoverData(
|
||||
costs=np.array([5, 10, 12, 6, 8]),
|
||||
incidence_matrix=np.array(
|
||||
[
|
||||
[1, 0, 0, 1, 0],
|
||||
[1, 1, 0, 0, 0],
|
||||
[0, 0, 1, 1, 1],
|
||||
],
|
||||
),
|
||||
)
|
||||
for model in [
|
||||
build_setcover_model_pyomo(data),
|
||||
build_setcover_model_gurobipy(data),
|
||||
]:
|
||||
with NamedTemporaryFile() as tempfile:
|
||||
with H5File(tempfile.name) as h5:
|
||||
model.optimize()
|
||||
model.extract_after_mip(h5)
|
||||
assert h5.get_scalar("mip_obj_value") == 11.0
|
||||
26
tests/problems/test_setpack.py
Normal file
26
tests/problems/test_setpack.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import numpy as np
|
||||
|
||||
from miplearn.problems.setpack import (
|
||||
SetPackData,
|
||||
build_setpack_model,
|
||||
)
|
||||
|
||||
|
||||
def test_setpack() -> None:
|
||||
data = SetPackData(
|
||||
costs=np.array([5, 10, 12, 6, 8]),
|
||||
incidence_matrix=np.array(
|
||||
[
|
||||
[1, 0, 0, 1, 0],
|
||||
[1, 1, 0, 0, 0],
|
||||
[0, 0, 1, 1, 1],
|
||||
],
|
||||
),
|
||||
)
|
||||
model = build_setpack_model(data)
|
||||
model.optimize()
|
||||
assert model.inner.objval == -22.0
|
||||
@@ -1,53 +1,30 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
import networkx as nx
|
||||
import numpy as np
|
||||
from scipy.stats import uniform, randint
|
||||
|
||||
from miplearn.problems.stab import MaxWeightStableSetInstance
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
from miplearn.h5 import H5File
|
||||
from miplearn.problems.stab import (
|
||||
MaxWeightStableSetData,
|
||||
build_stab_model_pyomo,
|
||||
build_stab_model_gurobipy,
|
||||
)
|
||||
|
||||
|
||||
def test_stab() -> None:
|
||||
graph = nx.cycle_graph(5)
|
||||
weights = np.array([1.0, 1.0, 1.0, 1.0, 1.0])
|
||||
instance = MaxWeightStableSetInstance(graph, weights)
|
||||
solver = LearningSolver()
|
||||
stats = solver._solve(instance)
|
||||
assert stats["mip_lower_bound"] == 2.0
|
||||
|
||||
|
||||
def test_stab_generator_fixed_graph() -> None:
|
||||
np.random.seed(42)
|
||||
from miplearn.problems.stab import MaxWeightStableSetGenerator
|
||||
|
||||
gen = MaxWeightStableSetGenerator(
|
||||
w=uniform(loc=50.0, scale=10.0),
|
||||
n=randint(low=10, high=11),
|
||||
p=uniform(loc=0.05, scale=0.0),
|
||||
fix_graph=True,
|
||||
data = MaxWeightStableSetData(
|
||||
graph=nx.cycle_graph(5),
|
||||
weights=np.array([1.0, 1.0, 1.0, 1.0, 1.0]),
|
||||
)
|
||||
data = gen.generate(1_000)
|
||||
weights = np.array([d.weights for d in data])
|
||||
weights_avg_actual = np.round(np.average(weights, axis=0))
|
||||
weights_avg_expected = [55.0] * 10
|
||||
assert list(weights_avg_actual) == weights_avg_expected
|
||||
|
||||
|
||||
def test_stab_generator_random_graph() -> None:
|
||||
np.random.seed(42)
|
||||
from miplearn.problems.stab import MaxWeightStableSetGenerator
|
||||
|
||||
gen = MaxWeightStableSetGenerator(
|
||||
w=uniform(loc=50.0, scale=10.0),
|
||||
n=randint(low=30, high=41),
|
||||
p=uniform(loc=0.5, scale=0.0),
|
||||
fix_graph=False,
|
||||
)
|
||||
data = gen.generate(1_000)
|
||||
n_nodes = [d.graph.number_of_nodes() for d in data]
|
||||
n_edges = [d.graph.number_of_edges() for d in data]
|
||||
assert np.round(np.mean(n_nodes)) == 35.0
|
||||
assert np.round(np.mean(n_edges), -1) == 300.0
|
||||
for model in [
|
||||
build_stab_model_pyomo(data),
|
||||
build_stab_model_gurobipy(data),
|
||||
]:
|
||||
with NamedTemporaryFile() as tempfile:
|
||||
with H5File(tempfile.name) as h5:
|
||||
model.optimize()
|
||||
model.extract_after_mip(h5)
|
||||
assert h5.get_scalar("mip_obj_value") == -2.0
|
||||
|
||||
@@ -1,100 +1,72 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
import json
|
||||
|
||||
import numpy as np
|
||||
from numpy.linalg import norm
|
||||
from miplearn.problems.tsp import (
|
||||
TravelingSalesmanData,
|
||||
TravelingSalesmanGenerator,
|
||||
build_tsp_model,
|
||||
)
|
||||
from scipy.spatial.distance import pdist, squareform
|
||||
from scipy.stats import uniform, randint
|
||||
|
||||
from miplearn.problems.tsp import TravelingSalesmanGenerator, TravelingSalesmanInstance
|
||||
from miplearn.solvers.learning import LearningSolver
|
||||
from miplearn.solvers.tests import assert_equals
|
||||
from scipy.stats import randint, uniform
|
||||
|
||||
|
||||
def test_generator() -> None:
|
||||
data = TravelingSalesmanGenerator(
|
||||
def test_tsp_generator() -> None:
|
||||
np.random.seed(42)
|
||||
gen = TravelingSalesmanGenerator(
|
||||
x=uniform(loc=0.0, scale=1000.0),
|
||||
y=uniform(loc=0.0, scale=1000.0),
|
||||
n=randint(low=100, high=101),
|
||||
gamma=uniform(loc=0.95, scale=0.1),
|
||||
n=randint(low=3, high=4),
|
||||
gamma=uniform(loc=1.0, scale=0.25),
|
||||
fix_cities=True,
|
||||
).generate(100)
|
||||
assert len(data) == 100
|
||||
assert data[0].n_cities == 100
|
||||
assert norm(data[0].distances - data[0].distances.T) < 1e-6
|
||||
d = [d.distances[0, 1] for d in data]
|
||||
assert np.std(d) > 0
|
||||
|
||||
|
||||
def test_instance() -> None:
|
||||
n_cities = 4
|
||||
distances = np.array(
|
||||
[
|
||||
[0.0, 1.0, 2.0, 1.0],
|
||||
[1.0, 0.0, 1.0, 2.0],
|
||||
[2.0, 1.0, 0.0, 1.0],
|
||||
[1.0, 2.0, 1.0, 0.0],
|
||||
]
|
||||
round=True,
|
||||
)
|
||||
instance = TravelingSalesmanInstance(n_cities, distances)
|
||||
solver = LearningSolver()
|
||||
solver._solve(instance)
|
||||
assert len(instance.get_samples()) == 1
|
||||
sample = instance.get_samples()[0]
|
||||
assert_equals(sample.get_array("mip_var_values"), [1.0, 0.0, 1.0, 1.0, 0.0, 1.0])
|
||||
assert sample.get_scalar("mip_lower_bound") == 4.0
|
||||
assert sample.get_scalar("mip_upper_bound") == 4.0
|
||||
data = gen.generate(2)
|
||||
assert data[0].distances.tolist() == [
|
||||
[0.0, 591.0, 996.0],
|
||||
[591.0, 0.0, 765.0],
|
||||
[996.0, 765.0, 0.0],
|
||||
]
|
||||
assert data[1].distances.tolist() == [
|
||||
[0.0, 556.0, 853.0],
|
||||
[556.0, 0.0, 779.0],
|
||||
[853.0, 779.0, 0.0],
|
||||
]
|
||||
|
||||
|
||||
def test_subtour() -> None:
|
||||
n_cities = 6
|
||||
cities = np.array(
|
||||
[
|
||||
[0.0, 0.0],
|
||||
[1.0, 0.0],
|
||||
[2.0, 0.0],
|
||||
[3.0, 0.0],
|
||||
[0.0, 1.0],
|
||||
[3.0, 1.0],
|
||||
]
|
||||
def test_tsp() -> None:
|
||||
data = TravelingSalesmanData(
|
||||
n_cities=6,
|
||||
distances=squareform(
|
||||
pdist(
|
||||
[
|
||||
[0.0, 0.0],
|
||||
[1.0, 0.0],
|
||||
[2.0, 0.0],
|
||||
[3.0, 0.0],
|
||||
[0.0, 1.0],
|
||||
[3.0, 1.0],
|
||||
]
|
||||
)
|
||||
),
|
||||
)
|
||||
distances = squareform(pdist(cities))
|
||||
instance = TravelingSalesmanInstance(n_cities, distances)
|
||||
solver = LearningSolver()
|
||||
solver._solve(instance)
|
||||
samples = instance.get_samples()
|
||||
assert len(samples) == 1
|
||||
sample = samples[0]
|
||||
|
||||
lazy_encoded = sample.get_scalar("mip_constr_lazy")
|
||||
assert lazy_encoded is not None
|
||||
lazy = json.loads(lazy_encoded)
|
||||
assert lazy == {
|
||||
"st[0,1,4]": [0, 1, 4],
|
||||
"st[2,3,5]": [2, 3, 5],
|
||||
}
|
||||
|
||||
assert_equals(
|
||||
sample.get_array("mip_var_values"),
|
||||
[
|
||||
1.0,
|
||||
0.0,
|
||||
0.0,
|
||||
1.0,
|
||||
0.0,
|
||||
1.0,
|
||||
0.0,
|
||||
0.0,
|
||||
0.0,
|
||||
1.0,
|
||||
0.0,
|
||||
0.0,
|
||||
0.0,
|
||||
1.0,
|
||||
1.0,
|
||||
],
|
||||
)
|
||||
solver._fit([instance])
|
||||
solver._solve(instance)
|
||||
model = build_tsp_model(data)
|
||||
model.optimize()
|
||||
assert model.inner.getAttr("x", model.inner.getVars()) == [
|
||||
1.0,
|
||||
0.0,
|
||||
0.0,
|
||||
1.0,
|
||||
0.0,
|
||||
1.0,
|
||||
0.0,
|
||||
0.0,
|
||||
0.0,
|
||||
1.0,
|
||||
0.0,
|
||||
0.0,
|
||||
0.0,
|
||||
1.0,
|
||||
1.0,
|
||||
]
|
||||
|
||||
71
tests/problems/test_uc.py
Normal file
71
tests/problems/test_uc.py
Normal file
@@ -0,0 +1,71 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import numpy as np
|
||||
from scipy.stats import uniform, randint
|
||||
|
||||
from miplearn.problems.uc import (
|
||||
UnitCommitmentData,
|
||||
build_uc_model,
|
||||
UnitCommitmentGenerator,
|
||||
)
|
||||
|
||||
|
||||
def test_generator() -> None:
|
||||
np.random.seed(42)
|
||||
gen = UnitCommitmentGenerator(
|
||||
n_units=randint(low=3, high=4),
|
||||
n_periods=randint(low=4, high=5),
|
||||
max_power=uniform(loc=50, scale=450),
|
||||
min_power=uniform(loc=0.25, scale=0.5),
|
||||
cost_startup=uniform(loc=1, scale=1),
|
||||
cost_prod=uniform(loc=1, scale=1),
|
||||
cost_fixed=uniform(loc=1, scale=1),
|
||||
min_uptime=randint(low=1, high=8),
|
||||
min_downtime=randint(low=1, high=8),
|
||||
cost_jitter=uniform(loc=0.75, scale=0.5),
|
||||
demand_jitter=uniform(loc=0.9, scale=0.2),
|
||||
fix_units=True,
|
||||
)
|
||||
data = gen.generate(2)
|
||||
|
||||
assert data[0].demand.tolist() == [430.3, 518.65, 448.16, 860.61]
|
||||
assert data[0].min_power.tolist() == [120.05, 156.73, 124.44]
|
||||
assert data[0].max_power.tolist() == [218.54, 477.82, 379.4]
|
||||
assert data[0].min_uptime.tolist() == [3, 3, 5]
|
||||
assert data[0].min_downtime.tolist() == [4, 3, 6]
|
||||
assert data[0].cost_startup.tolist() == [1.06, 1.72, 1.94]
|
||||
assert data[0].cost_prod.tolist() == [1.0, 1.99, 1.62]
|
||||
assert data[0].cost_fixed.tolist() == [1.61, 1.01, 1.02]
|
||||
|
||||
assert data[1].demand.tolist() == [407.3, 476.18, 458.77, 840.38]
|
||||
assert data[1].min_power.tolist() == [120.05, 156.73, 124.44]
|
||||
assert data[1].max_power.tolist() == [218.54, 477.82, 379.4]
|
||||
assert data[1].min_uptime.tolist() == [3, 3, 5]
|
||||
assert data[1].min_downtime.tolist() == [4, 3, 6]
|
||||
assert data[1].cost_startup.tolist() == [1.32, 1.69, 2.29]
|
||||
assert data[1].cost_prod.tolist() == [1.09, 1.94, 1.23]
|
||||
assert data[1].cost_fixed.tolist() == [1.97, 1.04, 0.96]
|
||||
|
||||
|
||||
def test_uc() -> None:
|
||||
data = UnitCommitmentData(
|
||||
demand=np.array([10, 12, 15, 10, 8, 5]),
|
||||
min_power=np.array([5, 5, 10]),
|
||||
max_power=np.array([10, 8, 20]),
|
||||
min_uptime=np.array([4, 3, 2]),
|
||||
min_downtime=np.array([4, 3, 2]),
|
||||
cost_startup=np.array([100, 120, 200]),
|
||||
cost_prod=np.array([1.0, 1.25, 1.5]),
|
||||
cost_fixed=np.array([10, 12, 9]),
|
||||
)
|
||||
model = build_uc_model(data)
|
||||
model.optimize()
|
||||
assert model.inner.objVal == 154.5
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
data = UnitCommitmentGenerator().generate(1)[0]
|
||||
model = build_uc_model(data)
|
||||
model.optimize()
|
||||
21
tests/problems/test_vertexcover.py
Normal file
21
tests/problems/test_vertexcover.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
|
||||
# Copyright (C) 2020-2022, UChicago Argonne, LLC. All rights reserved.
|
||||
# Released under the modified BSD license. See COPYING.md for more details.
|
||||
|
||||
import networkx as nx
|
||||
import numpy as np
|
||||
|
||||
from miplearn.problems.vertexcover import (
|
||||
MinWeightVertexCoverData,
|
||||
build_vertexcover_model,
|
||||
)
|
||||
|
||||
|
||||
def test_stab() -> None:
|
||||
data = MinWeightVertexCoverData(
|
||||
graph=nx.cycle_graph(5),
|
||||
weights=np.array([1.0, 1.0, 1.0, 1.0, 1.0]),
|
||||
)
|
||||
model = build_vertexcover_model(data)
|
||||
model.optimize()
|
||||
assert model.inner.objVal == 3.0
|
||||
Reference in New Issue
Block a user