mirror of
https://github.com/ANL-CEEESA/MIPLearn.git
synced 2025-12-06 01:18:52 -06:00
Initial version
This commit is contained in:
3
miplearn/__init__.py
Normal file
3
miplearn/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# MIPLearn: A Machine-Learning Framework for Mixed-Integer Optimization
|
||||
# Copyright (C) 2019-2020 Argonne National Laboratory. All rights reserved.
|
||||
# Written by Alinson S. Xavier <axavier@anl.gov>
|
||||
41
miplearn/core.py
Normal file
41
miplearn/core.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# MIPLearn: A Machine-Learning Framework for Mixed-Integer Optimization
|
||||
# Copyright (C) 2019-2020 Argonne National Laboratory. All rights reserved.
|
||||
# Written by Alinson S. Xavier <axavier@anl.gov>
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
class Parameters(ABC):
|
||||
"""
|
||||
Abstract class for holding the data that distinguishes one relevant instance of the problem
|
||||
from another.
|
||||
|
||||
In the knapsack problem, for example, this class could hold the number of items, their weights
|
||||
and costs, as well as the size of the knapsack. Objects implementing this class are able to
|
||||
convert themselves into concrete optimization model, which can be solved by a MIPSolver, or
|
||||
into 1-dimensional numpy arrays, which can be given to a machine learning model.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def to_model(self):
|
||||
"""
|
||||
Convert the parameters into a concrete optimization model.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def to_array(self):
|
||||
"""
|
||||
Convert the parameters into a 1-dimensional array.
|
||||
|
||||
The array is used by the LearningEnhancedSolver to determine how similar two instances are.
|
||||
After some normalization or embedding, it may also be used as input to the machine learning
|
||||
models. It must be numerical.
|
||||
|
||||
There is not necessarily a one-to-one correspondence between parameters and arrays. The
|
||||
array may encode only part of the data necessary to generate a concrete optimization model.
|
||||
The entries may also be reductions on the original data. For example, in the knapsack
|
||||
problem, an implementation may decide to encode only the average weights, the average prices
|
||||
and the size of the knapsack. This technique may be used to guarantee that arrays
|
||||
correponding to instances of different sizes have the same dimension.
|
||||
"""
|
||||
pass
|
||||
25
miplearn/solvers.py
Normal file
25
miplearn/solvers.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# MIPLearn: A Machine-Learning Framework for Mixed-Integer Optimization
|
||||
# Copyright (C) 2019-2020 Argonne National Laboratory. All rights reserved.
|
||||
# Written by Alinson S. Xavier <axavier@anl.gov>
|
||||
|
||||
import pyomo.environ as pe
|
||||
|
||||
class LearningSolver:
|
||||
"""
|
||||
LearningSolver is a Mixed-Integer Linear Programming (MIP) solver that uses information from
|
||||
previous runs to accelerate the solution of new, unseen instances.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.parent_solver = pe.SolverFactory('cplex_persistent')
|
||||
self.parent_solver.options["threads"] = 4
|
||||
|
||||
def solve(self, params):
|
||||
"""
|
||||
Solve the optimization problem represented by the given parameters.
|
||||
The parameters and the obtained solution is recorded.
|
||||
"""
|
||||
model = params.to_model()
|
||||
self.parent_solver.set_instance(model)
|
||||
self.parent_solver.solve(tee=True)
|
||||
|
||||
51
miplearn/test_stab.py
Normal file
51
miplearn/test_stab.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# MIPLearn: A Machine-Learning Framework for Mixed-Integer Optimization
|
||||
# Copyright (C) 2019-2020 Argonne National Laboratory. All rights reserved.
|
||||
# Written by Alinson S. Xavier <axavier@anl.gov>
|
||||
|
||||
from .solvers import LearningSolver
|
||||
from .core import Parameters
|
||||
import numpy as np
|
||||
import pyomo.environ as pe
|
||||
import networkx as nx
|
||||
|
||||
|
||||
class MaxStableSetGenerator:
|
||||
"""Class that generates random instances of the Maximum Stable Set (MSS) Problem."""
|
||||
|
||||
def __init__(self, n_vertices, density=0.1, seed=42):
|
||||
self.graph = nx.generators.random_graphs.binomial_graph(n_vertices, density, seed)
|
||||
self.base_weights = np.random.rand(self.graph.number_of_nodes()) * 10
|
||||
|
||||
def generate(self):
|
||||
perturbation = np.random.rand(self.graph.number_of_nodes()) * 0.1
|
||||
weights = self.base_weights + perturbation
|
||||
return MaxStableSetParameters(self.graph, weights)
|
||||
|
||||
|
||||
class MaxStableSetParameters(Parameters):
|
||||
def __init__(self, graph, weights):
|
||||
self.graph = graph
|
||||
self.weights = weights
|
||||
|
||||
def to_model(self):
|
||||
nodes = list(self.graph.nodes)
|
||||
edges = list(self.graph.edges)
|
||||
model = m = pe.ConcreteModel()
|
||||
m.x = pe.Var(nodes, domain=pe.Binary)
|
||||
m.OBJ = pe.Objective(rule=lambda m : sum(m.x[v] * self.weights[v] for v in nodes),
|
||||
sense=pe.maximize)
|
||||
m.edge_eqs = pe.ConstraintList()
|
||||
for edge in edges:
|
||||
m.edge_eqs.add(m.x[edge[0]] + m.x[edge[1]] <= 1)
|
||||
return m
|
||||
|
||||
def to_array(self):
|
||||
return self.weights
|
||||
|
||||
|
||||
def test_stab():
|
||||
generator = MaxStableSetGenerator(n_vertices=100)
|
||||
for k in range(5):
|
||||
params = generator.generate()
|
||||
solver = LearningSolver()
|
||||
solver.solve(params)
|
||||
Reference in New Issue
Block a user