Make simple examples work

master
Alinson S. Xavier 4 years ago
parent fcb536a657
commit 241d1d4157

@ -156,7 +156,7 @@ solver = LearningSolver(
)
```
### 2.2 Adjusting component aggresiveness
### 2.2 Adjusting component aggressiveness
```julia
using MIPLearn

2
deps/build.jl vendored

@ -5,7 +5,7 @@ function install_miplearn()
Conda.update()
pip = joinpath(dirname(pyimport("sys").executable), "pip")
isfile(pip) || error("$pip: invalid path")
run(`$pip install miplearn==0.2.0.dev6`)
run(`$pip install miplearn==0.2.0.dev8`)
end
install_miplearn()

@ -15,6 +15,7 @@ export @category
miplearn = pyimport("miplearn")
include("utils/log.jl")
include("utils/exceptions.jl")
include("modeling/jump_instance.jl")
include("modeling/jump_solver.jl")
include("modeling/learning_solver.jl")

@ -2,51 +2,57 @@
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
@pydef mutable struct JuMPInstance <: miplearn.Instance
using JuMP
@pydef mutable struct PyJuMPInstance <: miplearn.Instance
function __init__(self, model)
self.model = model
# init_miplearn_ext(model)
# features = model.ext[:miplearn][:features]
# # Copy training data
# training_data = []
# for sample in self.model.ext[:miplearn][:training_samples]
# pysample = miplearn.TrainingSample()
# pysample.__dict__ = sample
# push!(training_data, pysample)
# end
# self.training_data = training_data
# # Copy features to data classes
# self.features = miplearn.Features(
# instance=miplearn.InstanceFeatures(
# user_features=PyCall.array2py(
# features[:instance][:user_features],
# ),
# lazy_constraint_count=0,
# ),
# variables=Dict(
# varname => miplearn.VariableFeatures(
# category=vfeatures[:category],
# user_features=PyCall.array2py(
# vfeatures[:user_features],
# ),
# )
# for (varname, vfeatures) in features[:variables]
# ),
# constraints=Dict(
# cname => miplearn.ConstraintFeatures(
# category=cfeat[:category],
# user_features=PyCall.array2py(
# cfeat[:user_features],
# ),
# )
# for (cname, cfeat) in features[:constraints]
# ),
# )
self.samples = []
end
function to_model(self)
return self.model
end
function get_instance_features(self)
return self.model.ext[:miplearn][:instance_features]
end
function get_variable_features(self, var_name)
model = self.model
v = variable_by_name(model, var_name)
return model.ext[:miplearn][:variable_features][v]
end
function get_variable_category(self, var_name)
model = self.model
v = variable_by_name(model, var_name)
return model.ext[:miplearn][:variable_categories][v]
end
function get_constraint_features(self, cname)
model = self.model
c = constraint_by_name(model, cname)
return model.ext[:miplearn][:constraint_features][c]
end
function get_constraint_category(self, cname)
model = self.model
c = constraint_by_name(model, cname)
return model.ext[:miplearn][:constraint_categories][c]
end
end
struct JuMPInstance
py::PyCall.PyObject
end
function JuMPInstance(model::Model)
return JuMPInstance(PyJuMPInstance(model))
end
export JuMPInstance

@ -148,7 +148,7 @@ function build_test_instance_knapsack()
@objective(model, Max, sum(x[i-1] * prices[i] for i in 1:n))
@constraint(model, eq_capacity, sum(x[i-1] * weights[i] for i in 1:n) - z == 0)
return JuMPInstance(model)
return PyJuMPInstance(model)
end
@ -157,7 +157,7 @@ function build_test_instance_infeasible()
@variable(model, x, Bin)
@objective(model, Max, x)
@constraint(model, x >= 2)
return JuMPInstance(model)
return PyJuMPInstance(model)
end
@ -528,8 +528,11 @@ end
"user_features",
]
get_variables(self; with_static=true) =
get_variables(self.data; with_static=with_static)
get_variables(
self;
with_static=true,
with_sa=true,
) = get_variables(self.data; with_static=with_static)
get_variable_attrs(self) = [
"names",

@ -6,27 +6,21 @@ struct LearningSolver
py::PyCall.PyObject
end
function LearningSolver(
;
optimizer,
)::LearningSolver
py = miplearn.LearningSolver(solver=JuMPSolver(optimizer=optimizer))
function LearningSolver(optimizer_factory)::LearningSolver
py = miplearn.LearningSolver(solver=JuMPSolver(optimizer_factory))
return LearningSolver(py)
end
function solve!(solver::LearningSolver, model::Model)
instance = JuMPInstance(model)
mip_stats = solver.py.solve(instance)
push!(
model.ext[:miplearn][:training_samples],
instance.training_data[1].__dict__,
)
return mip_stats
function solve!(solver::LearningSolver, instance::JuMPInstance)
return @python_call solver.py.solve(instance.py)
end
function fit!(solver::LearningSolver, models::Array{Model})
instances = [JuMPInstance(m) for m in models]
solver.py.fit(instances)
function fit!(solver::LearningSolver, instances::Vector{JuMPInstance})
@python_call solver.py.fit([instance.py for instance in instances])
end
export LearningSolver
export LearningSolver, solve!, fit!

@ -0,0 +1,21 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using PyCall
traceback = pyimport("traceback")
macro python_call(expr)
quote
try
return $(esc(expr))
catch e
if isa(e, PyCall.PyError)
printstyled("Uncaught Python exception:\n", bold=true, color=:red)
traceback.print_exception(e.T, e.val, e.traceback)
end
rethrow()
end
end
end

@ -4,7 +4,7 @@
using JuMP
using MIPLearn
using Cbc
using Gurobi
@testset "macros" begin
weights = [1.0, 2.0, 3.0]
@ -38,19 +38,11 @@ using Cbc
@test model.ext[:miplearn][:constraint_categories][c1] == "c1"
@test model.ext[:miplearn][:instance_features] == [5.0]
# solver = LearningSolver(optimizer=Cbc.Optimizer)
# # Should return correct stats
# stats = solve!(solver, model)
# @test stats["Lower bound"] == 11.0
# # Should add a sample to the training data
# @test length(model.ext[:miplearn][:training_samples]) == 1
# sample = model.ext[:miplearn][:training_samples][1]
# @test sample["lower_bound"] == 11.0
# @test sample["solution"]["x[1]"] == 1.0
# fit!(solver, [model])
# solve!(solver, model)
solver = LearningSolver(Gurobi.Optimizer)
instance = JuMPInstance(model)
stats = solve!(solver, instance)
@test stats["mip_lower_bound"] == 11.0
@test length(instance.py.samples) == 1
fit!(solver, [instance])
solve!(solver, instance)
end

@ -8,6 +8,6 @@ using MIPLearn
MIPLearn.setup_logger()
@testset "MIPLearn" begin
# include("modeling/jump_solver_test.jl")
include("modeling/jump_solver_test.jl")
include("modeling/learning_solver_test.jl")
end

Loading…
Cancel
Save