Replace tuples; make it work with plain JuMP models

master
Alinson S. Xavier 4 years ago
parent 241d1d4157
commit f94eb598c9

2
deps/build.jl vendored

@ -5,7 +5,7 @@ function install_miplearn()
Conda.update() Conda.update()
pip = joinpath(dirname(pyimport("sys").executable), "pip") pip = joinpath(dirname(pyimport("sys").executable), "pip")
isfile(pip) || error("$pip: invalid path") isfile(pip) || error("$pip: invalid path")
run(`$pip install miplearn==0.2.0.dev8`) run(`$pip install miplearn==0.2.0.dev9`)
end end
install_miplearn() install_miplearn()

@ -7,6 +7,7 @@ using JuMP
@pydef mutable struct PyJuMPInstance <: miplearn.Instance @pydef mutable struct PyJuMPInstance <: miplearn.Instance
function __init__(self, model) function __init__(self, model)
init_miplearn_ext(model)
self.model = model self.model = model
self.samples = [] self.samples = []
end end
@ -22,25 +23,25 @@ using JuMP
function get_variable_features(self, var_name) function get_variable_features(self, var_name)
model = self.model model = self.model
v = variable_by_name(model, var_name) v = variable_by_name(model, var_name)
return model.ext[:miplearn][:variable_features][v] return get(model.ext[:miplearn][:variable_features], v, [0.0])
end end
function get_variable_category(self, var_name) function get_variable_category(self, var_name)
model = self.model model = self.model
v = variable_by_name(model, var_name) v = variable_by_name(model, var_name)
return model.ext[:miplearn][:variable_categories][v] return get(model.ext[:miplearn][:variable_categories], v, "default")
end end
function get_constraint_features(self, cname) function get_constraint_features(self, cname)
model = self.model model = self.model
c = constraint_by_name(model, cname) c = constraint_by_name(model, cname)
return model.ext[:miplearn][:constraint_features][c] return get(model.ext[:miplearn][:constraint_features], c, [0.0])
end end
function get_constraint_category(self, cname) function get_constraint_category(self, cname)
model = self.model model = self.model
c = constraint_by_name(model, cname) c = constraint_by_name(model, cname)
return model.ext[:miplearn][:constraint_categories][c] return get(model.ext[:miplearn][:constraint_categories], c, "default")
end end
end end
@ -50,7 +51,8 @@ struct JuMPInstance
end end
function JuMPInstance(model::Model) function JuMPInstance(model)
model isa Model || error("model should be a JuMP.Model. Found $(typeof(model)) instead.")
return JuMPInstance(PyJuMPInstance(model)) return JuMPInstance(PyJuMPInstance(model))
end end

@ -316,49 +316,49 @@ function get_variables(
values, rc = nothing, nothing values, rc = nothing, nothing
# Variable names # Variable names
names = Tuple(JuMP.name.(vars)) names = JuMP.name.(vars)
# Primal values # Primal values
if !isempty(data.solution) if !isempty(data.solution)
values = Tuple([data.solution[v] for v in vars]) values = [data.solution[v] for v in vars]
end end
if with_static if with_static
# Lower bounds # Lower bounds
lb = Tuple( lb = [
JuMP.is_binary(v) ? 0.0 : JuMP.is_binary(v) ? 0.0 :
JuMP.has_lower_bound(v) ? JuMP.lower_bound(v) : JuMP.has_lower_bound(v) ? JuMP.lower_bound(v) :
-Inf -Inf
for v in vars for v in vars
) ]
# Upper bounds # Upper bounds
ub = Tuple( ub = [
JuMP.is_binary(v) ? 1.0 : JuMP.is_binary(v) ? 1.0 :
JuMP.has_upper_bound(v) ? JuMP.upper_bound(v) : JuMP.has_upper_bound(v) ? JuMP.upper_bound(v) :
Inf Inf
for v in vars for v in vars
) ]
# Variable types # Variable types
types = Tuple( types = [
JuMP.is_binary(v) ? "B" : JuMP.is_binary(v) ? "B" :
JuMP.is_integer(v) ? "I" : JuMP.is_integer(v) ? "I" :
"C" "C"
for v in vars for v in vars
) ]
# Objective function coefficients # Objective function coefficients
obj = objective_function(data.model) obj = objective_function(data.model)
obj_coeffs = Tuple( obj_coeffs = [
v keys(obj.terms) ? obj.terms[v] : 0.0 v keys(obj.terms) ? obj.terms[v] : 0.0
for v in vars for v in vars
) ]
end end
rc = isempty(data.reduced_costs) ? nothing : Tuple(data.reduced_costs) rc = isempty(data.reduced_costs) ? nothing : data.reduced_costs
return miplearn.features.VariableFeatures( vf = miplearn.features.VariableFeatures(
names=names, names=names,
lower_bounds=lb, lower_bounds=lb,
upper_bounds=ub, upper_bounds=ub,
@ -367,6 +367,7 @@ function get_variables(
reduced_costs=rc, reduced_costs=rc,
values=values, values=values,
) )
return vf
end end
@ -406,7 +407,7 @@ function get_constraints(
if ftype == JuMP.AffExpr if ftype == JuMP.AffExpr
push!( push!(
lhs, lhs,
Tuple( [
( (
MOI.get( MOI.get(
constr.model.moi_backend, constr.model.moi_backend,
@ -420,7 +421,7 @@ function get_constraints(
MOI.ConstraintFunction(), MOI.ConstraintFunction(),
constr.index, constr.index,
).terms ).terms
) ]
) )
if stype == MOI.EqualTo{Float64} if stype == MOI.EqualTo{Float64}
push!(senses, "=") push!(senses, "=")
@ -441,17 +442,12 @@ function get_constraints(
end end
end end
function to_tuple(x)
x !== nothing || return nothing
return Tuple(x)
end
return miplearn.features.ConstraintFeatures( return miplearn.features.ConstraintFeatures(
names=to_tuple(names), names=names,
senses=to_tuple(senses), senses=senses,
lhs=to_tuple(lhs), lhs=lhs,
rhs=to_tuple(rhs), rhs=rhs,
dual_values=to_tuple(dual_values), dual_values=dual_values,
) )
end end
@ -471,23 +467,35 @@ end
) )
end end
add_constraints(self, cf) = function add_constraints(self, cf)
lhs = cf.lhs
if lhs isa Matrix
# Undo incorrect automatic conversion performed by PyCall
lhs = [col[:] for col in eachcol(lhs)]
end
add_constraints( add_constraints(
self.data, self.data,
lhs=[[term for term in constr] for constr in cf.lhs], lhs=lhs,
rhs=[r for r in cf.rhs], rhs=cf.rhs,
senses=[s for s in cf.senses], senses=cf.senses,
names=[n for n in cf.names], names=cf.names,
) )
end
are_constraints_satisfied(self, cf; tol=1e-5) = function are_constraints_satisfied(self, cf; tol=1e-5)
tuple(are_constraints_satisfied( lhs = cf.lhs
if lhs isa Matrix
# Undo incorrect automatic conversion performed by PyCall
lhs = [col[:] for col in eachcol(lhs)]
end
return are_constraints_satisfied(
self.data, self.data,
lhs=[[term for term in constr] for constr in cf.lhs], lhs=lhs,
rhs=[r for r in cf.rhs], rhs=cf.rhs,
senses=[s for s in cf.senses], senses=cf.senses,
tol=tol, tol=tol,
)...) )
end
build_test_instance_infeasible(self) = build_test_instance_infeasible(self) =
build_test_instance_infeasible() build_test_instance_infeasible()

@ -5,6 +5,7 @@
function init_miplearn_ext(model)::Dict function init_miplearn_ext(model)::Dict
if :miplearn keys(model.ext) if :miplearn keys(model.ext)
model.ext[:miplearn] = Dict{Symbol, Any}() model.ext[:miplearn] = Dict{Symbol, Any}()
model.ext[:miplearn][:instance_features] = [0.0]
model.ext[:miplearn][:variable_features] = Dict{VariableRef, Vector{Float64}}() model.ext[:miplearn][:variable_features] = Dict{VariableRef, Vector{Float64}}()
model.ext[:miplearn][:variable_categories] = Dict{VariableRef, String}() model.ext[:miplearn][:variable_categories] = Dict{VariableRef, String}()
model.ext[:miplearn][:constraint_features] = Dict{ConstraintRef, Vector{Float64}}() model.ext[:miplearn][:constraint_features] = Dict{ConstraintRef, Vector{Float64}}()

@ -6,13 +6,14 @@ using JuMP
using MIPLearn using MIPLearn
using Gurobi using Gurobi
@testset "macros" begin @testset "LearningSolver" begin
@testset "model with annotations" begin
# Create standard JuMP model
weights = [1.0, 2.0, 3.0] weights = [1.0, 2.0, 3.0]
prices = [5.0, 6.0, 7.0] prices = [5.0, 6.0, 7.0]
capacity = 3.0 capacity = 3.0
# Create standard JuMP model
model = Model() model = Model()
n = length(weights) n = length(weights)
@variable(model, x[1:n], Bin) @variable(model, x[1:n], Bin)
@objective(model, Max, sum(x[i] * prices[i] for i in 1:n)) @objective(model, Max, sum(x[i] * prices[i] for i in 1:n))
@ -46,3 +47,14 @@ using Gurobi
fit!(solver, [instance]) fit!(solver, [instance])
solve!(solver, instance) solve!(solver, instance)
end end
@testset "plain model" begin
model = Model()
@variable(model, x, Bin)
@variable(model, y, Bin)
@objective(model, Max, x + y)
solver = LearningSolver(Gurobi.Optimizer)
instance = JuMPInstance(model)
stats = solve!(solver, instance)
end
end

Loading…
Cancel
Save