Remove obsolete methods

master
Alinson S. Xavier 4 years ago
parent e9fbf4a3c7
commit b5c45966d3

2
deps/build.jl vendored

@ -5,7 +5,7 @@ function install_miplearn()
Conda.update()
pip = joinpath(dirname(pyimport("sys").executable), "pip")
isfile(pip) || error("$pip: invalid path")
run(`$pip install miplearn==0.2.0.dev5`)
run(`$pip install miplearn==0.2.0.dev6`)
end
install_miplearn()

@ -15,11 +15,9 @@ export @category
miplearn = pyimport("miplearn")
include("utils/log.jl")
include("utils/pycall.jl")
include("modeling/jump_instance.jl")
include("modeling/jump_solver.jl")
include("modeling/learning_solver.jl")
include("modeling/macros.jl")
include("problems/knapsack.jl")
end # module

@ -22,12 +22,12 @@ end
"""
optimize_and_capture_output!(model; tee=tee)
_optimize_and_capture_output!(model; tee=tee)
Optimizes a given JuMP model while capturing the solver log, then returns that log.
If tee=true, prints the solver log to the standard output as the optimization takes place.
"""
function optimize_and_capture_output!(model; tee::Bool=false)
function _optimize_and_capture_output!(model; tee::Bool=false)
logname = tempname()
logfile = open(logname, "w")
redirect_stdout(logfile) do
@ -44,6 +44,42 @@ function optimize_and_capture_output!(model; tee::Bool=false)
end
function _update_solution!(data::JuMPSolverData)
vars = JuMP.all_variables(data.model)
data.solution = [JuMP.value(var) for var in vars]
# Reduced costs
if has_duals(data.model)
data.reduced_costs = []
for var in vars
rc = 0.0
if has_upper_bound(var)
rc += shadow_price(UpperBoundRef(var))
end
if has_lower_bound(var)
# FIXME: Remove negative sign
rc -= shadow_price(LowerBoundRef(var))
end
if is_fixed(var)
rc += shadow_price(FixRef(var))
end
push!(data.reduced_costs, rc)
end
data.dual_values = Dict()
for (ftype, stype) in JuMP.list_of_constraint_types(data.model)
for constr in JuMP.all_constraints(data.model, ftype, stype)
# FIXME: Remove negative sign
data.dual_values[constr] = -JuMP.dual(constr)
end
end
else
data.reduced_costs = nothing
data.dual_values = nothing
end
end
function solve(
data::JuMPSolverData;
tee::Bool=false,
@ -53,7 +89,7 @@ function solve(
wallclock_time = 0
log = ""
while true
log *= optimize_and_capture_output!(model, tee=tee)
log *= _optimize_and_capture_output!(model, tee=tee)
wallclock_time += JuMP.solve_time(model)
if iteration_cb !== nothing
iteration_cb() || break
@ -61,7 +97,7 @@ function solve(
break
end
end
update_solution!(data)
_update_solution!(data)
primal_bound = JuMP.objective_value(model)
dual_bound = JuMP.objective_bound(model)
if JuMP.objective_sense(model) == MOI.MIN_SENSE
@ -93,9 +129,9 @@ function solve_lp(data::JuMPSolverData; tee::Bool=false)
JuMP.set_lower_bound(var, 0.0)
end
wallclock_time = @elapsed begin
log = optimize_and_capture_output!(model, tee=tee)
log = _optimize_and_capture_output!(model, tee=tee)
end
update_solution!(data)
_update_solution!(data)
obj_value = JuMP.objective_value(model)
for var in bin_vars
JuMP.set_binary(var)
@ -108,42 +144,6 @@ function solve_lp(data::JuMPSolverData; tee::Bool=false)
end
function update_solution!(data::JuMPSolverData)
vars = JuMP.all_variables(data.model)
data.solution = [JuMP.value(var) for var in vars]
# Reduced costs
if has_duals(data.model)
data.reduced_costs = []
for var in vars
rc = 0.0
if has_upper_bound(var)
rc += shadow_price(UpperBoundRef(var))
end
if has_lower_bound(var)
# FIXME: Remove negative sign
rc -= shadow_price(LowerBoundRef(var))
end
if is_fixed(var)
rc += shadow_price(FixRef(var))
end
push!(data.reduced_costs, rc)
end
data.dual_values = Dict()
for (ftype, stype) in JuMP.list_of_constraint_types(data.model)
for constr in JuMP.all_constraints(data.model, ftype, stype)
# FIXME: Remove negative sign
data.dual_values[constr] = -JuMP.dual(constr)
end
end
else
data.reduced_costs = nothing
data.dual_values = nothing
end
end
function set_instance!(data::JuMPSolverData, instance, model)
data.instance = instance
data.model = model
@ -188,39 +188,11 @@ function set_warm_start!(data::JuMPSolverData, solution)
end
function get_variable_names(data::JuMPSolverData)
return [JuMP.name(var) for var in JuMP.all_variables(data.model)]
end
function is_infeasible(data::JuMPSolverData)
return JuMP.termination_status(data.model) == MOI.INFEASIBLE
end
function get_constraint_ids(data::JuMPSolverData)
return [cname for cname in keys(data.cname_to_constr)]
end
function get_constraint_rhs(data::JuMPSolverData, cname)
constr = data.cname_to_constr[cname]
return get_constraint_rhs(constr)
end
function get_constraint_lhs(data::JuMPSolverData, cname)
constr = data.cname_to_constr[cname]
return get_constraint_lhs(constr)
end
function get_constraint_sense(data::JuMPSolverData, cname)
constr = data.cname_to_constr[cname]
return get_constraint_sense(constr)
end
function get_variables(
data::JuMPSolverData;
with_static::Bool,
@ -366,168 +338,6 @@ function get_constraints(
end
# Constraints: ScalarAffineFunction, LessThan
# -------------------------------------------------------------------------
function get_constraint_rhs(
constr::ConstraintRef{
Model,
MathOptInterface.ConstraintIndex{
MathOptInterface.ScalarAffineFunction{T},
MathOptInterface.LessThan{T},
},
ScalarShape,
},
)::T where T
return MOI.get(
constr.model.moi_backend,
MOI.ConstraintSet(),
constr.index,
).upper
end
function _terms_dict(constr)
terms = MOI.get(
constr.model.moi_backend,
MOI.ConstraintFunction(),
constr.index,
).terms
return Dict(
MOI.get(
constr.model.moi_backend,
MOI.VariableName(),
term.variable_index
) => term.coefficient
for term in terms
)
end
function get_constraint_lhs(
constr::ConstraintRef{
Model,
MathOptInterface.ConstraintIndex{
MathOptInterface.ScalarAffineFunction{T},
MathOptInterface.LessThan{T},
},
ScalarShape,
},
)::Dict{String, T} where T
return _terms_dict(constr)
end
function get_constraint_sense(
constr::ConstraintRef{
Model,
MathOptInterface.ConstraintIndex{
MathOptInterface.ScalarAffineFunction{T},
MathOptInterface.LessThan{T},
},
ScalarShape,
},
)::String where T
return "<"
end
# Constraints: ScalarAffineFunction, GreaterThan
# -------------------------------------------------------------------------
function get_constraint_rhs(
constr::ConstraintRef{
Model,
MathOptInterface.ConstraintIndex{
MathOptInterface.ScalarAffineFunction{T},
MathOptInterface.GreaterThan{T},
},
ScalarShape,
},
)::T where T
return MOI.get(
constr.model.moi_backend,
MOI.ConstraintSet(),
constr.index,
).lower
end
function get_constraint_lhs(
constr::ConstraintRef{
Model,
MathOptInterface.ConstraintIndex{
MathOptInterface.ScalarAffineFunction{T},
MathOptInterface.GreaterThan{T},
},
ScalarShape,
},
)::Dict{String, T} where T
return _terms_dict(constr)
end
function get_constraint_sense(
constr::ConstraintRef{
Model,
MathOptInterface.ConstraintIndex{
MathOptInterface.ScalarAffineFunction{T},
MathOptInterface.GreaterThan{T},
},
ScalarShape,
},
)::String where T
return ">"
end
# Constraints: ScalarAffineFunction, EqualTo
# -------------------------------------------------------------------------
function get_constraint_rhs(
constr::ConstraintRef{
Model,
MathOptInterface.ConstraintIndex{
MathOptInterface.ScalarAffineFunction{T},
MathOptInterface.EqualTo{T},
},
ScalarShape,
},
)::T where T
return MOI.get(
constr.model.moi_backend,
MOI.ConstraintSet(),
constr.index,
).value
end
function get_constraint_lhs(
constr::ConstraintRef{
Model,
MathOptInterface.ConstraintIndex{
MathOptInterface.ScalarAffineFunction{T},
MathOptInterface.EqualTo{T},
},
ScalarShape,
},
)::Dict{String, T} where T
return _terms_dict(constr)
end
function get_constraint_sense(
constr::ConstraintRef{
Model,
MathOptInterface.ConstraintIndex{
MathOptInterface.ScalarAffineFunction{T},
MathOptInterface.EqualTo{T},
},
ScalarShape,
},
)::String where T
return "="
end
# Test instances
# ---------------------------------------------
function build_test_instance_knapsack()
weights = [23.0, 26.0, 20.0, 18.0]
prices = [505.0, 352.0, 458.0, 220.0]
@ -559,68 +369,55 @@ end
)
end
set_warm_start(self, solution) =
set_warm_start!(self.data, solution)
add_constraints(self, cf) =
error("not implemented")
are_constraints_satisfied(self, cf; tol=1e-5) =
error("not implemented")
build_test_instance_infeasible(self) =
error("not implemented")
build_test_instance_knapsack(self) =
build_test_instance_knapsack()
# FIXME: Actually clone instead of returning self
clone(self) = self
fix(self, solution) =
fix!(self.data, solution)
set_instance(self, instance, model) =
set_instance!(self.data, instance, model)
solve(
get_solution(self) =
self.data.solution
get_constraints(
self;
tee=false,
iteration_cb=nothing,
lazy_cb=nothing,
user_cut_cb=nothing,
) = solve(
with_static=true,
with_sa=true,
with_lhs=true,
) = get_constraints(
self.data,
tee=tee,
iteration_cb=iteration_cb,
with_static=with_static,
)
solve_lp(self; tee=false) =
solve_lp(self.data, tee=tee)
get_solution(self) =
self.data.solution
get_constraint_attrs(self) = [
# "basis_status",
"categories",
"dual_values",
"lazy",
"lhs",
"names",
"rhs",
# "sa_rhs_down",
# "sa_rhs_up",
"senses",
# "slacks",
"user_features",
]
get_variables(self; with_static=true) =
get_variables(self.data; with_static=with_static)
set_branching_priorities(self, priorities) =
@warn "JuMPSolver: set_branching_priorities not implemented"
add_constraint(self, constraint) =
nothing
get_variable_names(self) =
get_variable_names(self.data)
is_infeasible(self) =
is_infeasible(self.data)
get_constraints(self; with_static=true) =
get_constraints(self.data; with_static=with_static)
get_constraint_ids(self) =
get_constraint_ids(self.data)
get_constraint_rhs(self, cname) =
get_constraint_rhs(self.data, cname)
get_constraint_lhs(self, cname) =
get_constraint_lhs(self.data, cname)
get_constraint_sense(self, cname) =
get_constraint_sense(self.data, cname)
build_test_instance_knapsack(self) =
build_test_instance_knapsack()
clone(self) = self
get_variable_attrs(self) = [
"names",
# "basis_status",
@ -640,34 +437,33 @@ end
"values",
]
get_constraint_attrs(self) = [
# "basis_status",
"categories",
"dual_values",
"lazy",
"lhs",
"names",
"rhs",
# "sa_rhs_down",
# "sa_rhs_up",
"senses",
# "slacks",
"user_features",
]
is_infeasible(self) =
is_infeasible(self.data)
add_cut(self) = error("not implemented")
extract_constraint(self) = error("not implemented")
is_constraint_satisfied(self) = error("not implemented")
set_constraint_sense(self) = error("not implemented")
relax(self) = error("not implemented")
get_inequality_slacks(self) = error("not implemented")
get_dual(self) = error("not implemented")
get_sense(self) = error("not implemented")
build_test_instance_infeasible(self) = error("not implemented")
build_test_instance_redundancy(self) = error("not implemented")
get_constraints_old(self) = error("not implemented")
is_constraint_satisfied_old(self) = error("not implemented")
remove_constraint(self) = error("not implemented")
remove_constraints(self, names) =
error("not implemented")
set_instance(self, instance, model) =
set_instance!(self.data, instance, model)
set_warm_start(self, solution) =
set_warm_start!(self.data, solution)
solve(
self;
tee=false,
iteration_cb=nothing,
lazy_cb=nothing,
user_cut_cb=nothing,
) = solve(
self.data,
tee=tee,
iteration_cb=iteration_cb,
)
solve_lp(self; tee=false) =
solve_lp(self.data, tee=tee)
end
export JuMPSolver, solve!, fit!, add!

@ -1,25 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP
function knapsack_model(
weights::Array{Float64, 1},
prices::Array{Float64, 1},
capacity::Float64,
)
model = Model()
n = length(weights)
@variable(model, x[0:(n-1)], Bin)
@objective(model, Max, sum(x[i] * prices[i+1] for i in 0:(n-1)))
@constraint(
model,
eq_capacity,
sum(
x[i] * weights[i+1]
for i in 0:(n-1)
) <= capacity,
)
return model
end
Loading…
Cancel
Save