diff --git a/src/instance/file.jl b/src/instance/file.jl index e01043b..0a1fb32 100644 --- a/src/instance/file.jl +++ b/src/instance/file.jl @@ -2,72 +2,66 @@ # Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved. # Released under the modified BSD license. See COPYING.md for more details. +import Base: flush -function __init_PyFileInstance__() - @pydef mutable struct Class <: miplearn.Instance - function __init__(self, filename) - self.filename = filename - self.loaded = nothing - self.samples = nothing - end - - function to_model(self) - return self.loaded.py.to_model() - end - - function get_instance_features(self) - return self.loaded.py.get_instance_features() - end - - function get_variable_features(self, var_name) - return self.loaded.py.get_variable_features(var_name) - end - - function get_variable_category(self, var_name) - return self.loaded.py.get_variable_category(var_name) - end - - function get_constraint_features(self, cname) - return self.loaded.py.get_constraint_features(cname) - end +mutable struct FileInstance <: Instance + py::Union{Nothing,PyCall.PyObject} + loaded::Union{Nothing, JuMPInstance} + filename::AbstractString - function get_constraint_category(self, cname) - return self.loaded.py.get_constraint_category(cname) - end + function FileInstance(filename::String)::FileInstance + instance = new(nothing, nothing, filename) + instance.py = PyFileInstance(instance) + return instance + end +end - function load(self) - if self.loaded === nothing - self.loaded = load_instance(self.filename) - self.samples = self.loaded.py.samples - end - end +to_model(instance::FileInstance) = to_model(instance.loaded) +get_instance_features(instance::FileInstance) = get_instance_features(instance.loaded) +get_variable_features(instance::FileInstance) = get_variable_features(instance.loaded) +get_variable_categories(instance::FileInstance) = get_variable_categories(instance.loaded) +get_constraint_features(instance::FileInstance) = get_constraint_features(instance.loaded) +get_samples(instance::FileInstance) = get_samples(instance.loaded) +push_sample!(instance::FileInstance, sample::PyCall.PyObject) = push_sample!(instance.loaded, sample) - function free(self) - self.loaded = nothing - self.samples = nothing - end +function get_constraint_categories(instance::FileInstance) + return get_constraint_categories(instance.loaded) +end - function flush(self) - self.loaded.py.samples = self.samples - save(self.filename, self.loaded) - end +function load(instance::FileInstance) + if instance.loaded === nothing + instance.loaded = load_instance(instance.filename) end - copy!(PyFileInstance, Class) end -struct FileInstance <: Instance - py::PyCall.PyObject - filename::AbstractString +function free(instance::FileInstance) + instance.loaded.samples = [] + instance.loaded = nothing + GC.gc() end - -function FileInstance(filename)::FileInstance - filename isa AbstractString || error("filename should be a string. Found $(typeof(filename)) instead.") - return FileInstance( - PyFileInstance(filename), - filename, - ) +function flush(instance::FileInstance) + save(instance.filename, instance.loaded) end +function __init_PyFileInstance__() + @pydef mutable struct Class <: miplearn.Instance + function __init__(self, jl) + self.jl = jl + end + to_model(self) = to_model(self.jl) + get_instance_features(self) = get_instance_features(self.jl) + get_variable_features(self) = get_variable_features(self.jl) + get_variable_categories(self) = get_variable_categories(self.jl) + get_constraint_features(self) = get_constraint_features(self.jl) + get_constraint_categories(self) = get_constraint_categories(self.jl) + get_samples(self) = get_samples(self.jl) + push_sample(self, sample) = push_sample!(self.jl, sample) + load(self) = load(self.jl) + free(self) = free(self.jl) + flush(self) = flush(self.jl) + end + copy!(PyFileInstance, Class) +end export FileInstance diff --git a/src/instance/jump.jl b/src/instance/jump.jl index 4930f78..74d0ba5 100644 --- a/src/instance/jump.jl +++ b/src/instance/jump.jl @@ -5,85 +5,88 @@ using JuMP using JLD2 -function __init_PyJuMPInstance__() - @pydef mutable struct Class <: miplearn.Instance - function __init__(self, model) - init_miplearn_ext(model) - self.model = model - self.samples = [] - end - - function to_model(self) - return self.model - end - - function get_instance_features(self) - return self.model.ext[:miplearn][:instance_features] - end - - function get_variable_features(self, var_name) - model = self.model - return get(model.ext[:miplearn][:variable_features], var_name, nothing) - end - - function get_variable_category(self, var_name) - model = self.model - return get(model.ext[:miplearn][:variable_categories], var_name, nothing) - end - - function get_constraint_features(self, cname) - model = self.model - return get(model.ext[:miplearn][:constraint_features], cname, nothing) - end +mutable struct JuMPInstance <: Instance + py::Union{Nothing,PyCall.PyObject} + model::Union{Nothing,JuMP.Model} + mps::Union{Nothing,Vector{UInt8}} + ext::AbstractDict + samples::Vector{PyCall.PyObject} + + function JuMPInstance(model::JuMP.Model) + init_miplearn_ext(model) + instance = new(nothing, model, nothing, model.ext[:miplearn], []) + py = PyJuMPInstance(instance) + instance.py = py + return instance + end - function get_constraint_category(self, cname) - model = self.model - return get(model.ext[:miplearn][:constraint_categories], cname, nothing) - end + function JuMPInstance(mps::Vector{UInt8}, ext::AbstractDict) + "instance_features" in keys(ext) || error("provided ext is not initialized") + instance = new(nothing, nothing, mps, ext, []) + instance.py = PyJuMPInstance(instance) + return instance end - copy!(PyJuMPInstance, Class) end - -struct JuMPInstance <: Instance - py::PyCall.PyObject - model::Model +function to_model(instance::JuMPInstance)::JuMP.Model + if instance.model === nothing + mps_filename = "$(tempname()).mps.gz" + write(mps_filename, instance.mps) + instance.model = read_from_file(mps_filename) + instance.model.ext[:miplearn] = instance.ext + end + return instance.model end +get_instance_features(instance::JuMPInstance) = instance.ext["instance_features"] +get_variable_features(instance::JuMPInstance) = instance.ext["variable_features"] +get_variable_categories(instance::JuMPInstance) = instance.ext["variable_categories"] +get_constraint_features(instance::JuMPInstance) = instance.ext["constraint_features"] +get_constraint_categories(instance::JuMPInstance) = instance.ext["constraint_categories"] +get_samples(instance::JuMPInstance) = instance.samples -function JuMPInstance(model) - model isa Model || error("model should be a JuMP.Model. Found $(typeof(model)) instead.") - return JuMPInstance( - PyJuMPInstance(model), - model, - ) +function push_sample!(instance::JuMPInstance, sample::PyCall.PyObject) + push!(instance.samples, sample) end +function __init_PyJuMPInstance__() + @pydef mutable struct Class <: miplearn.Instance + function __init__(self, jl) + self.jl = jl + end + to_model(self) = to_model(self.jl) + get_instance_features(self) = get_instance_features(self.jl) + get_variable_features(self) = get_variable_features(self.jl) + get_variable_categories(self) = get_variable_categories(self.jl) + get_constraint_features(self,) = get_constraint_features(self.jl) + get_constraint_categories(self) = get_constraint_categories(self.jl) + get_samples(self) = get_samples(self.jl) + push_sample(self, sample) = push_sample!(self.jl, sample) + end + copy!(PyJuMPInstance, Class) +end function save(filename::AbstractString, instance::JuMPInstance)::Nothing - @info "Writing: $filename" - time = @elapsed begin - # Convert JuMP model to MPS - mps_filename = "$(tempname()).mps.gz" - write_to_file(instance.model, mps_filename) - mps = read(mps_filename) - - # Pickle instance.py.samples. Ideally, we would use dumps and loads, but this - # causes some issues with PyCall, probably due to automatic type conversions. - py_samples_filename = tempname() - miplearn.write_pickle_gz(instance.py.samples, py_samples_filename, quiet=true) - py_samples = read(py_samples_filename) - - # Generate JLD2 file - jldsave( - filename; - miplearn_version="0.2", - mps=mps, - ext=instance.model.ext[:miplearn], - py_samples=py_samples, - ) - end - @info @sprintf("File written in %.2f seconds", time) + # Convert JuMP model to MPS + mps_filename = "$(tempname()).mps.gz" + model = instance.py.to_model() + write_to_file(model, mps_filename) + mps = read(mps_filename) + + # Pickle instance.py.samples. Ideally, we would use dumps and loads, but this + # causes some issues with PyCall, probably due to automatic type conversions. + samples_filename = tempname() + miplearn.write_pickle_gz(instance.samples, samples_filename) + samples = read(samples_filename) + + # Generate JLD2 file + jldsave( + filename; + miplearn_version="0.2", + mps=mps, + ext=model.ext[:miplearn], + samples=samples, + ) return end @@ -97,32 +100,15 @@ function _check_miplearn_version(file) ) end - function load_instance(filename::AbstractString)::JuMPInstance - @info "Reading: $filename" - instance = nothing - time = @elapsed begin - jldopen(filename, "r") do file - _check_miplearn_version(file) - - # Convert MPS to JuMP - mps_filename = "$(tempname()).mps.gz" - write(mps_filename, file["mps"]) - model = read_from_file(mps_filename) - model.ext[:miplearn] = file["ext"] - - # Unpickle instance.py.samples - py_samples_filename = tempname() - write(py_samples_filename, file["py_samples"]) - py_samples = miplearn.read_pickle_gz(py_samples_filename, quiet=true) - - instance = JuMPInstance(model) - instance.py.samples = py_samples - end + jldopen(filename, "r") do file + _check_miplearn_version(file) + instance = JuMPInstance(file["mps"], file["ext"]) + samples_filename = tempname() + write(samples_filename, file["samples"]) + @time instance.samples = miplearn.read_pickle_gz(samples_filename) + return instance end - @info @sprintf("File read in %.2f seconds", time) - return instance end - export JuMPInstance, save, load_instance diff --git a/src/solvers/jump.jl b/src/solvers/jump.jl index 643b0f5..83f1726 100644 --- a/src/solvers/jump.jl +++ b/src/solvers/jump.jl @@ -41,6 +41,8 @@ function _optimize_and_capture_output!(model; tee::Bool=false) rm(logname) if tee println(log) + flush(stdout) + Base.Libc.flush_cstdio() end return log end @@ -150,7 +152,7 @@ function build_test_instance_knapsack() @objective(model, Max, sum(x[i-1] * prices[i] for i in 1:n)) @constraint(model, eq_capacity, sum(x[i-1] * weights[i] for i in 1:n) - z == 0) - return PyJuMPInstance(model) + return JuMPInstance(model).py end @@ -159,7 +161,7 @@ function build_test_instance_infeasible() @variable(model, x, Bin) @objective(model, Max, x) @constraint(model, x >= 2) - return PyJuMPInstance(model) + return JuMPInstance(model).py end @@ -389,6 +391,8 @@ end function get_constraints( data::JuMPSolverData; with_static::Bool, + with_sa::Bool, + with_lhs::Bool, ) names = [] senses, lhs, rhs = nothing, nothing, nothing @@ -420,24 +424,26 @@ function get_constraints( if with_static if ftype == JuMP.AffExpr - push!( - lhs, - [ - ( - MOI.get( + if with_lhs + push!( + lhs, + [ + ( + MOI.get( + constr.model.moi_backend, + MOI.VariableName(), + term.variable_index + ), + term.coefficient, + ) + for term in MOI.get( constr.model.moi_backend, - MOI.VariableName(), - term.variable_index - ), - term.coefficient, - ) - for term in MOI.get( - constr.model.moi_backend, - MOI.ConstraintFunction(), - constr.index, - ).terms - ] - ) + MOI.ConstraintFunction(), + constr.index, + ).terms + ] + ) + end if stype == MOI.EqualTo{Float64} push!(senses, "=") push!(rhs, cset.value) @@ -535,6 +541,8 @@ function __init_JuMPSolver__() ) = get_constraints( self.data, with_static=with_static, + with_sa=with_sa, + with_lhs=with_lhs, ) get_constraint_attrs(self) = [ diff --git a/src/solvers/learning.jl b/src/solvers/learning.jl index f3cb1f7..75762f9 100644 --- a/src/solvers/learning.jl +++ b/src/solvers/learning.jl @@ -17,6 +17,8 @@ function LearningSolver( mode::AbstractString = "exact", simulate_perfect::Bool = false, solve_lp::Bool = true, + extract_sa::Bool = true, + extract_lhs::Bool = true, )::LearningSolver return LearningSolver( miplearn.LearningSolver( @@ -25,6 +27,8 @@ function LearningSolver( solve_lp=solve_lp, simulate_perfect=simulate_perfect, components=components, + extract_lhs=extract_lhs, + extract_sa=extract_sa, ), optimizer_factory, ) @@ -47,66 +51,83 @@ end function fit!(solver::LearningSolver, instances::Vector{<:Instance}) @python_call solver.py.fit([instance.py for instance in instances]) + return +end + + +function _solve( + solver_filename, + instance_filename; + discard_output::Bool, +) + @info "solve $instance_filename" + solver = load_solver(solver_filename) + solver.py._silence_miplearn_logger() + stats = solve!( + solver, + FileInstance(instance_filename), + discard_output = discard_output, + ) + solver.py._restore_miplearn_logger() + GC.gc() + @info "solve $instance_filename [done]" + return stats end -function parallel_solve!(solver::LearningSolver, instances::Vector{FileInstance}) - filenames = [instance.filename for instance in instances] +function parallel_solve!( + solver::LearningSolver, + instances::Vector{FileInstance}; + discard_output::Bool = false, +) + instance_filenames = [instance.filename for instance in instances] solver_filename = tempname() save(solver_filename, solver) - @sync @distributed for filename in filenames - s = load_solver(solver_filename) - solve!(s, FileInstance(filename)) - nothing - end + return pmap( + instance_filename -> _solve( + solver_filename, + instance_filename, + discard_output = discard_output, + ), + instance_filenames, + on_error=identity, + ) end function save(filename::AbstractString, solver::LearningSolver) - @info "Writing: $filename" - time = @elapsed begin - # Pickle solver.py - internal_solver = solver.py.internal_solver - internal_solver_prototype = solver.py.internal_solver_prototype - solver.py.internal_solver = nothing - solver.py.internal_solver_prototype = nothing - solver_py_filename = tempname() - miplearn.write_pickle_gz(solver.py, solver_py_filename, quiet=true) - solver_py = read(solver_py_filename) - solver.py.internal_solver = internal_solver - solver.py.internal_solver_prototype = internal_solver_prototype - - jldsave( - filename; - miplearn_version="0.2", - solver_py=solver_py, - optimizer_factory=solver.optimizer_factory, - ) - end - @info @sprintf("File written in %.2f seconds", time) + internal_solver = solver.py.internal_solver + internal_solver_prototype = solver.py.internal_solver_prototype + solver.py.internal_solver = nothing + solver.py.internal_solver_prototype = nothing + solver_py_filename = tempname() + miplearn.write_pickle_gz(solver.py, solver_py_filename) + solver_py = read(solver_py_filename) + solver.py.internal_solver = internal_solver + solver.py.internal_solver_prototype = internal_solver_prototype + jldsave( + filename; + miplearn_version="0.2", + solver_py=solver_py, + optimizer_factory=solver.optimizer_factory, + ) return end function load_solver(filename::AbstractString)::LearningSolver - @info "Reading: $filename" - solver = nothing - time = @elapsed begin - jldopen(filename, "r") do file - _check_miplearn_version(file) - solve_py_filename = tempname() - write(solve_py_filename, file["solver_py"]) - solver_py = miplearn.read_pickle_gz(solve_py_filename, quiet=true) - internal_solver = JuMPSolver(file["optimizer_factory"]) - solver_py.internal_solver_prototype = internal_solver - solver = LearningSolver( - solver_py, - file["optimizer_factory"], - ) - end + jldopen(filename, "r") do file + _check_miplearn_version(file) + solve_py_filename = tempname() + write(solve_py_filename, file["solver_py"]) + solver_py = miplearn.read_pickle_gz(solve_py_filename) + internal_solver = JuMPSolver(file["optimizer_factory"]) + solver_py.internal_solver_prototype = internal_solver + return LearningSolver( + solver_py, + file["optimizer_factory"], + ) end - @info @sprintf("File read in %.2f seconds", time) - return solver end diff --git a/src/solvers/macros.jl b/src/solvers/macros.jl index f7f6340..fb57ecc 100644 --- a/src/solvers/macros.jl +++ b/src/solvers/macros.jl @@ -4,12 +4,12 @@ function init_miplearn_ext(model)::Dict if :miplearn ∉ keys(model.ext) - model.ext[:miplearn] = Dict{Symbol, Any}() - model.ext[:miplearn][:instance_features] = [0.0] - model.ext[:miplearn][:variable_features] = Dict{AbstractString, Vector{Float64}}() - model.ext[:miplearn][:variable_categories] = Dict{AbstractString, String}() - model.ext[:miplearn][:constraint_features] = Dict{AbstractString, Vector{Float64}}() - model.ext[:miplearn][:constraint_categories] = Dict{AbstractString, String}() + model.ext[:miplearn] = Dict() + model.ext[:miplearn]["instance_features"] = [0.0] + model.ext[:miplearn]["variable_features"] = Dict{AbstractString, Vector{Float64}}() + model.ext[:miplearn]["variable_categories"] = Dict{AbstractString, String}() + model.ext[:miplearn]["constraint_features"] = Dict{AbstractString, Vector{Float64}}() + model.ext[:miplearn]["constraint_categories"] = Dict{AbstractString, String}() end return model.ext[:miplearn] end @@ -17,7 +17,7 @@ end function set_features!(m::Model, f::Array{Float64})::Nothing ext = init_miplearn_ext(m) - ext[:instance_features] = f + ext["instance_features"] = f return end @@ -25,7 +25,7 @@ end function set_features!(v::VariableRef, f::Array{Float64})::Nothing ext = init_miplearn_ext(v.model) n = _get_and_check_name(v) - ext[:variable_features][n] = f + ext["variable_features"][n] = f return end @@ -33,7 +33,7 @@ end function set_category!(v::VariableRef, category::String)::Nothing ext = init_miplearn_ext(v.model) n = _get_and_check_name(v) - ext[:variable_categories][n] = category + ext["variable_categories"][n] = category return end @@ -41,7 +41,7 @@ end function set_features!(c::ConstraintRef, f::Array{Float64})::Nothing ext = init_miplearn_ext(c.model) n = _get_and_check_name(c) - ext[:constraint_features][n] = f + ext["constraint_features"][n] = f return end @@ -49,7 +49,7 @@ end function set_category!(c::ConstraintRef, category::String)::Nothing ext = init_miplearn_ext(c.model) n = _get_and_check_name(c) - ext[:constraint_categories][n] = category + ext["constraint_categories"][n] = category return end diff --git a/src/utils/benchmark.jl b/src/utils/benchmark.jl index ce98cce..19ae467 100644 --- a/src/utils/benchmark.jl +++ b/src/utils/benchmark.jl @@ -9,61 +9,60 @@ using DataFrames mutable struct BenchmarkRunner solvers::Dict results::Union{Nothing,DataFrame} -end - + py::PyCall.PyObject -function BenchmarkRunner(; solvers::Dict) - return BenchmarkRunner( - solvers, - nothing, # results - ) + function BenchmarkRunner(; solvers::Dict) + return new( + solvers, + nothing, # results + miplearn.BenchmarkRunner( + Dict( + sname => solver.py + for (sname, solver) in solvers + ) + ) + ) + end end - function parallel_solve!( runner::BenchmarkRunner, instances::Vector{FileInstance}; n_trials::Int = 3, )::Nothing + instances = repeat(instances, n_trials) for (solver_name, solver) in runner.solvers - @info "Benchmarking: $solver_name" - for i in 1:n_trials - for instance in instances - stats = solve!(solver, instance, discard_output=true) - instance.py.free() - stats["Solver"] = solver_name - stats = Dict(k => isnothing(v) ? missing : v for (k, v) in stats) - if runner.results === nothing - runner.results = DataFrame(stats) - else - push!(runner.results, stats, cols=:union) - end + @info "benchmark $solver_name" + stats = parallel_solve!(solver, instances, discard_output=true) + for (i, s) in enumerate(stats) + s["Solver"] = solver_name + s["Instance"] = instances[i].filename + s = Dict(k => isnothing(v) ? missing : v for (k, v) in s) + if runner.results === nothing + runner.results = DataFrame(s) + else + push!(runner.results, s, cols=:union) end end + @info "benchmark $solver_name [done]" end end - function fit!( runner::BenchmarkRunner, instances::Vector{FileInstance} )::Nothing - for (solver_name, solver) in runner.solvers - fit!(solver, instances) - end + @python_call runner.py.fit([instance.py for instance in instances]) end - function write_csv!( runner::BenchmarkRunner, filename::AbstractString, )::Nothing - @info "Writing: $filename" CSV.write(filename, runner.results) return end - export BenchmarkRunner, parallel_solve!, fit!, diff --git a/src/utils/log.jl b/src/utils/log.jl index 28d2f2c..f0f8342 100644 --- a/src/utils/log.jl +++ b/src/utils/log.jl @@ -44,16 +44,22 @@ function handle_message(logger::TimeLogger, color = :light_green end + flush(stdout) + flush(stderr) + Base.Libc.flush_cstdio() if level >= logger.screen_log_level printstyled(time_string, color=color) println(message) end - if logger.file != nothing && level >= logger.io_log_level + if logger.file !== nothing && level >= logger.io_log_level write(logger.file, time_string) write(logger.file, message) write(logger.file, "\n") flush(logger.file) end + flush(stdout) + flush(stderr) + Base.Libc.flush_cstdio() end function setup_logger() @@ -63,4 +69,4 @@ function setup_logger() miplearn.setup_logger(initial_time) end -export TimeLogger \ No newline at end of file +export TimeLogger diff --git a/test/fixtures/knapsack.jl b/test/fixtures/knapsack.jl index e4454e5..1872a9f 100644 --- a/test/fixtures/knapsack.jl +++ b/test/fixtures/knapsack.jl @@ -28,15 +28,15 @@ function build_knapsack_model() end # Should store ML information - @test model.ext[:miplearn][:variable_features]["x[1]"] == [1.0, 5.0] - @test model.ext[:miplearn][:variable_features]["x[2]"] == [2.0, 6.0] - @test model.ext[:miplearn][:variable_features]["x[3]"] == [3.0, 7.0] - @test model.ext[:miplearn][:variable_categories]["x[1]"] == "type-1" - @test model.ext[:miplearn][:variable_categories]["x[2]"] == "type-2" - @test model.ext[:miplearn][:variable_categories]["x[3]"] == "type-3" - @test model.ext[:miplearn][:constraint_features]["c1"] == [1.0, 2.0, 3.0] - @test model.ext[:miplearn][:constraint_categories]["c1"] == "c1" - @test model.ext[:miplearn][:instance_features] == [5.0] + @test model.ext[:miplearn]["variable_features"]["x[1]"] == [1.0, 5.0] + @test model.ext[:miplearn]["variable_features"]["x[2]"] == [2.0, 6.0] + @test model.ext[:miplearn]["variable_features"]["x[3]"] == [3.0, 7.0] + @test model.ext[:miplearn]["variable_categories"]["x[1]"] == "type-1" + @test model.ext[:miplearn]["variable_categories"]["x[2]"] == "type-2" + @test model.ext[:miplearn]["variable_categories"]["x[3]"] == "type-3" + @test model.ext[:miplearn]["constraint_features"]["c1"] == [1.0, 2.0, 3.0] + @test model.ext[:miplearn]["constraint_categories"]["c1"] == "c1" + @test model.ext[:miplearn]["instance_features"] == [5.0] return model end diff --git a/test/instance/file_test.jl b/test/instance/file_test.jl index 16948d4..4e63ba9 100644 --- a/test/instance/file_test.jl +++ b/test/instance/file_test.jl @@ -19,6 +19,6 @@ using Cbc solve!(solver, file_instance) loaded = load_instance(filename) - @test length(loaded.py.samples) == 1 + @test length(loaded.samples) == 1 end end diff --git a/test/solvers/learning_test.jl b/test/solvers/learning_test.jl index cdd5b48..e08ca0c 100644 --- a/test/solvers/learning_test.jl +++ b/test/solvers/learning_test.jl @@ -13,7 +13,7 @@ using MIPLearn instance = JuMPInstance(model) stats = solve!(solver, instance) @test stats["mip_lower_bound"] == 11.0 - @test length(instance.py.samples) == 1 + @test length(instance.samples) == 1 fit!(solver, [instance]) solve!(solver, instance) end @@ -41,6 +41,6 @@ using MIPLearn solver = LearningSolver(Cbc.Optimizer) solve!(solver, instance, discard_output=true) loaded = load_instance(instance.filename) - @test length(loaded.py.samples) == 0 + @test length(loaded.samples) == 0 end end diff --git a/test/utils/benchmark_test.jl b/test/utils/benchmark_test.jl index 326b50a..c549391 100644 --- a/test/utils/benchmark_test.jl +++ b/test/utils/benchmark_test.jl @@ -8,17 +8,19 @@ using DataFrames @testset "BenchmarkRunner" begin - # Initialize instances and generate training data + @info "Building training data..." instances = [ build_knapsack_file_instance(), build_knapsack_file_instance(), ] - parallel_solve!( + stats = parallel_solve!( LearningSolver(Cbc.Optimizer), instances, ) + @test length(stats) == 2 + @test stats[1] !== nothing + @test stats[2] !== nothing - # Fit and benchmark benchmark = BenchmarkRunner( solvers=Dict( "baseline" => LearningSolver( @@ -34,13 +36,15 @@ using DataFrames ), ), ) + @info "Fitting..." fit!(benchmark, instances) - parallel_solve!(benchmark, instances, n_trials=1) - # Write CSV + @info "Benchmarking..." + parallel_solve!(benchmark, instances, n_trials=2) + csv_filename = tempname() write_csv!(benchmark, csv_filename) @test isfile(csv_filename) csv = DataFrame(CSV.File(csv_filename)) - @test size(csv)[1] == 6 + @test size(csv)[1] == 12 end