Reformat source code

This commit is contained in:
2024-05-29 09:04:59 -05:00
parent e9deac94a5
commit 93e604817b
15 changed files with 106 additions and 107 deletions

View File

@@ -9,7 +9,13 @@ using SparseArrays
using Statistics
using TimerOutputs
function collect_gmi(mps_filename; optimizer, max_rounds=10, max_cuts_per_round=100, atol=1e-4)
function collect_gmi(
mps_filename;
optimizer,
max_rounds = 10,
max_cuts_per_round = 100,
atol = 1e-4,
)
@info mps_filename
reset_timer!()
@@ -98,12 +104,12 @@ function collect_gmi(mps_filename; optimizer, max_rounds=10, max_cuts_per_round=
sol_frac = get_x(model_s)
stats_time_select += @elapsed begin
selected_rows =
select_gmi_rows(data_s, basis, sol_frac, max_rows=max_cuts_per_round)
select_gmi_rows(data_s, basis, sol_frac, max_rows = max_cuts_per_round)
end
# Compute selected tableau rows
stats_time_tableau += @elapsed begin
tableau = compute_tableau(data_s, basis, sol_frac, rows=selected_rows)
tableau = compute_tableau(data_s, basis, sol_frac, rows = selected_rows)
# Assert tableau rows have been computed correctly
assert_eq(tableau.lhs * sol_frac, tableau.rhs)
@@ -180,10 +186,9 @@ function collect_gmi(mps_filename; optimizer, max_rounds=10, max_cuts_per_round=
)
end
function select_gmi_rows(data, basis, x; max_rows=10, atol=1e-4)
function select_gmi_rows(data, basis, x; max_rows = 10, atol = 1e-4)
candidate_rows = [
r for
r in 1:length(basis.var_basic) if (
r for r = 1:length(basis.var_basic) if (
(data.var_types[basis.var_basic[r]] != 'C') &&
(frac(x[basis.var_basic[r]]) > atol) &&
(frac2(x[basis.var_basic[r]]) > atol)
@@ -204,7 +209,7 @@ function compute_gmi(data::ProblemData, tableau::Tableau)::ConstraintSet
lhs_J = Int[]
lhs_V = Float64[]
@timeit "Compute coefficients" begin
for k in 1:nnz(tableau.lhs)
for k = 1:nnz(tableau.lhs)
i::Int = tableau_I[k]
j::Int = tableau_J[k]
v::Float64 = 0.0
@@ -235,4 +240,5 @@ function compute_gmi(data::ProblemData, tableau::Tableau)::ConstraintSet
return ConstraintSet(; lhs, ub, lb)
end
export compute_gmi, frac, select_gmi_rows, assert_cuts_off, assert_does_not_cut_off, collect_gmi
export compute_gmi,
frac, select_gmi_rows, assert_cuts_off, assert_does_not_cut_off, collect_gmi

View File

@@ -2,7 +2,7 @@
@inline frac2(x::Float64) = ceil(x) - x
function assert_leq(a, b; atol=0.01)
function assert_leq(a, b; atol = 0.01)
if !all(a .<= b .+ atol)
delta = a .- b
for i in eachindex(delta)
@@ -14,7 +14,7 @@ function assert_leq(a, b; atol=0.01)
end
end
function assert_eq(a, b; atol=1e-4)
function assert_eq(a, b; atol = 1e-4)
if !all(abs.(a .- b) .<= atol)
delta = abs.(a .- b)
for i in eachindex(delta)
@@ -26,7 +26,7 @@ function assert_eq(a, b; atol=1e-4)
end
end
function assert_cuts_off(cuts::ConstraintSet, x::Vector{Float64}, tol=1e-6)
function assert_cuts_off(cuts::ConstraintSet, x::Vector{Float64}, tol = 1e-6)
for i = 1:length(cuts.lb)
val = cuts.lhs[i, :]' * x
if (val <= cuts.ub[i] - tol) && (val >= cuts.lb[i] + tol)
@@ -35,7 +35,7 @@ function assert_cuts_off(cuts::ConstraintSet, x::Vector{Float64}, tol=1e-6)
end
end
function assert_does_not_cut_off(cuts::ConstraintSet, x::Vector{Float64}; tol=1e-6)
function assert_does_not_cut_off(cuts::ConstraintSet, x::Vector{Float64}; tol = 1e-6)
for i = 1:length(cuts.lb)
val = cuts.lhs[i, :]' * x
ub = cuts.ub[i]

View File

@@ -53,8 +53,14 @@ function __init_components__()
)
copy!(SelectTopSolutions, pyimport("miplearn.components.primal.mem").SelectTopSolutions)
copy!(MergeTopSolutions, pyimport("miplearn.components.primal.mem").MergeTopSolutions)
copy!(MemorizingCutsComponent, pyimport("miplearn.components.cuts.mem").MemorizingCutsComponent)
copy!(MemorizingLazyComponent, pyimport("miplearn.components.lazy.mem").MemorizingLazyComponent)
copy!(
MemorizingCutsComponent,
pyimport("miplearn.components.cuts.mem").MemorizingCutsComponent,
)
copy!(
MemorizingLazyComponent,
pyimport("miplearn.components.lazy.mem").MemorizingLazyComponent,
)
end
export MinProbabilityClassifier,

View File

@@ -39,14 +39,14 @@ end
function PyObject(m::SparseMatrixCSC)
pyimport("scipy.sparse").csc_matrix(
(m.nzval, m.rowval .- 1, m.colptr .- 1),
shape=size(m),
shape = size(m),
).tocoo()
end
function write_jld2(
objs::Vector,
dirname::AbstractString;
prefix::AbstractString=""
prefix::AbstractString = "",
)::Vector{String}
mkpath(dirname)
filenames = [@sprintf("%s/%s%05d.jld2", dirname, prefix, i) for i = 1:length(objs)]

View File

@@ -13,7 +13,7 @@ function __init_problems_setcover__()
copy!(SetCoverGenerator, pyimport("miplearn.problems.setcover").SetCoverGenerator)
end
function build_setcover_model_jump(data::Any; optimizer=HiGHS.Optimizer)
function build_setcover_model_jump(data::Any; optimizer = HiGHS.Optimizer)
if data isa String
data = read_pkl_gz(data)
end

View File

@@ -10,10 +10,13 @@ global MaxWeightStableSetGenerator = PyNULL()
function __init_problems_stab__()
copy!(MaxWeightStableSetData, pyimport("miplearn.problems.stab").MaxWeightStableSetData)
copy!(MaxWeightStableSetGenerator, pyimport("miplearn.problems.stab").MaxWeightStableSetGenerator)
copy!(
MaxWeightStableSetGenerator,
pyimport("miplearn.problems.stab").MaxWeightStableSetGenerator,
)
end
function build_stab_model_jump(data::Any; optimizer=HiGHS.Optimizer)
function build_stab_model_jump(data::Any; optimizer = HiGHS.Optimizer)
nx = pyimport("networkx")
if data isa String
@@ -50,11 +53,7 @@ function build_stab_model_jump(data::Any; optimizer=HiGHS.Optimizer)
end
end
return JumpModel(
model,
cuts_separate=cuts_separate,
cuts_enforce=cuts_enforce,
)
return JumpModel(model, cuts_separate = cuts_separate, cuts_enforce = cuts_enforce)
end
export MaxWeightStableSetData, MaxWeightStableSetGenerator, build_stab_model_jump

View File

@@ -9,7 +9,10 @@ global TravelingSalesmanGenerator = PyNULL()
function __init_problems_tsp__()
copy!(TravelingSalesmanData, pyimport("miplearn.problems.tsp").TravelingSalesmanData)
copy!(TravelingSalesmanGenerator, pyimport("miplearn.problems.tsp").TravelingSalesmanGenerator)
copy!(
TravelingSalesmanGenerator,
pyimport("miplearn.problems.tsp").TravelingSalesmanGenerator,
)
end
function build_tsp_model_jump(data::Any; optimizer)
@@ -19,17 +22,15 @@ function build_tsp_model_jump(data::Any; optimizer)
data = read_pkl_gz(data)
end
model = Model(optimizer)
edges = [(i, j) for i in 1:data.n_cities for j in (i+1):data.n_cities]
edges = [(i, j) for i = 1:data.n_cities for j = (i+1):data.n_cities]
x = @variable(model, x[edges], Bin)
@objective(model, Min, sum(
x[(i, j)] * data.distances[i, j] for (i, j) in edges
))
@objective(model, Min, sum(x[(i, j)] * data.distances[i, j] for (i, j) in edges))
# Eq: Must choose two edges adjacent to each node
@constraint(
model,
eq_degree[i in 1:data.n_cities],
sum(x[(min(i, j), max(i, j))] for j in 1:data.n_cities if i != j) == 2
sum(x[(min(i, j), max(i, j))] for j = 1:data.n_cities if i != j) == 2
)
function lazy_separate(cb_data)
@@ -41,10 +42,8 @@ function build_tsp_model_jump(data::Any; optimizer)
for component in nx.connected_components(graph)
if length(component) < data.n_cities
cut_edges = [
[e[1], e[2]]
for e in edges
if (e[1] component && e[2] component)
||
[e[1], e[2]] for
e in edges if (e[1] component && e[2] component) ||
(e[1] component && e[2] component)
]
push!(violations, cut_edges)
@@ -63,9 +62,9 @@ function build_tsp_model_jump(data::Any; optimizer)
return JumpModel(
model,
lazy_enforce=lazy_enforce,
lazy_separate=lazy_separate,
lp_optimizer=optimizer,
lazy_enforce = lazy_enforce,
lazy_separate = lazy_separate,
lp_optimizer = optimizer,
)
end

View File

@@ -18,7 +18,7 @@ Base.@kwdef mutable struct _JumpModelExtData
cuts_separate::Union{Function,Nothing} = nothing
lazy_enforce::Union{Function,Nothing} = nothing
lazy_separate::Union{Function,Nothing} = nothing
lp_optimizer
lp_optimizer::Any
end
function JuMP.copy_extension_data(
@@ -26,9 +26,7 @@ function JuMP.copy_extension_data(
new_model::AbstractModel,
::AbstractModel,
)
new_model.ext[:miplearn] = _JumpModelExtData(
lp_optimizer=old_ext.lp_optimizer
)
new_model.ext[:miplearn] = _JumpModelExtData(lp_optimizer = old_ext.lp_optimizer)
end
# -----------------------------------------------------------------------------
@@ -297,7 +295,7 @@ end
function _fix_variables(model::JuMP.Model, var_names, var_values, stats)
vars = [variable_by_name(model, v) for v in var_names]
for (i, var) in enumerate(vars)
fix(var, var_values[i], force=true)
fix(var, var_values[i], force = true)
end
end
@@ -392,19 +390,19 @@ function __init_solvers_jump__()
function __init__(
self,
inner;
cuts_enforce::Union{Function,Nothing}=nothing,
cuts_separate::Union{Function,Nothing}=nothing,
lazy_enforce::Union{Function,Nothing}=nothing,
lazy_separate::Union{Function,Nothing}=nothing,
lp_optimizer=HiGHS.Optimizer,
cuts_enforce::Union{Function,Nothing} = nothing,
cuts_separate::Union{Function,Nothing} = nothing,
lazy_enforce::Union{Function,Nothing} = nothing,
lazy_separate::Union{Function,Nothing} = nothing,
lp_optimizer = HiGHS.Optimizer,
)
self.inner = inner
self.inner.ext[:miplearn] = _JumpModelExtData(
cuts_enforce=cuts_enforce,
cuts_separate=cuts_separate,
lazy_enforce=lazy_enforce,
lazy_separate=lazy_separate,
lp_optimizer=lp_optimizer,
cuts_enforce = cuts_enforce,
cuts_separate = cuts_separate,
lazy_enforce = lazy_enforce,
lazy_separate = lazy_separate,
lp_optimizer = lp_optimizer,
)
end
@@ -414,7 +412,7 @@ function __init_solvers_jump__()
constrs_lhs,
constrs_sense,
constrs_rhs,
stats=nothing,
stats = nothing,
) = _add_constrs(
self.inner,
from_str_array(var_names),
@@ -430,14 +428,14 @@ function __init_solvers_jump__()
extract_after_mip(self, h5) = _extract_after_mip(self.inner, h5)
fix_variables(self, var_names, var_values, stats=nothing) =
fix_variables(self, var_names, var_values, stats = nothing) =
_fix_variables(self.inner, from_str_array(var_names), var_values, stats)
optimize(self) = _optimize(self.inner)
relax(self) = Class(_relax(self.inner))
set_warm_starts(self, var_names, var_values, stats=nothing) =
set_warm_starts(self, var_names, var_values, stats = nothing) =
_set_warm_starts(self.inner, from_str_array(var_names), var_values, stats)
write(self, filename) = _write(self.inner, filename)