mirror of
https://github.com/ANL-CEEESA/MIPLearn.jl.git
synced 2025-12-06 00:18:51 -06:00
Reformat source code
This commit is contained in:
@@ -9,7 +9,13 @@ using SparseArrays
|
||||
using Statistics
|
||||
using TimerOutputs
|
||||
|
||||
function collect_gmi(mps_filename; optimizer, max_rounds=10, max_cuts_per_round=100, atol=1e-4)
|
||||
function collect_gmi(
|
||||
mps_filename;
|
||||
optimizer,
|
||||
max_rounds = 10,
|
||||
max_cuts_per_round = 100,
|
||||
atol = 1e-4,
|
||||
)
|
||||
@info mps_filename
|
||||
reset_timer!()
|
||||
|
||||
@@ -182,8 +188,7 @@ end
|
||||
|
||||
function select_gmi_rows(data, basis, x; max_rows = 10, atol = 1e-4)
|
||||
candidate_rows = [
|
||||
r for
|
||||
r in 1:length(basis.var_basic) if (
|
||||
r for r = 1:length(basis.var_basic) if (
|
||||
(data.var_types[basis.var_basic[r]] != 'C') &&
|
||||
(frac(x[basis.var_basic[r]]) > atol) &&
|
||||
(frac2(x[basis.var_basic[r]]) > atol)
|
||||
@@ -204,7 +209,7 @@ function compute_gmi(data::ProblemData, tableau::Tableau)::ConstraintSet
|
||||
lhs_J = Int[]
|
||||
lhs_V = Float64[]
|
||||
@timeit "Compute coefficients" begin
|
||||
for k in 1:nnz(tableau.lhs)
|
||||
for k = 1:nnz(tableau.lhs)
|
||||
i::Int = tableau_I[k]
|
||||
j::Int = tableau_J[k]
|
||||
v::Float64 = 0.0
|
||||
@@ -235,4 +240,5 @@ function compute_gmi(data::ProblemData, tableau::Tableau)::ConstraintSet
|
||||
return ConstraintSet(; lhs, ub, lb)
|
||||
end
|
||||
|
||||
export compute_gmi, frac, select_gmi_rows, assert_cuts_off, assert_does_not_cut_off, collect_gmi
|
||||
export compute_gmi,
|
||||
frac, select_gmi_rows, assert_cuts_off, assert_does_not_cut_off, collect_gmi
|
||||
|
||||
@@ -53,8 +53,14 @@ function __init_components__()
|
||||
)
|
||||
copy!(SelectTopSolutions, pyimport("miplearn.components.primal.mem").SelectTopSolutions)
|
||||
copy!(MergeTopSolutions, pyimport("miplearn.components.primal.mem").MergeTopSolutions)
|
||||
copy!(MemorizingCutsComponent, pyimport("miplearn.components.cuts.mem").MemorizingCutsComponent)
|
||||
copy!(MemorizingLazyComponent, pyimport("miplearn.components.lazy.mem").MemorizingLazyComponent)
|
||||
copy!(
|
||||
MemorizingCutsComponent,
|
||||
pyimport("miplearn.components.cuts.mem").MemorizingCutsComponent,
|
||||
)
|
||||
copy!(
|
||||
MemorizingLazyComponent,
|
||||
pyimport("miplearn.components.lazy.mem").MemorizingLazyComponent,
|
||||
)
|
||||
end
|
||||
|
||||
export MinProbabilityClassifier,
|
||||
|
||||
@@ -46,7 +46,7 @@ end
|
||||
function write_jld2(
|
||||
objs::Vector,
|
||||
dirname::AbstractString;
|
||||
prefix::AbstractString=""
|
||||
prefix::AbstractString = "",
|
||||
)::Vector{String}
|
||||
mkpath(dirname)
|
||||
filenames = [@sprintf("%s/%s%05d.jld2", dirname, prefix, i) for i = 1:length(objs)]
|
||||
|
||||
@@ -10,7 +10,10 @@ global MaxWeightStableSetGenerator = PyNULL()
|
||||
|
||||
function __init_problems_stab__()
|
||||
copy!(MaxWeightStableSetData, pyimport("miplearn.problems.stab").MaxWeightStableSetData)
|
||||
copy!(MaxWeightStableSetGenerator, pyimport("miplearn.problems.stab").MaxWeightStableSetGenerator)
|
||||
copy!(
|
||||
MaxWeightStableSetGenerator,
|
||||
pyimport("miplearn.problems.stab").MaxWeightStableSetGenerator,
|
||||
)
|
||||
end
|
||||
|
||||
function build_stab_model_jump(data::Any; optimizer = HiGHS.Optimizer)
|
||||
@@ -50,11 +53,7 @@ function build_stab_model_jump(data::Any; optimizer=HiGHS.Optimizer)
|
||||
end
|
||||
end
|
||||
|
||||
return JumpModel(
|
||||
model,
|
||||
cuts_separate=cuts_separate,
|
||||
cuts_enforce=cuts_enforce,
|
||||
)
|
||||
return JumpModel(model, cuts_separate = cuts_separate, cuts_enforce = cuts_enforce)
|
||||
end
|
||||
|
||||
export MaxWeightStableSetData, MaxWeightStableSetGenerator, build_stab_model_jump
|
||||
|
||||
@@ -9,7 +9,10 @@ global TravelingSalesmanGenerator = PyNULL()
|
||||
|
||||
function __init_problems_tsp__()
|
||||
copy!(TravelingSalesmanData, pyimport("miplearn.problems.tsp").TravelingSalesmanData)
|
||||
copy!(TravelingSalesmanGenerator, pyimport("miplearn.problems.tsp").TravelingSalesmanGenerator)
|
||||
copy!(
|
||||
TravelingSalesmanGenerator,
|
||||
pyimport("miplearn.problems.tsp").TravelingSalesmanGenerator,
|
||||
)
|
||||
end
|
||||
|
||||
function build_tsp_model_jump(data::Any; optimizer)
|
||||
@@ -19,17 +22,15 @@ function build_tsp_model_jump(data::Any; optimizer)
|
||||
data = read_pkl_gz(data)
|
||||
end
|
||||
model = Model(optimizer)
|
||||
edges = [(i, j) for i in 1:data.n_cities for j in (i+1):data.n_cities]
|
||||
edges = [(i, j) for i = 1:data.n_cities for j = (i+1):data.n_cities]
|
||||
x = @variable(model, x[edges], Bin)
|
||||
@objective(model, Min, sum(
|
||||
x[(i, j)] * data.distances[i, j] for (i, j) in edges
|
||||
))
|
||||
@objective(model, Min, sum(x[(i, j)] * data.distances[i, j] for (i, j) in edges))
|
||||
|
||||
# Eq: Must choose two edges adjacent to each node
|
||||
@constraint(
|
||||
model,
|
||||
eq_degree[i in 1:data.n_cities],
|
||||
sum(x[(min(i, j), max(i, j))] for j in 1:data.n_cities if i != j) == 2
|
||||
sum(x[(min(i, j), max(i, j))] for j = 1:data.n_cities if i != j) == 2
|
||||
)
|
||||
|
||||
function lazy_separate(cb_data)
|
||||
@@ -41,10 +42,8 @@ function build_tsp_model_jump(data::Any; optimizer)
|
||||
for component in nx.connected_components(graph)
|
||||
if length(component) < data.n_cities
|
||||
cut_edges = [
|
||||
[e[1], e[2]]
|
||||
for e in edges
|
||||
if (e[1] ∈ component && e[2] ∉ component)
|
||||
||
|
||||
[e[1], e[2]] for
|
||||
e in edges if (e[1] ∈ component && e[2] ∉ component) ||
|
||||
(e[1] ∉ component && e[2] ∈ component)
|
||||
]
|
||||
push!(violations, cut_edges)
|
||||
|
||||
@@ -18,7 +18,7 @@ Base.@kwdef mutable struct _JumpModelExtData
|
||||
cuts_separate::Union{Function,Nothing} = nothing
|
||||
lazy_enforce::Union{Function,Nothing} = nothing
|
||||
lazy_separate::Union{Function,Nothing} = nothing
|
||||
lp_optimizer
|
||||
lp_optimizer::Any
|
||||
end
|
||||
|
||||
function JuMP.copy_extension_data(
|
||||
@@ -26,9 +26,7 @@ function JuMP.copy_extension_data(
|
||||
new_model::AbstractModel,
|
||||
::AbstractModel,
|
||||
)
|
||||
new_model.ext[:miplearn] = _JumpModelExtData(
|
||||
lp_optimizer=old_ext.lp_optimizer
|
||||
)
|
||||
new_model.ext[:miplearn] = _JumpModelExtData(lp_optimizer = old_ext.lp_optimizer)
|
||||
end
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
@@ -29,9 +29,7 @@ end
|
||||
function test_cuts()
|
||||
data_filenames = ["$BASEDIR/../fixtures/stab-n50-00000.pkl.gz"]
|
||||
clf = pyimport("sklearn.dummy").DummyClassifier()
|
||||
extractor = H5FieldsExtractor(
|
||||
instance_fields=["static_var_obj_coeffs"],
|
||||
)
|
||||
extractor = H5FieldsExtractor(instance_fields = ["static_var_obj_coeffs"])
|
||||
comp = MemorizingCutsComponent(clf = clf, extractor = extractor)
|
||||
solver = LearningSolver(components = [comp])
|
||||
solver.fit(data_filenames)
|
||||
|
||||
@@ -32,9 +32,7 @@ end
|
||||
function test_lazy()
|
||||
data_filenames = ["$BASEDIR/../fixtures/tsp-n20-00000.pkl.gz"]
|
||||
clf = pyimport("sklearn.dummy").DummyClassifier()
|
||||
extractor = H5FieldsExtractor(
|
||||
instance_fields=["static_var_obj_coeffs"],
|
||||
)
|
||||
extractor = H5FieldsExtractor(instance_fields = ["static_var_obj_coeffs"])
|
||||
comp = MemorizingLazyComponent(clf = clf, extractor = extractor)
|
||||
solver = LearningSolver(components = [comp])
|
||||
solver.fit(data_filenames)
|
||||
|
||||
@@ -11,14 +11,9 @@ function test_problems_tsp()
|
||||
|
||||
data = TravelingSalesmanData(
|
||||
n_cities = 6,
|
||||
distances=squareform(pdist([
|
||||
[0.0, 0.0],
|
||||
[1.0, 0.0],
|
||||
[2.0, 0.0],
|
||||
[3.0, 0.0],
|
||||
[0.0, 1.0],
|
||||
[3.0, 1.0],
|
||||
])),
|
||||
distances = squareform(
|
||||
pdist([[0.0, 0.0], [1.0, 0.0], [2.0, 0.0], [3.0, 0.0], [0.0, 1.0], [3.0, 1.0]]),
|
||||
),
|
||||
)
|
||||
model = build_tsp_model_jump(data, optimizer = GLPK.Optimizer)
|
||||
model.optimize()
|
||||
|
||||
Reference in New Issue
Block a user