Remove Julia components

pull/3/head
Alinson S. Xavier 5 years ago
parent db3e27b039
commit 741af8506b

@ -1,44 +1,23 @@
PYTHON := python3
PYTEST := pytest
PIP := pip3
JULIA := julia
PYTEST_ARGS := -W ignore::DeprecationWarning -vv -x --log-level=DEBUG
JULIA_ARGS := --color=yes --project=src/julia
JULIA_SYSIMAGE_ARGS := $(JULIA_ARGS) --sysimage build/sysimage.so
PYTHON := python3
PYTEST := pytest
PIP := pip3
PYTEST_ARGS := -W ignore::DeprecationWarning -vv -x --log-level=DEBUG
all: docs test
build/sysimage.so: src/julia/Manifest.toml src/julia/Project.toml
mkdir -p build
$(JULIA) $(JULIA_ARGS) src/julia/sysimage.jl
develop:
cd src/python && $(PYTHON) setup.py develop
docs:
mkdocs build
install: install-python
install-python:
install:
cd src/python && $(PYTHON) setup.py install
install-julia:
$(JULIA) $(JULIA_ARGS) src/julia/setup.jl `which $(PYTHON)`
uninstall:
$(PIP) uninstall miplearn
test: test-python test-julia
test-python:
test:
cd src/python && $(PYTEST) $(PYTEST_ARGS)
test-python-watch:
cd src/python && pytest-watch -- $(PYTEST_ARGS)
test-julia: build/sysimage.so
$(JULIA) $(JULIA_SYSIMAGE_ARGS) src/julia/test/runtests.jl
.PHONY: test test-python test-julia test-watch docs install
.PHONY: test test-watch docs install

@ -1,360 +0,0 @@
# This file is machine-generated - editing it directly is not advised
[[Base64]]
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
[[BenchmarkTools]]
deps = ["JSON", "Logging", "Printf", "Statistics", "UUIDs"]
git-tree-sha1 = "9e62e66db34540a0c919d72172cc2f642ac71260"
uuid = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
version = "0.5.0"
[[Bzip2_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "03a44490020826950c68005cafb336e5ba08b7e8"
uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0"
version = "1.0.6+4"
[[CEnum]]
git-tree-sha1 = "215a9aa4a1f23fbd05b92769fdd62559488d70e9"
uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82"
version = "0.4.1"
[[CPLEX]]
deps = ["Libdl", "LinearAlgebra", "MathOptInterface", "MathProgBase", "SparseArrays"]
git-tree-sha1 = "c3d7c4c3e4d4bd01c5ac89dee420be93ef7ef20b"
uuid = "a076750e-1247-5638-91d2-ce28b192dca0"
version = "0.6.6"
[[CPLEXW]]
deps = ["CEnum", "Libdl"]
git-tree-sha1 = "2259e91418573278d919d6a163634a34ff1adb61"
repo-rev = "master"
repo-url = "https://github.com/iSoron/CPLEXW.git"
uuid = "cfecb002-79c2-11e9-35be-cb59aa640f85"
version = "1.0.0"
[[Calculus]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "f641eb0a4f00c343bbc32346e1217b86f3ce9dad"
uuid = "49dc2e85-a5d0-5ad3-a950-438e2897f1b9"
version = "0.5.1"
[[CodeTracking]]
deps = ["InteractiveUtils", "UUIDs"]
git-tree-sha1 = "ccc043a0df446cac279dca29d13e2827b40aceb5"
uuid = "da1fd8a2-8d9e-5ec2-8556-3022fb5608a2"
version = "0.5.12"
[[CodecBzip2]]
deps = ["Bzip2_jll", "Libdl", "TranscodingStreams"]
git-tree-sha1 = "2e62a725210ce3c3c2e1a3080190e7ca491f18d7"
uuid = "523fee87-0ab8-5b00-afb7-3ecf72e48cfd"
version = "0.7.2"
[[CodecZlib]]
deps = ["TranscodingStreams", "Zlib_jll"]
git-tree-sha1 = "ded953804d019afa9a3f98981d99b33e3db7b6da"
uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
version = "0.7.0"
[[CommonSubexpressions]]
deps = ["MacroTools", "Test"]
git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7"
uuid = "bbf7d656-a473-5ed7-a52c-81e309532950"
version = "0.3.0"
[[CompilerSupportLibraries_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "7c4f882c41faa72118841185afc58a2eb00ef612"
uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
version = "0.3.3+0"
[[Conda]]
deps = ["JSON", "VersionParsing"]
git-tree-sha1 = "7a58bb32ce5d85f8bf7559aa7c2842f9aecf52fc"
uuid = "8f4d0f93-b110-5947-807f-2305c1781a2d"
version = "1.4.1"
[[DataStructures]]
deps = ["InteractiveUtils", "OrderedCollections"]
git-tree-sha1 = "88d48e133e6d3dd68183309877eac74393daa7eb"
uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
version = "0.17.20"
[[Dates]]
deps = ["Printf"]
uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
[[DiffResults]]
deps = ["StaticArrays"]
git-tree-sha1 = "da24935df8e0c6cf28de340b958f6aac88eaa0cc"
uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5"
version = "1.0.2"
[[DiffRules]]
deps = ["NaNMath", "Random", "SpecialFunctions"]
git-tree-sha1 = "eb0c34204c8410888844ada5359ac8b96292cfd1"
uuid = "b552c78f-8df3-52c6-915a-8e097449b14b"
version = "1.0.1"
[[Distributed]]
deps = ["Random", "Serialization", "Sockets"]
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
[[FileWatching]]
uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee"
[[ForwardDiff]]
deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "NaNMath", "Random", "SpecialFunctions", "StaticArrays"]
git-tree-sha1 = "1d090099fb82223abc48f7ce176d3f7696ede36d"
uuid = "f6369f11-7733-5829-9624-2563aa707210"
version = "0.10.12"
[[Gurobi]]
deps = ["Libdl", "LinearAlgebra", "MathOptInterface", "MathProgBase", "SparseArrays"]
git-tree-sha1 = "f36a2fa62909675681aec582ccfc4a4a629406e4"
uuid = "2e9cd046-0924-5485-92f1-d5272153d98b"
version = "0.8.1"
[[HTTP]]
deps = ["Base64", "Dates", "IniFile", "MbedTLS", "Sockets"]
git-tree-sha1 = "2ac03263ce44be4222342bca1c51c36ce7566161"
uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3"
version = "0.8.17"
[[IniFile]]
deps = ["Test"]
git-tree-sha1 = "098e4d2c533924c921f9f9847274f2ad89e018b8"
uuid = "83e8ac13-25f8-5344-8a64-a9f2b223428f"
version = "0.5.0"
[[InteractiveUtils]]
deps = ["Markdown"]
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
[[JSON]]
deps = ["Dates", "Mmap", "Parsers", "Unicode"]
git-tree-sha1 = "b34d7cef7b337321e97d22242c3c2b91f476748e"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.21.0"
[[JSON2]]
deps = ["Dates", "Parsers", "Test"]
git-tree-sha1 = "66397cc6c08922f98a28ab05a8d3002f9853b129"
uuid = "2535ab7d-5cd8-5a07-80ac-9b1792aadce3"
version = "0.3.2"
[[JSONSchema]]
deps = ["HTTP", "JSON", "ZipFile"]
git-tree-sha1 = "a9ecdbc90be216912a2e3e8a8e38dc4c93f0d065"
uuid = "7d188eb4-7ad8-530c-ae41-71a32a6d4692"
version = "0.3.2"
[[JuMP]]
deps = ["Calculus", "DataStructures", "ForwardDiff", "LinearAlgebra", "MathOptInterface", "MutableArithmetics", "NaNMath", "Random", "SparseArrays", "Statistics"]
git-tree-sha1 = "cbab42e2e912109d27046aa88f02a283a9abac7c"
uuid = "4076af6c-e467-56ae-b986-b466b2749572"
version = "0.21.3"
[[JuliaInterpreter]]
deps = ["CodeTracking", "InteractiveUtils", "Random", "UUIDs"]
git-tree-sha1 = "7b2a1b650cec61a7d8cd8ee9ee7a818b5764d502"
uuid = "aa1ae85d-cabe-5617-a682-6adf51b2e16a"
version = "0.7.26"
[[LibGit2]]
deps = ["Printf"]
uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
[[Libdl]]
uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
[[LinearAlgebra]]
deps = ["Libdl"]
uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
[[Logging]]
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
[[LoweredCodeUtils]]
deps = ["JuliaInterpreter"]
git-tree-sha1 = "dbd9336b43c2d6fa492efa09ba3bb10fbdbeeb64"
uuid = "6f1432cf-f94c-5a45-995e-cdbf5db27b0b"
version = "0.4.9"
[[MacroTools]]
deps = ["Markdown", "Random"]
git-tree-sha1 = "f7d2e3f654af75f01ec49be82c231c382214223a"
uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
version = "0.5.5"
[[Markdown]]
deps = ["Base64"]
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
[[MathOptInterface]]
deps = ["BenchmarkTools", "CodecBzip2", "CodecZlib", "JSON", "JSONSchema", "LinearAlgebra", "MutableArithmetics", "OrderedCollections", "SparseArrays", "Test", "Unicode"]
git-tree-sha1 = "cd2049c055c7d192a235670d50faa375361624ba"
uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
version = "0.9.14"
[[MathProgBase]]
deps = ["LinearAlgebra", "SparseArrays"]
git-tree-sha1 = "9abbe463a1e9fc507f12a69e7f29346c2cdc472c"
uuid = "fdba3010-5040-5b88-9595-932c9decdf73"
version = "0.7.8"
[[MbedTLS]]
deps = ["Dates", "MbedTLS_jll", "Random", "Sockets"]
git-tree-sha1 = "426a6978b03a97ceb7ead77775a1da066343ec6e"
uuid = "739be429-bea8-5141-9913-cc70e7f3736d"
version = "1.0.2"
[[MbedTLS_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "a0cb0d489819fa7ea5f9fa84c7e7eba19d8073af"
uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1"
version = "2.16.6+1"
[[Mmap]]
uuid = "a63ad114-7e13-5084-954f-fe012c677804"
[[MutableArithmetics]]
deps = ["LinearAlgebra", "SparseArrays", "Test"]
git-tree-sha1 = "6cf09794783b9de2e662c4e8b60d743021e338d0"
uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0"
version = "0.2.10"
[[NaNMath]]
git-tree-sha1 = "c84c576296d0e2fbb3fc134d3e09086b3ea617cd"
uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
version = "0.3.4"
[[OpenSpecFun_jll]]
deps = ["CompilerSupportLibraries_jll", "Libdl", "Pkg"]
git-tree-sha1 = "d51c416559217d974a1113522d5919235ae67a87"
uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e"
version = "0.5.3+3"
[[OrderedCollections]]
git-tree-sha1 = "293b70ac1780f9584c89268a6e2a560d938a7065"
uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
version = "1.3.0"
[[PackageCompiler]]
deps = ["Libdl", "Pkg", "UUIDs"]
git-tree-sha1 = "98aa9c653e1dc3473bb5050caf8501293db9eee1"
uuid = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
version = "1.2.1"
[[Parsers]]
deps = ["Dates", "Test"]
git-tree-sha1 = "8077624b3c450b15c087944363606a6ba12f925e"
uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
version = "1.0.10"
[[Pkg]]
deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"]
uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
[[Printf]]
deps = ["Unicode"]
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
[[PyCall]]
deps = ["Conda", "Dates", "Libdl", "LinearAlgebra", "MacroTools", "Serialization", "VersionParsing"]
git-tree-sha1 = "3a3fdb9000d35958c9ba2323ca7c4958901f115d"
uuid = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0"
version = "1.91.4"
[[REPL]]
deps = ["InteractiveUtils", "Markdown", "Sockets"]
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
[[Random]]
deps = ["Serialization"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[Revise]]
deps = ["CodeTracking", "Distributed", "FileWatching", "JuliaInterpreter", "LibGit2", "LoweredCodeUtils", "OrderedCollections", "Pkg", "REPL", "UUIDs", "Unicode"]
git-tree-sha1 = "0992d4643e27b2deb9f2e4ec7a56b7033813a027"
uuid = "295af30f-e4ad-537b-8983-00126c2a3abe"
version = "2.7.3"
[[SHA]]
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
[[Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
[[Sockets]]
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
[[SparseArrays]]
deps = ["LinearAlgebra", "Random"]
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
[[SpecialFunctions]]
deps = ["OpenSpecFun_jll"]
git-tree-sha1 = "d8d8b8a9f4119829410ecd706da4cc8594a1e020"
uuid = "276daf66-3868-5448-9aa4-cd146d93841b"
version = "0.10.3"
[[StaticArrays]]
deps = ["LinearAlgebra", "Random", "Statistics"]
git-tree-sha1 = "016d1e1a00fabc556473b07161da3d39726ded35"
uuid = "90137ffa-7385-5640-81b9-e52037218182"
version = "0.12.4"
[[Statistics]]
deps = ["LinearAlgebra", "SparseArrays"]
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
[[Test]]
deps = ["Distributed", "InteractiveUtils", "Logging", "Random"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[TimerOutputs]]
deps = ["Printf"]
git-tree-sha1 = "f458ca23ff80e46a630922c555d838303e4b9603"
uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"
version = "0.5.6"
[[TinyBnB]]
deps = ["CPLEXW", "Printf", "Random", "Revise", "Test"]
git-tree-sha1 = "b66df27e41e9b83a34e9f48e67876064d5b92a62"
repo-rev = "master"
repo-url = "https://github.com/iSoron/TinyBnB.jl.git"
uuid = "1b2a1171-e557-4eeb-a4d6-6c23d7e94fcd"
version = "1.1.0"
[[TranscodingStreams]]
deps = ["Random", "Test"]
git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c"
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
version = "0.9.5"
[[UUIDs]]
deps = ["Random", "SHA"]
uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[[Unicode]]
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
[[VersionParsing]]
git-tree-sha1 = "80229be1f670524750d905f8fc8148e5a8c4537f"
uuid = "81def892-9a0e-5fdd-b105-ffc91e053289"
version = "1.2.0"
[[ZipFile]]
deps = ["Libdl", "Printf", "Zlib_jll"]
git-tree-sha1 = "254975fef2fc526583bb9b7c9420fe66ffe09f2f"
uuid = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea"
version = "0.9.2"
[[Zlib_jll]]
deps = ["Libdl", "Pkg"]
git-tree-sha1 = "fdd89e5ab270ea0f2a0174bd9093e557d06d4bfa"
uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
version = "1.2.11+16"

@ -1,24 +0,0 @@
name = "MIPLearn"
uuid = "2b1277c3-b477-4c49-a15e-7ba350325c68"
authors = ["Alinson S Xavier <git@axavier.org>"]
version = "0.1.0"
[deps]
CPLEX = "a076750e-1247-5638-91d2-ce28b192dca0"
CPLEXW = "cfecb002-79c2-11e9-35be-cb59aa640f85"
Gurobi = "2e9cd046-0924-5485-92f1-d5272153d98b"
JSON2 = "2535ab7d-5cd8-5a07-80ac-9b1792aadce3"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
Logging = "56ddb016-857b-54e1-b83d-db4d58db5568"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
PackageCompiler = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0"
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
TimerOutputs = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"
TinyBnB = "1b2a1171-e557-4eeb-a4d6-6c23d7e94fcd"
[compat]
CPLEX = "0.6"
JuMP = "0.21"

@ -1,8 +0,0 @@
ENV["PYTHON"] = ARGS[1]
using Pkg
Pkg.instantiate()
Pkg.build("CPLEX")
Pkg.build("Gurobi")
Pkg.build("PyCall")

@ -1,41 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
__precompile__(false)
module MIPLearn
using PyCall
miplearn = pyimport("miplearn")
Instance = miplearn.Instance
BenchmarkRunner = miplearn.BenchmarkRunner
macro pycall(expr)
quote
err_msg = nothing
result = nothing
try
result = $(esc(expr))
catch err
args = err.val.args[1]
if (err isa PyCall.PyError) && (args isa String) && startswith(args, "Julia")
err_msg = replace(args, r"Stacktrace.*" => "")
else
rethrow(err)
end
end
if err_msg != nothing
error(err_msg)
end
result
end
end
include("log.jl")
include("jump_solver.jl")
include("learning_solver.jl")
include("instance.jl")
export Instance, BenchmarkRunner
end # module

@ -1,70 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import Base.Threads.@threads
using TinyBnB, CPLEXW, Printf
instance_name = ARGS[1]
output_filename = ARGS[2]
node_limit = parse(Int, ARGS[3])
mip = open_mip(instance_name)
n_vars = CPXgetnumcols(mip.cplex_env[1], mip.cplex_lp[1])
pseudocost_count_up = [0 for i in 1:n_vars]
pseudocost_count_down = [0 for i in 1:n_vars]
pseudocost_sum_up = [0. for i in 1:n_vars]
pseudocost_sum_down = [0. for i in 1:n_vars]
function full_strong_branching_track(node::Node, progress::Progress)::TinyBnB.Variable
N = length(node.fractional_variables)
scores = Array{Float64}(undef, N)
rates_up = Array{Float64}(undef, N)
rates_down = Array{Float64}(undef, N)
@threads for v in 1:N
fix_vars!(node.mip, node.branch_variables, node.branch_values)
obj_up, obj_down = TinyBnB.probe(node.mip, node.fractional_variables[v])
unfix_vars!(node.mip, node.branch_variables)
delta_up = obj_up - node.obj
delta_down = obj_down - node.obj
frac_up = ceil(node.fractional_values[v]) - node.fractional_values[v]
frac_down = node.fractional_values[v] - floor(node.fractional_values[v])
rates_up[v] = delta_up / frac_up
rates_down[v] = delta_down / frac_down
scores[v] = delta_up * delta_down
end
max_score, max_offset = findmax(scores)
selected_var = node.fractional_variables[max_offset]
if abs(rates_up[max_offset]) < 1e6
pseudocost_count_up[selected_var.index] += 1
pseudocost_sum_up[selected_var.index] += rates_up[max_offset]
end
if abs(rates_down[max_offset]) < 1e6
pseudocost_count_down[selected_var.index] += 1
pseudocost_sum_down[selected_var.index] += rates_down[max_offset]
end
return selected_var
end
branch_and_bound(mip,
node_limit = node_limit,
branch_rule = full_strong_branching_track,
node_rule = best_bound,
print_interval = 100)
priority = [(pseudocost_count_up[v] == 0 || pseudocost_count_down[v] == 0) ? 0 :
(pseudocost_sum_up[v] / pseudocost_count_up[v]) *
(pseudocost_sum_down[v] / pseudocost_count_down[v])
for v in 1:n_vars];
open(output_filename, "w") do file
for var in mip.binary_variables
write(file, @sprintf("%s,%.0f\n", name(mip, var), priority[var.index]))
end
end

@ -1,61 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JSON2
import Base: dump
get_instance_features(instance) = [0.]
get_variable_features(instance, var, index) = [0.]
find_violated_lazy_constraints(instance, model) = []
build_lazy_constraint(instance, model, v) = nothing
dump(instance::PyCall.PyObject, filename) = @pycall instance.dump(filename)
load!(instance::PyCall.PyObject, filename) = @pycall instance.load(filename)
macro Instance(klass)
quote
@pydef mutable struct Wrapper <: Instance
function __init__(self, args...; kwargs...)
self.data = $(esc(klass))(args...; kwargs...)
end
function dump(self, filename)
prev_data = self.data
self.data = JSON2.write(prev_data)
Instance.dump(self, filename)
self.data = prev_data
end
function load(self, filename)
Instance.load(self, filename)
self.data = JSON2.read(self.data, $(esc(klass)))
end
to_model(self) =
$(esc(:to_model))(self.data)
get_instance_features(self) =
get_instance_features(self.data)
get_variable_features(self, var, index) =
get_variable_features(self.data, var, index)
function find_violated_lazy_constraints(self, model)
find_violated_lazy_constraints(self.data, model)
end
function build_lazy_constraint(self, model, v)
build_lazy_constraint(self.data, model, v)
end
end
end
end
export get_instance_features,
get_variable_features,
find_violated_lazy_constraints,
build_lazy_constraint,
dump,
load!,
@Instance

@ -1,254 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using JuMP
using CPLEX
using MathOptInterface
const MOI = MathOptInterface
using TimerOutputs
mutable struct JuMPSolverData
basename_idx_to_var
var_to_basename_idx
optimizer
instance
model
bin_vars
solution::Union{Nothing,Dict{String,Dict{String,Float64}}}
time_limit::Union{Nothing, Float64}
end
function varname_split(varname::String)
m = match(r"([^[]*)\[(.*)\]", varname)
if m == nothing
return varname, ""
end
return m.captures[1], m.captures[2]
end
"""
optimize_and_capture_output!(model; tee=tee)
Optimizes a given JuMP model while capturing the solver log, then returns that log.
If tee=true, prints the solver log to the standard output as the optimization takes place.
"""
function optimize_and_capture_output!(model; tee::Bool=false)
original_stdout = stdout
rd, wr = redirect_stdout()
task = @async begin
log = ""
while true
line = String(readavailable(rd))
isopen(rd) || break
log *= String(line)
if tee
print(original_stdout, line)
flush(original_stdout)
end
end
return log
end
JuMP.optimize!(model)
sleep(1)
redirect_stdout(original_stdout)
close(rd)
return fetch(task)
end
function solve(data::JuMPSolverData; tee::Bool=false)
instance, model = data.instance, data.model
if data.time_limit != nothing
JuMP.set_time_limit_sec(model, data.time_limit)
end
wallclock_time = 0
found_lazy = []
log = ""
while true
log *= optimize_and_capture_output!(model, tee=tee)
wallclock_time += JuMP.solve_time(model)
violations = instance.find_violated_lazy_constraints(model)
if length(violations) == 0
break
end
append!(found_lazy, violations)
for v in violations
instance.build_lazy_constraint(data.model, v)
end
end
update_solution!(data)
instance.found_violated_lazy_constraints = found_lazy
instance.found_violated_user_cuts = []
primal_bound = JuMP.objective_value(model)
dual_bound = JuMP.objective_bound(model)
if JuMP.objective_sense(model) == MOI.MIN_SENSE
sense = "min"
lower_bound = dual_bound
upper_bound = primal_bound
else
sense = "max"
lower_bound = primal_bound
upper_bound = dual_bound
end
return Dict("Lower bound" => lower_bound,
"Upper bound" => upper_bound,
"Sense" => sense,
"Wallclock time" => wallclock_time,
"Nodes" => 1,
"Log" => log,
"Warm start value" => nothing)
end
function solve_lp(data::JuMPSolverData; tee::Bool=false)
model, bin_vars = data.model, data.bin_vars
for var in bin_vars
JuMP.unset_binary(var)
JuMP.set_upper_bound(var, 1.0)
JuMP.set_lower_bound(var, 0.0)
end
log = optimize_and_capture_output!(model, tee=tee)
update_solution!(data)
obj_value = JuMP.objective_value(model)
for var in bin_vars
JuMP.set_binary(var)
end
return Dict("Optimal value" => obj_value,
"Log" => log)
end
function update_solution!(data::JuMPSolverData)
var_to_basename_idx, model = data.var_to_basename_idx, data.model
solution = Dict{String,Dict{String,Float64}}()
for var in JuMP.all_variables(model)
var in keys(var_to_basename_idx) || continue
basename, idx = var_to_basename_idx[var]
if !haskey(solution, basename)
solution[basename] = Dict{String,Float64}()
end
solution[basename][idx] = JuMP.value(var)
end
data.solution = solution
end
function get_variables(data::JuMPSolverData)
var_to_basename_idx, model = data.var_to_basename_idx, data.model
variables = Dict()
for var in JuMP.all_variables(model)
var in keys(var_to_basename_idx) || continue
basename, idx = var_to_basename_idx[var]
if !haskey(variables, basename)
variables[basename] = []
end
push!(variables[basename], idx)
end
return variables
end
function set_instance!(data::JuMPSolverData, instance, model)
data.instance = instance
data.model = model
data.var_to_basename_idx = Dict(var => varname_split(JuMP.name(var))
for var in JuMP.all_variables(model))
data.basename_idx_to_var = Dict(varname_split(JuMP.name(var)) => var
for var in JuMP.all_variables(model))
data.bin_vars = [var
for var in JuMP.all_variables(model)
if JuMP.is_binary(var)]
if data.optimizer != nothing
JuMP.set_optimizer(model, data.optimizer)
end
end
function fix!(data::JuMPSolverData, solution)
count = 0
for (basename, subsolution) in solution
for (idx, value) in subsolution
value != nothing || continue
var = data.basename_idx_to_var[basename, idx]
JuMP.fix(var, value, force=true)
count += 1
end
end
@info "Fixing $count variables"
end
function set_warm_start!(data::JuMPSolverData, solution)
count = 0
for (basename, subsolution) in solution
for (idx, value) in subsolution
value != nothing || continue
var = data.basename_idx_to_var[basename, idx]
JuMP.set_start_value(var, value)
count += 1
end
end
@info "Setting warm start values for $count variables"
end
@pydef mutable struct JuMPSolver <: miplearn.solvers.internal.InternalSolver
function __init__(self; optimizer)
self.data = JuMPSolverData(nothing, # basename_idx_to_var
nothing, # var_to_basename_idx
optimizer,
nothing, # instance
nothing, # model
nothing, # bin_vars
nothing, # solution
nothing, # time limit
)
end
set_warm_start(self, solution) =
set_warm_start!(self.data, solution)
fix(self, solution) =
fix!(self.data, solution)
set_instance(self, instance, model) =
set_instance!(self.data, instance, model)
solve(self; tee=false) =
solve(self.data, tee=tee)
solve_lp(self; tee=false) =
solve_lp(self.data, tee=tee)
get_solution(self) =
self.data.solution
get_variables(self) =
get_variables(self.data)
set_time_limit(self, time_limit) =
self.data.time_limit = time_limit
set_gap_tolerance(self, gap_tolerance) =
@warn "JuMPSolver: set_gap_tolerance not implemented"
set_node_limit(self) =
@warn "JuMPSolver: set_node_limit not implemented"
set_threads(self, threads) =
@warn "JuMPSolver: set_threads not implemented"
set_branching_priorities(self, priorities) =
@warn "JuMPSolver: set_branching_priorities not implemented"
add_constraint(self, constraint) = nothing
clear_warm_start(self) =
error("JuMPSolver.clear_warm_start should never be called")
end
export JuMPSolver, solve!, fit!, add!

@ -1,28 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
struct LearningSolver
py::PyCall.PyObject
end
function LearningSolver(;
optimizer,
kwargs...,
)::LearningSolver
py = @pycall miplearn.LearningSolver(;
kwargs...,
solver=JuMPSolver(optimizer=optimizer))
return LearningSolver(py)
end
solve!(solver::LearningSolver, instance; kwargs...) =
@pycall solver.py.solve(instance; kwargs...)
fit!(solver::LearningSolver, instances; kwargs...) =
@pycall solver.py.fit(instances; kwargs...)
add!(solver::LearningSolver, component; kwargs...) =
@pycall solver.py.add(component; kwargs...)
export LearningSolver

@ -1,62 +0,0 @@
import Logging: min_enabled_level, shouldlog, handle_message
using Base.CoreLogging, Logging, Printf
struct TimeLogger <: AbstractLogger
initial_time::Float64
file::Union{Nothing, IOStream}
screen_log_level
io_log_level
end
function TimeLogger(;
initial_time::Float64,
file::Union{Nothing, IOStream} = nothing,
screen_log_level = CoreLogging.Info,
io_log_level = CoreLogging.Info,
) :: TimeLogger
return TimeLogger(initial_time, file, screen_log_level, io_log_level)
end
min_enabled_level(logger::TimeLogger) = logger.io_log_level
shouldlog(logger::TimeLogger, level, _module, group, id) = true
function handle_message(logger::TimeLogger,
level,
message,
_module,
group,
id,
filepath,
line;
kwargs...)
elapsed_time = time() - logger.initial_time
time_string = @sprintf("[%12.3f] ", elapsed_time)
if level >= Logging.Error
color = :light_red
elseif level >= Logging.Warn
color = :light_yellow
else
color = :light_green
end
if level >= logger.screen_log_level
printstyled(time_string, color=color)
println(message)
end
if logger.file != nothing && level >= logger.io_log_level
write(logger.file, time_string)
write(logger.file, message)
write(logger.file, "\n")
flush(logger.file)
end
end
function setup_logger()
initial_time = time()
global_logger(TimeLogger(initial_time=initial_time))
miplearn = pyimport("miplearn")
miplearn.setup_logger(initial_time)
end
export TimeLogger

@ -1,22 +0,0 @@
using PackageCompiler
using CPLEX
using CPLEXW
using Gurobi
using JuMP
using MathOptInterface
using PyCall
using TimerOutputs
using TinyBnB
pkg = [:CPLEX
:CPLEXW
:Gurobi
:JuMP
:MathOptInterface
:PyCall
:TimerOutputs
:TinyBnB]
@info "Building system image..."
create_sysimage(pkg, sysimage_path="build/sysimage.so")

@ -1,65 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Test
using MIPLearn
using CPLEX
using Gurobi
@testset "varname_split" begin
@test MIPLearn.varname_split("x[1]") == ("x", "1")
end
@testset "JuMPSolver" begin
for optimizer in [CPLEX.Optimizer, Gurobi.Optimizer]
instance = KnapsackInstance([23., 26., 20., 18.],
[505., 352., 458., 220.],
67.0)
model = instance.to_model()
solver = JuMPSolver(optimizer=optimizer)
solver.set_instance(instance, model)
solver.set_time_limit(30)
solver.set_warm_start(Dict("x" => Dict(
"1" => 1.0,
"2" => 0.0,
"3" => 0.0,
"4" => 1.0,
)))
stats = solver.solve()
@test stats["Lower bound"] == 1183.0
@test stats["Upper bound"] == 1183.0
@test stats["Sense"] == "max"
@test stats["Wallclock time"] > 0
@test length(stats["Log"]) > 100
solution = solver.get_solution()
@test solution["x"]["1"] == 1.0
@test solution["x"]["2"] == 0.0
@test solution["x"]["3"] == 1.0
@test solution["x"]["4"] == 1.0
stats = solver.solve_lp()
@test round(stats["Optimal value"], digits=3) == 1287.923
@test length(stats["Log"]) > 100
solution = solver.get_solution()
@test round(solution["x"]["1"], digits=3) == 1.000
@test round(solution["x"]["2"], digits=3) == 0.923
@test round(solution["x"]["3"], digits=3) == 1.000
@test round(solution["x"]["4"], digits=3) == 0.000
solver.fix(Dict("x" => Dict(
"1" => 1.0,
"2" => 0.0,
"3" => 0.0,
"4" => 1.0,
)))
stats = solver.solve()
@test stats["Lower bound"] == 725.0
@test stats["Upper bound"] == 725.0
end
end

@ -1,34 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import MIPLearn: get_instance_features,
get_variable_features
find_violated_lazy_constraints
using JuMP
struct KnapsackData
weights
prices
capacity
end
function to_model(data::KnapsackData)
model = Model()
n = length(data.weights)
@variable(model, x[1:n], Bin)
@objective(model, Max, sum(x[i] * data.prices[i] for i in 1:n))
@constraint(model, sum(x[i] * data.weights[i] for i in 1:n) <= data.capacity)
return model
end
function get_instance_features(data::KnapsackData)
return [0.]
end
function get_variable_features(data::KnapsackData, var, index)
return [0.]
end
KnapsackInstance = @Instance(KnapsackData)

@ -1,50 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Test
using MIPLearn
using CPLEX
using Gurobi
@testset "Instance" begin
weights = [23., 26., 20., 18.]
prices = [505., 352., 458., 220.]
capacity = 67.0
instance = KnapsackInstance(weights, prices, capacity)
dump(instance, "tmp/instance.json.gz")
instance = KnapsackInstance([0.0], [0.0], 0.0)
load!(instance, "tmp/instance.json.gz")
@test instance.data.weights == weights
@test instance.data.prices == prices
@test instance.data.capacity == capacity
end
@testset "LearningSolver" begin
for optimizer in [CPLEX.Optimizer, Gurobi.Optimizer]
instance = KnapsackInstance([23., 26., 20., 18.],
[505., 352., 458., 220.],
67.0)
solver = LearningSolver(optimizer=optimizer,
mode="heuristic",
time_limit=90)
stats = solve!(solver, instance)
@test instance.solution["x"]["1"] == 1.0
@test instance.solution["x"]["2"] == 0.0
@test instance.solution["x"]["3"] == 1.0
@test instance.solution["x"]["4"] == 1.0
@test instance.lower_bound == 1183.0
@test instance.upper_bound == 1183.0
@test round(instance.lp_solution["x"]["1"], digits=3) == 1.000
@test round(instance.lp_solution["x"]["2"], digits=3) == 0.923
@test round(instance.lp_solution["x"]["3"], digits=3) == 1.000
@test round(instance.lp_solution["x"]["4"], digits=3) == 0.000
@test round(instance.lp_value, digits=3) == 1287.923
fit!(solver, [instance])
solve!(solver, instance)
end
end

@ -1,14 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
using Test
using MIPLearn
MIPLearn.setup_logger()
@testset "MIPLearn" begin
include("knapsack.jl")
include("jump_solver_test.jl")
include("learning_solver_test.jl")
end

@ -12,8 +12,6 @@ from .components.objective import ObjectiveValueComponent
from .components.lazy import LazyConstraintsComponent
from .components.cuts import UserCutsComponent
from .components.primal import PrimalSolutionComponent
from .components.branching import (BranchPriorityComponent,
BranchPriorityExtractor)
from .classifiers.adaptive import AdaptiveClassifier
from .classifiers.threshold import MinPrecisionThreshold

@ -1,125 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
import logging
import os
import sys
import subprocess
import tempfile
from copy import deepcopy
import numpy as np
from pyomo.core import Var
from pyomo.core.base.label import TextLabeler
from sklearn.neighbors import KNeighborsRegressor
from tqdm import tqdm
from .component import Component
from ..extractors import Extractor, VariableFeaturesExtractor
logger = logging.getLogger(__name__)
class BranchPriorityExtractor(Extractor):
def extract(self, instances):
result = {}
for instance in tqdm(instances,
desc="Extract (branch)",
disable=len(instances) < 5 or (not sys.stdout.isatty()),
):
var_split = self.split_variables(instance)
for (category, var_index_pairs) in var_split.items():
if category not in result:
result[category] = []
for (var_name, index) in var_index_pairs:
result[category] += [instance.branch_priorities[var_name][index]]
for category in result:
result[category] = np.array(result[category])
return result
class BranchPriorityComponent(Component):
def __init__(self,
node_limit=10_000,
regressor=KNeighborsRegressor(n_neighbors=1),
):
self.node_limit = node_limit
self.regressors = {}
self.regressor_prototype = regressor
def before_solve(self, solver, instance, model):
logger.info("Predicting branching priorities...")
priorities = self.predict(instance)
solver.internal_solver.set_branching_priorities(priorities)
def after_solve(self, solver, instance, model, results):
pass
def fit(self, training_instances, n_jobs=1):
for instance in tqdm(training_instances,
desc="Fit (branch)",
disable=not sys.stdout.isatty(),
):
if not hasattr(instance, "branch_priorities"):
instance.branch_priorities = self.compute_priorities(instance)
x, y = self.x(training_instances), self.y(training_instances)
for category in x.keys():
self.regressors[category] = deepcopy(self.regressor_prototype)
self.regressors[category].fit(x[category], y[category])
def x(self, instances):
return VariableFeaturesExtractor().extract(instances)
def y(self, instances):
return BranchPriorityExtractor().extract(instances)
def compute_priorities(self, instance, model=None):
# Create LP file
lp_file = tempfile.NamedTemporaryFile(suffix=".lp")
if model is None:
model = instance.to_model()
model.write(lp_file.name)
# Run Julia script
src_dirname = os.path.dirname(os.path.realpath(__file__))
julia_dirname = "%s/../../../julia" % src_dirname
priority_file = tempfile.NamedTemporaryFile(mode="r")
subprocess.run(["julia",
"--project=%s" % julia_dirname,
"%s/src/branching.jl" % julia_dirname,
lp_file.name,
priority_file.name,
str(self.node_limit)],
check=True)
# Parse output
tokens = [line.strip().split(",") for line in priority_file.readlines()]
lp_varname_to_priority = {t[0]: int(t[1]) for t in tokens}
# Map priorities back to Pyomo variables
labeler = TextLabeler()
symbol_map = list(model.solutions.symbol_map.values())[0]
priorities = {}
for var in model.component_objects(Var):
priorities[var.name] = {}
for index in var:
category = instance.get_variable_category(var, index)
if category is None:
continue
lp_varname = symbol_map.getSymbol(var[index], labeler)
var_priority = lp_varname_to_priority[lp_varname]
priorities[var.name][index] = var_priority
return priorities
def predict(self, instance):
priority = {}
x_test = self.x([instance])
var_split = Extractor.split_variables(instance)
for category in self.regressors.keys():
y_test = self.regressors[category].predict(x_test[category])
for (i, (var, index)) in enumerate(var_split[category]):
if var not in priority.keys():
priority[var] = {}
priority[var][index] = y_test[i]
return priority

@ -1,45 +0,0 @@
# MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
from unittest.mock import Mock
import numpy as np
from miplearn import BranchPriorityComponent, BranchPriorityExtractor
from miplearn.classifiers import Regressor
from miplearn.tests import get_test_pyomo_instances
def test_branch_extract():
instances, models = get_test_pyomo_instances()
instances[0].branch_priorities = {"x": {0: 100, 1: 200, 2: 300, 3: 400}}
instances[1].branch_priorities = {"x": {0: 150, 1: 250, 2: 350, 3: 450}}
priorities = BranchPriorityExtractor().extract(instances)
assert priorities["default"].tolist() == [100, 200, 300, 400, 150, 250, 350, 450]
def test_branch_calculate():
instances, models = get_test_pyomo_instances()
comp = BranchPriorityComponent()
# If instances do not have branch_priority property, fit should compute them
comp.fit(instances)
assert instances[0].branch_priorities == {"x": {0: 5730, 1: 24878, 2: 0, 3: 0,}}
# If instances already have branch_priority, fit should not modify them
instances[0].branch_priorities = {"x": {0: 100, 1: 200, 2: 300, 3: 400}}
comp.fit(instances)
assert instances[0].branch_priorities == {"x": {0: 100, 1: 200, 2: 300, 3: 400}}
def test_branch_x_y_predict():
instances, models = get_test_pyomo_instances()
instances[0].branch_priorities = {"x": {0: 100, 1: 200, 2: 300, 3: 400}}
instances[1].branch_priorities = {"x": {0: 150, 1: 250, 2: 350, 3: 450}}
comp = BranchPriorityComponent()
comp.regressors["default"] = Mock(spec=Regressor)
comp.regressors["default"].predict = Mock(return_value=np.array([150., 100., 0., 0.]))
x, y = comp.x(instances), comp.y(instances)
assert x["default"].shape == (8, 5)
assert y["default"].shape == (8,)
pred = comp.predict(instances[0])
assert pred == {"x": {0: 150., 1: 100., 2: 0., 3: 0.}}

@ -6,7 +6,7 @@ import logging
import pickle
import tempfile
from miplearn import BranchPriorityComponent
from miplearn import LazyConstraintsComponent
from miplearn import LearningSolver
from . import _get_instance, _get_internal_solvers
@ -61,7 +61,7 @@ def test_parallel_solve():
def test_add_components():
solver = LearningSolver(components=[])
solver.add(BranchPriorityComponent())
solver.add(BranchPriorityComponent())
solver.add(LazyConstraintsComponent())
solver.add(LazyConstraintsComponent())
assert len(solver.components) == 1
assert "BranchPriorityComponent" in solver.components

Loading…
Cancel
Save