Created
January 19, 2014 00:14
-
-
Save lucasb-eyer/8498698 to your computer and use it in GitHub Desktop.
SVM.jl prediction performance
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Fitted linear SVM | |
* Non-zero weights: 100 | |
* Accuracy: 0.963000 | |
* Iterations: 100 | |
* Converged: true | |
Computing Raw scores | |
-------------------- | |
vectorized julia | |
elapsed time: 0.641106641 seconds (80048000 bytes allocated) | |
BLAS.gemv | |
elapsed time: 0.676560927 seconds (80048000 bytes allocated) | |
loop+BLAS.dot[] | |
elapsed time: 6.282269339 seconds (9520048000 bytes allocated) | |
loop+BLAS.dot slice | |
elapsed time: 18.940013219 seconds (8990688000 bytes allocated) | |
loop+BLAS.dot sub | |
elapsed time: 18.61981412 seconds (8750688000 bytes allocated) | |
loop+BLAS.dot ptr | |
elapsed time: 0.674728387 seconds (80048000 bytes allocated) | |
Predictions | |
----------- | |
predict | |
elapsed time: 1.772697464 seconds (80048000 bytes allocated) | |
pure julia | |
elapsed time: 0.783535007 seconds (160096000 bytes allocated) | |
BLAS.gemv | |
elapsed time: 0.759791132 seconds (160096000 bytes allocated) | |
loop+BLAS.dot | |
elapsed time: 0.668235577 seconds (80048000 bytes allocated) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
module TestSpeed | |
using Base.Test | |
using SVM | |
srand(1) | |
X = rand(100, 10000) | |
X[10,5000:end] -= 1.1 | |
const Y = [i < 5000 ? 1.0 : -1.0 for i=1:10000] | |
const model = svm(X, Y) | |
println(model) | |
# Now come the different ways of computing the predictions. | |
predict1_raw(fit::SVM.SVMFit, X) = X' * fit.w | |
predict1(fit::SVM.SVMFit, X) = sign(X' * fit.w) | |
predict2_raw(fit::SVM.SVMFit, X) = BLAS.gemv('T', X, fit.w) | |
predict2(fit::SVM.SVMFit, X) = sign(BLAS.gemv('T', X, fit.w)) | |
# Probably need to wait for Julia's #3496 and friends for any of | |
# the non-pointer ones to be viable. | |
function predict3_raw_a(fit::SVM.SVMFit, X::Matrix{Float64}) | |
n, l = size(X) | |
preds = Array(Float64, l) | |
for i in 1:l | |
preds[i] = dot(fit.w, X[:,i]) | |
end | |
return preds | |
end | |
function predict3_raw_b(fit::SVM.SVMFit, X::Matrix{Float64}) | |
n, l = size(X) | |
preds = Array(Float64, l) | |
for i in 1:l | |
preds[i] = dot(fit.w, slice(X, :, i)) | |
end | |
return preds | |
end | |
function predict3_raw_c(fit::SVM.SVMFit, X::Matrix{Float64}) | |
n, l = size(X) | |
preds = Array(Float64, l) | |
for i in 1:l | |
preds[i] = dot(fit.w, sub(X, :, i)) | |
end | |
return preds | |
end | |
function predict3_raw_d(fit::SVM.SVMFit, X::Matrix{Float64}) | |
n, l = size(X) | |
preds = Array(Float64, l) | |
for i in 1:l | |
preds[i] = BLAS.dot(n, fit.w, 1, pointer(X) + (i-1)*n*sizeof(Float64), 1) | |
end | |
return preds | |
end | |
function predict3(fit::SVM.SVMFit, X::Matrix{Float64}) | |
n, l = size(X) | |
preds = Array(Float64, l) | |
for i in 1:l | |
# See above. | |
preds[i] = sign(BLAS.dot(n, fit.w, 1, pointer(X) + (i-1)*n*sizeof(Float64), 1)) | |
end | |
return preds | |
end | |
# Test for correctness first | |
@test predict(model, X) == predict1(model, X) | |
@test predict(model, X) == predict2(model, X) | |
@test predict(model, X) == predict3(model, X) | |
@test predict(model, X) == sign(predict1_raw(model, X)) | |
@test predict(model, X) == sign(predict2_raw(model, X)) | |
@test predict(model, X) == sign(predict3_raw_a(model, X)) | |
@test predict(model, X) == sign(predict3_raw_b(model, X)) | |
@test predict(model, X) == sign(predict3_raw_c(model, X)) | |
@test predict(model, X) == sign(predict3_raw_d(model, X)) | |
# Always perform some warm-up before benchmarking. | |
const Nw=100 | |
const N=1000 | |
println("Computing Raw scores") | |
println("--------------------") | |
println("vectorized julia") | |
for i=1:Nw predict1_raw(model, X) end | |
@time for i=1:N predict1_raw(model, X) end | |
println("BLAS.gemv") | |
for i=1:Nw predict2_raw(model, X) end | |
@time for i=1:N predict2_raw(model, X) end | |
println("loop+BLAS.dot[]") | |
for i=1:Nw predict3_raw_a(model, X) end | |
@time for i=1:N predict3_raw_a(model, X) end | |
println("loop+BLAS.dot slice") | |
for i=1:Nw predict3_raw_b(model, X) end | |
@time for i=1:N predict3_raw_b(model, X) end | |
println("loop+BLAS.dot sub") | |
for i=1:Nw predict3_raw_c(model, X) end | |
@time for i=1:N predict3_raw_c(model, X) end | |
println("loop+BLAS.dot ptr") | |
for i=1:Nw predict3_raw_d(model, X) end | |
@time for i=1:N predict3_raw_d(model, X) end | |
println("Predictions") | |
println("-----------") | |
println("predict") | |
for i=1:Nw predict(model, X) end | |
@time for i=1:N predict(model, X) end | |
println("pure julia") | |
for i=1:Nw predict1(model, X) end | |
@time for i=1:N predict1(model, X) end | |
println("BLAS.gemv") | |
for i=1:Nw predict2(model, X) end | |
@time for i=1:N predict2(model, X) end | |
println("loop+BLAS.dot") | |
for i=1:Nw predict3(model, X) end | |
@time for i=1:N predict3(model, X) end | |
end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment