Skip to content

Instantly share code, notes, and snippets.

@melvincabatuan
Created May 27, 2015 08:25
Show Gist options
  • Save melvincabatuan/523e184640c59d83650f to your computer and use it in GitHub Desktop.
Save melvincabatuan/523e184640c59d83650f to your computer and use it in GitHub Desktop.
caffe classify.py example [Currently Downloading Model]
[root@cobalt caffe]# python python/classify.py examples/images/cat.jpg foo
CPU mode
WARNING: Logging before InitGoogleLogging() is written to STDERR
I0527 16:23:20.268012 16218 net.cpp:42] Initializing net from parameters:
name: "CaffeNet"
input: "data"
input_dim: 10
input_dim: 3
input_dim: 227
input_dim: 227
state {
phase: TEST
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
convolution_param {
num_output: 96
kernel_size: 11
stride: 4
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "conv1"
top: "conv1"
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "norm1"
type: "LRN"
bottom: "pool1"
top: "norm1"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "norm1"
top: "conv2"
convolution_param {
num_output: 256
pad: 2
kernel_size: 5
group: 2
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "norm2"
type: "LRN"
bottom: "pool2"
top: "norm2"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "norm2"
top: "conv3"
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "conv4"
type: "Convolution"
bottom: "conv3"
top: "conv4"
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
group: 2
}
}
layer {
name: "relu4"
type: "ReLU"
bottom: "conv4"
top: "conv4"
}
layer {
name: "conv5"
type: "Convolution"
bottom: "conv4"
top: "conv5"
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
group: 2
}
}
layer {
name: "relu5"
type: "ReLU"
bottom: "conv5"
top: "conv5"
}
layer {
name: "pool5"
type: "Pooling"
bottom: "conv5"
top: "pool5"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "fc6"
type: "InnerProduct"
bottom: "pool5"
top: "fc6"
inner_product_param {
num_output: 4096
}
}
layer {
name: "relu6"
type: "ReLU"
bottom: "fc6"
top: "fc6"
}
layer {
name: "drop6"
type: "Dropout"
bottom: "fc6"
top: "fc6"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc7"
type: "InnerProduct"
bottom: "fc6"
top: "fc7"
inner_product_param {
num_output: 4096
}
}
layer {
name: "relu7"
type: "ReLU"
bottom: "fc7"
top: "fc7"
}
layer {
name: "drop7"
type: "Dropout"
bottom: "fc7"
top: "fc7"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc8"
type: "InnerProduct"
bottom: "fc7"
top: "fc8"
inner_product_param {
num_output: 1000
}
}
layer {
name: "prob"
type: "Softmax"
bottom: "fc8"
top: "prob"
}
I0527 16:23:20.268777 16218 net.cpp:370] Input 0 -> data
I0527 16:23:20.268811 16218 layer_factory.hpp:74] Creating layer conv1
I0527 16:23:20.268827 16218 net.cpp:90] Creating Layer conv1
I0527 16:23:20.268836 16218 net.cpp:410] conv1 <- data
I0527 16:23:20.268846 16218 net.cpp:368] conv1 -> conv1
I0527 16:23:20.268857 16218 net.cpp:120] Setting up conv1
I0527 16:23:20.268934 16218 net.cpp:127] Top shape: 10 96 55 55 (2904000)
I0527 16:23:20.268950 16218 layer_factory.hpp:74] Creating layer relu1
I0527 16:23:20.268962 16218 net.cpp:90] Creating Layer relu1
I0527 16:23:20.268970 16218 net.cpp:410] relu1 <- conv1
I0527 16:23:20.268977 16218 net.cpp:357] relu1 -> conv1 (in-place)
I0527 16:23:20.268986 16218 net.cpp:120] Setting up relu1
I0527 16:23:20.268996 16218 net.cpp:127] Top shape: 10 96 55 55 (2904000)
I0527 16:23:20.269002 16218 layer_factory.hpp:74] Creating layer pool1
I0527 16:23:20.269011 16218 net.cpp:90] Creating Layer pool1
I0527 16:23:20.269018 16218 net.cpp:410] pool1 <- conv1
I0527 16:23:20.269026 16218 net.cpp:368] pool1 -> pool1
I0527 16:23:20.269034 16218 net.cpp:120] Setting up pool1
I0527 16:23:20.269049 16218 net.cpp:127] Top shape: 10 96 27 27 (699840)
I0527 16:23:20.269057 16218 layer_factory.hpp:74] Creating layer norm1
I0527 16:23:20.269067 16218 net.cpp:90] Creating Layer norm1
I0527 16:23:20.269073 16218 net.cpp:410] norm1 <- pool1
I0527 16:23:20.269081 16218 net.cpp:368] norm1 -> norm1
I0527 16:23:20.269090 16218 net.cpp:120] Setting up norm1
I0527 16:23:20.269101 16218 net.cpp:127] Top shape: 10 96 27 27 (699840)
I0527 16:23:20.269109 16218 layer_factory.hpp:74] Creating layer conv2
I0527 16:23:20.269117 16218 net.cpp:90] Creating Layer conv2
I0527 16:23:20.269124 16218 net.cpp:410] conv2 <- norm1
I0527 16:23:20.269132 16218 net.cpp:368] conv2 -> conv2
I0527 16:23:20.269140 16218 net.cpp:120] Setting up conv2
I0527 16:23:20.269745 16218 net.cpp:127] Top shape: 10 256 27 27 (1866240)
I0527 16:23:20.269758 16218 layer_factory.hpp:74] Creating layer relu2
I0527 16:23:20.269767 16218 net.cpp:90] Creating Layer relu2
I0527 16:23:20.269774 16218 net.cpp:410] relu2 <- conv2
I0527 16:23:20.269783 16218 net.cpp:357] relu2 -> conv2 (in-place)
I0527 16:23:20.269790 16218 net.cpp:120] Setting up relu2
I0527 16:23:20.269798 16218 net.cpp:127] Top shape: 10 256 27 27 (1866240)
I0527 16:23:20.269805 16218 layer_factory.hpp:74] Creating layer pool2
I0527 16:23:20.269814 16218 net.cpp:90] Creating Layer pool2
I0527 16:23:20.269819 16218 net.cpp:410] pool2 <- conv2
I0527 16:23:20.269827 16218 net.cpp:368] pool2 -> pool2
I0527 16:23:20.269836 16218 net.cpp:120] Setting up pool2
I0527 16:23:20.269846 16218 net.cpp:127] Top shape: 10 256 13 13 (432640)
I0527 16:23:20.269853 16218 layer_factory.hpp:74] Creating layer norm2
I0527 16:23:20.269861 16218 net.cpp:90] Creating Layer norm2
I0527 16:23:20.269867 16218 net.cpp:410] norm2 <- pool2
I0527 16:23:20.269877 16218 net.cpp:368] norm2 -> norm2
I0527 16:23:20.269886 16218 net.cpp:120] Setting up norm2
I0527 16:23:20.269894 16218 net.cpp:127] Top shape: 10 256 13 13 (432640)
I0527 16:23:20.269901 16218 layer_factory.hpp:74] Creating layer conv3
I0527 16:23:20.269909 16218 net.cpp:90] Creating Layer conv3
I0527 16:23:20.269917 16218 net.cpp:410] conv3 <- norm2
I0527 16:23:20.269924 16218 net.cpp:368] conv3 -> conv3
I0527 16:23:20.269935 16218 net.cpp:120] Setting up conv3
I0527 16:23:20.271605 16218 net.cpp:127] Top shape: 10 384 13 13 (648960)
I0527 16:23:20.271630 16218 layer_factory.hpp:74] Creating layer relu3
I0527 16:23:20.271644 16218 net.cpp:90] Creating Layer relu3
I0527 16:23:20.271656 16218 net.cpp:410] relu3 <- conv3
I0527 16:23:20.271670 16218 net.cpp:357] relu3 -> conv3 (in-place)
I0527 16:23:20.271684 16218 net.cpp:120] Setting up relu3
I0527 16:23:20.271697 16218 net.cpp:127] Top shape: 10 384 13 13 (648960)
I0527 16:23:20.271708 16218 layer_factory.hpp:74] Creating layer conv4
I0527 16:23:20.271720 16218 net.cpp:90] Creating Layer conv4
I0527 16:23:20.271731 16218 net.cpp:410] conv4 <- conv3
I0527 16:23:20.271744 16218 net.cpp:368] conv4 -> conv4
I0527 16:23:20.271756 16218 net.cpp:120] Setting up conv4
I0527 16:23:20.273030 16218 net.cpp:127] Top shape: 10 384 13 13 (648960)
I0527 16:23:20.273044 16218 layer_factory.hpp:74] Creating layer relu4
I0527 16:23:20.273053 16218 net.cpp:90] Creating Layer relu4
I0527 16:23:20.273061 16218 net.cpp:410] relu4 <- conv4
I0527 16:23:20.273068 16218 net.cpp:357] relu4 -> conv4 (in-place)
I0527 16:23:20.273077 16218 net.cpp:120] Setting up relu4
I0527 16:23:20.273085 16218 net.cpp:127] Top shape: 10 384 13 13 (648960)
I0527 16:23:20.273092 16218 layer_factory.hpp:74] Creating layer conv5
I0527 16:23:20.273100 16218 net.cpp:90] Creating Layer conv5
I0527 16:23:20.273108 16218 net.cpp:410] conv5 <- conv4
I0527 16:23:20.273116 16218 net.cpp:368] conv5 -> conv5
I0527 16:23:20.273125 16218 net.cpp:120] Setting up conv5
I0527 16:23:20.273898 16218 net.cpp:127] Top shape: 10 256 13 13 (432640)
I0527 16:23:20.273913 16218 layer_factory.hpp:74] Creating layer relu5
I0527 16:23:20.273922 16218 net.cpp:90] Creating Layer relu5
I0527 16:23:20.273929 16218 net.cpp:410] relu5 <- conv5
I0527 16:23:20.273937 16218 net.cpp:357] relu5 -> conv5 (in-place)
I0527 16:23:20.273946 16218 net.cpp:120] Setting up relu5
I0527 16:23:20.273953 16218 net.cpp:127] Top shape: 10 256 13 13 (432640)
I0527 16:23:20.273960 16218 layer_factory.hpp:74] Creating layer pool5
I0527 16:23:20.273969 16218 net.cpp:90] Creating Layer pool5
I0527 16:23:20.273975 16218 net.cpp:410] pool5 <- conv5
I0527 16:23:20.273983 16218 net.cpp:368] pool5 -> pool5
I0527 16:23:20.273993 16218 net.cpp:120] Setting up pool5
I0527 16:23:20.274003 16218 net.cpp:127] Top shape: 10 256 6 6 (92160)
I0527 16:23:20.274010 16218 layer_factory.hpp:74] Creating layer fc6
I0527 16:23:20.274019 16218 net.cpp:90] Creating Layer fc6
I0527 16:23:20.274026 16218 net.cpp:410] fc6 <- pool5
I0527 16:23:20.274035 16218 net.cpp:368] fc6 -> fc6
I0527 16:23:20.274045 16218 net.cpp:120] Setting up fc6
I0527 16:23:20.358563 16218 net.cpp:127] Top shape: 10 4096 (40960)
I0527 16:23:20.358619 16218 layer_factory.hpp:74] Creating layer relu6
I0527 16:23:20.358649 16218 net.cpp:90] Creating Layer relu6
I0527 16:23:20.358659 16218 net.cpp:410] relu6 <- fc6
I0527 16:23:20.358672 16218 net.cpp:357] relu6 -> fc6 (in-place)
I0527 16:23:20.358685 16218 net.cpp:120] Setting up relu6
I0527 16:23:20.358695 16218 net.cpp:127] Top shape: 10 4096 (40960)
I0527 16:23:20.358705 16218 layer_factory.hpp:74] Creating layer drop6
I0527 16:23:20.358716 16218 net.cpp:90] Creating Layer drop6
I0527 16:23:20.358741 16218 net.cpp:410] drop6 <- fc6
I0527 16:23:20.358752 16218 net.cpp:357] drop6 -> fc6 (in-place)
I0527 16:23:20.358762 16218 net.cpp:120] Setting up drop6
I0527 16:23:20.358775 16218 net.cpp:127] Top shape: 10 4096 (40960)
I0527 16:23:20.358784 16218 layer_factory.hpp:74] Creating layer fc7
I0527 16:23:20.358796 16218 net.cpp:90] Creating Layer fc7
I0527 16:23:20.358814 16218 net.cpp:410] fc7 <- fc6
I0527 16:23:20.358824 16218 net.cpp:368] fc7 -> fc7
I0527 16:23:20.358835 16218 net.cpp:120] Setting up fc7
I0527 16:23:20.393887 16218 net.cpp:127] Top shape: 10 4096 (40960)
I0527 16:23:20.393940 16218 layer_factory.hpp:74] Creating layer relu7
I0527 16:23:20.393959 16218 net.cpp:90] Creating Layer relu7
I0527 16:23:20.393970 16218 net.cpp:410] relu7 <- fc7
I0527 16:23:20.393981 16218 net.cpp:357] relu7 -> fc7 (in-place)
I0527 16:23:20.393992 16218 net.cpp:120] Setting up relu7
I0527 16:23:20.394002 16218 net.cpp:127] Top shape: 10 4096 (40960)
I0527 16:23:20.394009 16218 layer_factory.hpp:74] Creating layer drop7
I0527 16:23:20.394037 16218 net.cpp:90] Creating Layer drop7
I0527 16:23:20.394044 16218 net.cpp:410] drop7 <- fc7
I0527 16:23:20.394054 16218 net.cpp:357] drop7 -> fc7 (in-place)
I0527 16:23:20.394063 16218 net.cpp:120] Setting up drop7
I0527 16:23:20.394073 16218 net.cpp:127] Top shape: 10 4096 (40960)
I0527 16:23:20.394080 16218 layer_factory.hpp:74] Creating layer fc8
I0527 16:23:20.394093 16218 net.cpp:90] Creating Layer fc8
I0527 16:23:20.394101 16218 net.cpp:410] fc8 <- fc7
I0527 16:23:20.394112 16218 net.cpp:368] fc8 -> fc8
I0527 16:23:20.394122 16218 net.cpp:120] Setting up fc8
I0527 16:23:20.406705 16218 net.cpp:127] Top shape: 10 1000 (10000)
I0527 16:23:20.406754 16218 layer_factory.hpp:74] Creating layer prob
I0527 16:23:20.406771 16218 net.cpp:90] Creating Layer prob
I0527 16:23:20.406781 16218 net.cpp:410] prob <- fc8
I0527 16:23:20.406793 16218 net.cpp:368] prob -> prob
I0527 16:23:20.406806 16218 net.cpp:120] Setting up prob
I0527 16:23:20.406826 16218 net.cpp:127] Top shape: 10 1000 (10000)
I0527 16:23:20.406833 16218 net.cpp:194] prob does not need backward computation.
I0527 16:23:20.406841 16218 net.cpp:194] fc8 does not need backward computation.
I0527 16:23:20.406849 16218 net.cpp:194] drop7 does not need backward computation.
I0527 16:23:20.406857 16218 net.cpp:194] relu7 does not need backward computation.
I0527 16:23:20.406862 16218 net.cpp:194] fc7 does not need backward computation.
I0527 16:23:20.406888 16218 net.cpp:194] drop6 does not need backward computation.
I0527 16:23:20.406896 16218 net.cpp:194] relu6 does not need backward computation.
I0527 16:23:20.406905 16218 net.cpp:194] fc6 does not need backward computation.
I0527 16:23:20.406913 16218 net.cpp:194] pool5 does not need backward computation.
I0527 16:23:20.406922 16218 net.cpp:194] relu5 does not need backward computation.
I0527 16:23:20.406930 16218 net.cpp:194] conv5 does not need backward computation.
I0527 16:23:20.406937 16218 net.cpp:194] relu4 does not need backward computation.
I0527 16:23:20.406945 16218 net.cpp:194] conv4 does not need backward computation.
I0527 16:23:20.406955 16218 net.cpp:194] relu3 does not need backward computation.
I0527 16:23:20.406961 16218 net.cpp:194] conv3 does not need backward computation.
I0527 16:23:20.406970 16218 net.cpp:194] norm2 does not need backward computation.
I0527 16:23:20.406978 16218 net.cpp:194] pool2 does not need backward computation.
I0527 16:23:20.406985 16218 net.cpp:194] relu2 does not need backward computation.
I0527 16:23:20.406992 16218 net.cpp:194] conv2 does not need backward computation.
I0527 16:23:20.406999 16218 net.cpp:194] norm1 does not need backward computation.
I0527 16:23:20.407006 16218 net.cpp:194] pool1 does not need backward computation.
I0527 16:23:20.407014 16218 net.cpp:194] relu1 does not need backward computation.
I0527 16:23:20.407021 16218 net.cpp:194] conv1 does not need backward computation.
I0527 16:23:20.407027 16218 net.cpp:235] This network produces output prob
I0527 16:23:20.407043 16218 net.cpp:482] Collecting Learning Rate and Weight Decay.
I0527 16:23:20.407054 16218 net.cpp:247] Network initialization done.
I0527 16:23:20.407063 16218 net.cpp:248] Memory required for data: 62497920
F0527 16:23:20.412070 16218 upgrade_proto.cpp:935] Check failed: ReadProtoFromBinaryFile(param_file, param) Failed to parse NetParameter file: python/../models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel
*** Check failure stack trace: ***
Aborted
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment