Skip to content

Instantly share code, notes, and snippets.

@amiltonwong
Created July 24, 2015 07:47
Show Gist options
  • Save amiltonwong/76d5ba61e69f2e89a896 to your computer and use it in GitHub Desktop.
Save amiltonwong/76d5ba61e69f2e89a896 to your computer and use it in GitHub Desktop.
I0724 15:20:24.137687 26781 caffe.cpp:113] Use GPU with device ID 0
I0724 15:20:24.351411 26781 caffe.cpp:121] Starting Optimization
I0724 15:20:24.351496 26781 solver.cpp:32] Initializing solver from parameters:
test_iter: 100
test_interval: 500
base_lr: 0.001
display: 100
max_iter: 4000
lr_policy: "fixed"
momentum: 0.9
weight_decay: 0.004
snapshot: 4000
snapshot_prefix: "examples/cifar10/cifar10_quick"
solver_mode: GPU
net: "examples/cifar10/cifar10_quick_train_test.prototxt"
I0724 15:20:24.351536 26781 solver.cpp:70] Creating training net from net file: examples/cifar10/cifar10_quick_train_test.prototxt
I0724 15:20:24.351855 26781 net.cpp:257] The NetState phase (0) differed from the phase (1) specified by a rule in layer cifar
I0724 15:20:24.351873 26781 net.cpp:257] The NetState phase (0) differed from the phase (1) specified by a rule in layer accuracy
I0724 15:20:24.351958 26781 net.cpp:42] Initializing net from parameters:
name: "CIFAR10_quick"
state {
phase: TRAIN
}
layer {
name: "cifar"
type: "Data"
top: "data"
top: "label"
include {
phase: TRAIN
}
transform_param {
mean_file: "examples/cifar10/mean.binaryproto"
}
data_param {
source: "examples/cifar10/cifar10_train_lmdb"
batch_size: 100
backend: LMDB
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.0001
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "pool1"
top: "pool1"
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2"
top: "pool2"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "pool2"
top: "conv3"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 64
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "pool3"
type: "Pooling"
bottom: "conv3"
top: "pool3"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool3"
top: "ip1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 64
weight_filler {
type: "gaussian"
std: 0.1
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "ip2"
type: "InnerProduct"
bottom: "ip1"
top: "ip2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 10
weight_filler {
type: "gaussian"
std: 0.1
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "ip2"
bottom: "label"
top: "loss"
}
I0724 15:20:24.352023 26781 layer_factory.hpp:74] Creating layer cifar
I0724 15:20:24.352041 26781 net.cpp:84] Creating Layer cifar
I0724 15:20:24.352046 26781 net.cpp:338] cifar -> data
I0724 15:20:24.352074 26781 net.cpp:338] cifar -> label
I0724 15:20:24.352087 26781 net.cpp:113] Setting up cifar
I0724 15:20:24.352169 26781 db.cpp:34] Opened lmdb examples/cifar10/cifar10_train_lmdb
I0724 15:20:24.352215 26781 data_layer.cpp:67] output data size: 100,3,32,32
I0724 15:20:24.352226 26781 data_transformer.cpp:22] Loading mean file from: examples/cifar10/mean.binaryproto
I0724 15:20:24.352715 26781 net.cpp:120] Top shape: 100 3 32 32 (307200)
I0724 15:20:24.352723 26781 net.cpp:120] Top shape: 100 (100)
I0724 15:20:24.352730 26781 layer_factory.hpp:74] Creating layer conv1
I0724 15:20:24.352751 26781 net.cpp:84] Creating Layer conv1
I0724 15:20:24.352780 26781 net.cpp:380] conv1 <- data
I0724 15:20:24.352792 26781 net.cpp:338] conv1 -> conv1
I0724 15:20:24.352802 26781 net.cpp:113] Setting up conv1
I0724 15:20:24.415385 26781 net.cpp:120] Top shape: 100 32 32 32 (3276800)
I0724 15:20:24.415421 26781 layer_factory.hpp:74] Creating layer pool1
I0724 15:20:24.415436 26781 net.cpp:84] Creating Layer pool1
I0724 15:20:24.415441 26781 net.cpp:380] pool1 <- conv1
I0724 15:20:24.415451 26781 net.cpp:338] pool1 -> pool1
I0724 15:20:24.415459 26781 net.cpp:113] Setting up pool1
I0724 15:20:24.415549 26781 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:20:24.415565 26781 layer_factory.hpp:74] Creating layer relu1
I0724 15:20:24.415580 26781 net.cpp:84] Creating Layer relu1
I0724 15:20:24.415585 26781 net.cpp:380] relu1 <- pool1
I0724 15:20:24.415590 26781 net.cpp:327] relu1 -> pool1 (in-place)
I0724 15:20:24.415594 26781 net.cpp:113] Setting up relu1
I0724 15:20:24.415668 26781 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:20:24.415675 26781 layer_factory.hpp:74] Creating layer conv2
I0724 15:20:24.415702 26781 net.cpp:84] Creating Layer conv2
I0724 15:20:24.415706 26781 net.cpp:380] conv2 <- pool1
I0724 15:20:24.415711 26781 net.cpp:338] conv2 -> conv2
I0724 15:20:24.415719 26781 net.cpp:113] Setting up conv2
I0724 15:20:24.416695 26781 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:20:24.416708 26781 layer_factory.hpp:74] Creating layer relu2
I0724 15:20:24.416713 26781 net.cpp:84] Creating Layer relu2
I0724 15:20:24.416718 26781 net.cpp:380] relu2 <- conv2
I0724 15:20:24.416723 26781 net.cpp:327] relu2 -> conv2 (in-place)
I0724 15:20:24.416728 26781 net.cpp:113] Setting up relu2
I0724 15:20:24.416787 26781 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:20:24.416795 26781 layer_factory.hpp:74] Creating layer pool2
I0724 15:20:24.416810 26781 net.cpp:84] Creating Layer pool2
I0724 15:20:24.416813 26781 net.cpp:380] pool2 <- conv2
I0724 15:20:24.416818 26781 net.cpp:338] pool2 -> pool2
I0724 15:20:24.416832 26781 net.cpp:113] Setting up pool2
I0724 15:20:24.416990 26781 net.cpp:120] Top shape: 100 32 8 8 (204800)
I0724 15:20:24.416997 26781 layer_factory.hpp:74] Creating layer conv3
I0724 15:20:24.417004 26781 net.cpp:84] Creating Layer conv3
I0724 15:20:24.417007 26781 net.cpp:380] conv3 <- pool2
I0724 15:20:24.417013 26781 net.cpp:338] conv3 -> conv3
I0724 15:20:24.417019 26781 net.cpp:113] Setting up conv3
I0724 15:20:24.418752 26781 net.cpp:120] Top shape: 100 64 8 8 (409600)
I0724 15:20:24.418766 26781 layer_factory.hpp:74] Creating layer relu3
I0724 15:20:24.418772 26781 net.cpp:84] Creating Layer relu3
I0724 15:20:24.418776 26781 net.cpp:380] relu3 <- conv3
I0724 15:20:24.418781 26781 net.cpp:327] relu3 -> conv3 (in-place)
I0724 15:20:24.418787 26781 net.cpp:113] Setting up relu3
I0724 15:20:24.418829 26781 net.cpp:120] Top shape: 100 64 8 8 (409600)
I0724 15:20:24.418836 26781 layer_factory.hpp:74] Creating layer pool3
I0724 15:20:24.418841 26781 net.cpp:84] Creating Layer pool3
I0724 15:20:24.418845 26781 net.cpp:380] pool3 <- conv3
I0724 15:20:24.418850 26781 net.cpp:338] pool3 -> pool3
I0724 15:20:24.418855 26781 net.cpp:113] Setting up pool3
I0724 15:20:24.418905 26781 net.cpp:120] Top shape: 100 64 4 4 (102400)
I0724 15:20:24.418912 26781 layer_factory.hpp:74] Creating layer ip1
I0724 15:20:24.418921 26781 net.cpp:84] Creating Layer ip1
I0724 15:20:24.418926 26781 net.cpp:380] ip1 <- pool3
I0724 15:20:24.418932 26781 net.cpp:338] ip1 -> ip1
I0724 15:20:24.418941 26781 net.cpp:113] Setting up ip1
I0724 15:20:24.420871 26781 net.cpp:120] Top shape: 100 64 (6400)
I0724 15:20:24.420881 26781 layer_factory.hpp:74] Creating layer ip2
I0724 15:20:24.420888 26781 net.cpp:84] Creating Layer ip2
I0724 15:20:24.420892 26781 net.cpp:380] ip2 <- ip1
I0724 15:20:24.420913 26781 net.cpp:338] ip2 -> ip2
I0724 15:20:24.420920 26781 net.cpp:113] Setting up ip2
I0724 15:20:24.420950 26781 net.cpp:120] Top shape: 100 10 (1000)
I0724 15:20:24.420959 26781 layer_factory.hpp:74] Creating layer loss
I0724 15:20:24.420967 26781 net.cpp:84] Creating Layer loss
I0724 15:20:24.420970 26781 net.cpp:380] loss <- ip2
I0724 15:20:24.420975 26781 net.cpp:380] loss <- label
I0724 15:20:24.420982 26781 net.cpp:338] loss -> loss
I0724 15:20:24.420989 26781 net.cpp:113] Setting up loss
I0724 15:20:24.420994 26781 layer_factory.hpp:74] Creating layer loss
I0724 15:20:24.421064 26781 net.cpp:120] Top shape: (1)
I0724 15:20:24.421072 26781 net.cpp:122] with loss weight 1
I0724 15:20:24.421088 26781 net.cpp:167] loss needs backward computation.
I0724 15:20:24.421092 26781 net.cpp:167] ip2 needs backward computation.
I0724 15:20:24.421097 26781 net.cpp:167] ip1 needs backward computation.
I0724 15:20:24.421099 26781 net.cpp:167] pool3 needs backward computation.
I0724 15:20:24.421103 26781 net.cpp:167] relu3 needs backward computation.
I0724 15:20:24.421105 26781 net.cpp:167] conv3 needs backward computation.
I0724 15:20:24.421110 26781 net.cpp:167] pool2 needs backward computation.
I0724 15:20:24.421113 26781 net.cpp:167] relu2 needs backward computation.
I0724 15:20:24.421116 26781 net.cpp:167] conv2 needs backward computation.
I0724 15:20:24.421119 26781 net.cpp:167] relu1 needs backward computation.
I0724 15:20:24.421123 26781 net.cpp:167] pool1 needs backward computation.
I0724 15:20:24.421126 26781 net.cpp:167] conv1 needs backward computation.
I0724 15:20:24.421129 26781 net.cpp:169] cifar does not need backward computation.
I0724 15:20:24.421133 26781 net.cpp:205] This network produces output loss
I0724 15:20:24.421142 26781 net.cpp:447] Collecting Learning Rate and Weight Decay.
I0724 15:20:24.421150 26781 net.cpp:217] Network initialization done.
I0724 15:20:24.421154 26781 net.cpp:218] Memory required for data: 31978804
I0724 15:20:24.421432 26781 solver.cpp:154] Creating test net (#0) specified by net file: examples/cifar10/cifar10_quick_train_test.prototxt
I0724 15:20:24.421455 26781 net.cpp:257] The NetState phase (1) differed from the phase (0) specified by a rule in layer cifar
I0724 15:20:24.421550 26781 net.cpp:42] Initializing net from parameters:
name: "CIFAR10_quick"
state {
phase: TEST
}
layer {
name: "cifar"
type: "Data"
top: "data"
top: "label"
include {
phase: TEST
}
transform_param {
mean_file: "examples/cifar10/mean.binaryproto"
}
data_param {
source: "examples/cifar10/cifar10_test_lmdb"
batch_size: 100
backend: LMDB
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.0001
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "pool1"
top: "pool1"
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2"
top: "pool2"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "pool2"
top: "conv3"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 64
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "pool3"
type: "Pooling"
bottom: "conv3"
top: "pool3"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool3"
top: "ip1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 64
weight_filler {
type: "gaussian"
std: 0.1
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "ip2"
type: "InnerProduct"
bottom: "ip1"
top: "ip2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 10
weight_filler {
type: "gaussian"
std: 0.1
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "accuracy"
type: "Accuracy"
bottom: "ip2"
bottom: "label"
top: "accuracy"
include {
phase: TEST
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "ip2"
bottom: "label"
top: "loss"
}
I0724 15:20:24.421622 26781 layer_factory.hpp:74] Creating layer cifar
I0724 15:20:24.421632 26781 net.cpp:84] Creating Layer cifar
I0724 15:20:24.421636 26781 net.cpp:338] cifar -> data
I0724 15:20:24.421644 26781 net.cpp:338] cifar -> label
I0724 15:20:24.421650 26781 net.cpp:113] Setting up cifar
I0724 15:20:24.421689 26781 db.cpp:34] Opened lmdb examples/cifar10/cifar10_test_lmdb
I0724 15:20:24.421721 26781 data_layer.cpp:67] output data size: 100,3,32,32
I0724 15:20:24.421728 26781 data_transformer.cpp:22] Loading mean file from: examples/cifar10/mean.binaryproto
I0724 15:20:24.422838 26781 net.cpp:120] Top shape: 100 3 32 32 (307200)
I0724 15:20:24.422873 26781 net.cpp:120] Top shape: 100 (100)
I0724 15:20:24.422884 26781 layer_factory.hpp:74] Creating layer label_cifar_1_split
I0724 15:20:24.422909 26781 net.cpp:84] Creating Layer label_cifar_1_split
I0724 15:20:24.422924 26781 net.cpp:380] label_cifar_1_split <- label
I0724 15:20:24.422937 26781 net.cpp:338] label_cifar_1_split -> label_cifar_1_split_0
I0724 15:20:24.422950 26781 net.cpp:338] label_cifar_1_split -> label_cifar_1_split_1
I0724 15:20:24.422962 26781 net.cpp:113] Setting up label_cifar_1_split
I0724 15:20:24.423040 26781 net.cpp:120] Top shape: 100 (100)
I0724 15:20:24.423063 26781 net.cpp:120] Top shape: 100 (100)
I0724 15:20:24.423074 26781 layer_factory.hpp:74] Creating layer conv1
I0724 15:20:24.423099 26781 net.cpp:84] Creating Layer conv1
I0724 15:20:24.423111 26781 net.cpp:380] conv1 <- data
I0724 15:20:24.423126 26781 net.cpp:338] conv1 -> conv1
I0724 15:20:24.423151 26781 net.cpp:113] Setting up conv1
I0724 15:20:24.424074 26781 net.cpp:120] Top shape: 100 32 32 32 (3276800)
I0724 15:20:24.424105 26781 layer_factory.hpp:74] Creating layer pool1
I0724 15:20:24.424116 26781 net.cpp:84] Creating Layer pool1
I0724 15:20:24.424123 26781 net.cpp:380] pool1 <- conv1
I0724 15:20:24.424186 26781 net.cpp:338] pool1 -> pool1
I0724 15:20:24.424202 26781 net.cpp:113] Setting up pool1
I0724 15:20:24.424496 26781 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:20:24.424515 26781 layer_factory.hpp:74] Creating layer relu1
I0724 15:20:24.424533 26781 net.cpp:84] Creating Layer relu1
I0724 15:20:24.424541 26781 net.cpp:380] relu1 <- pool1
I0724 15:20:24.424551 26781 net.cpp:327] relu1 -> pool1 (in-place)
I0724 15:20:24.424561 26781 net.cpp:113] Setting up relu1
I0724 15:20:24.424649 26781 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:20:24.424661 26781 layer_factory.hpp:74] Creating layer conv2
I0724 15:20:24.424679 26781 net.cpp:84] Creating Layer conv2
I0724 15:20:24.424686 26781 net.cpp:380] conv2 <- pool1
I0724 15:20:24.424700 26781 net.cpp:338] conv2 -> conv2
I0724 15:20:24.424715 26781 net.cpp:113] Setting up conv2
I0724 15:20:24.426895 26781 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:20:24.426990 26781 layer_factory.hpp:74] Creating layer relu2
I0724 15:20:24.427023 26781 net.cpp:84] Creating Layer relu2
I0724 15:20:24.427033 26781 net.cpp:380] relu2 <- conv2
I0724 15:20:24.427085 26781 net.cpp:327] relu2 -> conv2 (in-place)
I0724 15:20:24.427103 26781 net.cpp:113] Setting up relu2
I0724 15:20:24.427176 26781 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:20:24.427186 26781 layer_factory.hpp:74] Creating layer pool2
I0724 15:20:24.427203 26781 net.cpp:84] Creating Layer pool2
I0724 15:20:24.427208 26781 net.cpp:380] pool2 <- conv2
I0724 15:20:24.427213 26781 net.cpp:338] pool2 -> pool2
I0724 15:20:24.427232 26781 net.cpp:113] Setting up pool2
I0724 15:20:24.427284 26781 net.cpp:120] Top shape: 100 32 8 8 (204800)
I0724 15:20:24.427291 26781 layer_factory.hpp:74] Creating layer conv3
I0724 15:20:24.427302 26781 net.cpp:84] Creating Layer conv3
I0724 15:20:24.427307 26781 net.cpp:380] conv3 <- pool2
I0724 15:20:24.427314 26781 net.cpp:338] conv3 -> conv3
I0724 15:20:24.427325 26781 net.cpp:113] Setting up conv3
I0724 15:20:24.429347 26781 net.cpp:120] Top shape: 100 64 8 8 (409600)
I0724 15:20:24.429378 26781 layer_factory.hpp:74] Creating layer relu3
I0724 15:20:24.429389 26781 net.cpp:84] Creating Layer relu3
I0724 15:20:24.429394 26781 net.cpp:380] relu3 <- conv3
I0724 15:20:24.429404 26781 net.cpp:327] relu3 -> conv3 (in-place)
I0724 15:20:24.429414 26781 net.cpp:113] Setting up relu3
I0724 15:20:24.429577 26781 net.cpp:120] Top shape: 100 64 8 8 (409600)
I0724 15:20:24.429586 26781 layer_factory.hpp:74] Creating layer pool3
I0724 15:20:24.429594 26781 net.cpp:84] Creating Layer pool3
I0724 15:20:24.429599 26781 net.cpp:380] pool3 <- conv3
I0724 15:20:24.429605 26781 net.cpp:338] pool3 -> pool3
I0724 15:20:24.429612 26781 net.cpp:113] Setting up pool3
I0724 15:20:24.429666 26781 net.cpp:120] Top shape: 100 64 4 4 (102400)
I0724 15:20:24.429673 26781 layer_factory.hpp:74] Creating layer ip1
I0724 15:20:24.429682 26781 net.cpp:84] Creating Layer ip1
I0724 15:20:24.429685 26781 net.cpp:380] ip1 <- pool3
I0724 15:20:24.429692 26781 net.cpp:338] ip1 -> ip1
I0724 15:20:24.429702 26781 net.cpp:113] Setting up ip1
I0724 15:20:24.431738 26781 net.cpp:120] Top shape: 100 64 (6400)
I0724 15:20:24.431764 26781 layer_factory.hpp:74] Creating layer ip2
I0724 15:20:24.431777 26781 net.cpp:84] Creating Layer ip2
I0724 15:20:24.431784 26781 net.cpp:380] ip2 <- ip1
I0724 15:20:24.431797 26781 net.cpp:338] ip2 -> ip2
I0724 15:20:24.431829 26781 net.cpp:113] Setting up ip2
I0724 15:20:24.431875 26781 net.cpp:120] Top shape: 100 10 (1000)
I0724 15:20:24.431890 26781 layer_factory.hpp:74] Creating layer ip2_ip2_0_split
I0724 15:20:24.431906 26781 net.cpp:84] Creating Layer ip2_ip2_0_split
I0724 15:20:24.431916 26781 net.cpp:380] ip2_ip2_0_split <- ip2
I0724 15:20:24.431927 26781 net.cpp:338] ip2_ip2_0_split -> ip2_ip2_0_split_0
I0724 15:20:24.431941 26781 net.cpp:338] ip2_ip2_0_split -> ip2_ip2_0_split_1
I0724 15:20:24.431947 26781 net.cpp:113] Setting up ip2_ip2_0_split
I0724 15:20:24.431957 26781 net.cpp:120] Top shape: 100 10 (1000)
I0724 15:20:24.431962 26781 net.cpp:120] Top shape: 100 10 (1000)
I0724 15:20:24.431967 26781 layer_factory.hpp:74] Creating layer accuracy
I0724 15:20:24.431978 26781 net.cpp:84] Creating Layer accuracy
I0724 15:20:24.431982 26781 net.cpp:380] accuracy <- ip2_ip2_0_split_0
I0724 15:20:24.431988 26781 net.cpp:380] accuracy <- label_cifar_1_split_0
I0724 15:20:24.431993 26781 net.cpp:338] accuracy -> accuracy
I0724 15:20:24.432003 26781 net.cpp:113] Setting up accuracy
I0724 15:20:24.432018 26781 net.cpp:120] Top shape: (1)
I0724 15:20:24.432023 26781 layer_factory.hpp:74] Creating layer loss
I0724 15:20:24.432030 26781 net.cpp:84] Creating Layer loss
I0724 15:20:24.432035 26781 net.cpp:380] loss <- ip2_ip2_0_split_1
I0724 15:20:24.432040 26781 net.cpp:380] loss <- label_cifar_1_split_1
I0724 15:20:24.432046 26781 net.cpp:338] loss -> loss
I0724 15:20:24.432054 26781 net.cpp:113] Setting up loss
I0724 15:20:24.432060 26781 layer_factory.hpp:74] Creating layer loss
I0724 15:20:24.432152 26781 net.cpp:120] Top shape: (1)
I0724 15:20:24.432159 26781 net.cpp:122] with loss weight 1
I0724 15:20:24.432173 26781 net.cpp:167] loss needs backward computation.
I0724 15:20:24.432188 26781 net.cpp:169] accuracy does not need backward computation.
I0724 15:20:24.432193 26781 net.cpp:167] ip2_ip2_0_split needs backward computation.
I0724 15:20:24.432200 26781 net.cpp:167] ip2 needs backward computation.
I0724 15:20:24.432206 26781 net.cpp:167] ip1 needs backward computation.
I0724 15:20:24.432214 26781 net.cpp:167] pool3 needs backward computation.
I0724 15:20:24.432224 26781 net.cpp:167] relu3 needs backward computation.
I0724 15:20:24.432230 26781 net.cpp:167] conv3 needs backward computation.
I0724 15:20:24.432235 26781 net.cpp:167] pool2 needs backward computation.
I0724 15:20:24.432240 26781 net.cpp:167] relu2 needs backward computation.
I0724 15:20:24.432250 26781 net.cpp:167] conv2 needs backward computation.
I0724 15:20:24.432255 26781 net.cpp:167] relu1 needs backward computation.
I0724 15:20:24.432368 26781 net.cpp:167] pool1 needs backward computation.
I0724 15:20:24.432376 26781 net.cpp:167] conv1 needs backward computation.
I0724 15:20:24.432386 26781 net.cpp:169] label_cifar_1_split does not need backward computation.
I0724 15:20:24.432395 26781 net.cpp:169] cifar does not need backward computation.
I0724 15:20:24.432417 26781 net.cpp:205] This network produces output accuracy
I0724 15:20:24.432425 26781 net.cpp:205] This network produces output loss
I0724 15:20:24.432443 26781 net.cpp:447] Collecting Learning Rate and Weight Decay.
I0724 15:20:24.432459 26781 net.cpp:217] Network initialization done.
I0724 15:20:24.432468 26781 net.cpp:218] Memory required for data: 31987608
I0724 15:20:24.432510 26781 solver.cpp:42] Solver scaffolding done.
I0724 15:20:24.432535 26781 solver.cpp:222] Solving CIFAR10_quick
I0724 15:20:24.432540 26781 solver.cpp:223] Learning Rate Policy: fixed
I0724 15:20:24.432546 26781 solver.cpp:266] Iteration 0, Testing net (#0)
I0724 15:20:24.876049 26781 solver.cpp:315] Test net output #0: accuracy = 0.1409
I0724 15:20:24.876102 26781 solver.cpp:315] Test net output #1: loss = 2.30229 (* 1 = 2.30229 loss)
I0724 15:20:24.882349 26781 solver.cpp:189] Iteration 0, loss = 2.30285
I0724 15:20:24.882375 26781 solver.cpp:204] Train net output #0: loss = 2.30285 (* 1 = 2.30285 loss)
I0724 15:20:24.882385 26781 solver.cpp:464] Iteration 0, lr = 0.001
I0724 15:20:26.019618 26781 solver.cpp:189] Iteration 100, loss = 1.71262
I0724 15:20:26.019659 26781 solver.cpp:204] Train net output #0: loss = 1.71262 (* 1 = 1.71262 loss)
I0724 15:20:26.019667 26781 solver.cpp:464] Iteration 100, lr = 0.001
I0724 15:20:27.161996 26781 solver.cpp:189] Iteration 200, loss = 1.61527
I0724 15:20:27.162050 26781 solver.cpp:204] Train net output #0: loss = 1.61527 (* 1 = 1.61527 loss)
I0724 15:20:27.162058 26781 solver.cpp:464] Iteration 200, lr = 0.001
I0724 15:20:28.303331 26781 solver.cpp:189] Iteration 300, loss = 1.32747
I0724 15:20:28.303442 26781 solver.cpp:204] Train net output #0: loss = 1.32747 (* 1 = 1.32747 loss)
I0724 15:20:28.303455 26781 solver.cpp:464] Iteration 300, lr = 0.001
I0724 15:20:29.442226 26781 solver.cpp:189] Iteration 400, loss = 1.27856
I0724 15:20:29.442315 26781 solver.cpp:204] Train net output #0: loss = 1.27856 (* 1 = 1.27856 loss)
I0724 15:20:29.442324 26781 solver.cpp:464] Iteration 400, lr = 0.001
I0724 15:20:30.591828 26781 solver.cpp:266] Iteration 500, Testing net (#0)
I0724 15:20:30.981670 26781 solver.cpp:315] Test net output #0: accuracy = 0.5677
I0724 15:20:30.981708 26781 solver.cpp:315] Test net output #1: loss = 1.23664 (* 1 = 1.23664 loss)
I0724 15:20:30.985673 26781 solver.cpp:189] Iteration 500, loss = 1.21874
I0724 15:20:30.985690 26781 solver.cpp:204] Train net output #0: loss = 1.21874 (* 1 = 1.21874 loss)
I0724 15:20:30.985697 26781 solver.cpp:464] Iteration 500, lr = 0.001
I0724 15:20:32.136443 26781 solver.cpp:189] Iteration 600, loss = 1.37051
I0724 15:20:32.136487 26781 solver.cpp:204] Train net output #0: loss = 1.37051 (* 1 = 1.37051 loss)
I0724 15:20:32.136493 26781 solver.cpp:464] Iteration 600, lr = 0.001
I0724 15:20:33.293810 26781 solver.cpp:189] Iteration 700, loss = 1.10339
I0724 15:20:33.294037 26781 solver.cpp:204] Train net output #0: loss = 1.10339 (* 1 = 1.10339 loss)
I0724 15:20:33.294054 26781 solver.cpp:464] Iteration 700, lr = 0.001
I0724 15:20:34.435643 26781 solver.cpp:189] Iteration 800, loss = 0.98746
I0724 15:20:34.435688 26781 solver.cpp:204] Train net output #0: loss = 0.98746 (* 1 = 0.98746 loss)
I0724 15:20:34.435695 26781 solver.cpp:464] Iteration 800, lr = 0.001
I0724 15:20:35.587226 26781 solver.cpp:189] Iteration 900, loss = 1.03267
I0724 15:20:35.587270 26781 solver.cpp:204] Train net output #0: loss = 1.03267 (* 1 = 1.03267 loss)
I0724 15:20:35.587277 26781 solver.cpp:464] Iteration 900, lr = 0.001
I0724 15:20:36.719394 26781 solver.cpp:266] Iteration 1000, Testing net (#0)
I0724 15:20:37.105604 26781 solver.cpp:315] Test net output #0: accuracy = 0.6296
I0724 15:20:37.105654 26781 solver.cpp:315] Test net output #1: loss = 1.0718 (* 1 = 1.0718 loss)
I0724 15:20:37.110713 26781 solver.cpp:189] Iteration 1000, loss = 1.02915
I0724 15:20:37.110757 26781 solver.cpp:204] Train net output #0: loss = 1.02915 (* 1 = 1.02915 loss)
I0724 15:20:37.110765 26781 solver.cpp:464] Iteration 1000, lr = 0.001
I0724 15:20:38.247428 26781 solver.cpp:189] Iteration 1100, loss = 0.954241
I0724 15:20:38.247462 26781 solver.cpp:204] Train net output #0: loss = 0.954241 (* 1 = 0.954241 loss)
I0724 15:20:38.247468 26781 solver.cpp:464] Iteration 1100, lr = 0.001
I0724 15:20:39.385828 26781 solver.cpp:189] Iteration 1200, loss = 0.907719
I0724 15:20:39.385861 26781 solver.cpp:204] Train net output #0: loss = 0.907719 (* 1 = 0.907719 loss)
I0724 15:20:39.385869 26781 solver.cpp:464] Iteration 1200, lr = 0.001
I0724 15:20:40.525348 26781 solver.cpp:189] Iteration 1300, loss = 0.839362
I0724 15:20:40.525449 26781 solver.cpp:204] Train net output #0: loss = 0.839362 (* 1 = 0.839362 loss)
I0724 15:20:40.525457 26781 solver.cpp:464] Iteration 1300, lr = 0.001
I0724 15:20:41.679199 26781 solver.cpp:189] Iteration 1400, loss = 0.838737
I0724 15:20:41.679234 26781 solver.cpp:204] Train net output #0: loss = 0.838737 (* 1 = 0.838737 loss)
I0724 15:20:41.679240 26781 solver.cpp:464] Iteration 1400, lr = 0.001
I0724 15:20:42.811251 26781 solver.cpp:266] Iteration 1500, Testing net (#0)
I0724 15:20:43.193924 26781 solver.cpp:315] Test net output #0: accuracy = 0.663
I0724 15:20:43.194023 26781 solver.cpp:315] Test net output #1: loss = 0.981802 (* 1 = 0.981802 loss)
I0724 15:20:43.198870 26781 solver.cpp:189] Iteration 1500, loss = 0.907493
I0724 15:20:43.198968 26781 solver.cpp:204] Train net output #0: loss = 0.907493 (* 1 = 0.907493 loss)
I0724 15:20:43.198981 26781 solver.cpp:464] Iteration 1500, lr = 0.001
I0724 15:20:44.336916 26781 solver.cpp:189] Iteration 1600, loss = 0.904637
I0724 15:20:44.336961 26781 solver.cpp:204] Train net output #0: loss = 0.904637 (* 1 = 0.904637 loss)
I0724 15:20:44.336968 26781 solver.cpp:464] Iteration 1600, lr = 0.001
I0724 15:20:45.474933 26781 solver.cpp:189] Iteration 1700, loss = 0.809851
I0724 15:20:45.474968 26781 solver.cpp:204] Train net output #0: loss = 0.809851 (* 1 = 0.809851 loss)
I0724 15:20:45.474977 26781 solver.cpp:464] Iteration 1700, lr = 0.001
I0724 15:20:46.617063 26781 solver.cpp:189] Iteration 1800, loss = 0.770444
I0724 15:20:46.617099 26781 solver.cpp:204] Train net output #0: loss = 0.770444 (* 1 = 0.770444 loss)
I0724 15:20:46.617105 26781 solver.cpp:464] Iteration 1800, lr = 0.001
I0724 15:20:47.766064 26781 solver.cpp:189] Iteration 1900, loss = 0.786517
I0724 15:20:47.766163 26781 solver.cpp:204] Train net output #0: loss = 0.786517 (* 1 = 0.786517 loss)
I0724 15:20:47.766177 26781 solver.cpp:464] Iteration 1900, lr = 0.001
I0724 15:20:48.903381 26781 solver.cpp:266] Iteration 2000, Testing net (#0)
I0724 15:20:49.290642 26781 solver.cpp:315] Test net output #0: accuracy = 0.68
I0724 15:20:49.290671 26781 solver.cpp:315] Test net output #1: loss = 0.91725 (* 1 = 0.91725 loss)
I0724 15:20:49.294699 26781 solver.cpp:189] Iteration 2000, loss = 0.77896
I0724 15:20:49.294729 26781 solver.cpp:204] Train net output #0: loss = 0.77896 (* 1 = 0.77896 loss)
I0724 15:20:49.294736 26781 solver.cpp:464] Iteration 2000, lr = 0.001
I0724 15:20:50.434298 26781 solver.cpp:189] Iteration 2100, loss = 0.845264
I0724 15:20:50.434342 26781 solver.cpp:204] Train net output #0: loss = 0.845264 (* 1 = 0.845264 loss)
I0724 15:20:50.434350 26781 solver.cpp:464] Iteration 2100, lr = 0.001
I0724 15:20:51.573161 26781 solver.cpp:189] Iteration 2200, loss = 0.752443
I0724 15:20:51.573199 26781 solver.cpp:204] Train net output #0: loss = 0.752443 (* 1 = 0.752443 loss)
I0724 15:20:51.573205 26781 solver.cpp:464] Iteration 2200, lr = 0.001
I0724 15:20:52.711212 26781 solver.cpp:189] Iteration 2300, loss = 0.664528
I0724 15:20:52.711246 26781 solver.cpp:204] Train net output #0: loss = 0.664528 (* 1 = 0.664528 loss)
I0724 15:20:52.711251 26781 solver.cpp:464] Iteration 2300, lr = 0.001
I0724 15:20:53.851538 26781 solver.cpp:189] Iteration 2400, loss = 0.700644
I0724 15:20:53.851573 26781 solver.cpp:204] Train net output #0: loss = 0.700644 (* 1 = 0.700644 loss)
I0724 15:20:53.851589 26781 solver.cpp:464] Iteration 2400, lr = 0.001
I0724 15:20:54.979451 26781 solver.cpp:266] Iteration 2500, Testing net (#0)
I0724 15:20:55.364724 26781 solver.cpp:315] Test net output #0: accuracy = 0.6927
I0724 15:20:55.364758 26781 solver.cpp:315] Test net output #1: loss = 0.891418 (* 1 = 0.891418 loss)
I0724 15:20:55.368755 26781 solver.cpp:189] Iteration 2500, loss = 0.723417
I0724 15:20:55.368772 26781 solver.cpp:204] Train net output #0: loss = 0.723417 (* 1 = 0.723417 loss)
I0724 15:20:55.368779 26781 solver.cpp:464] Iteration 2500, lr = 0.001
I0724 15:20:56.509495 26781 solver.cpp:189] Iteration 2600, loss = 0.731871
I0724 15:20:56.509529 26781 solver.cpp:204] Train net output #0: loss = 0.731871 (* 1 = 0.731871 loss)
I0724 15:20:56.509537 26781 solver.cpp:464] Iteration 2600, lr = 0.001
I0724 15:20:57.650527 26781 solver.cpp:189] Iteration 2700, loss = 0.700396
I0724 15:20:57.650605 26781 solver.cpp:204] Train net output #0: loss = 0.700396 (* 1 = 0.700396 loss)
I0724 15:20:57.650614 26781 solver.cpp:464] Iteration 2700, lr = 0.001
I0724 15:20:58.790457 26781 solver.cpp:189] Iteration 2800, loss = 0.623085
I0724 15:20:58.790501 26781 solver.cpp:204] Train net output #0: loss = 0.623085 (* 1 = 0.623085 loss)
I0724 15:20:58.790508 26781 solver.cpp:464] Iteration 2800, lr = 0.001
I0724 15:20:59.931192 26781 solver.cpp:189] Iteration 2900, loss = 0.705873
I0724 15:20:59.931236 26781 solver.cpp:204] Train net output #0: loss = 0.705873 (* 1 = 0.705873 loss)
I0724 15:20:59.931242 26781 solver.cpp:464] Iteration 2900, lr = 0.001
I0724 15:21:01.062194 26781 solver.cpp:266] Iteration 3000, Testing net (#0)
I0724 15:21:01.447918 26781 solver.cpp:315] Test net output #0: accuracy = 0.6994
I0724 15:21:01.448011 26781 solver.cpp:315] Test net output #1: loss = 0.880686 (* 1 = 0.880686 loss)
I0724 15:21:01.453021 26781 solver.cpp:189] Iteration 3000, loss = 0.691863
I0724 15:21:01.453069 26781 solver.cpp:204] Train net output #0: loss = 0.691863 (* 1 = 0.691863 loss)
I0724 15:21:01.453078 26781 solver.cpp:464] Iteration 3000, lr = 0.001
I0724 15:21:02.595587 26781 solver.cpp:189] Iteration 3100, loss = 0.692409
I0724 15:21:02.595631 26781 solver.cpp:204] Train net output #0: loss = 0.692409 (* 1 = 0.692409 loss)
I0724 15:21:02.595638 26781 solver.cpp:464] Iteration 3100, lr = 0.001
I0724 15:21:03.735291 26781 solver.cpp:189] Iteration 3200, loss = 0.689493
I0724 15:21:03.735324 26781 solver.cpp:204] Train net output #0: loss = 0.689493 (* 1 = 0.689493 loss)
I0724 15:21:03.735332 26781 solver.cpp:464] Iteration 3200, lr = 0.001
I0724 15:21:04.873982 26781 solver.cpp:189] Iteration 3300, loss = 0.581388
I0724 15:21:04.874025 26781 solver.cpp:204] Train net output #0: loss = 0.581388 (* 1 = 0.581388 loss)
I0724 15:21:04.874033 26781 solver.cpp:464] Iteration 3300, lr = 0.001
I0724 15:21:06.017020 26781 solver.cpp:189] Iteration 3400, loss = 0.67786
I0724 15:21:06.017074 26781 solver.cpp:204] Train net output #0: loss = 0.67786 (* 1 = 0.67786 loss)
I0724 15:21:06.017083 26781 solver.cpp:464] Iteration 3400, lr = 0.001
I0724 15:21:07.144655 26781 solver.cpp:266] Iteration 3500, Testing net (#0)
I0724 15:21:07.530313 26781 solver.cpp:315] Test net output #0: accuracy = 0.7069
I0724 15:21:07.530344 26781 solver.cpp:315] Test net output #1: loss = 0.869211 (* 1 = 0.869211 loss)
I0724 15:21:07.534332 26781 solver.cpp:189] Iteration 3500, loss = 0.660259
I0724 15:21:07.534349 26781 solver.cpp:204] Train net output #0: loss = 0.660259 (* 1 = 0.660259 loss)
I0724 15:21:07.534356 26781 solver.cpp:464] Iteration 3500, lr = 0.001
I0724 15:21:08.672732 26781 solver.cpp:189] Iteration 3600, loss = 0.667279
I0724 15:21:08.672802 26781 solver.cpp:204] Train net output #0: loss = 0.667279 (* 1 = 0.667279 loss)
I0724 15:21:08.672811 26781 solver.cpp:464] Iteration 3600, lr = 0.001
I0724 15:21:09.810492 26781 solver.cpp:189] Iteration 3700, loss = 0.671383
I0724 15:21:09.810528 26781 solver.cpp:204] Train net output #0: loss = 0.671383 (* 1 = 0.671383 loss)
I0724 15:21:09.810534 26781 solver.cpp:464] Iteration 3700, lr = 0.001
I0724 15:21:10.951683 26781 solver.cpp:189] Iteration 3800, loss = 0.565697
I0724 15:21:10.951776 26781 solver.cpp:204] Train net output #0: loss = 0.565697 (* 1 = 0.565697 loss)
I0724 15:21:10.951787 26781 solver.cpp:464] Iteration 3800, lr = 0.001
I0724 15:21:12.089732 26781 solver.cpp:189] Iteration 3900, loss = 0.670047
I0724 15:21:12.089776 26781 solver.cpp:204] Train net output #0: loss = 0.670047 (* 1 = 0.670047 loss)
I0724 15:21:12.089784 26781 solver.cpp:464] Iteration 3900, lr = 0.001
I0724 15:21:13.224203 26781 solver.cpp:334] Snapshotting to examples/cifar10/cifar10_quick_iter_4000.caffemodel
I0724 15:21:13.227545 26781 solver.cpp:342] Snapshotting solver state to examples/cifar10/cifar10_quick_iter_4000.solverstate
I0724 15:21:13.233026 26781 solver.cpp:248] Iteration 4000, loss = 0.604699
I0724 15:21:13.233064 26781 solver.cpp:266] Iteration 4000, Testing net (#0)
I0724 15:21:13.614272 26781 solver.cpp:315] Test net output #0: accuracy = 0.7148
I0724 15:21:13.614313 26781 solver.cpp:315] Test net output #1: loss = 0.843019 (* 1 = 0.843019 loss)
I0724 15:21:13.614320 26781 solver.cpp:253] Optimization Done.
I0724 15:21:13.614323 26781 caffe.cpp:134] Optimization Done.
I0724 15:21:13.689227 31785 caffe.cpp:113] Use GPU with device ID 0
I0724 15:21:13.846545 31785 caffe.cpp:121] Starting Optimization
I0724 15:21:13.846644 31785 solver.cpp:32] Initializing solver from parameters:
test_iter: 100
test_interval: 500
base_lr: 0.0001
display: 100
max_iter: 5000
lr_policy: "fixed"
momentum: 0.9
weight_decay: 0.004
snapshot: 5000
snapshot_prefix: "examples/cifar10/cifar10_quick"
solver_mode: GPU
net: "examples/cifar10/cifar10_quick_train_test.prototxt"
I0724 15:21:13.846679 31785 solver.cpp:70] Creating training net from net file: examples/cifar10/cifar10_quick_train_test.prototxt
I0724 15:21:13.846997 31785 net.cpp:257] The NetState phase (0) differed from the phase (1) specified by a rule in layer cifar
I0724 15:21:13.847019 31785 net.cpp:257] The NetState phase (0) differed from the phase (1) specified by a rule in layer accuracy
I0724 15:21:13.847129 31785 net.cpp:42] Initializing net from parameters:
name: "CIFAR10_quick"
state {
phase: TRAIN
}
layer {
name: "cifar"
type: "Data"
top: "data"
top: "label"
include {
phase: TRAIN
}
transform_param {
mean_file: "examples/cifar10/mean.binaryproto"
}
data_param {
source: "examples/cifar10/cifar10_train_lmdb"
batch_size: 100
backend: LMDB
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.0001
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "pool1"
top: "pool1"
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2"
top: "pool2"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "pool2"
top: "conv3"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 64
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "pool3"
type: "Pooling"
bottom: "conv3"
top: "pool3"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool3"
top: "ip1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 64
weight_filler {
type: "gaussian"
std: 0.1
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "ip2"
type: "InnerProduct"
bottom: "ip1"
top: "ip2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 10
weight_filler {
type: "gaussian"
std: 0.1
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "ip2"
bottom: "label"
top: "loss"
}
I0724 15:21:13.847203 31785 layer_factory.hpp:74] Creating layer cifar
I0724 15:21:13.847221 31785 net.cpp:84] Creating Layer cifar
I0724 15:21:13.847228 31785 net.cpp:338] cifar -> data
I0724 15:21:13.847255 31785 net.cpp:338] cifar -> label
I0724 15:21:13.847265 31785 net.cpp:113] Setting up cifar
I0724 15:21:13.847321 31785 db.cpp:34] Opened lmdb examples/cifar10/cifar10_train_lmdb
I0724 15:21:13.847365 31785 data_layer.cpp:67] output data size: 100,3,32,32
I0724 15:21:13.847374 31785 data_transformer.cpp:22] Loading mean file from: examples/cifar10/mean.binaryproto
I0724 15:21:13.847839 31785 net.cpp:120] Top shape: 100 3 32 32 (307200)
I0724 15:21:13.847847 31785 net.cpp:120] Top shape: 100 (100)
I0724 15:21:13.847852 31785 layer_factory.hpp:74] Creating layer conv1
I0724 15:21:13.847863 31785 net.cpp:84] Creating Layer conv1
I0724 15:21:13.847868 31785 net.cpp:380] conv1 <- data
I0724 15:21:13.847877 31785 net.cpp:338] conv1 -> conv1
I0724 15:21:13.847887 31785 net.cpp:113] Setting up conv1
I0724 15:21:13.894073 31785 net.cpp:120] Top shape: 100 32 32 32 (3276800)
I0724 15:21:13.894109 31785 layer_factory.hpp:74] Creating layer pool1
I0724 15:21:13.894125 31785 net.cpp:84] Creating Layer pool1
I0724 15:21:13.894130 31785 net.cpp:380] pool1 <- conv1
I0724 15:21:13.894148 31785 net.cpp:338] pool1 -> pool1
I0724 15:21:13.894158 31785 net.cpp:113] Setting up pool1
I0724 15:21:13.894232 31785 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:21:13.894240 31785 layer_factory.hpp:74] Creating layer relu1
I0724 15:21:13.894246 31785 net.cpp:84] Creating Layer relu1
I0724 15:21:13.894250 31785 net.cpp:380] relu1 <- pool1
I0724 15:21:13.894264 31785 net.cpp:327] relu1 -> pool1 (in-place)
I0724 15:21:13.894270 31785 net.cpp:113] Setting up relu1
I0724 15:21:13.894337 31785 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:21:13.894345 31785 layer_factory.hpp:74] Creating layer conv2
I0724 15:21:13.894364 31785 net.cpp:84] Creating Layer conv2
I0724 15:21:13.894368 31785 net.cpp:380] conv2 <- pool1
I0724 15:21:13.894373 31785 net.cpp:338] conv2 -> conv2
I0724 15:21:13.894381 31785 net.cpp:113] Setting up conv2
I0724 15:21:13.895388 31785 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:21:13.895411 31785 layer_factory.hpp:74] Creating layer relu2
I0724 15:21:13.895416 31785 net.cpp:84] Creating Layer relu2
I0724 15:21:13.895421 31785 net.cpp:380] relu2 <- conv2
I0724 15:21:13.895426 31785 net.cpp:327] relu2 -> conv2 (in-place)
I0724 15:21:13.895432 31785 net.cpp:113] Setting up relu2
I0724 15:21:13.895480 31785 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:21:13.895488 31785 layer_factory.hpp:74] Creating layer pool2
I0724 15:21:13.895493 31785 net.cpp:84] Creating Layer pool2
I0724 15:21:13.895496 31785 net.cpp:380] pool2 <- conv2
I0724 15:21:13.895503 31785 net.cpp:338] pool2 -> pool2
I0724 15:21:13.895509 31785 net.cpp:113] Setting up pool2
I0724 15:21:13.895668 31785 net.cpp:120] Top shape: 100 32 8 8 (204800)
I0724 15:21:13.895675 31785 layer_factory.hpp:74] Creating layer conv3
I0724 15:21:13.895683 31785 net.cpp:84] Creating Layer conv3
I0724 15:21:13.895685 31785 net.cpp:380] conv3 <- pool2
I0724 15:21:13.895691 31785 net.cpp:338] conv3 -> conv3
I0724 15:21:13.895697 31785 net.cpp:113] Setting up conv3
I0724 15:21:13.897600 31785 net.cpp:120] Top shape: 100 64 8 8 (409600)
I0724 15:21:13.897620 31785 layer_factory.hpp:74] Creating layer relu3
I0724 15:21:13.897629 31785 net.cpp:84] Creating Layer relu3
I0724 15:21:13.897632 31785 net.cpp:380] relu3 <- conv3
I0724 15:21:13.897639 31785 net.cpp:327] relu3 -> conv3 (in-place)
I0724 15:21:13.897644 31785 net.cpp:113] Setting up relu3
I0724 15:21:13.897716 31785 net.cpp:120] Top shape: 100 64 8 8 (409600)
I0724 15:21:13.897722 31785 layer_factory.hpp:74] Creating layer pool3
I0724 15:21:13.897732 31785 net.cpp:84] Creating Layer pool3
I0724 15:21:13.897737 31785 net.cpp:380] pool3 <- conv3
I0724 15:21:13.897744 31785 net.cpp:338] pool3 -> pool3
I0724 15:21:13.897752 31785 net.cpp:113] Setting up pool3
I0724 15:21:13.897816 31785 net.cpp:120] Top shape: 100 64 4 4 (102400)
I0724 15:21:13.897825 31785 layer_factory.hpp:74] Creating layer ip1
I0724 15:21:13.897855 31785 net.cpp:84] Creating Layer ip1
I0724 15:21:13.897858 31785 net.cpp:380] ip1 <- pool3
I0724 15:21:13.897866 31785 net.cpp:338] ip1 -> ip1
I0724 15:21:13.897881 31785 net.cpp:113] Setting up ip1
I0724 15:21:13.899966 31785 net.cpp:120] Top shape: 100 64 (6400)
I0724 15:21:13.900015 31785 layer_factory.hpp:74] Creating layer ip2
I0724 15:21:13.900027 31785 net.cpp:84] Creating Layer ip2
I0724 15:21:13.900032 31785 net.cpp:380] ip2 <- ip1
I0724 15:21:13.900070 31785 net.cpp:338] ip2 -> ip2
I0724 15:21:13.900082 31785 net.cpp:113] Setting up ip2
I0724 15:21:13.900115 31785 net.cpp:120] Top shape: 100 10 (1000)
I0724 15:21:13.900127 31785 layer_factory.hpp:74] Creating layer loss
I0724 15:21:13.900138 31785 net.cpp:84] Creating Layer loss
I0724 15:21:13.900142 31785 net.cpp:380] loss <- ip2
I0724 15:21:13.900146 31785 net.cpp:380] loss <- label
I0724 15:21:13.900156 31785 net.cpp:338] loss -> loss
I0724 15:21:13.900164 31785 net.cpp:113] Setting up loss
I0724 15:21:13.900171 31785 layer_factory.hpp:74] Creating layer loss
I0724 15:21:13.900398 31785 net.cpp:120] Top shape: (1)
I0724 15:21:13.900408 31785 net.cpp:122] with loss weight 1
I0724 15:21:13.900472 31785 net.cpp:167] loss needs backward computation.
I0724 15:21:13.900480 31785 net.cpp:167] ip2 needs backward computation.
I0724 15:21:13.900482 31785 net.cpp:167] ip1 needs backward computation.
I0724 15:21:13.900487 31785 net.cpp:167] pool3 needs backward computation.
I0724 15:21:13.900491 31785 net.cpp:167] relu3 needs backward computation.
I0724 15:21:13.900495 31785 net.cpp:167] conv3 needs backward computation.
I0724 15:21:13.900502 31785 net.cpp:167] pool2 needs backward computation.
I0724 15:21:13.900507 31785 net.cpp:167] relu2 needs backward computation.
I0724 15:21:13.900511 31785 net.cpp:167] conv2 needs backward computation.
I0724 15:21:13.900514 31785 net.cpp:167] relu1 needs backward computation.
I0724 15:21:13.900518 31785 net.cpp:167] pool1 needs backward computation.
I0724 15:21:13.900523 31785 net.cpp:167] conv1 needs backward computation.
I0724 15:21:13.900527 31785 net.cpp:169] cifar does not need backward computation.
I0724 15:21:13.900532 31785 net.cpp:205] This network produces output loss
I0724 15:21:13.900547 31785 net.cpp:447] Collecting Learning Rate and Weight Decay.
I0724 15:21:13.900558 31785 net.cpp:217] Network initialization done.
I0724 15:21:13.900563 31785 net.cpp:218] Memory required for data: 31978804
I0724 15:21:13.901029 31785 solver.cpp:154] Creating test net (#0) specified by net file: examples/cifar10/cifar10_quick_train_test.prototxt
I0724 15:21:13.901087 31785 net.cpp:257] The NetState phase (1) differed from the phase (0) specified by a rule in layer cifar
I0724 15:21:13.901259 31785 net.cpp:42] Initializing net from parameters:
name: "CIFAR10_quick"
state {
phase: TEST
}
layer {
name: "cifar"
type: "Data"
top: "data"
top: "label"
include {
phase: TEST
}
transform_param {
mean_file: "examples/cifar10/mean.binaryproto"
}
data_param {
source: "examples/cifar10/cifar10_test_lmdb"
batch_size: 100
backend: LMDB
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.0001
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "pool1"
top: "pool1"
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2"
top: "pool2"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "pool2"
top: "conv3"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 64
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "pool3"
type: "Pooling"
bottom: "conv3"
top: "pool3"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool3"
top: "ip1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 64
weight_filler {
type: "gaussian"
std: 0.1
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "ip2"
type: "InnerProduct"
bottom: "ip1"
top: "ip2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 10
weight_filler {
type: "gaussian"
std: 0.1
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "accuracy"
type: "Accuracy"
bottom: "ip2"
bottom: "label"
top: "accuracy"
include {
phase: TEST
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "ip2"
bottom: "label"
top: "loss"
}
I0724 15:21:13.901396 31785 layer_factory.hpp:74] Creating layer cifar
I0724 15:21:13.901414 31785 net.cpp:84] Creating Layer cifar
I0724 15:21:13.901424 31785 net.cpp:338] cifar -> data
I0724 15:21:13.901450 31785 net.cpp:338] cifar -> label
I0724 15:21:13.901465 31785 net.cpp:113] Setting up cifar
I0724 15:21:13.901571 31785 db.cpp:34] Opened lmdb examples/cifar10/cifar10_test_lmdb
I0724 15:21:13.901623 31785 data_layer.cpp:67] output data size: 100,3,32,32
I0724 15:21:13.901631 31785 data_transformer.cpp:22] Loading mean file from: examples/cifar10/mean.binaryproto
I0724 15:21:13.902997 31785 net.cpp:120] Top shape: 100 3 32 32 (307200)
I0724 15:21:13.903058 31785 net.cpp:120] Top shape: 100 (100)
I0724 15:21:13.903071 31785 layer_factory.hpp:74] Creating layer label_cifar_1_split
I0724 15:21:13.903090 31785 net.cpp:84] Creating Layer label_cifar_1_split
I0724 15:21:13.903095 31785 net.cpp:380] label_cifar_1_split <- label
I0724 15:21:13.903103 31785 net.cpp:338] label_cifar_1_split -> label_cifar_1_split_0
I0724 15:21:13.903116 31785 net.cpp:338] label_cifar_1_split -> label_cifar_1_split_1
I0724 15:21:13.903126 31785 net.cpp:113] Setting up label_cifar_1_split
I0724 15:21:13.903142 31785 net.cpp:120] Top shape: 100 (100)
I0724 15:21:13.903172 31785 net.cpp:120] Top shape: 100 (100)
I0724 15:21:13.903178 31785 layer_factory.hpp:74] Creating layer conv1
I0724 15:21:13.903192 31785 net.cpp:84] Creating Layer conv1
I0724 15:21:13.903197 31785 net.cpp:380] conv1 <- data
I0724 15:21:13.903234 31785 net.cpp:338] conv1 -> conv1
I0724 15:21:13.903277 31785 net.cpp:113] Setting up conv1
I0724 15:21:13.903806 31785 net.cpp:120] Top shape: 100 32 32 32 (3276800)
I0724 15:21:13.903820 31785 layer_factory.hpp:74] Creating layer pool1
I0724 15:21:13.903828 31785 net.cpp:84] Creating Layer pool1
I0724 15:21:13.903832 31785 net.cpp:380] pool1 <- conv1
I0724 15:21:13.903837 31785 net.cpp:338] pool1 -> pool1
I0724 15:21:13.903843 31785 net.cpp:113] Setting up pool1
I0724 15:21:13.904022 31785 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:21:13.904031 31785 layer_factory.hpp:74] Creating layer relu1
I0724 15:21:13.904039 31785 net.cpp:84] Creating Layer relu1
I0724 15:21:13.904043 31785 net.cpp:380] relu1 <- pool1
I0724 15:21:13.904057 31785 net.cpp:327] relu1 -> pool1 (in-place)
I0724 15:21:13.904065 31785 net.cpp:113] Setting up relu1
I0724 15:21:13.904126 31785 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:21:13.904139 31785 layer_factory.hpp:74] Creating layer conv2
I0724 15:21:13.904153 31785 net.cpp:84] Creating Layer conv2
I0724 15:21:13.904160 31785 net.cpp:380] conv2 <- pool1
I0724 15:21:13.904170 31785 net.cpp:338] conv2 -> conv2
I0724 15:21:13.904177 31785 net.cpp:113] Setting up conv2
I0724 15:21:13.905239 31785 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:21:13.905256 31785 layer_factory.hpp:74] Creating layer relu2
I0724 15:21:13.905262 31785 net.cpp:84] Creating Layer relu2
I0724 15:21:13.905266 31785 net.cpp:380] relu2 <- conv2
I0724 15:21:13.905283 31785 net.cpp:327] relu2 -> conv2 (in-place)
I0724 15:21:13.905289 31785 net.cpp:113] Setting up relu2
I0724 15:21:13.905339 31785 net.cpp:120] Top shape: 100 32 16 16 (819200)
I0724 15:21:13.905345 31785 layer_factory.hpp:74] Creating layer pool2
I0724 15:21:13.905352 31785 net.cpp:84] Creating Layer pool2
I0724 15:21:13.905356 31785 net.cpp:380] pool2 <- conv2
I0724 15:21:13.905361 31785 net.cpp:338] pool2 -> pool2
I0724 15:21:13.905366 31785 net.cpp:113] Setting up pool2
I0724 15:21:13.905414 31785 net.cpp:120] Top shape: 100 32 8 8 (204800)
I0724 15:21:13.905421 31785 layer_factory.hpp:74] Creating layer conv3
I0724 15:21:13.905428 31785 net.cpp:84] Creating Layer conv3
I0724 15:21:13.905432 31785 net.cpp:380] conv3 <- pool2
I0724 15:21:13.905437 31785 net.cpp:338] conv3 -> conv3
I0724 15:21:13.905444 31785 net.cpp:113] Setting up conv3
I0724 15:21:13.907307 31785 net.cpp:120] Top shape: 100 64 8 8 (409600)
I0724 15:21:13.907331 31785 layer_factory.hpp:74] Creating layer relu3
I0724 15:21:13.907338 31785 net.cpp:84] Creating Layer relu3
I0724 15:21:13.907342 31785 net.cpp:380] relu3 <- conv3
I0724 15:21:13.907349 31785 net.cpp:327] relu3 -> conv3 (in-place)
I0724 15:21:13.907354 31785 net.cpp:113] Setting up relu3
I0724 15:21:13.907536 31785 net.cpp:120] Top shape: 100 64 8 8 (409600)
I0724 15:21:13.907544 31785 layer_factory.hpp:74] Creating layer pool3
I0724 15:21:13.907551 31785 net.cpp:84] Creating Layer pool3
I0724 15:21:13.907554 31785 net.cpp:380] pool3 <- conv3
I0724 15:21:13.907559 31785 net.cpp:338] pool3 -> pool3
I0724 15:21:13.907565 31785 net.cpp:113] Setting up pool3
I0724 15:21:13.907613 31785 net.cpp:120] Top shape: 100 64 4 4 (102400)
I0724 15:21:13.907620 31785 layer_factory.hpp:74] Creating layer ip1
I0724 15:21:13.907629 31785 net.cpp:84] Creating Layer ip1
I0724 15:21:13.907631 31785 net.cpp:380] ip1 <- pool3
I0724 15:21:13.907637 31785 net.cpp:338] ip1 -> ip1
I0724 15:21:13.907644 31785 net.cpp:113] Setting up ip1
I0724 15:21:13.909636 31785 net.cpp:120] Top shape: 100 64 (6400)
I0724 15:21:13.909647 31785 layer_factory.hpp:74] Creating layer ip2
I0724 15:21:13.909656 31785 net.cpp:84] Creating Layer ip2
I0724 15:21:13.909659 31785 net.cpp:380] ip2 <- ip1
I0724 15:21:13.909665 31785 net.cpp:338] ip2 -> ip2
I0724 15:21:13.909672 31785 net.cpp:113] Setting up ip2
I0724 15:21:13.909701 31785 net.cpp:120] Top shape: 100 10 (1000)
I0724 15:21:13.909711 31785 layer_factory.hpp:74] Creating layer ip2_ip2_0_split
I0724 15:21:13.909718 31785 net.cpp:84] Creating Layer ip2_ip2_0_split
I0724 15:21:13.909723 31785 net.cpp:380] ip2_ip2_0_split <- ip2
I0724 15:21:13.909726 31785 net.cpp:338] ip2_ip2_0_split -> ip2_ip2_0_split_0
I0724 15:21:13.909732 31785 net.cpp:338] ip2_ip2_0_split -> ip2_ip2_0_split_1
I0724 15:21:13.909739 31785 net.cpp:113] Setting up ip2_ip2_0_split
I0724 15:21:13.909745 31785 net.cpp:120] Top shape: 100 10 (1000)
I0724 15:21:13.909749 31785 net.cpp:120] Top shape: 100 10 (1000)
I0724 15:21:13.909754 31785 layer_factory.hpp:74] Creating layer accuracy
I0724 15:21:13.909762 31785 net.cpp:84] Creating Layer accuracy
I0724 15:21:13.909766 31785 net.cpp:380] accuracy <- ip2_ip2_0_split_0
I0724 15:21:13.909770 31785 net.cpp:380] accuracy <- label_cifar_1_split_0
I0724 15:21:13.909775 31785 net.cpp:338] accuracy -> accuracy
I0724 15:21:13.909781 31785 net.cpp:113] Setting up accuracy
I0724 15:21:13.909790 31785 net.cpp:120] Top shape: (1)
I0724 15:21:13.909792 31785 layer_factory.hpp:74] Creating layer loss
I0724 15:21:13.909798 31785 net.cpp:84] Creating Layer loss
I0724 15:21:13.909801 31785 net.cpp:380] loss <- ip2_ip2_0_split_1
I0724 15:21:13.909806 31785 net.cpp:380] loss <- label_cifar_1_split_1
I0724 15:21:13.909812 31785 net.cpp:338] loss -> loss
I0724 15:21:13.909817 31785 net.cpp:113] Setting up loss
I0724 15:21:13.909822 31785 layer_factory.hpp:74] Creating layer loss
I0724 15:21:13.909888 31785 net.cpp:120] Top shape: (1)
I0724 15:21:13.909894 31785 net.cpp:122] with loss weight 1
I0724 15:21:13.909905 31785 net.cpp:167] loss needs backward computation.
I0724 15:21:13.909920 31785 net.cpp:169] accuracy does not need backward computation.
I0724 15:21:13.909924 31785 net.cpp:167] ip2_ip2_0_split needs backward computation.
I0724 15:21:13.909929 31785 net.cpp:167] ip2 needs backward computation.
I0724 15:21:13.909931 31785 net.cpp:167] ip1 needs backward computation.
I0724 15:21:13.909934 31785 net.cpp:167] pool3 needs backward computation.
I0724 15:21:13.909937 31785 net.cpp:167] relu3 needs backward computation.
I0724 15:21:13.909941 31785 net.cpp:167] conv3 needs backward computation.
I0724 15:21:13.909945 31785 net.cpp:167] pool2 needs backward computation.
I0724 15:21:13.909947 31785 net.cpp:167] relu2 needs backward computation.
I0724 15:21:13.909951 31785 net.cpp:167] conv2 needs backward computation.
I0724 15:21:13.909955 31785 net.cpp:167] relu1 needs backward computation.
I0724 15:21:13.909957 31785 net.cpp:167] pool1 needs backward computation.
I0724 15:21:13.909961 31785 net.cpp:167] conv1 needs backward computation.
I0724 15:21:13.909965 31785 net.cpp:169] label_cifar_1_split does not need backward computation.
I0724 15:21:13.909981 31785 net.cpp:169] cifar does not need backward computation.
I0724 15:21:13.909988 31785 net.cpp:205] This network produces output accuracy
I0724 15:21:13.909996 31785 net.cpp:205] This network produces output loss
I0724 15:21:13.910007 31785 net.cpp:447] Collecting Learning Rate and Weight Decay.
I0724 15:21:13.910013 31785 net.cpp:217] Network initialization done.
I0724 15:21:13.910017 31785 net.cpp:218] Memory required for data: 31987608
I0724 15:21:13.910058 31785 solver.cpp:42] Solver scaffolding done.
I0724 15:21:13.910080 31785 caffe.cpp:126] Resuming from examples/cifar10/cifar10_quick_iter_4000.solverstate
I0724 15:21:13.910086 31785 solver.cpp:222] Solving CIFAR10_quick
I0724 15:21:13.910090 31785 solver.cpp:223] Learning Rate Policy: fixed
I0724 15:21:13.910094 31785 solver.cpp:226] Restoring previous solver status from examples/cifar10/cifar10_quick_iter_4000.solverstate
I0724 15:21:13.911710 31785 solver.cpp:564] SGDSolver: restoring history
I0724 15:21:13.911932 31785 solver.cpp:266] Iteration 4000, Testing net (#0)
I0724 15:21:14.293946 31785 solver.cpp:315] Test net output #0: accuracy = 0.7148
I0724 15:21:14.294061 31785 solver.cpp:315] Test net output #1: loss = 0.843019 (* 1 = 0.843019 loss)
I0724 15:21:14.300997 31785 solver.cpp:189] Iteration 4000, loss = 0.604699
I0724 15:21:14.301045 31785 solver.cpp:204] Train net output #0: loss = 0.604699 (* 1 = 0.604699 loss)
I0724 15:21:14.301059 31785 solver.cpp:464] Iteration 4000, lr = 0.0001
I0724 15:21:15.444993 31785 solver.cpp:189] Iteration 4100, loss = 0.560168
I0724 15:21:15.445029 31785 solver.cpp:204] Train net output #0: loss = 0.560168 (* 1 = 0.560168 loss)
I0724 15:21:15.445036 31785 solver.cpp:464] Iteration 4100, lr = 0.0001
I0724 15:21:16.581609 31785 solver.cpp:189] Iteration 4200, loss = 0.583019
I0724 15:21:16.581660 31785 solver.cpp:204] Train net output #0: loss = 0.583019 (* 1 = 0.583019 loss)
I0724 15:21:16.581668 31785 solver.cpp:464] Iteration 4200, lr = 0.0001
I0724 15:21:17.721596 31785 solver.cpp:189] Iteration 4300, loss = 0.458659
I0724 15:21:17.721722 31785 solver.cpp:204] Train net output #0: loss = 0.458659 (* 1 = 0.458659 loss)
I0724 15:21:17.721736 31785 solver.cpp:464] Iteration 4300, lr = 0.0001
I0724 15:21:18.860602 31785 solver.cpp:189] Iteration 4400, loss = 0.462953
I0724 15:21:18.860636 31785 solver.cpp:204] Train net output #0: loss = 0.462953 (* 1 = 0.462953 loss)
I0724 15:21:18.860642 31785 solver.cpp:464] Iteration 4400, lr = 0.0001
I0724 15:21:19.993888 31785 solver.cpp:266] Iteration 4500, Testing net (#0)
I0724 15:21:20.385629 31785 solver.cpp:315] Test net output #0: accuracy = 0.7502
I0724 15:21:20.385663 31785 solver.cpp:315] Test net output #1: loss = 0.748229 (* 1 = 0.748229 loss)
I0724 15:21:20.389665 31785 solver.cpp:189] Iteration 4500, loss = 0.497331
I0724 15:21:20.389683 31785 solver.cpp:204] Train net output #0: loss = 0.497331 (* 1 = 0.497331 loss)
I0724 15:21:20.389716 31785 solver.cpp:464] Iteration 4500, lr = 0.0001
I0724 15:21:21.528735 31785 solver.cpp:189] Iteration 4600, loss = 0.476239
I0724 15:21:21.528861 31785 solver.cpp:204] Train net output #0: loss = 0.476239 (* 1 = 0.476239 loss)
I0724 15:21:21.528877 31785 solver.cpp:464] Iteration 4600, lr = 0.0001
I0724 15:21:22.665988 31785 solver.cpp:189] Iteration 4700, loss = 0.567064
I0724 15:21:22.666031 31785 solver.cpp:204] Train net output #0: loss = 0.567064 (* 1 = 0.567064 loss)
I0724 15:21:22.666038 31785 solver.cpp:464] Iteration 4700, lr = 0.0001
I0724 15:21:23.802747 31785 solver.cpp:189] Iteration 4800, loss = 0.439045
I0724 15:21:23.802781 31785 solver.cpp:204] Train net output #0: loss = 0.439045 (* 1 = 0.439045 loss)
I0724 15:21:23.802788 31785 solver.cpp:464] Iteration 4800, lr = 0.0001
I0724 15:21:24.940335 31785 solver.cpp:189] Iteration 4900, loss = 0.445887
I0724 15:21:24.940371 31785 solver.cpp:204] Train net output #0: loss = 0.445887 (* 1 = 0.445887 loss)
I0724 15:21:24.940377 31785 solver.cpp:464] Iteration 4900, lr = 0.0001
I0724 15:21:26.072255 31785 solver.cpp:334] Snapshotting to examples/cifar10/cifar10_quick_iter_5000.caffemodel
I0724 15:21:26.075196 31785 solver.cpp:342] Snapshotting solver state to examples/cifar10/cifar10_quick_iter_5000.solverstate
I0724 15:21:26.081387 31785 solver.cpp:248] Iteration 5000, loss = 0.478067
I0724 15:21:26.081419 31785 solver.cpp:266] Iteration 5000, Testing net (#0)
I0724 15:21:26.460409 31785 solver.cpp:315] Test net output #0: accuracy = 0.7519
I0724 15:21:26.460441 31785 solver.cpp:315] Test net output #1: loss = 0.743144 (* 1 = 0.743144 loss)
I0724 15:21:26.460448 31785 solver.cpp:253] Optimization Done.
I0724 15:21:26.460451 31785 caffe.cpp:134] Optimization Done.
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment