I0208 11:35:30.288038 9036 caffe.cpp:217] Using GPUs 0 I0208 11:35:30.426815 9036 caffe.cpp:222] GPU 0: NVIDIA Tegra X1 I0208 11:35:33.318043 9036 solver.cpp:63] Initializing solver from parameters: train_net: "finetune_train.prototxt" test_net: "finetune_val.prototxt" test_iter: 30 test_interval: 1000 base_lr: 0.0001 display: 20 max_iter: 100000 lr_policy: "step" gamma: 0.1 momentum: 0.9 weight_decay: 0.0001 stepsize: 20000 snapshot: 10000 snapshot_prefix: "finetune_train" device_id: 0 train_state { level: 0 stage: "" } I0208 11:35:33.318629 9036 solver.cpp:96] Creating training net from train_net file: finetune_train.prototxt I0208 11:35:33.319537 9036 upgrade_proto.cpp:44] Attempting to upgrade input file specified using deprecated transformation parameters: finetune_train.prototxt I0208 11:35:33.319586 9036 upgrade_proto.cpp:47] Successfully upgraded file specified using deprecated data transformation parameters. W0208 11:35:33.319610 9036 upgrade_proto.cpp:49] Note that future Caffe releases will only support transform_param messages for transformation fields. I0208 11:35:33.319631 9036 upgrade_proto.cpp:53] Attempting to upgrade input file specified using deprecated V1LayerParameter: finetune_train.prototxt I0208 11:35:33.320008 9036 upgrade_proto.cpp:61] Successfully upgraded file specified using deprecated V1LayerParameter I0208 11:35:33.320314 9036 net.cpp:58] Initializing net from parameters: name: "CaffeNet" state { phase: TRAIN level: 0 stage: "" } layer { name: "data" type: "Data" top: "data" top: "label" transform_param { mirror: true crop_size: 227 mean_file: "imagenet_mean.binaryproto" } data_param { source: "train_leveldb" batch_size: 128 backend: LMDB } } layer { name: "conv1" type: "Convolution" bottom: "data" top: "conv1" param { lr_mult: 0.1 decay_mult: 1 } param { lr_mult: 0.2 decay_mult: 0 } convolution_param { num_output: 96 kernel_size: 11 stride: 4 weight_filler { type: "gaussian" std: 0.01 } bias_filler { type: "constant" value: 0 } } } layer { name: "relu1" type: "ReLU" bottom: "conv1" top: "conv1" } layer { name: "pool1" type: "Pooling" bottom: "conv1" top: "pool1" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "norm1" type: "LRN" bottom: "pool1" top: "norm1" lrn_param { local_size: 5 alpha: 0.0001 beta: 0.75 } } layer { name: "conv2" type: "Convolution" bottom: "norm1" top: "conv2" param { lr_mult: 0.1 decay_mult: 1 } param { lr_mult: 0.2 decay_mult: 0 } convolution_param { num_output: 256 pad: 2 kernel_size: 5 group: 2 weight_filler { type: "gaussian" std: 0.01 } bias_filler { type: "constant" value: 1 } } } layer { name: "relu2" type: "ReLU" bottom: "conv2" top: "conv2" } layer { name: "pool2" type: "Pooling" bottom: "conv2" top: "pool2" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "norm2" type: "LRN" bottom: "pool2" top: "norm2" lrn_param { local_size: 5 alpha: 0.0001 beta: 0.75 } } layer { name: "conv3" type: "Convolution" bottom: "norm2" top: "conv3" param { lr_mult: 0.1 decay_mult: 1 } param { lr_mult: 0.2 decay_mult: 0 } convolution_param { num_output: 384 pad: 1 kernel_size: 3 weight_filler { type: "gaussian" std: 0.01 } bias_filler { type: "constant" value: 0 } } } layer { name: "relu3" type: "ReLU" bottom: "conv3" top: "conv3" } layer { name: "conv4" type: "Convolution" bottom: "conv3" top: "conv4" param { lr_mult: 0.1 decay_mult: 1 } param { lr_mult: 0.2 decay_mult: 0 } convolution_param { num_output: 384 pad: 1 kernel_size: 3 group: 2 weight_filler { type: "gaussian" std: 0.01 } bias_filler { type: "constant" value: 1 } } } layer { name: "relu4" type: "ReLU" bottom: "conv4" top: "conv4" } layer { name: "conv5" type: "Convolution" bottom: "conv4" top: "conv5" param { lr_mult: 0.1 decay_mult: 1 } param { lr_mult: 0.2 decay_mult: 0 } convolution_param { num_output: 256 pad: 1 kernel_size: 3 group: 2 weight_filler { type: "gaussian" std: 0.01 } bias_filler { type: "constant" value: 1 } } } layer { name: "relu5" type: "ReLU" bottom: "conv5" top: "conv5" } layer { name: "pool5" type: "Pooling" bottom: "conv5" top: "pool5" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "fc6" type: "InnerProduct" bottom: "pool5" top: "fc6" param { lr_mult: 0.1 decay_mult: 1 } param { lr_mult: 0.2 decay_mult: 0 } inner_product_param { num_output: 4096 weight_filler { type: "gaussian" std: 0.005 } bias_filler { type: "constant" value: 1 } } } layer { name: "relu6" type: "ReLU" bottom: "fc6" top: "fc6" } layer { name: "drop6" type: "Dropout" bottom: "fc6" top: "fc6" dropout_param { dropout_ratio: 0.5 } } layer { name: "fc7" type: "InnerProduct" bottom: "fc6" top: "fc7" param { lr_mult: 0.1 decay_mult: 1 } param { lr_mult: 0.2 decay_mult: 0 } inner_product_param { num_output: 4096 weight_filler { type: "gaussian" std: 0.005 } bias_filler { type: "constant" value: 1 } } } layer { name: "relu7" type: "ReLU" bottom: "fc7" top: "fc7" } layer { name: "drop7" type: "Dropout" bottom: "fc7" top: "fc7" dropout_param { dropout_ratio: 0.5 } } layer { name: "fc8_pascal" type: "InnerProduct" bottom: "fc7" top: "fc8_pascal" param { lr_mult: 10 decay_mult: 1 } param { lr_mult: 20 decay_mult: 0 } inner_product_param { num_output: 2 weight_filler { type: "gaussian" std: 0.01 } bias_filler { type: "constant" value: 0 } } } layer { name: "loss" type: "SoftmaxWithLoss" bottom: "fc8_pascal" bottom: "label" } I0208 11:35:33.321244 9036 layer_factory.hpp:77] Creating layer data I0208 11:35:33.322032 9036 net.cpp:100] Creating Layer data I0208 11:35:33.322089 9036 net.cpp:408] data -> data I0208 11:35:33.322216 9036 net.cpp:408] data -> label I0208 11:35:33.322319 9036 data_transformer.cpp:27] Loading mean file from: imagenet_mean.binaryproto I0208 11:35:33.335590 9042 db_lmdb.cpp:35] Opened lmdb train_leveldb I0208 11:35:33.413972 9036 data_layer.cpp:41] output data size: 128,3,227,227 I0208 11:35:33.947464 9036 net.cpp:150] Setting up data I0208 11:35:33.947540 9036 net.cpp:157] Top shape: 128 3 227 227 (19787136) I0208 11:35:33.947608 9036 net.cpp:157] Top shape: 128 (128) I0208 11:35:33.947634 9036 net.cpp:165] Memory required for data: 79149056 I0208 11:35:33.947669 9036 layer_factory.hpp:77] Creating layer conv1 I0208 11:35:33.947768 9036 net.cpp:100] Creating Layer conv1 I0208 11:35:33.947842 9036 net.cpp:434] conv1 <- data I0208 11:35:33.947899 9036 net.cpp:408] conv1 -> conv1 I0208 11:35:40.263168 9036 net.cpp:150] Setting up conv1 I0208 11:35:40.263258 9036 net.cpp:157] Top shape: 128 96 55 55 (37171200) I0208 11:35:40.263294 9036 net.cpp:165] Memory required for data: 227833856 I0208 11:35:40.263368 9036 layer_factory.hpp:77] Creating layer relu1 I0208 11:35:40.263406 9036 net.cpp:100] Creating Layer relu1 I0208 11:35:40.263437 9036 net.cpp:434] relu1 <- conv1 I0208 11:35:40.263464 9036 net.cpp:395] relu1 -> conv1 (in-place) I0208 11:35:40.267197 9036 net.cpp:150] Setting up relu1 I0208 11:35:40.267256 9036 net.cpp:157] Top shape: 128 96 55 55 (37171200) I0208 11:35:40.267287 9036 net.cpp:165] Memory required for data: 376518656 I0208 11:35:40.267308 9036 layer_factory.hpp:77] Creating layer pool1 I0208 11:35:40.267347 9036 net.cpp:100] Creating Layer pool1 I0208 11:35:40.267371 9036 net.cpp:434] pool1 <- conv1 I0208 11:35:40.267421 9036 net.cpp:408] pool1 -> pool1 I0208 11:35:40.267596 9036 net.cpp:150] Setting up pool1 I0208 11:35:40.267623 9036 net.cpp:157] Top shape: 128 96 27 27 (8957952) I0208 11:35:40.267647 9036 net.cpp:165] Memory required for data: 412350464 I0208 11:35:40.267664 9036 layer_factory.hpp:77] Creating layer norm1 I0208 11:35:40.267704 9036 net.cpp:100] Creating Layer norm1 I0208 11:35:40.267724 9036 net.cpp:434] norm1 <- pool1 I0208 11:35:40.267746 9036 net.cpp:408] norm1 -> norm1 I0208 11:35:40.272389 9036 net.cpp:150] Setting up norm1 I0208 11:35:40.272447 9036 net.cpp:157] Top shape: 128 96 27 27 (8957952) I0208 11:35:40.272476 9036 net.cpp:165] Memory required for data: 448182272 I0208 11:35:40.272497 9036 layer_factory.hpp:77] Creating layer conv2 I0208 11:35:40.272557 9036 net.cpp:100] Creating Layer conv2 I0208 11:35:40.272579 9036 net.cpp:434] conv2 <- norm1 I0208 11:35:40.272611 9036 net.cpp:408] conv2 -> conv2 I0208 11:35:44.283610 9036 net.cpp:150] Setting up conv2 I0208 11:35:44.304832 9036 net.cpp:157] Top shape: 128 256 27 27 (23887872) I0208 11:35:44.305240 9036 net.cpp:165] Memory required for data: 543733760 I0208 11:35:44.309028 9036 layer_factory.hpp:77] Creating layer relu2 I0208 11:35:44.325594 9036 net.cpp:100] Creating Layer relu2 I0208 11:35:44.325816 9036 net.cpp:434] relu2 <- conv2 I0208 11:35:44.326498 9036 net.cpp:395] relu2 -> conv2 (in-place) I0208 11:35:44.507019 9036 net.cpp:150] Setting up relu2 I0208 11:35:44.507427 9036 net.cpp:157] Top shape: 128 256 27 27 (23887872) I0208 11:35:44.507485 9036 net.cpp:165] Memory required for data: 639285248 I0208 11:35:44.507526 9036 layer_factory.hpp:77] Creating layer pool2 I0208 11:35:44.507771 9036 net.cpp:100] Creating Layer pool2 I0208 11:35:44.507833 9036 net.cpp:434] pool2 <- conv2 I0208 11:35:44.507918 9036 net.cpp:408] pool2 -> pool2 I0208 11:35:44.508889 9036 net.cpp:150] Setting up pool2 I0208 11:35:44.508961 9036 net.cpp:157] Top shape: 128 256 13 13 (5537792) I0208 11:35:44.509027 9036 net.cpp:165] Memory required for data: 661436416 I0208 11:35:44.509080 9036 layer_factory.hpp:77] Creating layer norm2 I0208 11:35:44.509166 9036 net.cpp:100] Creating Layer norm2 I0208 11:35:44.509222 9036 net.cpp:434] norm2 <- pool2 I0208 11:35:44.509287 9036 net.cpp:408] norm2 -> norm2 I0208 11:35:44.514129 9036 net.cpp:150] Setting up norm2 I0208 11:35:44.514218 9036 net.cpp:157] Top shape: 128 256 13 13 (5537792) I0208 11:35:44.514272 9036 net.cpp:165] Memory required for data: 683587584 I0208 11:35:44.514314 9036 layer_factory.hpp:77] Creating layer conv3 I0208 11:35:44.514560 9036 net.cpp:100] Creating Layer conv3 I0208 11:35:44.514611 9036 net.cpp:434] conv3 <- norm2 I0208 11:35:44.514669 9036 net.cpp:408] conv3 -> conv3 Killed