chibi@2204:~/caffe$ cd python; python3 classify.py --raw_scale 255 ../101_ObjectCategories/panda/image_0002.jpg ../result.npy; cd .. /usr/lib/python3/dist-packages/scipy/__init__.py:146: UserWarning: A NumPy version >=1.17.3 and <1.25.0 is required for this version of SciPy (detected version 1.26.4 warnings.warn(f"A NumPy version >={np_minversion} and <{np_maxversion}" WARNING: Logging before InitGoogleLogging() is written to STDERR I0405 06:46:33.699044 38686 gpu_memory.cpp:82] GPUMemory::Manager initialized CPU mode W0405 06:46:33.859503 38686 _caffe.cpp:172] DEPRECATION WARNING - deprecated use of Python interface W0405 06:46:33.859767 38686 _caffe.cpp:173] Use this instead (with the named "weights" parameter): W0405 06:46:33.859786 38686 _caffe.cpp:175] Net('/home/chibi/caffe/python/../models/bvlc_reference_caffenet/deploy.prototxt', 1, weights='/home/chibi/caffe/python/../models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel') I0405 06:46:33.860098 38686 net.cpp:86] Initializing net from parameters: name: "CaffeNet" state { phase: TEST level: 0 } layer { name: "data" type: "Input" top: "data" input_param { shape { dim: 10 dim: 3 dim: 227 dim: 227 } } } layer { name: "conv1" type: "Convolution" bottom: "data" top: "conv1" convolution_param { num_output: 96 kernel_size: 11 stride: 4 } } layer { name: "relu1" type: "ReLU" bottom: "conv1" top: "conv1" } layer { name: "pool1" type: "Pooling" bottom: "conv1" top: "pool1" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "norm1" type: "LRN" bottom: "pool1" top: "norm1" lrn_param { local_size: 5 alpha: 0.0001 beta: 0.75 } } layer { name: "conv2" type: "Convolution" bottom: "norm1" top: "conv2" convolution_param { num_output: 256 pad: 2 kernel_size: 5 group: 2 } } layer { name: "relu2" type: "ReLU" bottom: "conv2" top: "conv2" } layer { name: "pool2" type: "Pooling" bottom: "conv2" top: "pool2" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "norm2" type: "LRN" bottom: "pool2" top: "norm2" lrn_param { local_size: 5 alpha: 0.0001 beta: 0.75 } } layer { name: "conv3" type: "Convolution" bottom: "norm2" top: "conv3" convolution_param { num_output: 384 pad: 1 kernel_size: 3 } } layer { name: "relu3" type: "ReLU" bottom: "conv3" top: "conv3" } layer { name: "conv4" type: "Convolution" bottom: "conv3" top: "conv4" convolution_param { num_output: 384 pad: 1 kernel_size: 3 group: 2 } } layer { name: "relu4" type: "ReLU" bottom: "conv4" top: "conv4" } layer { name: "conv5" type: "Convolution" bottom: "conv4" top: "conv5" convolution_param { num_output: 256 pad: 1 kernel_size: 3 group: 2 } } layer { name: "relu5" type: "ReLU" bottom: "conv5" top: "conv5" } layer { name: "pool5" type: "Pooling" bottom: "conv5" top: "pool5" pooling_param { pool: MAX kernel_size: 3 stride: 2 } } layer { name: "fc6" type: "InnerProduct" bottom: "pool5" top: "fc6" inner_product_param { num_output: 4096 } } layer { name: "relu6" type: "ReLU" bottom: "fc6" top: "fc6" } layer { name: "drop6" type: "Dropout" bottom: "fc6" top: "fc6" dropout_param { dropout_ratio: 0.5 } } layer { name: "fc7" type: "InnerProduct" bottom: "fc6" top: "fc7" inner_product_param { num_output: 4096 } } layer { name: "relu7" type: "ReLU" bottom: "fc7" top: "fc7" } layer { name: "drop7" type: "Dropout" bottom: "fc7" top: "fc7" dropout_param { dropout_ratio: 0.5 } } layer { name: "fc8" type: "InnerProduct" bottom: "fc7" top: "fc8" inner_product_param { num_output: 1000 } } layer { name: "prob" type: "Softmax" bottom: "fc8" top: "prob" } I0405 06:46:33.860342 38686 net.cpp:116] Using FLOAT as default forward math type I0405 06:46:33.860352 38686 net.cpp:122] Using FLOAT as default backward math type I0405 06:46:33.860360 38686 layer_factory.hpp:172] Creating layer 'data' of type 'Input' I0405 06:46:33.860368 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.860389 38686 net.cpp:205] Created Layer data (0) I0405 06:46:33.860402 38686 net.cpp:547] data -> data I0405 06:46:33.860432 38686 net.cpp:265] Setting up data I0405 06:46:33.860440 38686 net.cpp:272] TEST Top shape for layer 0 'data' 10 3 227 227 (1545870) I0405 06:46:33.860457 38686 layer_factory.hpp:172] Creating layer 'conv1' of type 'Convolution' I0405 06:46:33.860464 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.860508 38686 net.cpp:205] Created Layer conv1 (1) I0405 06:46:33.860517 38686 net.cpp:577] conv1 <- data I0405 06:46:33.860525 38686 net.cpp:547] conv1 -> conv1 I0405 06:46:33.860890 38686 net.cpp:265] Setting up conv1 I0405 06:46:33.860899 38686 net.cpp:272] TEST Top shape for layer 1 'conv1' 10 96 55 55 (2904000) I0405 06:46:33.860918 38686 layer_factory.hpp:172] Creating layer 'relu1' of type 'ReLU' I0405 06:46:33.860934 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.860941 38686 net.cpp:205] Created Layer relu1 (2) I0405 06:46:33.860948 38686 net.cpp:577] relu1 <- conv1 I0405 06:46:33.860954 38686 net.cpp:532] relu1 -> conv1 (in-place) I0405 06:46:33.860961 38686 net.cpp:265] Setting up relu1 I0405 06:46:33.860967 38686 net.cpp:272] TEST Top shape for layer 2 'relu1' 10 96 55 55 (2904000) I0405 06:46:33.860975 38686 layer_factory.hpp:172] Creating layer 'pool1' of type 'Pooling' I0405 06:46:33.860981 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.860992 38686 net.cpp:205] Created Layer pool1 (3) I0405 06:46:33.860999 38686 net.cpp:577] pool1 <- conv1 I0405 06:46:33.861006 38686 net.cpp:547] pool1 -> pool1 I0405 06:46:33.861021 38686 net.cpp:265] Setting up pool1 I0405 06:46:33.861028 38686 net.cpp:272] TEST Top shape for layer 3 'pool1' 10 96 27 27 (699840) I0405 06:46:33.861037 38686 layer_factory.hpp:172] Creating layer 'norm1' of type 'LRN' I0405 06:46:33.861042 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.861054 38686 net.cpp:205] Created Layer norm1 (4) I0405 06:46:33.861061 38686 net.cpp:577] norm1 <- pool1 I0405 06:46:33.861068 38686 net.cpp:547] norm1 -> norm1 I0405 06:46:33.861078 38686 net.cpp:265] Setting up norm1 I0405 06:46:33.861084 38686 net.cpp:272] TEST Top shape for layer 4 'norm1' 10 96 27 27 (699840) I0405 06:46:33.861091 38686 layer_factory.hpp:172] Creating layer 'conv2' of type 'Convolution' I0405 06:46:33.861099 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.861114 38686 net.cpp:205] Created Layer conv2 (5) I0405 06:46:33.861120 38686 net.cpp:577] conv2 <- norm1 I0405 06:46:33.861127 38686 net.cpp:547] conv2 -> conv2 I0405 06:46:33.863842 38686 net.cpp:265] Setting up conv2 I0405 06:46:33.863864 38686 net.cpp:272] TEST Top shape for layer 5 'conv2' 10 256 27 27 (1866240) I0405 06:46:33.863884 38686 layer_factory.hpp:172] Creating layer 'relu2' of type 'ReLU' I0405 06:46:33.863891 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.863901 38686 net.cpp:205] Created Layer relu2 (6) I0405 06:46:33.863909 38686 net.cpp:577] relu2 <- conv2 I0405 06:46:33.863914 38686 net.cpp:532] relu2 -> conv2 (in-place) I0405 06:46:33.863929 38686 net.cpp:265] Setting up relu2 I0405 06:46:33.863935 38686 net.cpp:272] TEST Top shape for layer 6 'relu2' 10 256 27 27 (1866240) I0405 06:46:33.863943 38686 layer_factory.hpp:172] Creating layer 'pool2' of type 'Pooling' I0405 06:46:33.863950 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.863958 38686 net.cpp:205] Created Layer pool2 (7) I0405 06:46:33.863965 38686 net.cpp:577] pool2 <- conv2 I0405 06:46:33.863972 38686 net.cpp:547] pool2 -> pool2 I0405 06:46:33.863982 38686 net.cpp:265] Setting up pool2 I0405 06:46:33.863989 38686 net.cpp:272] TEST Top shape for layer 7 'pool2' 10 256 13 13 (432640) I0405 06:46:33.863997 38686 layer_factory.hpp:172] Creating layer 'norm2' of type 'LRN' I0405 06:46:33.864003 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.864019 38686 net.cpp:205] Created Layer norm2 (8) I0405 06:46:33.864027 38686 net.cpp:577] norm2 <- pool2 I0405 06:46:33.864033 38686 net.cpp:547] norm2 -> norm2 I0405 06:46:33.864068 38686 net.cpp:265] Setting up norm2 I0405 06:46:33.864077 38686 net.cpp:272] TEST Top shape for layer 8 'norm2' 10 256 13 13 (432640) I0405 06:46:33.864085 38686 layer_factory.hpp:172] Creating layer 'conv3' of type 'Convolution' I0405 06:46:33.864092 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.864106 38686 net.cpp:205] Created Layer conv3 (9) I0405 06:46:33.864113 38686 net.cpp:577] conv3 <- norm2 I0405 06:46:33.864120 38686 net.cpp:547] conv3 -> conv3 I0405 06:46:33.871959 38686 net.cpp:265] Setting up conv3 I0405 06:46:33.871984 38686 net.cpp:272] TEST Top shape for layer 9 'conv3' 10 384 13 13 (648960) I0405 06:46:33.872004 38686 layer_factory.hpp:172] Creating layer 'relu3' of type 'ReLU' I0405 06:46:33.872011 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.872021 38686 net.cpp:205] Created Layer relu3 (10) I0405 06:46:33.872028 38686 net.cpp:577] relu3 <- conv3 I0405 06:46:33.872037 38686 net.cpp:532] relu3 -> conv3 (in-place) I0405 06:46:33.872046 38686 net.cpp:265] Setting up relu3 I0405 06:46:33.872051 38686 net.cpp:272] TEST Top shape for layer 10 'relu3' 10 384 13 13 (648960) I0405 06:46:33.872058 38686 layer_factory.hpp:172] Creating layer 'conv4' of type 'Convolution' I0405 06:46:33.872066 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.872076 38686 net.cpp:205] Created Layer conv4 (11) I0405 06:46:33.872083 38686 net.cpp:577] conv4 <- conv3 I0405 06:46:33.872090 38686 net.cpp:547] conv4 -> conv4 I0405 06:46:33.878000 38686 net.cpp:265] Setting up conv4 I0405 06:46:33.878021 38686 net.cpp:272] TEST Top shape for layer 11 'conv4' 10 384 13 13 (648960) I0405 06:46:33.878037 38686 layer_factory.hpp:172] Creating layer 'relu4' of type 'ReLU' I0405 06:46:33.878046 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.878057 38686 net.cpp:205] Created Layer relu4 (12) I0405 06:46:33.878064 38686 net.cpp:577] relu4 <- conv4 I0405 06:46:33.878072 38686 net.cpp:532] relu4 -> conv4 (in-place) I0405 06:46:33.878082 38686 net.cpp:265] Setting up relu4 I0405 06:46:33.878087 38686 net.cpp:272] TEST Top shape for layer 12 'relu4' 10 384 13 13 (648960) I0405 06:46:33.878095 38686 layer_factory.hpp:172] Creating layer 'conv5' of type 'Convolution' I0405 06:46:33.878101 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.878118 38686 net.cpp:205] Created Layer conv5 (13) I0405 06:46:33.878124 38686 net.cpp:577] conv5 <- conv4 I0405 06:46:33.878131 38686 net.cpp:547] conv5 -> conv5 I0405 06:46:33.882161 38686 net.cpp:265] Setting up conv5 I0405 06:46:33.882182 38686 net.cpp:272] TEST Top shape for layer 13 'conv5' 10 256 13 13 (432640) I0405 06:46:33.882203 38686 layer_factory.hpp:172] Creating layer 'relu5' of type 'ReLU' I0405 06:46:33.882211 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.882229 38686 net.cpp:205] Created Layer relu5 (14) I0405 06:46:33.882237 38686 net.cpp:577] relu5 <- conv5 I0405 06:46:33.882246 38686 net.cpp:532] relu5 -> conv5 (in-place) I0405 06:46:33.882253 38686 net.cpp:265] Setting up relu5 I0405 06:46:33.882261 38686 net.cpp:272] TEST Top shape for layer 14 'relu5' 10 256 13 13 (432640) I0405 06:46:33.882268 38686 layer_factory.hpp:172] Creating layer 'pool5' of type 'Pooling' I0405 06:46:33.882274 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.882283 38686 net.cpp:205] Created Layer pool5 (15) I0405 06:46:33.882290 38686 net.cpp:577] pool5 <- conv5 I0405 06:46:33.882297 38686 net.cpp:547] pool5 -> pool5 I0405 06:46:33.882308 38686 net.cpp:265] Setting up pool5 I0405 06:46:33.882313 38686 net.cpp:272] TEST Top shape for layer 15 'pool5' 10 256 6 6 (92160) I0405 06:46:33.882321 38686 layer_factory.hpp:172] Creating layer 'fc6' of type 'InnerProduct' I0405 06:46:33.882328 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:33.882385 38686 net.cpp:205] Created Layer fc6 (16) I0405 06:46:33.882393 38686 net.cpp:577] fc6 <- pool5 I0405 06:46:33.882402 38686 net.cpp:547] fc6 -> fc6 I0405 06:46:34.212502 38686 net.cpp:265] Setting up fc6 I0405 06:46:34.212528 38686 net.cpp:272] TEST Top shape for layer 16 'fc6' 10 4096 (40960) I0405 06:46:34.212548 38686 layer_factory.hpp:172] Creating layer 'relu6' of type 'ReLU' I0405 06:46:34.212555 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:34.212567 38686 net.cpp:205] Created Layer relu6 (17) I0405 06:46:34.212575 38686 net.cpp:577] relu6 <- fc6 I0405 06:46:34.212584 38686 net.cpp:532] relu6 -> fc6 (in-place) I0405 06:46:34.212594 38686 net.cpp:265] Setting up relu6 I0405 06:46:34.212600 38686 net.cpp:272] TEST Top shape for layer 17 'relu6' 10 4096 (40960) I0405 06:46:34.212607 38686 layer_factory.hpp:172] Creating layer 'drop6' of type 'Dropout' I0405 06:46:34.212615 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:34.212623 38686 net.cpp:205] Created Layer drop6 (18) I0405 06:46:34.212630 38686 net.cpp:577] drop6 <- fc6 I0405 06:46:34.212637 38686 net.cpp:532] drop6 -> fc6 (in-place) I0405 06:46:34.212653 38686 net.cpp:265] Setting up drop6 I0405 06:46:34.212661 38686 net.cpp:272] TEST Top shape for layer 18 'drop6' 10 4096 (40960) I0405 06:46:34.212667 38686 layer_factory.hpp:172] Creating layer 'fc7' of type 'InnerProduct' I0405 06:46:34.212674 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:34.212683 38686 net.cpp:205] Created Layer fc7 (19) I0405 06:46:34.212690 38686 net.cpp:577] fc7 <- fc6 I0405 06:46:34.212697 38686 net.cpp:547] fc7 -> fc7 I0405 06:46:34.358752 38686 net.cpp:265] Setting up fc7 I0405 06:46:34.358776 38686 net.cpp:272] TEST Top shape for layer 19 'fc7' 10 4096 (40960) I0405 06:46:34.358793 38686 layer_factory.hpp:172] Creating layer 'relu7' of type 'ReLU' I0405 06:46:34.358801 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:34.358816 38686 net.cpp:205] Created Layer relu7 (20) I0405 06:46:34.358824 38686 net.cpp:577] relu7 <- fc7 I0405 06:46:34.358832 38686 net.cpp:532] relu7 -> fc7 (in-place) I0405 06:46:34.358841 38686 net.cpp:265] Setting up relu7 I0405 06:46:34.358848 38686 net.cpp:272] TEST Top shape for layer 20 'relu7' 10 4096 (40960) I0405 06:46:34.358856 38686 layer_factory.hpp:172] Creating layer 'drop7' of type 'Dropout' I0405 06:46:34.358863 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:34.358872 38686 net.cpp:205] Created Layer drop7 (21) I0405 06:46:34.358879 38686 net.cpp:577] drop7 <- fc7 I0405 06:46:34.358886 38686 net.cpp:532] drop7 -> fc7 (in-place) I0405 06:46:34.358894 38686 net.cpp:265] Setting up drop7 I0405 06:46:34.358901 38686 net.cpp:272] TEST Top shape for layer 21 'drop7' 10 4096 (40960) I0405 06:46:34.358907 38686 layer_factory.hpp:172] Creating layer 'fc8' of type 'InnerProduct' I0405 06:46:34.358914 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:34.358927 38686 net.cpp:205] Created Layer fc8 (22) I0405 06:46:34.358934 38686 net.cpp:577] fc8 <- fc7 I0405 06:46:34.358942 38686 net.cpp:547] fc8 -> fc8 I0405 06:46:34.394675 38686 net.cpp:265] Setting up fc8 I0405 06:46:34.394701 38686 net.cpp:272] TEST Top shape for layer 22 'fc8' 10 1000 (10000) I0405 06:46:34.394718 38686 layer_factory.hpp:172] Creating layer 'prob' of type 'Softmax' I0405 06:46:34.394726 38686 layer_factory.hpp:184] Layer's types are Ftype:FLOAT Btype:FLOAT Fmath:FLOAT Bmath:FLOAT I0405 06:46:34.394747 38686 net.cpp:205] Created Layer prob (23) I0405 06:46:34.394754 38686 net.cpp:577] prob <- fc8 I0405 06:46:34.394763 38686 net.cpp:547] prob -> prob I0405 06:46:34.394791 38686 net.cpp:265] Setting up prob I0405 06:46:34.394798 38686 net.cpp:272] TEST Top shape for layer 23 'prob' 10 1000 (10000) I0405 06:46:34.394838 38686 net.cpp:343] prob does not need backward computation. I0405 06:46:34.394845 38686 net.cpp:343] fc8 does not need backward computation. I0405 06:46:34.394852 38686 net.cpp:343] drop7 does not need backward computation. I0405 06:46:34.394858 38686 net.cpp:343] relu7 does not need backward computation. I0405 06:46:34.394866 38686 net.cpp:343] fc7 does not need backward computation. I0405 06:46:34.394871 38686 net.cpp:343] drop6 does not need backward computation. I0405 06:46:34.394877 38686 net.cpp:343] relu6 does not need backward computation. I0405 06:46:34.394884 38686 net.cpp:343] fc6 does not need backward computation. I0405 06:46:34.394889 38686 net.cpp:343] pool5 does not need backward computation. I0405 06:46:34.394896 38686 net.cpp:343] relu5 does not need backward computation. I0405 06:46:34.394901 38686 net.cpp:343] conv5 does not need backward computation. I0405 06:46:34.394913 38686 net.cpp:343] relu4 does not need backward computation. I0405 06:46:34.394919 38686 net.cpp:343] conv4 does not need backward computation. I0405 06:46:34.394933 38686 net.cpp:343] relu3 does not need backward computation. I0405 06:46:34.394940 38686 net.cpp:343] conv3 does not need backward computation. I0405 06:46:34.394948 38686 net.cpp:343] norm2 does not need backward computation. I0405 06:46:34.394954 38686 net.cpp:343] pool2 does not need backward computation. I0405 06:46:34.394960 38686 net.cpp:343] relu2 does not need backward computation. I0405 06:46:34.394968 38686 net.cpp:343] conv2 does not need backward computation. I0405 06:46:34.394974 38686 net.cpp:343] norm1 does not need backward computation. I0405 06:46:34.394981 38686 net.cpp:343] pool1 does not need backward computation. I0405 06:46:34.394987 38686 net.cpp:343] relu1 does not need backward computation. I0405 06:46:34.394994 38686 net.cpp:343] conv1 does not need backward computation. I0405 06:46:34.395002 38686 net.cpp:343] data does not need backward computation. I0405 06:46:34.395008 38686 net.cpp:385] This network produces output prob I0405 06:46:34.395030 38686 net.cpp:408] Top memory (TEST) required for data: 68681400 diff: 68681400 I0405 06:46:34.395037 38686 net.cpp:411] Bottom memory (TEST) required for data: 68641400 diff: 68641400 I0405 06:46:34.395043 38686 net.cpp:414] Shared (in-place) memory (TEST) by data: 26658560 diff: 26658560 I0405 06:46:34.395049 38686 net.cpp:417] Parameters memory (TEST) required for data: 243860896 diff: 42272 I0405 06:46:34.395056 38686 net.cpp:420] Parameters shared memory (TEST) by data: 0 diff: 0 I0405 06:46:34.395061 38686 net.cpp:426] Network initialization done. I0405 06:46:34.769229 38686 upgrade_proto.cpp:43] Attempting to upgrade input file specified using deprecated transformation parameters: /home/chibi/caffe/python/../models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel I0405 06:46:34.769259 38686 upgrade_proto.cpp:46] Successfully upgraded file specified using deprecated data transformation parameters. W0405 06:46:34.769268 38686 upgrade_proto.cpp:48] Note that future Caffe releases will only support transform_param messages for transformation fields. I0405 06:46:34.769299 38686 upgrade_proto.cpp:52] Attempting to upgrade input file specified using deprecated V1LayerParameter: /home/chibi/caffe/python/../models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel I0405 06:46:35.104784 38686 upgrade_proto.cpp:60] Successfully upgraded file specified using deprecated V1LayerParameter I0405 06:46:35.116675 38686 net.cpp:1143] Copying source layer data Type:Data #blobs=0 I0405 06:46:35.116699 38686 net.cpp:1143] Copying source layer conv1 Type:Convolution #blobs=2 I0405 06:46:35.116963 38686 net.cpp:1143] Copying source layer relu1 Type:ReLU #blobs=0 I0405 06:46:35.116972 38686 net.cpp:1143] Copying source layer pool1 Type:Pooling #blobs=0 I0405 06:46:35.116979 38686 net.cpp:1143] Copying source layer norm1 Type:LRN #blobs=0 I0405 06:46:35.116986 38686 net.cpp:1143] Copying source layer conv2 Type:Convolution #blobs=2 I0405 06:46:35.119262 38686 net.cpp:1143] Copying source layer relu2 Type:ReLU #blobs=0 I0405 06:46:35.119280 38686 net.cpp:1143] Copying source layer pool2 Type:Pooling #blobs=0 I0405 06:46:35.119287 38686 net.cpp:1143] Copying source layer norm2 Type:LRN #blobs=0 I0405 06:46:35.119293 38686 net.cpp:1143] Copying source layer conv3 Type:Convolution #blobs=2 I0405 06:46:35.125663 38686 net.cpp:1143] Copying source layer relu3 Type:ReLU #blobs=0 I0405 06:46:35.125684 38686 net.cpp:1143] Copying source layer conv4 Type:Convolution #blobs=2 I0405 06:46:35.130473 38686 net.cpp:1143] Copying source layer relu4 Type:ReLU #blobs=0 I0405 06:46:35.130491 38686 net.cpp:1143] Copying source layer conv5 Type:Convolution #blobs=2 I0405 06:46:35.133704 38686 net.cpp:1143] Copying source layer relu5 Type:ReLU #blobs=0 I0405 06:46:35.133723 38686 net.cpp:1143] Copying source layer pool5 Type:Pooling #blobs=0 I0405 06:46:35.133729 38686 net.cpp:1143] Copying source layer fc6 Type:InnerProduct #blobs=2 I0405 06:46:35.403841 38686 net.cpp:1143] Copying source layer relu6 Type:ReLU #blobs=0 I0405 06:46:35.403980 38686 net.cpp:1143] Copying source layer drop6 Type:Dropout #blobs=0 I0405 06:46:35.404040 38686 net.cpp:1143] Copying source layer fc7 Type:InnerProduct #blobs=2 I0405 06:46:35.524750 38686 net.cpp:1143] Copying source layer relu7 Type:ReLU #blobs=0 I0405 06:46:35.524860 38686 net.cpp:1143] Copying source layer drop7 Type:Dropout #blobs=0 I0405 06:46:35.524925 38686 net.cpp:1143] Copying source layer fc8 Type:InnerProduct #blobs=2 I0405 06:46:35.554312 38686 net.cpp:1135] Ignoring source layer loss (10, 3, 227, 227) Loading file: ../101_ObjectCategories/panda/image_0002.jpg Classifying 1 inputs. Done in 1.06 s. Saving results into ../result.npy chibi@2204:~/caffe$ python3 show_result_py3.py data/ilsvrc12/synset_words.txt result.npy #1 | giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca | 99.9% #2 | Old English sheepdog, bobtail | 0.0% #3 | kuvasz | 0.0% #4 | Sealyham terrier, Sealyham | 0.0% #5 | teddy, teddy bear | 0.0% chibi@2204:~/caffe$