From: Yangqing Jia Date: Tue, 19 Nov 2013 00:06:33 +0000 (-0800) Subject: imagenet deploy prototxt: instead of having a data layer, this network proto takes... X-Git-Tag: submit/tizen/20180823.020014~856^2~1 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=91254090bd348398bffb7672a5d4e23ecd09e23c;p=platform%2Fupstream%2Fcaffeonacl.git imagenet deploy prototxt: instead of having a data layer, this network proto takes an external blob called data. The shape is hardcoded to accompany the imagenet python wrapper at python/caffe/imagenet/wrapper.py. You need to change the shape if you intend to use other shapes. --- diff --git a/examples/imagenet_deploy.prototxt b/examples/imagenet_deploy.prototxt new file mode 100644 index 0000000..6257914 --- /dev/null +++ b/examples/imagenet_deploy.prototxt @@ -0,0 +1,355 @@ +input: "data" +input_dim: 10 +input_dim: 3 +input_dim: 227 +input_dim: 227 +layers { + layer { + name: "conv1" + type: "conv" + num_output: 96 + kernelsize: 11 + stride: 4 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0. + } + blobs_lr: 1. + blobs_lr: 2. + weight_decay: 1. + weight_decay: 0. + } + bottom: "data" + top: "conv1" +} +layers { + layer { + name: "relu1" + type: "relu" + } + bottom: "conv1" + top: "conv1" +} +layers { + layer { + name: "pool1" + type: "pool" + pool: MAX + kernelsize: 3 + stride: 2 + } + bottom: "conv1" + top: "pool1" +} +layers { + layer { + name: "norm1" + type: "lrn" + local_size: 5 + alpha: 0.0001 + beta: 0.75 + } + bottom: "pool1" + top: "norm1" +} +layers { + layer { + name: "pad2" + type: "padding" + pad: 2 + } + bottom: "norm1" + top: "pad2" +} +layers { + layer { + name: "conv2" + type: "conv" + num_output: 256 + group: 2 + kernelsize: 5 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 1. + } + blobs_lr: 1. + blobs_lr: 2. + weight_decay: 1. + weight_decay: 0. + } + bottom: "pad2" + top: "conv2" +} +layers { + layer { + name: "relu2" + type: "relu" + } + bottom: "conv2" + top: "conv2" +} +layers { + layer { + name: "pool2" + type: "pool" + pool: MAX + kernelsize: 3 + stride: 2 + } + bottom: "conv2" + top: "pool2" +} +layers { + layer { + name: "norm2" + type: "lrn" + local_size: 5 + alpha: 0.0001 + beta: 0.75 + } + bottom: "pool2" + top: "norm2" +} +layers { + layer { + name: "pad3" + type: "padding" + pad: 1 + } + bottom: "norm2" + top: "pad3" +} +layers { + layer { + name: "conv3" + type: "conv" + num_output: 384 + kernelsize: 3 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0. + } + blobs_lr: 1. + blobs_lr: 2. + weight_decay: 1. + weight_decay: 0. + } + bottom: "pad3" + top: "conv3" +} +layers { + layer { + name: "relu3" + type: "relu" + } + bottom: "conv3" + top: "conv3" +} +layers { + layer { + name: "pad4" + type: "padding" + pad: 1 + } + bottom: "conv3" + top: "pad4" +} +layers { + layer { + name: "conv4" + type: "conv" + num_output: 384 + group: 2 + kernelsize: 3 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 1. + } + blobs_lr: 1. + blobs_lr: 2. + weight_decay: 1. + weight_decay: 0. + } + bottom: "pad4" + top: "conv4" +} +layers { + layer { + name: "relu4" + type: "relu" + } + bottom: "conv4" + top: "conv4" +} +layers { + layer { + name: "pad5" + type: "padding" + pad: 1 + } + bottom: "conv4" + top: "pad5" +} +layers { + layer { + name: "conv5" + type: "conv" + num_output: 256 + group: 2 + kernelsize: 3 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 1. + } + blobs_lr: 1. + blobs_lr: 2. + weight_decay: 1. + weight_decay: 0. + } + bottom: "pad5" + top: "conv5" +} +layers { + layer { + name: "relu5" + type: "relu" + } + bottom: "conv5" + top: "conv5" +} +layers { + layer { + name: "pool5" + type: "pool" + kernelsize: 3 + pool: MAX + stride: 2 + } + bottom: "conv5" + top: "pool5" +} +layers { + layer { + name: "fc6" + type: "innerproduct" + num_output: 4096 + weight_filler { + type: "gaussian" + std: 0.005 + } + bias_filler { + type: "constant" + value: 1. + } + blobs_lr: 1. + blobs_lr: 2. + weight_decay: 1. + weight_decay: 0. + } + bottom: "pool5" + top: "fc6" +} +layers { + layer { + name: "relu6" + type: "relu" + } + bottom: "fc6" + top: "fc6" +} +layers { + layer { + name: "drop6" + type: "dropout" + dropout_ratio: 0.5 + } + bottom: "fc6" + top: "fc6" +} +layers { + layer { + name: "fc7" + type: "innerproduct" + num_output: 4096 + weight_filler { + type: "gaussian" + std: 0.005 + } + bias_filler { + type: "constant" + value: 1. + } + blobs_lr: 1. + blobs_lr: 2. + weight_decay: 1. + weight_decay: 0. + } + bottom: "fc6" + top: "fc7" +} +layers { + layer { + name: "relu7" + type: "relu" + } + bottom: "fc7" + top: "fc7" +} +layers { + layer { + name: "drop7" + type: "dropout" + dropout_ratio: 0.5 + } + bottom: "fc7" + top: "fc7" +} +layers { + layer { + name: "fc8" + type: "innerproduct" + num_output: 1000 + weight_filler { + type: "gaussian" + std: 0.01 + } + bias_filler { + type: "constant" + value: 0 + } + blobs_lr: 1. + blobs_lr: 2. + weight_decay: 1. + weight_decay: 0. + } + bottom: "fc7" + top: "fc8" +} +layers { + layer { + name: "prob" + type: "softmax" + } + bottom: "fc8" + top: "prob" +}