--- /dev/null
+train_net: "pascal_finetune_train.prototxt"
+test_net: "pascal_finetune_val.prototxt"
+test_iter: 100
+test_interval: 1000
+base_lr: 0.001
+lr_policy: "step"
+gamma: 0.1
+stepsize: 20000
+display: 20
+max_iter: 100000
+momentum: 0.9
+weight_decay: 0.0005
+snapshot: 10000
+snapshot_prefix: "pascal_finetune_train"
--- /dev/null
+name: "CaffeNet"
+layers {
+ layer {
+ name: "data"
+ type: "window_data"
+ source: "window_file_2007_trainval.txt"
+ meanfile: "../../data/ilsvrc12/imagenet_mean.binaryproto"
+ batchsize: 128
+ cropsize: 227
+ mirror: true
+ det_context_pad: 16
+ det_crop_mode: "warp"
+ det_fg_threshold: 0.5
+ det_bg_threshold: 0.5
+ det_fg_fraction: 0.25
+ }
+ top: "data"
+ top: "label"
+}
+layers {
+ layer {
+ name: "conv1"
+ type: "conv"
+ num_output: 96
+ kernelsize: 11
+ stride: 4
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 0.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "data"
+ top: "conv1"
+}
+layers {
+ layer {
+ name: "relu1"
+ type: "relu"
+ }
+ bottom: "conv1"
+ top: "conv1"
+}
+layers {
+ layer {
+ name: "pool1"
+ type: "pool"
+ pool: MAX
+ kernelsize: 3
+ stride: 2
+ }
+ bottom: "conv1"
+ top: "pool1"
+}
+layers {
+ layer {
+ name: "norm1"
+ type: "lrn"
+ local_size: 5
+ alpha: 0.0001
+ beta: 0.75
+ }
+ bottom: "pool1"
+ top: "norm1"
+}
+layers {
+ layer {
+ name: "pad2"
+ type: "padding"
+ pad: 2
+ }
+ bottom: "norm1"
+ top: "pad2"
+}
+layers {
+ layer {
+ name: "conv2"
+ type: "conv"
+ num_output: 256
+ group: 2
+ kernelsize: 5
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 1.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "pad2"
+ top: "conv2"
+}
+layers {
+ layer {
+ name: "relu2"
+ type: "relu"
+ }
+ bottom: "conv2"
+ top: "conv2"
+}
+layers {
+ layer {
+ name: "pool2"
+ type: "pool"
+ pool: MAX
+ kernelsize: 3
+ stride: 2
+ }
+ bottom: "conv2"
+ top: "pool2"
+}
+layers {
+ layer {
+ name: "norm2"
+ type: "lrn"
+ local_size: 5
+ alpha: 0.0001
+ beta: 0.75
+ }
+ bottom: "pool2"
+ top: "norm2"
+}
+layers {
+ layer {
+ name: "pad3"
+ type: "padding"
+ pad: 1
+ }
+ bottom: "norm2"
+ top: "pad3"
+}
+layers {
+ layer {
+ name: "conv3"
+ type: "conv"
+ num_output: 384
+ kernelsize: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 0.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "pad3"
+ top: "conv3"
+}
+layers {
+ layer {
+ name: "relu3"
+ type: "relu"
+ }
+ bottom: "conv3"
+ top: "conv3"
+}
+layers {
+ layer {
+ name: "pad4"
+ type: "padding"
+ pad: 1
+ }
+ bottom: "conv3"
+ top: "pad4"
+}
+layers {
+ layer {
+ name: "conv4"
+ type: "conv"
+ num_output: 384
+ group: 2
+ kernelsize: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 1.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "pad4"
+ top: "conv4"
+}
+layers {
+ layer {
+ name: "relu4"
+ type: "relu"
+ }
+ bottom: "conv4"
+ top: "conv4"
+}
+layers {
+ layer {
+ name: "pad5"
+ type: "padding"
+ pad: 1
+ }
+ bottom: "conv4"
+ top: "pad5"
+}
+layers {
+ layer {
+ name: "conv5"
+ type: "conv"
+ num_output: 256
+ group: 2
+ kernelsize: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 1.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "pad5"
+ top: "conv5"
+}
+layers {
+ layer {
+ name: "relu5"
+ type: "relu"
+ }
+ bottom: "conv5"
+ top: "conv5"
+}
+layers {
+ layer {
+ name: "pool5"
+ type: "pool"
+ kernelsize: 3
+ pool: MAX
+ stride: 2
+ }
+ bottom: "conv5"
+ top: "pool5"
+}
+layers {
+ layer {
+ name: "fc6"
+ type: "innerproduct"
+ num_output: 4096
+ weight_filler {
+ type: "gaussian"
+ std: 0.005
+ }
+ bias_filler {
+ type: "constant"
+ value: 1.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "pool5"
+ top: "fc6"
+}
+layers {
+ layer {
+ name: "relu6"
+ type: "relu"
+ }
+ bottom: "fc6"
+ top: "fc6"
+}
+layers {
+ layer {
+ name: "drop6"
+ type: "dropout"
+ dropout_ratio: 0.5
+ }
+ bottom: "fc6"
+ top: "fc6"
+}
+layers {
+ layer {
+ name: "fc7"
+ type: "innerproduct"
+ num_output: 4096
+ weight_filler {
+ type: "gaussian"
+ std: 0.005
+ }
+ bias_filler {
+ type: "constant"
+ value: 1.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "fc6"
+ top: "fc7"
+}
+layers {
+ layer {
+ name: "relu7"
+ type: "relu"
+ }
+ bottom: "fc7"
+ top: "fc7"
+}
+layers {
+ layer {
+ name: "drop7"
+ type: "dropout"
+ dropout_ratio: 0.5
+ }
+ bottom: "fc7"
+ top: "fc7"
+}
+layers {
+ layer {
+ name: "fc8_pascal"
+ type: "innerproduct"
+ num_output: 21
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 0
+ }
+ blobs_lr: 10.
+ blobs_lr: 20.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "fc7"
+ top: "fc8_pascal"
+}
+layers {
+ layer {
+ name: "loss"
+ type: "softmax_loss"
+ }
+ bottom: "fc8_pascal"
+ bottom: "label"
+}
--- /dev/null
+name: "CaffeNet"
+layers {
+ layer {
+ name: "data"
+ type: "window_data"
+ source: "window_file_2007_test.txt"
+ meanfile: "../../data/ilsvrc12/imagenet_mean.binaryproto"
+ batchsize: 128
+ cropsize: 227
+ mirror: true
+ det_context_pad: 16
+ det_crop_mode: "warp"
+ det_fg_threshold: 0.5
+ det_bg_threshold: 0.5
+ det_fg_fraction: 0.25
+ }
+ top: "data"
+ top: "label"
+}
+layers {
+ layer {
+ name: "conv1"
+ type: "conv"
+ num_output: 96
+ kernelsize: 11
+ stride: 4
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 0.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "data"
+ top: "conv1"
+}
+layers {
+ layer {
+ name: "relu1"
+ type: "relu"
+ }
+ bottom: "conv1"
+ top: "conv1"
+}
+layers {
+ layer {
+ name: "pool1"
+ type: "pool"
+ pool: MAX
+ kernelsize: 3
+ stride: 2
+ }
+ bottom: "conv1"
+ top: "pool1"
+}
+layers {
+ layer {
+ name: "norm1"
+ type: "lrn"
+ local_size: 5
+ alpha: 0.0001
+ beta: 0.75
+ }
+ bottom: "pool1"
+ top: "norm1"
+}
+layers {
+ layer {
+ name: "pad2"
+ type: "padding"
+ pad: 2
+ }
+ bottom: "norm1"
+ top: "pad2"
+}
+layers {
+ layer {
+ name: "conv2"
+ type: "conv"
+ num_output: 256
+ group: 2
+ kernelsize: 5
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 1.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "pad2"
+ top: "conv2"
+}
+layers {
+ layer {
+ name: "relu2"
+ type: "relu"
+ }
+ bottom: "conv2"
+ top: "conv2"
+}
+layers {
+ layer {
+ name: "pool2"
+ type: "pool"
+ pool: MAX
+ kernelsize: 3
+ stride: 2
+ }
+ bottom: "conv2"
+ top: "pool2"
+}
+layers {
+ layer {
+ name: "norm2"
+ type: "lrn"
+ local_size: 5
+ alpha: 0.0001
+ beta: 0.75
+ }
+ bottom: "pool2"
+ top: "norm2"
+}
+layers {
+ layer {
+ name: "pad3"
+ type: "padding"
+ pad: 1
+ }
+ bottom: "norm2"
+ top: "pad3"
+}
+layers {
+ layer {
+ name: "conv3"
+ type: "conv"
+ num_output: 384
+ kernelsize: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 0.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "pad3"
+ top: "conv3"
+}
+layers {
+ layer {
+ name: "relu3"
+ type: "relu"
+ }
+ bottom: "conv3"
+ top: "conv3"
+}
+layers {
+ layer {
+ name: "pad4"
+ type: "padding"
+ pad: 1
+ }
+ bottom: "conv3"
+ top: "pad4"
+}
+layers {
+ layer {
+ name: "conv4"
+ type: "conv"
+ num_output: 384
+ group: 2
+ kernelsize: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 1.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "pad4"
+ top: "conv4"
+}
+layers {
+ layer {
+ name: "relu4"
+ type: "relu"
+ }
+ bottom: "conv4"
+ top: "conv4"
+}
+layers {
+ layer {
+ name: "pad5"
+ type: "padding"
+ pad: 1
+ }
+ bottom: "conv4"
+ top: "pad5"
+}
+layers {
+ layer {
+ name: "conv5"
+ type: "conv"
+ num_output: 256
+ group: 2
+ kernelsize: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 1.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "pad5"
+ top: "conv5"
+}
+layers {
+ layer {
+ name: "relu5"
+ type: "relu"
+ }
+ bottom: "conv5"
+ top: "conv5"
+}
+layers {
+ layer {
+ name: "pool5"
+ type: "pool"
+ kernelsize: 3
+ pool: MAX
+ stride: 2
+ }
+ bottom: "conv5"
+ top: "pool5"
+}
+layers {
+ layer {
+ name: "fc6"
+ type: "innerproduct"
+ num_output: 4096
+ weight_filler {
+ type: "gaussian"
+ std: 0.005
+ }
+ bias_filler {
+ type: "constant"
+ value: 1.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "pool5"
+ top: "fc6"
+}
+layers {
+ layer {
+ name: "relu6"
+ type: "relu"
+ }
+ bottom: "fc6"
+ top: "fc6"
+}
+layers {
+ layer {
+ name: "drop6"
+ type: "dropout"
+ dropout_ratio: 0.5
+ }
+ bottom: "fc6"
+ top: "fc6"
+}
+layers {
+ layer {
+ name: "fc7"
+ type: "innerproduct"
+ num_output: 4096
+ weight_filler {
+ type: "gaussian"
+ std: 0.005
+ }
+ bias_filler {
+ type: "constant"
+ value: 1.
+ }
+ blobs_lr: 1.
+ blobs_lr: 2.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "fc6"
+ top: "fc7"
+}
+layers {
+ layer {
+ name: "relu7"
+ type: "relu"
+ }
+ bottom: "fc7"
+ top: "fc7"
+}
+layers {
+ layer {
+ name: "drop7"
+ type: "dropout"
+ dropout_ratio: 0.5
+ }
+ bottom: "fc7"
+ top: "fc7"
+}
+layers {
+ layer {
+ name: "fc8_pascal"
+ type: "innerproduct"
+ num_output: 21
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ value: 0
+ }
+ blobs_lr: 10.
+ blobs_lr: 20.
+ weight_decay: 1.
+ weight_decay: 0.
+ }
+ bottom: "fc7"
+ top: "fc8_pascal"
+}
+layers {
+ layer {
+ name: "prob"
+ type: "softmax"
+ }
+ bottom: "fc8_pascal"
+ top: "prob"
+}
+layers {
+ layer {
+ name: "accuracy"
+ type: "accuracy"
+ }
+ bottom: "prob"
+ bottom: "label"
+ top: "accuracy"
+}
+++ /dev/null
-name: "CaffeNet"
-layers {
- layer {
- name: "data"
- type: "window_data"
- source: "/work5/rbg/convnet-selective-search/selective-search-data/window_file_2007_trainval.txt"
- meanfile: "/home/eecs/rbg/working/caffe-rbg/data/ilsvrc2012_mean.binaryproto"
- batchsize: 128
- cropsize: 227
- mirror: true
- det_context_pad: 16
- det_crop_mode: "warp"
- det_fg_threshold: 0.5
- det_bg_threshold: 0.5
- det_fg_fraction: 0.25
- }
- top: "data"
- top: "label"
-}
-layers {
- layer {
- name: "conv1"
- type: "conv"
- num_output: 96
- kernelsize: 11
- stride: 4
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 0.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "data"
- top: "conv1"
-}
-layers {
- layer {
- name: "relu1"
- type: "relu"
- }
- bottom: "conv1"
- top: "conv1"
-}
-layers {
- layer {
- name: "pool1"
- type: "pool"
- pool: MAX
- kernelsize: 3
- stride: 2
- }
- bottom: "conv1"
- top: "pool1"
-}
-layers {
- layer {
- name: "norm1"
- type: "lrn"
- local_size: 5
- alpha: 0.0001
- beta: 0.75
- }
- bottom: "pool1"
- top: "norm1"
-}
-layers {
- layer {
- name: "pad2"
- type: "padding"
- pad: 2
- }
- bottom: "norm1"
- top: "pad2"
-}
-layers {
- layer {
- name: "conv2"
- type: "conv"
- num_output: 256
- group: 2
- kernelsize: 5
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "pad2"
- top: "conv2"
-}
-layers {
- layer {
- name: "relu2"
- type: "relu"
- }
- bottom: "conv2"
- top: "conv2"
-}
-layers {
- layer {
- name: "pool2"
- type: "pool"
- pool: MAX
- kernelsize: 3
- stride: 2
- }
- bottom: "conv2"
- top: "pool2"
-}
-layers {
- layer {
- name: "norm2"
- type: "lrn"
- local_size: 5
- alpha: 0.0001
- beta: 0.75
- }
- bottom: "pool2"
- top: "norm2"
-}
-layers {
- layer {
- name: "pad3"
- type: "padding"
- pad: 1
- }
- bottom: "norm2"
- top: "pad3"
-}
-layers {
- layer {
- name: "conv3"
- type: "conv"
- num_output: 384
- kernelsize: 3
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 0.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "pad3"
- top: "conv3"
-}
-layers {
- layer {
- name: "relu3"
- type: "relu"
- }
- bottom: "conv3"
- top: "conv3"
-}
-layers {
- layer {
- name: "pad4"
- type: "padding"
- pad: 1
- }
- bottom: "conv3"
- top: "pad4"
-}
-layers {
- layer {
- name: "conv4"
- type: "conv"
- num_output: 384
- group: 2
- kernelsize: 3
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "pad4"
- top: "conv4"
-}
-layers {
- layer {
- name: "relu4"
- type: "relu"
- }
- bottom: "conv4"
- top: "conv4"
-}
-layers {
- layer {
- name: "pad5"
- type: "padding"
- pad: 1
- }
- bottom: "conv4"
- top: "pad5"
-}
-layers {
- layer {
- name: "conv5"
- type: "conv"
- num_output: 256
- group: 2
- kernelsize: 3
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "pad5"
- top: "conv5"
-}
-layers {
- layer {
- name: "relu5"
- type: "relu"
- }
- bottom: "conv5"
- top: "conv5"
-}
-layers {
- layer {
- name: "pool5"
- type: "pool"
- kernelsize: 3
- pool: MAX
- stride: 2
- }
- bottom: "conv5"
- top: "pool5"
-}
-layers {
- layer {
- name: "fc6"
- type: "innerproduct"
- num_output: 4096
- weight_filler {
- type: "gaussian"
- std: 0.005
- }
- bias_filler {
- type: "constant"
- value: 1.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "pool5"
- top: "fc6"
-}
-layers {
- layer {
- name: "relu6"
- type: "relu"
- }
- bottom: "fc6"
- top: "fc6"
-}
-layers {
- layer {
- name: "drop6"
- type: "dropout"
- dropout_ratio: 0.5
- }
- bottom: "fc6"
- top: "fc6"
-}
-layers {
- layer {
- name: "fc7"
- type: "innerproduct"
- num_output: 4096
- weight_filler {
- type: "gaussian"
- std: 0.005
- }
- bias_filler {
- type: "constant"
- value: 1.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "fc6"
- top: "fc7"
-}
-layers {
- layer {
- name: "relu7"
- type: "relu"
- }
- bottom: "fc7"
- top: "fc7"
-}
-layers {
- layer {
- name: "drop7"
- type: "dropout"
- dropout_ratio: 0.5
- }
- bottom: "fc7"
- top: "fc7"
-}
-layers {
- layer {
- name: "fc8_pascal"
- type: "innerproduct"
- num_output: 21
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- blobs_lr: 10.
- blobs_lr: 20.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "fc7"
- top: "fc8_pascal"
-}
-layers {
- layer {
- name: "loss"
- type: "softmax_loss"
- }
- bottom: "fc8_pascal"
- bottom: "label"
-}
+++ /dev/null
-train_net: "examples/pascal_finetune.prototxt"
-test_net: "examples/pascal_finetune_val.prototxt"
-test_iter: 100
-test_interval: 1000
-base_lr: 0.001
-lr_policy: "step"
-gamma: 0.1
-stepsize: 20000
-display: 20
-max_iter: 100000
-momentum: 0.9
-weight_decay: 0.0005
-snapshot: 10000
-snapshot_prefix: "./snapshots/pascal_finetune_train"
+++ /dev/null
-name: "CaffeNet"
-layers {
- layer {
- name: "data"
- type: "window_data"
- source: "/work5/rbg/convnet-selective-search/selective-search-data/window_file_2007_test.txt"
- meanfile: "/home/eecs/rbg/working/caffe-rbg/data/ilsvrc2012_mean.binaryproto"
- batchsize: 128
- cropsize: 227
- mirror: true
- det_context_pad: 16
- det_crop_mode: "warp"
- det_fg_threshold: 0.5
- det_bg_threshold: 0.5
- det_fg_fraction: 0.25
- }
- top: "data"
- top: "label"
-}
-layers {
- layer {
- name: "conv1"
- type: "conv"
- num_output: 96
- kernelsize: 11
- stride: 4
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 0.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "data"
- top: "conv1"
-}
-layers {
- layer {
- name: "relu1"
- type: "relu"
- }
- bottom: "conv1"
- top: "conv1"
-}
-layers {
- layer {
- name: "pool1"
- type: "pool"
- pool: MAX
- kernelsize: 3
- stride: 2
- }
- bottom: "conv1"
- top: "pool1"
-}
-layers {
- layer {
- name: "norm1"
- type: "lrn"
- local_size: 5
- alpha: 0.0001
- beta: 0.75
- }
- bottom: "pool1"
- top: "norm1"
-}
-layers {
- layer {
- name: "pad2"
- type: "padding"
- pad: 2
- }
- bottom: "norm1"
- top: "pad2"
-}
-layers {
- layer {
- name: "conv2"
- type: "conv"
- num_output: 256
- group: 2
- kernelsize: 5
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "pad2"
- top: "conv2"
-}
-layers {
- layer {
- name: "relu2"
- type: "relu"
- }
- bottom: "conv2"
- top: "conv2"
-}
-layers {
- layer {
- name: "pool2"
- type: "pool"
- pool: MAX
- kernelsize: 3
- stride: 2
- }
- bottom: "conv2"
- top: "pool2"
-}
-layers {
- layer {
- name: "norm2"
- type: "lrn"
- local_size: 5
- alpha: 0.0001
- beta: 0.75
- }
- bottom: "pool2"
- top: "norm2"
-}
-layers {
- layer {
- name: "pad3"
- type: "padding"
- pad: 1
- }
- bottom: "norm2"
- top: "pad3"
-}
-layers {
- layer {
- name: "conv3"
- type: "conv"
- num_output: 384
- kernelsize: 3
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 0.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "pad3"
- top: "conv3"
-}
-layers {
- layer {
- name: "relu3"
- type: "relu"
- }
- bottom: "conv3"
- top: "conv3"
-}
-layers {
- layer {
- name: "pad4"
- type: "padding"
- pad: 1
- }
- bottom: "conv3"
- top: "pad4"
-}
-layers {
- layer {
- name: "conv4"
- type: "conv"
- num_output: 384
- group: 2
- kernelsize: 3
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "pad4"
- top: "conv4"
-}
-layers {
- layer {
- name: "relu4"
- type: "relu"
- }
- bottom: "conv4"
- top: "conv4"
-}
-layers {
- layer {
- name: "pad5"
- type: "padding"
- pad: 1
- }
- bottom: "conv4"
- top: "pad5"
-}
-layers {
- layer {
- name: "conv5"
- type: "conv"
- num_output: 256
- group: 2
- kernelsize: 3
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 1.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "pad5"
- top: "conv5"
-}
-layers {
- layer {
- name: "relu5"
- type: "relu"
- }
- bottom: "conv5"
- top: "conv5"
-}
-layers {
- layer {
- name: "pool5"
- type: "pool"
- kernelsize: 3
- pool: MAX
- stride: 2
- }
- bottom: "conv5"
- top: "pool5"
-}
-layers {
- layer {
- name: "fc6"
- type: "innerproduct"
- num_output: 4096
- weight_filler {
- type: "gaussian"
- std: 0.005
- }
- bias_filler {
- type: "constant"
- value: 1.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "pool5"
- top: "fc6"
-}
-layers {
- layer {
- name: "relu6"
- type: "relu"
- }
- bottom: "fc6"
- top: "fc6"
-}
-layers {
- layer {
- name: "drop6"
- type: "dropout"
- dropout_ratio: 0.5
- }
- bottom: "fc6"
- top: "fc6"
-}
-layers {
- layer {
- name: "fc7"
- type: "innerproduct"
- num_output: 4096
- weight_filler {
- type: "gaussian"
- std: 0.005
- }
- bias_filler {
- type: "constant"
- value: 1.
- }
- blobs_lr: 1.
- blobs_lr: 2.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "fc6"
- top: "fc7"
-}
-layers {
- layer {
- name: "relu7"
- type: "relu"
- }
- bottom: "fc7"
- top: "fc7"
-}
-layers {
- layer {
- name: "drop7"
- type: "dropout"
- dropout_ratio: 0.5
- }
- bottom: "fc7"
- top: "fc7"
-}
-layers {
- layer {
- name: "fc8_pascal"
- type: "innerproduct"
- num_output: 21
- weight_filler {
- type: "gaussian"
- std: 0.01
- }
- bias_filler {
- type: "constant"
- value: 0
- }
- blobs_lr: 10.
- blobs_lr: 20.
- weight_decay: 1.
- weight_decay: 0.
- }
- bottom: "fc7"
- top: "fc8_pascal"
-}
-layers {
- layer {
- name: "prob"
- type: "softmax"
- }
- bottom: "fc8_pascal"
- top: "prob"
-}
-layers {
- layer {
- name: "accuracy"
- type: "accuracy"
- }
- bottom: "prob"
- bottom: "label"
- top: "accuracy"
-}