Blob<Dtype> data_mean_;
};
+ // This function is used to create a pthread that prefetches the data.
+ template <typename Dtype>
+ void* ImagesLayerPrefetch(void* layer_pointer);
+
+ template <typename Dtype>
+ class ImagesLayer : public Layer<Dtype> {
+ // The function used to perform prefetching.
+ friend void* ImagesLayerPrefetch<Dtype>(void* layer_pointer);
+
+ public:
+ explicit ImagesLayer(const LayerParameter& param)
+ : Layer<Dtype>(param) {}
+ virtual ~ImagesLayer();
+ virtual void SetUp(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+
+ protected:
+ virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+ virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+ virtual Dtype Backward_cpu(const vector<Blob<Dtype>*>& top,
+ const bool propagate_down, vector<Blob<Dtype>*>* bottom);
+ virtual Dtype Backward_gpu(const vector<Blob<Dtype>*>& top,
+ const bool propagate_down, vector<Blob<Dtype>*>* bottom);
+
+ vector<std::pair<std::string, int> > lines_;
+ int lines_id_;
+ int datum_channels_;
+ int datum_height_;
+ int datum_width_;
+ int datum_size_;
+ pthread_t thread_;
+ shared_ptr<Blob<Dtype> > prefetch_data_;
+ shared_ptr<Blob<Dtype> > prefetch_label_;
+ Blob<Dtype> data_mean_;
+ };
+
template <typename Dtype>
+class HDF5DataLayer : public Layer<Dtype> {
+ public:
+ explicit HDF5DataLayer(const LayerParameter& param)
+ : Layer<Dtype>(param) {}
+ virtual ~HDF5DataLayer();
+ virtual void SetUp(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+
+ protected:
+ virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+ virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
+ vector<Blob<Dtype>*>* top);
+ virtual Dtype Backward_cpu(const vector<Blob<Dtype>*>& top,
+ const bool propagate_down, vector<Blob<Dtype>*>* bottom);
+ virtual Dtype Backward_gpu(const vector<Blob<Dtype>*>& top,
+ const bool propagate_down, vector<Blob<Dtype>*>* bottom);
+ virtual void load_hdf5_file_data(const char* filename);
+
+ std::vector<std::string> hdf_filenames_;
+ unsigned int num_files_;
+ unsigned int current_file_;
+ hsize_t current_row_;
+
+ Blob<Dtype> data_blob_;
+ Blob<Dtype> label_blob_;
+};
+
+
+template <typename Dtype>
class SoftmaxLayer : public Layer<Dtype> {
public:
explicit SoftmaxLayer(const LayerParameter& param)
return new BNLLLayer<Dtype>(param);
} else if (type == "conv") {
return new ConvolutionLayer<Dtype>(param);
+ } else if (type == "concat") {
+ return new ConcatLayer<Dtype>(param);
} else if (type == "data") {
return new DataLayer<Dtype>(param);
+ } else if (type == "hdf5_data") {
+ return new HDF5DataLayer<Dtype>(param);
+ } else if (type == "images") {
+ return new ImagesLayer<Dtype>(param);
} else if (type == "dropout") {
return new DropoutLayer<Dtype>(param);
} else if (type == "euclidean_loss") {
// be larger than the number of keys in the leveldb.
optional uint32 rand_skip = 53 [ default = 0 ];
- // Concat Layer need to specify the dimension along the concat will happen,
- // the other dimensions must be the same for all the bottom blobs
- // By default it will concatenate blobs along channels dimension
+ // For the Reshape Layer one need to specify the new dimensions
- optional int32 new_num = 60 [default = 0];
- optional int32 new_channels = 61 [default = 0];
- optional int32 new_height = 62 [default = 0];
- optional int32 new_width = 63 [default = 0];
-
- // Used by ImageLayer to shuffle the list of files at every epoch it will also
- // resize images if new_height or new_width are not zero
- optional bool shuffle_images = 64 [default = false];
-
++ optional int32 new_num = 60 [ default = 0 ];
++ optional int32 new_channels = 61 [ default = 0 ];
++ optional int32 new_height = 62 [ default = 0 ];
++ optional int32 new_width = 63 [ default = 0 ];
++
++ // Used by ImageLayer to shuffle the list of files at every epoch.
++ // It will also resize images if new_height or new_width are not zero
++ optional bool shuffle_images = 64 [ default = false ];
++
++ // ConcatLayer needs the dimension along which to concatenate, and
++ // the other dimensions must be the same for all the bottom blobs.
++ // By default it will concatenate blobs along the channels dimension.
+ optional uint32 concat_dim = 65 [ default = 1 ];
}
message LayerConnection {