From 298a27cb57639085d00d6a0797540bf04314662e Mon Sep 17 00:00:00 2001 From: Jonathan L Long Date: Thu, 3 Apr 2014 19:27:21 -0700 Subject: [PATCH] add MemoryDataLayer for reading input from contiguous blocks of memory --- include/caffe/vision_layers.hpp | 27 ++++++++++++++++++ src/caffe/layer_factory.cpp | 2 ++ src/caffe/layers/memory_data_layer.cpp | 51 ++++++++++++++++++++++++++++++++++ src/caffe/proto/caffe.proto | 14 ++++++++-- 4 files changed, 92 insertions(+), 2 deletions(-) create mode 100644 src/caffe/layers/memory_data_layer.cpp diff --git a/include/caffe/vision_layers.hpp b/include/caffe/vision_layers.hpp index 817bce9..8fb8fdb 100644 --- a/include/caffe/vision_layers.hpp +++ b/include/caffe/vision_layers.hpp @@ -625,6 +625,33 @@ class LRNLayer : public Layer { }; template +class MemoryDataLayer : public Layer { + public: + explicit MemoryDataLayer(const LayerParameter& param) + : Layer(param) {} + virtual void SetUp(const vector*>& bottom, + vector*>* top); + // Reset should accept const pointers, but can't, because the memory + // will be given to Blob, which is mutable + void Reset(Dtype* data, Dtype* label, int n); + + protected: + virtual Dtype Forward_cpu(const vector*>& bottom, + vector*>* top); + virtual void Backward_cpu(const vector*>& top, + const bool propagate_down, vector*>* bottom) { return; } + virtual void Backward_gpu(const vector*>& top, + const bool propagate_down, vector*>* bottom) { return; } + + Dtype* data_; + Dtype* labels_; + int datum_size_; + int batch_size_; + int n_; + int pos_; +}; + +template class MultinomialLogisticLossLayer : public Layer { public: explicit MultinomialLogisticLossLayer(const LayerParameter& param) diff --git a/src/caffe/layer_factory.cpp b/src/caffe/layer_factory.cpp index d586924..2991c81 100644 --- a/src/caffe/layer_factory.cpp +++ b/src/caffe/layer_factory.cpp @@ -56,6 +56,8 @@ Layer* GetLayer(const LayerParameter& param) { return new InnerProductLayer(param); case LayerParameter_LayerType_LRN: return new LRNLayer(param); + case LayerParameter_LayerType_MEMORY_DATA: + return new MemoryDataLayer(param); case LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS: return new MultinomialLogisticLossLayer(param); case LayerParameter_LayerType_POOLING: diff --git a/src/caffe/layers/memory_data_layer.cpp b/src/caffe/layers/memory_data_layer.cpp new file mode 100644 index 0000000..7a1f3ff --- /dev/null +++ b/src/caffe/layers/memory_data_layer.cpp @@ -0,0 +1,51 @@ +// Copyright 2014 BVLC and contributors. + +#include + +#include "caffe/layer.hpp" +#include "caffe/vision_layers.hpp" + +namespace caffe { + +template +void MemoryDataLayer::SetUp(const vector*>& bottom, + vector*>* top) { + CHECK_EQ(bottom.size(), 0) << "Memory Data Layer takes no blobs as input."; + CHECK_EQ(top->size(), 2) << "Memory Data Layer takes two blobs as output."; + batch_size_ = this->layer_param_.memory_data_param().batch_size(); + int channels = this->layer_param_.memory_data_param().channels(); + int height = this->layer_param_.memory_data_param().height(); + int width = this->layer_param_.memory_data_param().width(); + datum_size_ = channels * height * width; + CHECK_GT(batch_size_ * datum_size_, 0) << "batch_size, channels, height," + " and width must be specified and positive in memory_data_param"; + (*top)[0]->Reshape(batch_size_, channels, height, width); + (*top)[1]->Reshape(batch_size_, 1, 1, 1); + data_ = NULL; + labels_ = NULL; +} + +template +void MemoryDataLayer::Reset(Dtype* data, Dtype* labels, int n) { + CHECK(data); + CHECK(labels); + CHECK_EQ(n % batch_size_, 0) << "n must be a multiple of batch size"; + data_ = data; + labels_ = labels; + n_ = n; + pos_ = 0; +} + +template +Dtype MemoryDataLayer::Forward_cpu(const vector*>& bottom, + vector*>* top) { + CHECK(data_) << "MemoryDataLayer needs to be initalized by calling Reset"; + (*top)[0]->set_cpu_data(data_ + pos_ * datum_size_); + (*top)[1]->set_cpu_data(labels_ + pos_); + pos_ = (pos_ + batch_size_) % n_; + return Dtype(0.); +} + +INSTANTIATE_CLASS(MemoryDataLayer); + +} // namespace caffe diff --git a/src/caffe/proto/caffe.proto b/src/caffe/proto/caffe.proto index e04e42c..9fba817 100644 --- a/src/caffe/proto/caffe.proto +++ b/src/caffe/proto/caffe.proto @@ -99,7 +99,7 @@ message SolverState { // Update the next available ID when you add a new LayerParameter field. // -// LayerParameter next available ID: 22 (last added: power_param) +// LayerParameter next available ID: 23 (last added: memory_data_param) message LayerParameter { repeated string bottom = 2; // the name of the bottom blobs repeated string top = 3; // the name of the top blobs @@ -110,7 +110,7 @@ message LayerParameter { // line above the enum. Update the next available ID when you add a new // LayerType. // - // LayerType next available ID: 29 (last added: HINGE_LOSS) + // LayerType next available ID: 30 (last added: MEMORY_DATA) enum LayerType { // "NONE" layer type is 0th enum element so that we don't cause confusion // by defaulting to an existent LayerType (instead, should usually error if @@ -133,6 +133,7 @@ message LayerParameter { INFOGAIN_LOSS = 13; INNER_PRODUCT = 14; LRN = 15; + MEMORY_DATA = 29; MULTINOMIAL_LOGISTIC_LOSS = 16; POOLING = 17; POWER = 26; @@ -166,6 +167,7 @@ message LayerParameter { optional InfogainLossParameter infogain_loss_param = 16; optional InnerProductParameter inner_product_param = 17; optional LRNParameter lrn_param = 18; + optional MemoryDataParameter memory_data_param = 22; optional PoolingParameter pooling_param = 19; optional PowerParameter power_param = 21; optional WindowDataParameter window_data_param = 20; @@ -289,6 +291,14 @@ message LRNParameter { optional NormRegion norm_region = 4 [default = ACROSS_CHANNELS]; } +// Message that stores parameters used by MemoryDataLayer +message MemoryDataParameter { + optional uint32 batch_size = 1; + optional uint32 channels = 2; + optional uint32 height = 3; + optional uint32 width = 4; +} + // Message that stores parameters used by PoolingLayer message PoolingParameter { enum PoolMethod { -- 2.7.4