From 2aea6bb3ce084f8be09183380941040352702dd9 Mon Sep 17 00:00:00 2001 From: Jeff Donahue Date: Fri, 21 Mar 2014 15:14:08 -0700 Subject: [PATCH] some naming standardization: ImagesLayer -> ImageDataLayer (like other data layers), and load_hdf5_file_data -> LoadHDF5FileData --- docs/feature_extraction.md | 4 ++-- include/caffe/vision_layers.hpp | 18 ++++++----------- src/caffe/layer_factory.cpp | 2 +- src/caffe/layers/hdf5_data_layer.cpp | 6 +++--- src/caffe/layers/hdf5_data_layer.cu | 2 +- .../{images_layer.cpp => image_data_layer.cpp} | 18 ++++++++--------- .../{images_layer.cu => image_data_layer.cu} | 0 ..._images_layer.cpp => test_image_data_layer.cpp} | 23 ++++++++++++---------- 8 files changed, 35 insertions(+), 38 deletions(-) rename src/caffe/layers/{images_layer.cpp => image_data_layer.cpp} (94%) rename src/caffe/layers/{images_layer.cu => image_data_layer.cu} (100%) rename src/caffe/test/{test_images_layer.cpp => test_image_data_layer.cpp} (89%) diff --git a/docs/feature_extraction.md b/docs/feature_extraction.md index 13639fb..fa23e9c 100644 --- a/docs/feature_extraction.md +++ b/docs/feature_extraction.md @@ -22,7 +22,7 @@ We're going to use the images that ship with caffe. find `pwd`/examples/images -type f -exec echo {} \; > examples/_temp/temp.txt -The `ImagesLayer` we'll use expects labels after each filenames, so let's add a 0 to the end of each line +The `ImageDataLayer` we'll use expects labels after each filenames, so let's add a 0 to the end of each line sed "s/$/ 0/" examples/_temp/temp.txt > examples/_temp/file_list.txt @@ -37,7 +37,7 @@ Download the mean image of the ILSVRC dataset. We will use `data/ilsvrc212/imagenet_mean.binaryproto` in the network definition prototxt. Let's copy and modify the network definition. -We'll be using the `ImagesLayer`, which will load and resize images for us. +We'll be using the `ImageDataLayer`, which will load and resize images for us. cp examples/feature_extraction/imagenet_val.prototxt examples/_temp diff --git a/include/caffe/vision_layers.hpp b/include/caffe/vision_layers.hpp index 079537e..870bb22 100644 --- a/include/caffe/vision_layers.hpp +++ b/include/caffe/vision_layers.hpp @@ -255,8 +255,6 @@ class EuclideanLossLayer : public Layer { vector*>* top); protected: - // The loss layer will do nothing during forward - all computation are - // carried out in the backward pass. virtual Dtype Forward_cpu(const vector*>& bottom, vector*>* top); // virtual Dtype Forward_gpu(const vector*>& bottom, @@ -308,7 +306,7 @@ class HDF5DataLayer : public Layer { const bool propagate_down, vector*>* bottom); virtual void Backward_gpu(const vector*>& top, const bool propagate_down, vector*>* bottom); - virtual void load_hdf5_file_data(const char* filename); + virtual void LoadHDF5FileData(const char* filename); std::vector hdf_filenames_; unsigned int num_files_; @@ -346,17 +344,17 @@ class Im2colLayer : public Layer { // This function is used to create a pthread that prefetches the data. template -void* ImagesLayerPrefetch(void* layer_pointer); +void* ImageDataLayerPrefetch(void* layer_pointer); template -class ImagesLayer : public Layer { +class ImageDataLayer : public Layer { // The function used to perform prefetching. - friend void* ImagesLayerPrefetch(void* layer_pointer); + friend void* ImageDataLayerPrefetch(void* layer_pointer); public: - explicit ImagesLayer(const LayerParameter& param) + explicit ImageDataLayer(const LayerParameter& param) : Layer(param) {} - virtual ~ImagesLayer(); + virtual ~ImageDataLayer(); virtual void SetUp(const vector*>& bottom, vector*>* top); @@ -391,8 +389,6 @@ class InfogainLossLayer : public Layer { vector*>* top); protected: - // The loss layer will do nothing during forward - all computation are - // carried out in the backward pass. virtual Dtype Forward_cpu(const vector*>& bottom, vector*>* top); // virtual Dtype Forward_gpu(const vector*>& bottom, @@ -495,8 +491,6 @@ class MultinomialLogisticLossLayer : public Layer { vector*>* top); protected: - // The loss layer will do nothing during forward - all computation are - // carried out in the backward pass. virtual Dtype Forward_cpu(const vector*>& bottom, vector*>* top); // virtual Dtype Forward_gpu(const vector*>& bottom, diff --git a/src/caffe/layer_factory.cpp b/src/caffe/layer_factory.cpp index f86e12e..f3e52a6 100644 --- a/src/caffe/layer_factory.cpp +++ b/src/caffe/layer_factory.cpp @@ -43,7 +43,7 @@ Layer* GetLayer(const LayerParameter& param) { case LayerParameter_LayerType_HDF5_OUTPUT: return new HDF5OutputLayer(param); case LayerParameter_LayerType_IMAGE_DATA: - return new ImagesLayer(param); + return new ImageDataLayer(param); case LayerParameter_LayerType_IM2COL: return new Im2colLayer(param); case LayerParameter_LayerType_INFOGAIN_LOSS: diff --git a/src/caffe/layers/hdf5_data_layer.cpp b/src/caffe/layers/hdf5_data_layer.cpp index 035eea8..cff4f7c 100644 --- a/src/caffe/layers/hdf5_data_layer.cpp +++ b/src/caffe/layers/hdf5_data_layer.cpp @@ -26,7 +26,7 @@ HDF5DataLayer::~HDF5DataLayer() { } // Load data and label from HDF5 filename into the class property blobs. template -void HDF5DataLayer::load_hdf5_file_data(const char* filename) { +void HDF5DataLayer::LoadHDF5FileData(const char* filename) { LOG(INFO) << "Loading HDF5 file" << filename; hid_t file_id = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT); if (file_id < 0) { @@ -72,7 +72,7 @@ void HDF5DataLayer::SetUp(const vector*>& bottom, LOG(INFO) << "Number of files: " << num_files_; // Load the first HDF5 file and initialize the line counter. - load_hdf5_file_data(hdf_filenames_[current_file_].c_str()); + LoadHDF5FileData(hdf_filenames_[current_file_].c_str()); current_row_ = 0; // Reshape blobs. @@ -101,7 +101,7 @@ Dtype HDF5DataLayer::Forward_cpu(const vector*>& bottom, current_file_ = 0; LOG(INFO) << "looping around to first file"; } - load_hdf5_file_data(hdf_filenames_[current_file_].c_str()); + LoadHDF5FileData(hdf_filenames_[current_file_].c_str()); } current_row_ = 0; } diff --git a/src/caffe/layers/hdf5_data_layer.cu b/src/caffe/layers/hdf5_data_layer.cu index e8d09c0..9c5bb5a 100644 --- a/src/caffe/layers/hdf5_data_layer.cu +++ b/src/caffe/layers/hdf5_data_layer.cu @@ -36,7 +36,7 @@ Dtype HDF5DataLayer::Forward_gpu(const vector*>& bottom, LOG(INFO) << "looping around to first file"; } - load_hdf5_file_data(hdf_filenames_[current_file_].c_str()); + LoadHDF5FileData(hdf_filenames_[current_file_].c_str()); } current_row_ = 0; } diff --git a/src/caffe/layers/images_layer.cpp b/src/caffe/layers/image_data_layer.cpp similarity index 94% rename from src/caffe/layers/images_layer.cpp rename to src/caffe/layers/image_data_layer.cpp index 63f79ca..8c23cc4 100644 --- a/src/caffe/layers/images_layer.cpp +++ b/src/caffe/layers/image_data_layer.cpp @@ -19,10 +19,10 @@ using std::pair; namespace caffe { template -void* ImagesLayerPrefetch(void* layer_pointer) { +void* ImageDataLayerPrefetch(void* layer_pointer) { CHECK(layer_pointer); - ImagesLayer* layer = - reinterpret_cast*>(layer_pointer); + ImageDataLayer* layer = + reinterpret_cast*>(layer_pointer); CHECK(layer); Datum datum; CHECK(layer->prefetch_data_); @@ -133,13 +133,13 @@ void* ImagesLayerPrefetch(void* layer_pointer) { } template -ImagesLayer::~ImagesLayer() { +ImageDataLayer::~ImageDataLayer() { // Finally, join the thread CHECK(!pthread_join(thread_, NULL)) << "Pthread joining failed."; } template -void ImagesLayer::SetUp(const vector*>& bottom, +void ImageDataLayer::SetUp(const vector*>& bottom, vector*>* top) { CHECK_EQ(bottom.size(), 0) << "Input Layer takes no input blobs."; CHECK_EQ(top->size(), 2) << "Input Layer takes two blobs as output."; @@ -228,13 +228,13 @@ void ImagesLayer::SetUp(const vector*>& bottom, prefetch_label_->mutable_cpu_data(); data_mean_.cpu_data(); DLOG(INFO) << "Initializing prefetch"; - CHECK(!pthread_create(&thread_, NULL, ImagesLayerPrefetch, + CHECK(!pthread_create(&thread_, NULL, ImageDataLayerPrefetch, reinterpret_cast(this))) << "Pthread execution failed."; DLOG(INFO) << "Prefetch initialized."; } template -Dtype ImagesLayer::Forward_cpu(const vector*>& bottom, +Dtype ImageDataLayer::Forward_cpu(const vector*>& bottom, vector*>* top) { // First, join the thread CHECK(!pthread_join(thread_, NULL)) << "Pthread joining failed."; @@ -244,11 +244,11 @@ Dtype ImagesLayer::Forward_cpu(const vector*>& bottom, memcpy((*top)[1]->mutable_cpu_data(), prefetch_label_->cpu_data(), sizeof(Dtype) * prefetch_label_->count()); // Start a new prefetch thread - CHECK(!pthread_create(&thread_, NULL, ImagesLayerPrefetch, + CHECK(!pthread_create(&thread_, NULL, ImageDataLayerPrefetch, reinterpret_cast(this))) << "Pthread execution failed."; return Dtype(0.); } -INSTANTIATE_CLASS(ImagesLayer); +INSTANTIATE_CLASS(ImageDataLayer); } // namespace caffe diff --git a/src/caffe/layers/images_layer.cu b/src/caffe/layers/image_data_layer.cu similarity index 100% rename from src/caffe/layers/images_layer.cu rename to src/caffe/layers/image_data_layer.cu diff --git a/src/caffe/test/test_images_layer.cpp b/src/caffe/test/test_image_data_layer.cpp similarity index 89% rename from src/caffe/test/test_images_layer.cpp rename to src/caffe/test/test_image_data_layer.cpp index 0cd1001..9a6271c 100644 --- a/src/caffe/test/test_images_layer.cpp +++ b/src/caffe/test/test_image_data_layer.cpp @@ -22,9 +22,9 @@ namespace caffe { extern cudaDeviceProp CAFFE_TEST_CUDA_PROP; template -class ImagesLayerTest : public ::testing::Test { +class ImageDataLayerTest : public ::testing::Test { protected: - ImagesLayerTest() + ImageDataLayerTest() : blob_top_data_(new Blob()), blob_top_label_(new Blob()), filename(NULL) {} @@ -41,7 +41,10 @@ class ImagesLayerTest : public ::testing::Test { outfile.close(); } - virtual ~ImagesLayerTest() { delete blob_top_data_; delete blob_top_label_; } + virtual ~ImageDataLayerTest() { + delete blob_top_data_; + delete blob_top_label_; + } char* filename; Blob* const blob_top_data_; @@ -51,15 +54,15 @@ class ImagesLayerTest : public ::testing::Test { }; typedef ::testing::Types Dtypes; -TYPED_TEST_CASE(ImagesLayerTest, Dtypes); +TYPED_TEST_CASE(ImageDataLayerTest, Dtypes); -TYPED_TEST(ImagesLayerTest, TestRead) { +TYPED_TEST(ImageDataLayerTest, TestRead) { LayerParameter param; ImageDataParameter* image_data_param = param.mutable_image_data_param(); image_data_param->set_batch_size(5); image_data_param->set_source(this->filename); image_data_param->set_shuffle(false); - ImagesLayer layer(param); + ImageDataLayer layer(param); layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_); EXPECT_EQ(this->blob_top_data_->num(), 5); EXPECT_EQ(this->blob_top_data_->channels(), 3); @@ -78,7 +81,7 @@ TYPED_TEST(ImagesLayerTest, TestRead) { } } -TYPED_TEST(ImagesLayerTest, TestResize) { +TYPED_TEST(ImageDataLayerTest, TestResize) { LayerParameter param; ImageDataParameter* image_data_param = param.mutable_image_data_param(); image_data_param->set_batch_size(5); @@ -86,7 +89,7 @@ TYPED_TEST(ImagesLayerTest, TestResize) { image_data_param->set_new_height(256); image_data_param->set_new_width(256); image_data_param->set_shuffle(false); - ImagesLayer layer(param); + ImageDataLayer layer(param); layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_); EXPECT_EQ(this->blob_top_data_->num(), 5); EXPECT_EQ(this->blob_top_data_->channels(), 3); @@ -105,13 +108,13 @@ TYPED_TEST(ImagesLayerTest, TestResize) { } } -TYPED_TEST(ImagesLayerTest, TestShuffle) { +TYPED_TEST(ImageDataLayerTest, TestShuffle) { LayerParameter param; ImageDataParameter* image_data_param = param.mutable_image_data_param(); image_data_param->set_batch_size(5); image_data_param->set_source(this->filename); image_data_param->set_shuffle(true); - ImagesLayer layer(param); + ImageDataLayer layer(param); layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_); EXPECT_EQ(this->blob_top_data_->num(), 5); EXPECT_EQ(this->blob_top_data_->channels(), 3); -- 2.7.4