From: Sergey Karayev Date: Mon, 17 Mar 2014 23:07:39 +0000 (-0700) Subject: Lint errors fixed, except still using stream. X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=e6055c167ca354e88690c491ec877d76477982ff;p=platform%2Fupstream%2Fcaffe.git Lint errors fixed, except still using stream. --- diff --git a/include/caffe/util/io.hpp b/include/caffe/util/io.hpp index 1dcdb7e..7bf7897 100644 --- a/include/caffe/util/io.hpp +++ b/include/caffe/util/io.hpp @@ -53,12 +53,12 @@ inline bool ReadImageToDatum(const string& filename, const int label, template void hdf5_load_nd_dataset_helper( hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, - Blob& blob); + Blob* blob); template void hdf5_load_nd_dataset( hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, - Blob& blob); + Blob* blob); } // namespace caffe diff --git a/include/caffe/vision_layers.hpp b/include/caffe/vision_layers.hpp index 8ab1afb..ad57d8b 100644 --- a/include/caffe/vision_layers.hpp +++ b/include/caffe/vision_layers.hpp @@ -4,6 +4,7 @@ #define CAFFE_VISION_LAYERS_HPP_ #include +#include #include "leveldb/db.h" #include "pthread.h" @@ -389,7 +390,6 @@ class HDF5DataLayer : public Layer { vector*>* top); protected: - virtual void Forward_cpu(const vector*>& bottom, vector*>* top); virtual void Forward_gpu(const vector*>& bottom, @@ -398,7 +398,6 @@ class HDF5DataLayer : public Layer { const bool propagate_down, vector*>* bottom); virtual Dtype Backward_gpu(const vector*>& top, const bool propagate_down, vector*>* bottom); - virtual void load_hdf5_file_data(const char* filename); std::vector hdf_filenames_; diff --git a/src/caffe/layers/hdf5_data_layer.cpp b/src/caffe/layers/hdf5_data_layer.cpp index 98873cb..60e25d0 100644 --- a/src/caffe/layers/hdf5_data_layer.cpp +++ b/src/caffe/layers/hdf5_data_layer.cpp @@ -1,3 +1,4 @@ +// Copyright 2014 BVLC. /* Contributors: - Sergey Karayev, 2014. @@ -8,11 +9,11 @@ TODO: - can be smarter about the memcpy call instead of doing it row-by-row :: use util functions caffe_copy, and Blob->offset() :: don't forget to update hdf5_daa_layer.cu accordingly +- add ability to shuffle filenames if flag is set */ #include #include #include -#include #include #include "hdf5.h" @@ -22,8 +23,6 @@ TODO: #include "caffe/util/io.hpp" #include "caffe/vision_layers.hpp" -using std::string; - namespace caffe { template @@ -42,12 +41,12 @@ void HDF5DataLayer::load_hdf5_file_data(const char* filename) { const int MIN_DATA_DIM = 2; const int MAX_DATA_DIM = 4; hdf5_load_nd_dataset( - file_id, "data", MIN_DATA_DIM, MAX_DATA_DIM, data_blob_); + file_id, "data", MIN_DATA_DIM, MAX_DATA_DIM, &data_blob_); const int MIN_LABEL_DIM = 1; const int MAX_LABEL_DIM = 2; hdf5_load_nd_dataset( - file_id, "label", MIN_LABEL_DIM, MAX_LABEL_DIM, label_blob_); + file_id, "label", MIN_LABEL_DIM, MAX_LABEL_DIM, &label_blob_); herr_t status = H5Fclose(file_id); CHECK_EQ(data_blob_.num(), label_blob_.num()); @@ -65,7 +64,7 @@ void HDF5DataLayer::SetUp(const vector*>& bottom, hdf_filenames_.clear(); std::ifstream myfile(this->layer_param_.source().c_str()); if (myfile.is_open()) { - string line = ""; + std::string line; while (myfile >> line) { hdf_filenames_.push_back(line); } diff --git a/src/caffe/test/test_hdf5data_layer.cpp b/src/caffe/test/test_hdf5data_layer.cpp index 59aee0c..0b0b97e 100644 --- a/src/caffe/test/test_hdf5data_layer.cpp +++ b/src/caffe/test/test_hdf5data_layer.cpp @@ -64,9 +64,9 @@ TYPED_TEST(HDF5DataLayerTest, TestRead) { int num_cols = 8; int height = 5; int width = 5; - HDF5DataLayer layer(param); // Test that the layer setup got the correct parameters. + HDF5DataLayer layer(param); layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_); EXPECT_EQ(this->blob_top_data_->num(), batchsize); EXPECT_EQ(this->blob_top_data_->channels(), num_cols); @@ -78,16 +78,18 @@ TYPED_TEST(HDF5DataLayerTest, TestRead) { EXPECT_EQ(this->blob_top_label_->height(), 1); EXPECT_EQ(this->blob_top_label_->width(), 1); - for (int t=0; t<2; ++t) { + for (int t = 0; t < 2; ++t) { + // TODO: make this a TypedTest instead of this silly loop. if (t == 0) { Caffe::set_mode(Caffe::CPU); } else { Caffe::set_mode(Caffe::GPU); } + layer.SetUp(this->blob_bottom_vec_, &this->blob_top_vec_); - // Go through the data 100 times (50 batches). + // Go through the data 10 times (5 batches). const int data_size = num_cols * height * width; - for (int iter = 0; iter < 100; ++iter) { + for (int iter = 0; iter < 5; ++iter) { layer.Forward(this->blob_bottom_vec_, &this->blob_top_vec_); // On even iterations, we're reading the first half of the data. @@ -109,11 +111,15 @@ TYPED_TEST(HDF5DataLayerTest, TestRead) { for (int j = 0; j < num_cols; ++j) { for (int h = 0; h < height; ++h) { for (int w = 0; w < width; ++w) { - int idx = i * num_cols * height * width + j * height * width + h * width + w; + int idx = ( + i * num_cols * height * width + + j * height * width + + h * width + w); EXPECT_EQ( file_offset + data_offset + idx, this->blob_top_data_->cpu_data()[idx]) - << "debug: i " << i << " j " << j << " iter " << iter; + << "debug: i " << i << " j " << j + << " iter " << iter << " t " << t; } } } diff --git a/src/caffe/util/io.cpp b/src/caffe/util/io.cpp index 72ceb8d..3ac69f9 100644 --- a/src/caffe/util/io.cpp +++ b/src/caffe/util/io.cpp @@ -103,7 +103,8 @@ bool ReadImageToDatum(const string& filename, const int label, // Verifies format of data stored in HDF5 file and reshapes blob accordingly. template void hdf5_load_nd_dataset_helper( - hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, Blob& blob) { + hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, + Blob* blob) { // Verify that the number of dimensions is in the accepted range. herr_t status; int ndims; @@ -118,28 +119,27 @@ void hdf5_load_nd_dataset_helper( file_id, dataset_name_, dims.data(), &class_, NULL); CHECK_EQ(class_, H5T_FLOAT) << "Expected float or double data"; - blob.Reshape( + blob->Reshape( dims[0], (dims.size() > 1) ? dims[1] : 1, (dims.size() > 2) ? dims[2] : 1, - (dims.size() > 3) ? dims[3] : 1 - ); + (dims.size() > 3) ? dims[3] : 1); } template <> void hdf5_load_nd_dataset(hid_t file_id, const char* dataset_name_, - int min_dim, int max_dim, Blob& blob) { + int min_dim, int max_dim, Blob* blob) { hdf5_load_nd_dataset_helper(file_id, dataset_name_, min_dim, max_dim, blob); herr_t status = H5LTread_dataset_float( - file_id, dataset_name_, blob.mutable_cpu_data()); + file_id, dataset_name_, blob->mutable_cpu_data()); } template <> void hdf5_load_nd_dataset(hid_t file_id, const char* dataset_name_, - int min_dim, int max_dim, Blob& blob) { + int min_dim, int max_dim, Blob* blob) { hdf5_load_nd_dataset_helper(file_id, dataset_name_, min_dim, max_dim, blob); herr_t status = H5LTread_dataset_double( - file_id, dataset_name_, blob.mutable_cpu_data()); + file_id, dataset_name_, blob->mutable_cpu_data()); } } // namespace caffe