removed needs_reshape_ and ChangeBatchSize is now set_batch_size
authormanuele <manuele.tamburrano@gmail.com>
Mon, 12 Jan 2015 11:22:49 +0000 (12:22 +0100)
committerEvan Shelhamer <shelhamer@imaginarynumber.net>
Sat, 7 Feb 2015 04:55:31 +0000 (20:55 -0800)
include/caffe/data_layers.hpp
src/caffe/layers/memory_data_layer.cpp
src/caffe/test/test_memory_data_layer.cpp

index e8c6eec..1542af7 100644 (file)
@@ -266,7 +266,7 @@ class MemoryDataLayer : public BaseDataLayer<Dtype> {
   // Reset should accept const pointers, but can't, because the memory
   //  will be given to Blob, which is mutable
   void Reset(Dtype* data, Dtype* label, int n);
-  void ChangeBatchSize(int new_size);
+  void set_batch_size(int new_size);
 
   int batch_size() { return batch_size_; }
   int channels() { return channels_; }
@@ -285,7 +285,6 @@ class MemoryDataLayer : public BaseDataLayer<Dtype> {
   Blob<Dtype> added_data_;
   Blob<Dtype> added_label_;
   bool has_new_data_;
-  bool needs_reshape_;
 };
 
 /**
index 73b9f49..1669db3 100644 (file)
@@ -23,7 +23,6 @@ void MemoryDataLayer<Dtype>::DataLayerSetUp(const vector<Blob<Dtype>*>& bottom,
   added_label_.Reshape(batch_size_, 1, 1, 1);
   data_ = NULL;
   labels_ = NULL;
-  needs_reshape_ = false;
   added_data_.cpu_data();
   added_label_.cpu_data();
 }
@@ -89,25 +88,21 @@ void MemoryDataLayer<Dtype>::Reset(Dtype* data, Dtype* labels, int n) {
 }
 
 template <typename Dtype>
-void MemoryDataLayer<Dtype>::ChangeBatchSize(int new_size) {
+void MemoryDataLayer<Dtype>::set_batch_size(int new_size) {
   CHECK(!has_new_data_) <<
       "Can't change batch_size before all data haven't been consumed"
       << " by the upper layers";
   batch_size_ = new_size;
   added_data_.Reshape(batch_size_, channels_, height_, width_);
   added_label_.Reshape(batch_size_, 1, 1, 1);
-  needs_reshape_ = true;
 }
 
 template <typename Dtype>
 void MemoryDataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
       const vector<Blob<Dtype>*>& top) {
   CHECK(data_) << "MemoryDataLayer needs to be initalized by calling Reset";
-  if (needs_reshape_) {
-    top[0]->Reshape(batch_size_, channels_, height_, width_);
-    top[1]->Reshape(batch_size_, 1, 1, 1);
-    needs_reshape_ = false;
-  }
+  top[0]->Reshape(batch_size_, channels_, height_, width_);
+  top[1]->Reshape(batch_size_, 1, 1, 1);
   top[0]->set_cpu_data(data_ + pos_ * size_);
   top[1]->set_cpu_data(labels_ + pos_);
   pos_ = (pos_ + batch_size_) % n_;
index d4d5710..6a7bf04 100644 (file)
@@ -211,7 +211,7 @@ TYPED_TEST(MemoryDataLayerTest, AddMatVectorDefaultTransform) {
   }
 }
 
-TYPED_TEST(MemoryDataLayerTest, TestChangeBatchSize) {
+TYPED_TEST(MemoryDataLayerTest, TestSetBatchSize) {
   typedef typename TypeParam::Dtype Dtype;
   LayerParameter param;
   MemoryDataParameter* memory_data_param = param.mutable_memory_data_param();
@@ -255,7 +255,7 @@ TYPED_TEST(MemoryDataLayerTest, TestChangeBatchSize) {
   }
   // and then add new data with different batch_size
   int new_batch_size = 16;
-  layer.ChangeBatchSize(new_batch_size);
+  layer.set_batch_size(new_batch_size);
   mat_vector.clear();
   mat_vector.resize(new_batch_size * num_iter);
   label_vector.clear();