Blob: add scale_{data,diff} methods and tests
authorJeff Donahue <jeff.donahue@gmail.com>
Tue, 7 Oct 2014 06:45:43 +0000 (23:45 -0700)
committerJeff Donahue <jeff.donahue@gmail.com>
Sat, 14 Feb 2015 01:28:10 +0000 (17:28 -0800)
include/caffe/blob.hpp
src/caffe/blob.cpp
src/caffe/test/test_blob.cpp

index d425e38..42e4420 100644 (file)
@@ -111,6 +111,11 @@ class Blob {
   /// @brief Compute the sum of squares (L2 norm squared) of the diff.
   Dtype sumsq_diff() const;
 
+  /// @brief Scale the blob data by a constant factor.
+  void scale_data(Dtype scale_factor);
+  /// @brief Scale the blob diff by a constant factor.
+  void scale_diff(Dtype scale_factor);
+
   /**
    * @brief Set the data_ shared_ptr to point to the SyncedMemory holding the
    *        data_ of Blob other -- useful in Layer&s which simply perform a copy
index 3b72ce0..fbc1361 100644 (file)
@@ -279,6 +279,72 @@ Dtype Blob<Dtype>::sumsq_diff() const {
   return sumsq;
 }
 
+template <> void Blob<unsigned int>::scale_data(unsigned int scale_factor) {
+  NOT_IMPLEMENTED;
+}
+
+template <> void Blob<int>::scale_data(int scale_factor) {
+  NOT_IMPLEMENTED;
+}
+
+template <typename Dtype>
+void Blob<Dtype>::scale_data(Dtype scale_factor) {
+  Dtype* data;
+  if (!data_) { return; }
+  switch (data_->head()) {
+  case SyncedMemory::HEAD_AT_CPU:
+    data = mutable_cpu_data();
+    caffe_scal(count_, scale_factor, data);
+    return;
+  case SyncedMemory::HEAD_AT_GPU:
+  case SyncedMemory::SYNCED:
+#ifndef CPU_ONLY
+    data = mutable_gpu_data();
+    caffe_gpu_scal(count_, scale_factor, data);
+    return;
+#else
+    NO_GPU;
+#endif
+  case SyncedMemory::UNINITIALIZED:
+    return;
+  default:
+    LOG(FATAL) << "Unknown SyncedMemory head state: " << data_->head();
+  }
+}
+
+template <> void Blob<unsigned int>::scale_diff(unsigned int scale_factor) {
+  NOT_IMPLEMENTED;
+}
+
+template <> void Blob<int>::scale_diff(int scale_factor) {
+  NOT_IMPLEMENTED;
+}
+
+template <typename Dtype>
+void Blob<Dtype>::scale_diff(Dtype scale_factor) {
+  Dtype* diff;
+  if (!diff_) { return; }
+  switch (diff_->head()) {
+  case SyncedMemory::HEAD_AT_CPU:
+    diff = mutable_cpu_diff();
+    caffe_scal(count_, scale_factor, diff);
+    return;
+  case SyncedMemory::HEAD_AT_GPU:
+  case SyncedMemory::SYNCED:
+#ifndef CPU_ONLY
+    diff = mutable_gpu_diff();
+    caffe_gpu_scal(count_, scale_factor, diff);
+    return;
+#else
+    NO_GPU;
+#endif
+  case SyncedMemory::UNINITIALIZED:
+    return;
+  default:
+    LOG(FATAL) << "Unknown SyncedMemory head state: " << diff_->head();
+  }
+}
+
 template <typename Dtype>
 void Blob<Dtype>::CopyFrom(const Blob& source, bool copy_diff, bool reshape) {
   if (num_ != source.num() || channels_ != source.channels() ||
index c619ad1..84d84e8 100644 (file)
@@ -120,7 +120,7 @@ TYPED_TEST(BlobMathTest, TestSumOfSquares) {
 TYPED_TEST(BlobMathTest, TestAsum) {
   typedef typename TypeParam::Dtype Dtype;
 
-  // Uninitialized Blob should have sum of squares == 0.
+  // Uninitialized Blob should have asum == 0.
   EXPECT_EQ(0, this->blob_->asum_data());
   EXPECT_EQ(0, this->blob_->asum_diff());
   FillerParameter filler_param;
@@ -167,4 +167,62 @@ TYPED_TEST(BlobMathTest, TestAsum) {
   EXPECT_FLOAT_EQ(expected_asum * kDiffScaleFactor, this->blob_->asum_diff());
 }
 
+TYPED_TEST(BlobMathTest, TestScaleData) {
+  typedef typename TypeParam::Dtype Dtype;
+
+  EXPECT_EQ(0, this->blob_->asum_data());
+  EXPECT_EQ(0, this->blob_->asum_diff());
+  FillerParameter filler_param;
+  filler_param.set_min(-3);
+  filler_param.set_max(3);
+  UniformFiller<Dtype> filler(filler_param);
+  filler.Fill(this->blob_);
+  const Dtype asum_before_scale = this->blob_->asum_data();
+  // Do a mutable access on the current device,
+  // so that the asum computation is done on that device.
+  // (Otherwise, this would only check the CPU asum implementation.)
+  switch (TypeParam::device) {
+  case Caffe::CPU:
+    this->blob_->mutable_cpu_data();
+    break;
+  case Caffe::GPU:
+    this->blob_->mutable_gpu_data();
+    break;
+  default:
+    LOG(FATAL) << "Unknown device: " << TypeParam::device;
+  }
+  const Dtype kDataScaleFactor = 3;
+  this->blob_->scale_data(kDataScaleFactor);
+  EXPECT_FLOAT_EQ(asum_before_scale * kDataScaleFactor,
+                  this->blob_->asum_data());
+  EXPECT_EQ(0, this->blob_->asum_diff());
+
+  // Check scale_diff too.
+  const Dtype kDataToDiffScaleFactor = 7;
+  const Dtype* data = this->blob_->cpu_data();
+  caffe_cpu_scale(this->blob_->count(), kDataToDiffScaleFactor, data,
+                  this->blob_->mutable_cpu_diff());
+  EXPECT_FLOAT_EQ(asum_before_scale * kDataScaleFactor,
+                  this->blob_->asum_data());
+  const Dtype diff_asum_before_scale = this->blob_->asum_diff();
+  EXPECT_FLOAT_EQ(asum_before_scale * kDataScaleFactor * kDataToDiffScaleFactor,
+                  diff_asum_before_scale);
+  switch (TypeParam::device) {
+  case Caffe::CPU:
+    this->blob_->mutable_cpu_diff();
+    break;
+  case Caffe::GPU:
+    this->blob_->mutable_gpu_diff();
+    break;
+  default:
+    LOG(FATAL) << "Unknown device: " << TypeParam::device;
+  }
+  const Dtype kDiffScaleFactor = 3;
+  this->blob_->scale_diff(kDiffScaleFactor);
+  EXPECT_FLOAT_EQ(asum_before_scale * kDataScaleFactor,
+                  this->blob_->asum_data());
+  EXPECT_FLOAT_EQ(diff_asum_before_scale * kDiffScaleFactor,
+                  this->blob_->asum_diff());
+}
+
 }  // namespace caffe