exhaustive test mode
authorYangqing Jia <jiayq84@gmail.com>
Thu, 19 Sep 2013 21:53:51 +0000 (14:53 -0700)
committerYangqing Jia <jiayq84@gmail.com>
Thu, 19 Sep 2013 21:53:51 +0000 (14:53 -0700)
src/caffeine/test/test_gradient_check_util.cpp
src/caffeine/test/test_gradient_check_util.hpp
src/caffeine/test/test_im2col_layer.cpp
src/caffeine/test/test_innerproduct_layer.cpp
src/caffeine/test/test_neuron_layer.cpp
src/caffeine/test/test_padding_layer.cpp

index aa540f9..2bea22b 100644 (file)
@@ -9,10 +9,9 @@ using std::max;
 namespace caffeine {
 
 template <typename Dtype>
-void GradientChecker<Dtype>::CheckGradient(Layer<Dtype>& layer,
+void GradientChecker<Dtype>::CheckGradientSingle(Layer<Dtype>& layer,
     vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>& top,
-    int check_bottom) {
-  layer.SetUp(bottom, &top);
+    int check_bottom, int top_id, int top_data_id) {
   // First, figure out what blobs we need to check against.
   vector<Blob<Dtype>*> blobs_to_check;
   for (int i = 0; i < layer.params().size(); ++i) {
@@ -26,7 +25,7 @@ void GradientChecker<Dtype>::CheckGradient(Layer<Dtype>& layer,
     CHECK(check_bottom < bottom.size());
     blobs_to_check.push_back(bottom[check_bottom]);
   }
-  // go through the blobs
+  // go through the bottom and parameter blobs
   //LOG(ERROR) << "Checking " << blobs_to_check.size() << " blobs.";
   for (int blobid = 0; blobid < blobs_to_check.size(); ++blobid) {
     Blob<Dtype>* current_blob = blobs_to_check[blobid];
@@ -37,7 +36,7 @@ void GradientChecker<Dtype>::CheckGradient(Layer<Dtype>& layer,
       // First, obtain the original data
       Caffeine::set_random_seed(seed_);
       layer.Forward(bottom, &top);
-      Dtype computed_objective = GetObjAndGradient(top);
+      Dtype computed_objective = GetObjAndGradient(top, top_id, top_data_id);
       // Get any additional loss from the layer
       computed_objective += layer.Backward(top, true, &bottom);
       Dtype computed_gradient = current_blob->cpu_diff()[feat_id];
@@ -45,13 +44,13 @@ void GradientChecker<Dtype>::CheckGradient(Layer<Dtype>& layer,
       current_blob->mutable_cpu_data()[feat_id] += stepsize_;
       Caffeine::set_random_seed(seed_);
       layer.Forward(bottom, &top);
-      Dtype positive_objective = GetObjAndGradient(top);
+      Dtype positive_objective = GetObjAndGradient(top, top_id, top_data_id);
       positive_objective += layer.Backward(top, true, &bottom);
       // compute score by subtracting stepsize
       current_blob->mutable_cpu_data()[feat_id] -= stepsize_ * 2;
       Caffeine::set_random_seed(seed_);
       layer.Forward(bottom, &top);
-      Dtype negative_objective = GetObjAndGradient(top);
+      Dtype negative_objective = GetObjAndGradient(top, top_id, top_data_id);
       negative_objective += layer.Backward(top, true, &bottom);
       // Recover stepsize
       current_blob->mutable_cpu_data()[feat_id] += stepsize_;
@@ -76,20 +75,47 @@ void GradientChecker<Dtype>::CheckGradient(Layer<Dtype>& layer,
 }
 
 template <typename Dtype>
-Dtype GradientChecker<Dtype>::GetObjAndGradient(vector<Blob<Dtype>*>& top) {
-  Dtype loss = 0;
+void GradientChecker<Dtype>::CheckGradientExhaustive(Layer<Dtype>& layer,
+    vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>& top, int check_bottom) {
+  layer.SetUp(bottom, &top);
+  //LOG(ERROR) << "Exhaustive Mode.";
   for (int i = 0; i < top.size(); ++i) {
-    Blob<Dtype>* top_blob = top[i];
-    const Dtype* top_blob_data = top_blob->cpu_data();
-    Dtype* top_blob_diff = top_blob->mutable_cpu_diff();
-    int count = top_blob->count();
-    for (int j = 0; j < count; ++j) {
-      loss += top_blob_data[j] * top_blob_data[j];
+    //LOG(ERROR) << "Exhaustive: blob " << i << " size " << top[i]->count();
+    for (int j = 0; j < top[i]->count(); ++j) {
+      //LOG(ERROR) << "Exhaustive: blob " << i << " data " << j;
+      CheckGradientSingle(layer, bottom, top, check_bottom, i, j);
+    }
+  }
+}
+
+template <typename Dtype>
+Dtype GradientChecker<Dtype>::GetObjAndGradient(vector<Blob<Dtype>*>& top,
+    int top_id, int top_data_id) {
+  Dtype loss = 0;
+  if (top_id < 0) {
+    // the loss will be half of the sum of squares of all outputs
+    for (int i = 0; i < top.size(); ++i) {
+      Blob<Dtype>* top_blob = top[i];
+      const Dtype* top_blob_data = top_blob->cpu_data();
+      Dtype* top_blob_diff = top_blob->mutable_cpu_diff();
+      int count = top_blob->count();
+      for (int j = 0; j < count; ++j) {
+        loss += top_blob_data[j] * top_blob_data[j];
+      }
+      // set the diff: simply the data.
+      memcpy(top_blob_diff, top_blob_data, sizeof(Dtype) * top_blob->count());
+    }
+    loss /= 2.;
+  } else {
+    // the loss will be the top_data_id-th element in the top_id-th blob.
+    for (int i = 0; i < top.size(); ++i) {
+      Blob<Dtype>* top_blob = top[i];
+      Dtype* top_blob_diff = top_blob->mutable_cpu_diff();
+      memset(top_blob_diff, 0, sizeof(Dtype) * top_blob->count());
     }
-    // set the diff: simply the data.
-    memcpy(top_blob_diff, top_blob_data, sizeof(Dtype) * count);
+    loss = top[top_id]->cpu_data()[top_data_id];
+    top[top_id]->mutable_cpu_diff()[top_data_id] = 1.;
   }
-  loss /= 2.;
   return loss;
 }
 
index 848be15..cb55766 100644 (file)
@@ -22,9 +22,20 @@ class GradientChecker {
   // Note that after the gradient check, we do not guarantee that the data
   // stored in the layer parameters and the blobs.
   void CheckGradient(Layer<Dtype>& layer, vector<Blob<Dtype>*>& bottom,
-    vector<Blob<Dtype>*>& top, int check_bottom = -1);
+      vector<Blob<Dtype>*>& top, int check_bottom = -1) {
+      layer.SetUp(bottom, &top);
+      CheckGradientSingle(layer, bottom, top, check_bottom, -1, -1);
+  }
+  void CheckGradientExhaustive(Layer<Dtype>& layer,
+      vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>& top,
+      int check_bottom = -1);
+
+  void CheckGradientSingle(Layer<Dtype>& layer, vector<Blob<Dtype>*>& bottom,
+      vector<Blob<Dtype>*>& top, int check_bottom, int top_id,
+      int top_data_id);
  protected:
-  Dtype GetObjAndGradient(vector<Blob<Dtype>*>& top);
+  Dtype GetObjAndGradient(vector<Blob<Dtype>*>& top, int top_id = -1,
+      int top_data_id = -1);
   Dtype stepsize_;
   Dtype threshold_;
   unsigned int seed_;
index da5632d..d96d011 100644 (file)
@@ -70,7 +70,7 @@ TYPED_TEST(Im2colLayerTest, TestCPUGradient) {
   Caffeine::set_mode(Caffeine::CPU);
   Im2colLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-2);
-  checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
+  checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
 }
 
 }
index eb5e341..9c883db 100644 (file)
@@ -99,7 +99,7 @@ TYPED_TEST(InnerProductLayerTest, TestCPUGradient) {
   layer_param.mutable_bias_filler()->set_max(2);
   InnerProductLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-2);
-  checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
+  checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
 }
 
 TYPED_TEST(InnerProductLayerTest, TestGPUGradient) {
index a634a3a..5a28b00 100644 (file)
@@ -57,7 +57,7 @@ TYPED_TEST(NeuronLayerTest, TestReLUGradientCPU) {
   Caffeine::set_mode(Caffeine::CPU);
   ReLULayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
-  checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
+  checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
 }
 
 
@@ -82,7 +82,7 @@ TYPED_TEST(NeuronLayerTest, TestReLUGradientGPU) {
   Caffeine::set_mode(Caffeine::GPU);
   ReLULayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
-  checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
+  checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
 }
 
 
@@ -110,7 +110,7 @@ TYPED_TEST(NeuronLayerTest, TestDropoutGradientCPU) {
   Caffeine::set_mode(Caffeine::CPU);
   DropoutLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3);
-  checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
+  checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
 }
 
 
@@ -158,7 +158,7 @@ TYPED_TEST(NeuronLayerTest, TestDropoutGradientGPU) {
     Caffeine::set_mode(Caffeine::GPU);
     DropoutLayer<TypeParam> layer(layer_param);
     GradientChecker<TypeParam> checker(1e-2, 1e-3);
-    checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
+    checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
   } else {
     LOG(ERROR) << "Skipping test to spare my laptop.";
   }
index fb95fde..03bf61f 100644 (file)
@@ -65,7 +65,7 @@ TYPED_TEST(PaddingLayerTest, TestCPUGrad) {
   Caffeine::set_mode(Caffeine::CPU);
   PaddingLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3);
-  checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
+  checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
 }
 
 TYPED_TEST(PaddingLayerTest, TestGPU) {
@@ -102,7 +102,7 @@ TYPED_TEST(PaddingLayerTest, TestGPUGrad) {
     Caffeine::set_mode(Caffeine::GPU);
     PaddingLayer<TypeParam> layer(layer_param);
     GradientChecker<TypeParam> checker(1e-2, 1e-3);
-    checker.CheckGradient(layer, this->blob_bottom_vec_, this->blob_top_vec_);
+    checker.CheckGradientExhaustive(layer, this->blob_bottom_vec_, this->blob_top_vec_);
   } else {
     LOG(ERROR) << "Skipping test (gpu version too low).";
   }