eltwise gradient checker
authorJeff Donahue <jeff.donahue@gmail.com>
Fri, 25 Apr 2014 03:24:19 +0000 (20:24 -0700)
committerJeff Donahue <jeff.donahue@gmail.com>
Fri, 25 Apr 2014 17:33:08 +0000 (10:33 -0700)
src/caffe/test/test_eltwise_product_layer.cpp
src/caffe/test/test_flatten_layer.cpp
src/caffe/test/test_gradient_check_util.hpp
src/caffe/test/test_neuron_layer.cpp
src/caffe/test/test_power_layer.cpp
src/caffe/test/test_split_layer.cpp
src/caffe/test/test_tanh_layer.cpp

index 8255a57..86d6fdc 100644 (file)
@@ -102,7 +102,7 @@ TYPED_TEST(EltwiseProductLayerTest, TestCPUGradient) {
   LayerParameter layer_param;
   EltwiseProductLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -111,7 +111,7 @@ TYPED_TEST(EltwiseProductLayerTest, TestGPUGradient) {
   LayerParameter layer_param;
   EltwiseProductLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-2);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
index 139488b..52c567b 100644 (file)
@@ -84,7 +84,7 @@ TYPED_TEST(FlattenLayerTest, TestCPUGradient) {
   Caffe::set_mode(Caffe::CPU);
   FlattenLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-2);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -93,7 +93,7 @@ TYPED_TEST(FlattenLayerTest, TestGPUGradient) {
   Caffe::set_mode(Caffe::GPU);
   FlattenLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-2);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
index a1c66a1..da54a96 100644 (file)
@@ -40,9 +40,15 @@ class GradientChecker {
       vector<Blob<Dtype>*>* bottom, vector<Blob<Dtype>*>* top,
       int check_bottom = -1);
 
+  // CheckGradientEltwise can be used to test layers that perform element-wise
+  // computation only (e.g., neuron layers) -- where (d y_i) / (d x_j) = 0 when
+  // i != j.
+  void CheckGradientEltwise(Layer<Dtype>* layer,
+      vector<Blob<Dtype>*>* bottom, vector<Blob<Dtype>*>* top);
+
   void CheckGradientSingle(Layer<Dtype>* layer, vector<Blob<Dtype>*>* bottom,
       vector<Blob<Dtype>*>* top, int check_bottom, int top_id,
-      int top_data_id);
+      int top_data_id, bool element_wise = false);
 
   // Checks the gradient of a network. This network should not have any data
   // layers or loss layers, since the function does not explicitly deal with
@@ -65,7 +71,16 @@ class GradientChecker {
 template <typename Dtype>
 void GradientChecker<Dtype>::CheckGradientSingle(Layer<Dtype>* layer,
     vector<Blob<Dtype>*>* bottom, vector<Blob<Dtype>*>* top,
-    int check_bottom, int top_id, int top_data_id) {
+    int check_bottom, int top_id, int top_data_id, bool element_wise) {
+  if (element_wise) {
+    CHECK_EQ(0, layer->blobs().size());
+    CHECK_LE(0, top_id);
+    CHECK_LE(0, top_data_id);
+    const int top_count = (*top)[top_id]->count();
+    for (int blob_id = 0; blob_id < bottom->size(); ++blob_id) {
+      CHECK_EQ(top_count, (*bottom)[blob_id]->count());
+    }
+  }
   // First, figure out what blobs we need to check against.
   vector<Blob<Dtype>*> blobs_to_check;
   for (int i = 0; i < layer->blobs().size(); ++i) {
@@ -87,7 +102,8 @@ void GradientChecker<Dtype>::CheckGradientSingle(Layer<Dtype>* layer,
   computed_objective += GetObjAndGradient(top, top_id, top_data_id);
   layer->Backward(*top, true, bottom);
   // Store computed gradients for all checked blobs
-  vector<shared_ptr<Blob<Dtype> > > computed_gradient_blobs(blobs_to_check.size());
+  vector<shared_ptr<Blob<Dtype> > >
+      computed_gradient_blobs(blobs_to_check.size());
   for (int blob_id = 0; blob_id < blobs_to_check.size(); ++blob_id) {
     Blob<Dtype>* current_blob = blobs_to_check[blob_id];
     computed_gradient_blobs[blob_id].reset(new Blob<Dtype>());
@@ -108,20 +124,29 @@ void GradientChecker<Dtype>::CheckGradientSingle(Layer<Dtype>* layer,
     // LOG(ERROR) << "Blob " << blob_id << ": checking "
     //     << current_blob->count() << " parameters.";
     for (int feat_id = 0; feat_id < current_blob->count(); ++feat_id) {
-      // Compute loss with stepsize_ added to input.
-      current_blob->mutable_cpu_data()[feat_id] += stepsize_;
-      Caffe::set_random_seed(seed_);
-      Dtype positive_objective = layer->Forward(*bottom, top);
-      positive_objective += GetObjAndGradient(top, top_id, top_data_id);
-      // Compute loss with stepsize_ subtracted from input.
-      current_blob->mutable_cpu_data()[feat_id] -= stepsize_ * 2;
-      Caffe::set_random_seed(seed_);
-      Dtype negative_objective = layer->Forward(*bottom, top);
-      negative_objective += GetObjAndGradient(top, top_id, top_data_id);
-      // Recover original input value.
-      current_blob->mutable_cpu_data()[feat_id] += stepsize_;
-      Dtype estimated_gradient = (positive_objective - negative_objective) /
-          stepsize_ / 2.;
+      // For an element-wise layer, we only need to do finite differencing to
+      // compute the derivative of (*top)[top_id][top_data_id] w.r.t.
+      // (*bottom)[blob_id][i] only for i == top_data_id.  For any other
+      // i != top_data_id, we know the derivative is 0 by definition, and simply
+      // check that that's true.
+      Dtype estimated_gradient = 0;
+      if (!element_wise || (feat_id == top_data_id)) {
+        // Do finite differencing.
+        // Compute loss with stepsize_ added to input.
+        current_blob->mutable_cpu_data()[feat_id] += stepsize_;
+        Caffe::set_random_seed(seed_);
+        Dtype positive_objective = layer->Forward(*bottom, top);
+        positive_objective += GetObjAndGradient(top, top_id, top_data_id);
+        // Compute loss with stepsize_ subtracted from input.
+        current_blob->mutable_cpu_data()[feat_id] -= stepsize_ * 2;
+        Caffe::set_random_seed(seed_);
+        Dtype negative_objective = layer->Forward(*bottom, top);
+        negative_objective += GetObjAndGradient(top, top_id, top_data_id);
+        // Recover original input value.
+        current_blob->mutable_cpu_data()[feat_id] += stepsize_;
+        estimated_gradient = (positive_objective - negative_objective) /
+            stepsize_ / 2.;
+      }
       Dtype computed_gradient = computed_gradients[feat_id];
       Dtype feature = current_blob->cpu_data()[feat_id];
       // LOG(ERROR) << "debug: " << current_blob->cpu_data()[feat_id] << " "
@@ -158,6 +183,20 @@ void GradientChecker<Dtype>::CheckGradientExhaustive(Layer<Dtype>* layer,
 }
 
 template <typename Dtype>
+void GradientChecker<Dtype>::CheckGradientEltwise(Layer<Dtype>* layer,
+    vector<Blob<Dtype>*>* bottom, vector<Blob<Dtype>*>* top) {
+  layer->SetUp(*bottom, top);
+  CHECK_GT(top->size(), 0) << "Eltwise mode requires at least one top blob.";
+  const int check_bottom = -1;
+  const bool element_wise = true;
+  for (int i = 0; i < top->size(); ++i) {
+    for (int j = 0; j < (*top)[i]->count(); ++j) {
+      CheckGradientSingle(layer, bottom, top, check_bottom, i, j, element_wise);
+    }
+  }
+}
+
+template <typename Dtype>
 void GradientChecker<Dtype>::CheckGradientNet(
     const Net<Dtype>& net, const vector<Blob<Dtype>*>& input) {
   const vector<shared_ptr<Layer<Dtype> > >& layers = net.layers();
index cd73375..9c852a1 100644 (file)
@@ -61,7 +61,7 @@ TYPED_TEST(NeuronLayerTest, TestReLUGradientCPU) {
   Caffe::set_mode(Caffe::CPU);
   ReLULayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -87,7 +87,7 @@ TYPED_TEST(NeuronLayerTest, TestReLUGradientGPU) {
   Caffe::set_mode(Caffe::GPU);
   ReLULayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -115,7 +115,7 @@ TYPED_TEST(NeuronLayerTest, TestSigmoidGradientCPU) {
   Caffe::set_mode(Caffe::CPU);
   SigmoidLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -142,7 +142,7 @@ TYPED_TEST(NeuronLayerTest, TestSigmoidGradientGPU) {
   Caffe::set_mode(Caffe::GPU);
   SigmoidLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3, 1701, 0., 0.01);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -172,7 +172,7 @@ TYPED_TEST(NeuronLayerTest, TestDropoutGradientCPU) {
   Caffe::set_mode(Caffe::CPU);
   DropoutLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -271,7 +271,7 @@ TYPED_TEST(NeuronLayerTest, TestBNLLGradientCPU) {
   Caffe::set_mode(Caffe::CPU);
   BNLLLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -297,7 +297,7 @@ TYPED_TEST(NeuronLayerTest, TestBNLLGradientGPU) {
   Caffe::set_mode(Caffe::GPU);
   BNLLLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
index 2101a41..4fab8af 100644 (file)
@@ -79,7 +79,7 @@ class PowerLayerTest : public ::testing::Test {
       }
     }
     GradientChecker<Dtype> checker(1e-2, 1e-2, 1701, 0., 0.01);
-    checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+    checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
         &(this->blob_top_vec_));
   }
 
index 06f0f3f..327bcf9 100644 (file)
@@ -121,7 +121,7 @@ TYPED_TEST(SplitLayerTest, TestCPUGradient) {
   Caffe::set_mode(Caffe::CPU);
   SplitLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-2);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -130,7 +130,7 @@ TYPED_TEST(SplitLayerTest, TestGPUGradient) {
   Caffe::set_mode(Caffe::GPU);
   SplitLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-2);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -140,7 +140,7 @@ TYPED_TEST(SplitLayerTest, TestCPUGradientInPlace) {
   SplitLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-2);
   this->blob_top_vec_[0] = this->blob_bottom_vec_[0];
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -150,7 +150,7 @@ TYPED_TEST(SplitLayerTest, TestGPUGradientInPlace) {
   SplitLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-2);
   this->blob_top_vec_[0] = this->blob_bottom_vec_[0];
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
index 82cb96c..9c9f8a7 100644 (file)
@@ -70,7 +70,7 @@ TYPED_TEST(TanHLayerTest, TestGradientCPU) {
   Caffe::set_mode(Caffe::CPU);
   TanHLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }
 
@@ -102,7 +102,7 @@ TYPED_TEST(TanHLayerTest, TestGradientGPU) {
   Caffe::set_mode(Caffe::GPU);
   TanHLayer<TypeParam> layer(layer_param);
   GradientChecker<TypeParam> checker(1e-2, 1e-3);
-  checker.CheckGradientExhaustive(&layer, &(this->blob_bottom_vec_),
+  checker.CheckGradientEltwise(&layer, &(this->blob_bottom_vec_),
       &(this->blob_top_vec_));
 }