Test that DropoutLayer obeys dropout_ratio.
authorJeff Donahue <jeff.donahue@gmail.com>
Sat, 2 Aug 2014 02:10:16 +0000 (19:10 -0700)
committerJeff Donahue <jeff.donahue@gmail.com>
Sat, 2 Aug 2014 02:18:35 +0000 (19:18 -0700)
src/caffe/test/test_neuron_layer.cpp

index 7a7af31..b3a536a 100644 (file)
@@ -17,6 +17,7 @@ namespace caffe {
 template <typename TypeParam>
 class NeuronLayerTest : public MultiDeviceTest<TypeParam> {
   typedef typename TypeParam::Dtype Dtype;
+
  protected:
   NeuronLayerTest()
       : blob_bottom_(new Blob<Dtype>(2, 3, 4, 5)),
@@ -34,6 +35,38 @@ class NeuronLayerTest : public MultiDeviceTest<TypeParam> {
   Blob<Dtype>* const blob_top_;
   vector<Blob<Dtype>*> blob_bottom_vec_;
   vector<Blob<Dtype>*> blob_top_vec_;
+
+  void TestDropoutForward(const float dropout_ratio) {
+    LayerParameter layer_param;
+    // Fill in the given dropout_ratio, unless it's 0.5, in which case we don't
+    // set it explicitly to test that 0.5 is the default.
+    if (dropout_ratio != 0.5) {
+      layer_param.mutable_dropout_param()->set_dropout_ratio(dropout_ratio);
+    }
+    Caffe::set_phase(Caffe::TRAIN);
+    DropoutLayer<Dtype> layer(layer_param);
+    layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
+    layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
+    // Now, check values
+    const Dtype* bottom_data = this->blob_bottom_->cpu_data();
+    const Dtype* top_data = this->blob_top_->cpu_data();
+    float scale = 1. / (1. - layer_param.dropout_param().dropout_ratio());
+    const int count = this->blob_bottom_->count();
+    // Initialize num_kept to count the number of inputs NOT dropped out.
+    int num_kept = 0;
+    for (int i = 0; i < count; ++i) {
+      if (top_data[i] != 0) {
+        ++num_kept;
+        EXPECT_EQ(top_data[i], bottom_data[i] * scale);
+      }
+    }
+    const Dtype std_error = sqrt(dropout_ratio * (1 - dropout_ratio) / count);
+    // Fail if the number dropped was more than 1.96 * std_error away from the
+    // expected number -- requires 95% confidence that the dropout layer is not
+    // obeying the given dropout_ratio for test failure.
+    const Dtype empirical_dropout_ratio = 1 - num_kept / Dtype(count);
+    EXPECT_NEAR(empirical_dropout_ratio, dropout_ratio, 1.96 * std_error);
+  }
 };
 
 TYPED_TEST_CASE(NeuronLayerTest, TestDtypesAndDevices);
@@ -114,22 +147,14 @@ TYPED_TEST(NeuronLayerTest, TestSigmoidGradient) {
       &(this->blob_top_vec_));
 }
 
-TYPED_TEST(NeuronLayerTest, TestDropout) {
-  typedef typename TypeParam::Dtype Dtype;
-  LayerParameter layer_param;
-  Caffe::set_phase(Caffe::TRAIN);
-  DropoutLayer<Dtype> layer(layer_param);
-  layer.SetUp(this->blob_bottom_vec_, &(this->blob_top_vec_));
-  layer.Forward(this->blob_bottom_vec_, &(this->blob_top_vec_));
-  // Now, check values
-  const Dtype* bottom_data = this->blob_bottom_->cpu_data();
-  const Dtype* top_data = this->blob_top_->cpu_data();
-  float scale = 1. / (1. - layer_param.dropout_param().dropout_ratio());
-  for (int i = 0; i < this->blob_bottom_->count(); ++i) {
-    if (top_data[i] != 0) {
-      EXPECT_EQ(top_data[i], bottom_data[i] * scale);
-    }
-  }
+TYPED_TEST(NeuronLayerTest, TestDropoutHalf) {
+  const float kDropoutRatio = 0.5;
+  this->TestDropoutForward(kDropoutRatio);
+}
+
+TYPED_TEST(NeuronLayerTest, TestDropoutThreeQuarters) {
+  const float kDropoutRatio = 0.75;
+  this->TestDropoutForward(kDropoutRatio);
 }
 
 TYPED_TEST(NeuronLayerTest, TestDropoutTestPhase) {