slightly relax batch norm check
authorBenedikt Wilbertz <benedikt.wilbertz@gmx.de>
Thu, 29 Sep 2016 19:55:58 +0000 (21:55 +0200)
committerBenedikt Wilbertz <benedikt.wilbertz@gmx.de>
Thu, 29 Sep 2016 19:55:58 +0000 (21:55 +0200)
src/caffe/layers/batch_norm_layer.cpp

index 0b1037e..e661abb 100644 (file)
@@ -36,11 +36,15 @@ void BatchNormLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
   }
   // Mask statistics from optimization by setting local learning rates
   // for mean, variance, and the bias correction to zero.
-  CHECK_EQ(this->layer_param_.param_size(), 0)
-      << "Cannot configure batch normalization statistics as layer parameters.";
   for (int i = 0; i < this->blobs_.size(); ++i) {
-    ParamSpec* fixed_param_spec = this->layer_param_.add_param();
-    fixed_param_spec->set_lr_mult(0.);
+    if (this->layer_param_.param_size() == i) {
+      ParamSpec* fixed_param_spec = this->layer_param_.add_param();
+      fixed_param_spec->set_lr_mult(0.f);
+    } else {
+      CHECK_EQ(this->layer_param_.param(i).lr_mult(), 0.f)
+          << "Cannot configure batch normalization statistics as layer "
+          << "parameters.";
+    }
   }
 }