cleaning some logging print
authorYangqing Jia <jiayq84@gmail.com>
Wed, 9 Oct 2013 23:53:56 +0000 (16:53 -0700)
committerYangqing Jia <jiayq84@gmail.com>
Wed, 9 Oct 2013 23:53:56 +0000 (16:53 -0700)
src/caffe/optimization/solver.cpp
src/caffe/test/test_gradient_check_util.hpp
src/caffe/test/test_net_proto.cpp

index 3459cc4..324433f 100644 (file)
@@ -99,8 +99,8 @@ void SGDSolver<Dtype>::ComputeUpdateValue() {
   Dtype rate = GetLearningRate();
   Dtype momentum = this->param_.momentum();
   Dtype weight_decay = this->param_.weight_decay();
-  LOG(ERROR) << "rate:" << rate << " momentum:" << momentum
-      << " weight_decay:" << weight_decay;
+  // LOG(ERROR) << "rate:" << rate << " momentum:" << momentum
+  //     << " weight_decay:" << weight_decay;
   switch (Caffe::mode()) {
   case Caffe::CPU:
     for (int param_id = 0; param_id < net_params.size(); ++param_id) {
index 4955394..4973483 100644 (file)
@@ -143,7 +143,7 @@ void GradientChecker<Dtype>::CheckGradientExhaustive(Layer<Dtype>& layer,
   layer.SetUp(bottom, &top);
   // LOG(ERROR) << "Exhaustive Mode.";
   for (int i = 0; i < top.size(); ++i) {
-    LOG(ERROR) << "Exhaustive: blob " << i << " size " << top[i]->count();
+    // LOG(ERROR) << "Exhaustive: blob " << i << " size " << top[i]->count();
     for (int j = 0; j < top[i]->count(); ++j) {
       // LOG(ERROR) << "Exhaustive: blob " << i << " data " << j;
       CheckGradientSingle(layer, bottom, top, check_bottom, i, j);
index 642ce30..38b5b68 100644 (file)
@@ -38,6 +38,7 @@ TYPED_TEST(NetProtoTest, TestSetup) {
   EXPECT_EQ(caffe_net.layer_names().size(), 10);
   EXPECT_EQ(caffe_net.blob_names().size(), 10);
 
+  /*
   // Print a few statistics to see if things are correct
   for (int i = 0; i < caffe_net.blobs().size(); ++i) {
     LOG(ERROR) << "Blob: " << caffe_net.blob_names()[i];
@@ -46,13 +47,14 @@ TYPED_TEST(NetProtoTest, TestSetup) {
         << caffe_net.blobs()[i]->height() << ", "
         << caffe_net.blobs()[i]->width();
   }
+  */
   Caffe::set_mode(Caffe::CPU);
   // Run the network without training.
   LOG(ERROR) << "Performing Forward";
   caffe_net.Forward(bottom_vec);
   LOG(ERROR) << "Performing Backward";
   LOG(ERROR) << caffe_net.Backward();
-  
+
   Caffe::set_mode(Caffe::GPU);
   // Run the network without training.
   LOG(ERROR) << "Performing Forward";