Fix cpplint errors for Net, its tests and feature related 3 examples
authorKai Li <kaili_kloud@163.com>
Wed, 26 Feb 2014 14:07:48 +0000 (22:07 +0800)
committerKai Li <kaili_kloud@163.com>
Wed, 19 Mar 2014 15:04:42 +0000 (23:04 +0800)
src/caffe/net.cpp
src/caffe/test/test_math_functions.cpp
src/caffe/test/test_net.cpp
tools/binarize_features.cpp
tools/extract_features.cpp
tools/retrieve_images.cpp

index 5e06dce..c979a96 100644 (file)
@@ -339,13 +339,14 @@ bool Net<Dtype>::has_blob(const string& blob_name) {
 }
 
 template <typename Dtype>
-const shared_ptr<Blob<Dtype> > Net<Dtype>::blob_by_name(const string& blob_name) {
+const shared_ptr<Blob<Dtype> > Net<Dtype>::blob_by_name(
+    const string& blob_name) {
   shared_ptr<Blob<Dtype> > blob_ptr;
   if (has_blob(blob_name)) {
     blob_ptr = blobs_[blob_names_index_[blob_name]];
   } else {
     blob_ptr.reset((Blob<Dtype>*)(NULL));
-    LOG(ERROR) << "Unknown blob name " << blob_name;
+    LOG(WARNING) << "Unknown blob name " << blob_name;
   }
   return blob_ptr;
 }
@@ -356,13 +357,14 @@ bool Net<Dtype>::has_layer(const string& layer_name) {
 }
 
 template <typename Dtype>
-const shared_ptr<Layer<Dtype> > Net<Dtype>::layer_by_name(const string& layer_name) {
+const shared_ptr<Layer<Dtype> > Net<Dtype>::layer_by_name(
+    const string& layer_name) {
   shared_ptr<Layer<Dtype> > layer_ptr;
   if (has_layer(layer_name)) {
     layer_ptr = layers_[layer_names_index_[layer_name]];
   } else {
     layer_ptr.reset((Layer<Dtype>*)(NULL));
-    LOG(ERROR) << "Unknown layer name " << layer_name;
+    LOG(WARNING) << "Unknown layer name " << layer_name;
   }
   return layer_ptr;
 }
index 0e313ee..45d43cc 100644 (file)
@@ -1,6 +1,6 @@
 // Copyright 2014 kloudkl@github
 
-#include <stdint.h> // for uint32_t & uint64_t
+#include <stdint.h>  // for uint32_t & uint64_t
 
 #include "gtest/gtest.h"
 #include "caffe/blob.hpp"
@@ -66,7 +66,7 @@ REF_HAMMING_DIST(double, uint64_t);
 typedef ::testing::Types<float, double> Dtypes;
 TYPED_TEST_CASE(MathFunctionsTest, Dtypes);
 
-TYPED_TEST(MathFunctionsTest, TestHammingDistance){
+TYPED_TEST(MathFunctionsTest, TestHammingDistance) {
   int n = this->blob_bottom_->count();
   const TypeParam* x = this->blob_bottom_->cpu_data();
   const TypeParam* y = this->blob_top_->cpu_data();
@@ -74,4 +74,4 @@ TYPED_TEST(MathFunctionsTest, TestHammingDistance){
            caffe_hamming_distance<TypeParam>(n, x, y));
 }
 
-}
+}  // namespace caffe
index 0cd39b4..fd7265c 100644 (file)
@@ -1,8 +1,9 @@
 // Copyright 2014 kloudkl@github
 
-#include <sstream>
 #include <google/protobuf/text_format.h>
 #include <leveldb/db.h>
+#include <sstream>
+#include <string>
 
 #include "gtest/gtest.h"
 #include "caffe/common.hpp"
@@ -18,10 +19,10 @@ template <typename Dtype>
 class NetTest : public ::testing::Test {
  protected:
   NetTest() : filename(NULL) {
-  };
-  virtual void SetUp() {
-    // Create the leveldb
-    filename = tmpnam(NULL); // get temp name
+  }
+
+  virtual void SetUp() {  // Create the leveldb
+    filename = tmpnam(NULL);  // get temp name
     LOG(INFO) << "Using temporary leveldb " << filename;
     leveldb::DB* db;
     leveldb::Options options;
@@ -50,8 +51,7 @@ class NetTest : public ::testing::Test {
         "layers: { "
         "  layer { "
         "    name: 'data' "
-        "    type: 'data' "
-        ;
+        "    type: 'data' ";
     const string& proto_suffix =
         "    batchsize: 1 "
         "  } "
@@ -86,8 +86,7 @@ class NetTest : public ::testing::Test {
         "  } "
         "  bottom: 'innerproduct' "
         "  bottom: 'label' "
-        "} "
-        ;
+        "} ";
     proto = proto_prefix + "source: '" + string(this->filename) +
         "' " + proto_suffix;
   }
@@ -146,5 +145,4 @@ TYPED_TEST(NetTest, TestGetLayerByName) {
   EXPECT_FALSE(net.layer_by_name("label"));
 }
 
-
-}
+}  // namespace caffe
index 881755a..e15e125 100644 (file)
@@ -1,8 +1,10 @@
 // Copyright 2014 kloudkl@github
 
-#include <cmath> // for std::signbit
 #include <cuda_runtime.h>
 #include <google/protobuf/text_format.h>
+#include <cmath>  // for std::signbit
+#include <string>
+#include <vector>
 
 #include "caffe/blob.hpp"
 #include "caffe/common.hpp"
@@ -11,7 +13,7 @@
 #include "caffe/proto/caffe.pb.h"
 #include "caffe/util/io.hpp"
 
-using namespace caffe;
+using namespace caffe;  // NOLINT(build/namespaces)
 
 template<typename Dtype>
 void binarize(const vector<shared_ptr<Blob<Dtype> > >& feature_blob_vector,
@@ -31,8 +33,9 @@ int features_binarization_pipeline(int argc, char** argv) {
   if (argc < num_required_args) {
     LOG(ERROR)<<
     "This program compresses real valued features into compact binary codes.\n"
-    "Usage: demo_binarize_features  real_valued_feature_prototxt  feature_blob_name"
-    "  save_binarized_feature_binaryproto_file  num_mini_batches  [CPU/GPU]  [DEVICE_ID=0]";
+    "Usage: demo_binarize_features  real_valued_feature_prototxt"
+    "  feature_blob_name  save_binarized_feature_binaryproto_file"
+    "  num_mini_batches  [CPU/GPU]  [DEVICE_ID=0]";
     return 1;
   }
   int arg_pos = num_required_args;
@@ -57,7 +60,8 @@ int features_binarization_pipeline(int argc, char** argv) {
 
   arg_pos = 0;  // the name of the executable
 
-  // Expected prototxt contains at least one data layer as the real valued features.
+  // Expected prototxt contains at least one data layer as the real valued
+  // features.
   /*
    layers {
    layer {
@@ -79,8 +83,8 @@ int features_binarization_pipeline(int argc, char** argv) {
 
   string feature_blob_name(argv[++arg_pos]);
   CHECK(real_valued_feature_net->has_blob(feature_blob_name))
-      << "Unknown feature blob name " << feature_blob_name << " in the network "
-      << real_valued_feature_prototxt;
+      << "Unknown feature blob name " << feature_blob_name
+      << " in the network " << real_valued_feature_prototxt;
 
   string save_binarized_feature_binaryproto_file(argv[++arg_pos]);
 
@@ -101,11 +105,13 @@ int features_binarization_pipeline(int argc, char** argv) {
   BlobProto blob_proto;
   feature_binary_codes->ToProto(&blob_proto);
   WriteProtoToBinaryFile(blob_proto, save_binarized_feature_binaryproto_file);
-  LOG(ERROR)<< "Successfully binarized " << feature_binary_codes->num() << " features!";
+  LOG(ERROR) << "Successfully binarized " << feature_binary_codes->num()
+      << " features!";
   return 0;
 }
 
-// http://scikit-learn.org/stable/modules/preprocessing.html#feature-binarization
+// http://scikit-learn.org/stable/modules/preprocessing.html
+//   #feature-binarization
 template<typename Dtype>
 void binarize(const vector<shared_ptr<Blob<Dtype> > >& feature_blob_vector,
               shared_ptr<Blob<Dtype> > binary_codes) {
index 1902aad..0766eea 100644 (file)
@@ -1,10 +1,12 @@
 // Copyright 2014 kloudkl@github
 
-#include <stdio.h> // for snprintf
+#include <stdio.h>  // for snprintf
 #include <cuda_runtime.h>
 #include <google/protobuf/text_format.h>
 #include <leveldb/db.h>
 #include <leveldb/write_batch.h>
+#include <string>
+#include <vector>
 
 #include "caffe/blob.hpp"
 #include "caffe/common.hpp"
@@ -13,7 +15,7 @@
 #include "caffe/proto/caffe.pb.h"
 #include "caffe/util/io.hpp"
 
-using namespace caffe;
+using namespace caffe;  // NOLINT(build/namespaces)
 
 template<typename Dtype>
 int feature_extraction_pipeline(int argc, char** argv);
@@ -89,7 +91,6 @@ int feature_extraction_pipeline(int argc, char** argv) {
    }
    */
   NetParameter feature_extraction_net_param;
-  ;
   string feature_extraction_proto(argv[++arg_pos]);
   ReadProtoFromTextFile(feature_extraction_proto,
                         &feature_extraction_net_param);
@@ -120,8 +121,8 @@ int feature_extraction_pipeline(int argc, char** argv) {
 
   Datum datum;
   leveldb::WriteBatch* batch = new leveldb::WriteBatch();
-  const int max_key_str_length = 100;
-  char key_str[max_key_str_length];
+  const int kMaxKeyStrLength = 100;
+  char key_str[kMaxKeyStrLength];
   int num_bytes_of_binary_code = sizeof(Dtype);
   vector<Blob<float>*> input_vec;
   int image_index = 0;
@@ -138,18 +139,20 @@ int feature_extraction_pipeline(int argc, char** argv) {
       datum.set_channels(1);
       datum.clear_data();
       datum.clear_float_data();
-      feature_blob_data = feature_blob->mutable_cpu_data() + feature_blob->offset(n);
+      feature_blob_data = feature_blob->mutable_cpu_data() +
+          feature_blob->offset(n);
       for (int d = 0; d < dim_features; ++d) {
         datum.add_float_data(feature_blob_data[d]);
       }
       string value;
       datum.SerializeToString(&value);
-      snprintf(key_str, max_key_str_length, "%d", image_index);
+      snprintf(key_str, kMaxKeyStrLength, "%d", image_index);
       batch->Put(string(key_str), value);
       ++image_index;
       if (image_index % 1000 == 0) {
         db->Write(leveldb::WriteOptions(), batch);
-        LOG(ERROR)<< "Extracted features of " << image_index << " query images.";
+        LOG(ERROR)<< "Extracted features of " << image_index <<
+            " query images.";
         delete batch;
         batch = new leveldb::WriteBatch();
       }
@@ -158,7 +161,8 @@ int feature_extraction_pipeline(int argc, char** argv) {
   // write the last batch
   if (image_index % 1000 != 0) {
     db->Write(leveldb::WriteOptions(), batch);
-    LOG(ERROR)<< "Extracted features of " << image_index << " query images.";
+    LOG(ERROR)<< "Extracted features of " << image_index <<
+        " query images.";
     delete batch;
     batch = new leveldb::WriteBatch();
   }
index f339981..dddff69 100644 (file)
@@ -1,9 +1,12 @@
 // Copyright 2014 kloudkl@github
 
-#include <fstream> // for std::ofstream
-#include <queue> // for std::priority_queue
 #include <cuda_runtime.h>
 #include <google/protobuf/text_format.h>
+#include <stdio.h>
+#include <queue>  // for std::priority_queue
+#include <string>
+#include <utility>  // for pair
+#include <vector>
 
 #include "caffe/blob.hpp"
 #include "caffe/common.hpp"
@@ -13,7 +16,7 @@
 #include "caffe/util/io.hpp"
 #include "caffe/util/math_functions.hpp"
 
-using namespace caffe;
+using namespace caffe;  // NOLINT(build/namespaces)
 
 template<typename Dtype>
 void similarity_search(
@@ -92,8 +95,8 @@ int image_retrieval_pipeline(int argc, char** argv) {
 
   string save_retrieval_result_filename(argv[++arg_pos]);
   LOG(ERROR)<< "Opening result file " << save_retrieval_result_filename;
-  std::ofstream retrieval_result_ofs(save_retrieval_result_filename.c_str(),
-                                     std::ofstream::out);
+  FILE * result_fileid = fopen(save_retrieval_result_filename.c_str(),
+                                         "w");
 
   LOG(ERROR)<< "Retrieving images";
   vector<vector<std::pair<int, int> > > retrieval_results;
@@ -104,16 +107,18 @@ int image_retrieval_pipeline(int argc, char** argv) {
                            &retrieval_results);
   int num_results = retrieval_results.size();
   for (int i = 0; i < num_results; ++i) {
-    retrieval_result_ofs << query_image_index++;
+    fprintf(result_fileid, "%d", query_image_index++);
     for (int j = 0; j < retrieval_results[i].size(); ++j) {
-      retrieval_result_ofs << " " << retrieval_results[i][j].first << ":"
-                           << retrieval_results[i][j].second;
+      fprintf(result_fileid, " %d:%d", retrieval_results[i][j].first,
+              retrieval_results[i][j].second);
     }
-    retrieval_result_ofs << "\n";
+    fprintf(result_fileid, "\n");
   }
-
-  retrieval_result_ofs.close();
-  LOG(ERROR)<< "Successfully retrieved similar images for " << num_results << " queries!";
+  if (result_fileid != NULL) {
+    fclose(result_fileid);
+  }
+  LOG(ERROR) << "Successfully retrieved similar images for " << num_results
+      << " queries!";
   return 0;
 }
 
@@ -134,7 +139,8 @@ void similarity_search(
   int num_samples = sample_images_feature_blob->num();
   int num_queries = query_binary_feature_blob->num();
   int dim = query_binary_feature_blob->count() / num_queries;
-  LOG(ERROR)<< "num_samples " << num_samples << ", num_queries " << num_queries << ", dim " << dim;
+  LOG(ERROR)<< "num_samples " << num_samples << ", num_queries " <<
+  num_queries << ", dim " << dim;
   int hamming_dist;
   int neighbor_index;
   retrieval_results->resize(num_queries);
@@ -152,7 +158,8 @@ void similarity_search(
       hamming_dist = caffe_hamming_distance(dim, query_data, sample_data);
       if (results.size() < top_k_results) {
         results.push(std::make_pair(-hamming_dist, k));
-      } else if (-hamming_dist > results.top().first) {  // smaller hamming dist, nearer neighbor
+      } else if (-hamming_dist > results.top().first) {
+        // smaller hamming dist, nearer neighbor
         results.pop();
         results.push(std::make_pair(-hamming_dist, k));
       }
@@ -161,7 +168,8 @@ void similarity_search(
     for (int k = results.size() - 1; k >= 0; --k) {
       hamming_dist = -results.top().first;
       neighbor_index = results.top().second;
-      retrieval_results->at(i)[k] = std::make_pair<int, int>(neighbor_index, hamming_dist);
+      retrieval_results->at(i)[k] = std::make_pair(neighbor_index,
+                                                   hamming_dist);
       results.pop();
     }
   }  // for (int i = 0; i < num_queries; ++i) {