From: Yangqing Jia Date: Tue, 24 Sep 2013 22:55:25 +0000 (-0700) Subject: cpplint X-Git-Tag: submit/tizen/20180823.020014~1007 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=9ff84bf68b46d72b3e61486bbb2c2121bf224d2e;p=platform%2Fupstream%2Fcaffeonacl.git cpplint --- diff --git a/src/caffe/common.hpp b/src/caffe/common.hpp index c9b9f65..67177a6 100644 --- a/src/caffe/common.hpp +++ b/src/caffe/common.hpp @@ -7,7 +7,7 @@ #include #include #include -//cuda driver types +// cuda driver types #include #include #include diff --git a/src/caffe/syncedmem.cpp b/src/caffe/syncedmem.cpp index 8e6996d..c991d8d 100644 --- a/src/caffe/syncedmem.cpp +++ b/src/caffe/syncedmem.cpp @@ -1,8 +1,9 @@ // Copyright 2013 Yangqing Jia -#include #include +#include + #include "caffe/common.hpp" #include "caffe/syncedmem.hpp" diff --git a/src/caffe/vision_layers.hpp b/src/caffe/vision_layers.hpp index 4744a95..336ed0f 100644 --- a/src/caffe/vision_layers.hpp +++ b/src/caffe/vision_layers.hpp @@ -5,6 +5,8 @@ #include +#include + #include "caffe/layer.hpp" namespace caffe { @@ -15,7 +17,7 @@ template class NeuronLayer : public Layer { public: explicit NeuronLayer(const LayerParameter& param) - : Layer(param) {}; + : Layer(param) {} virtual void SetUp(const vector*>& bottom, vector*>* top); }; @@ -25,7 +27,8 @@ template class ReLULayer : public NeuronLayer { public: explicit ReLULayer(const LayerParameter& param) - : NeuronLayer(param) {}; + : NeuronLayer(param) {} + protected: virtual void Forward_cpu(const vector*>& bottom, vector*>* top); @@ -43,9 +46,10 @@ template class DropoutLayer : public NeuronLayer { public: explicit DropoutLayer(const LayerParameter& param) - : NeuronLayer(param) {}; + : NeuronLayer(param) {} virtual void SetUp(const vector*>& bottom, vector*>* top); + protected: virtual void Forward_cpu(const vector*>& bottom, vector*>* top); @@ -67,9 +71,10 @@ template class InnerProductLayer : public Layer { public: explicit InnerProductLayer(const LayerParameter& param) - : Layer(param) {}; + : Layer(param) {} virtual void SetUp(const vector*>& bottom, vector*>* top); + protected: virtual void Forward_cpu(const vector*>& bottom, vector*>* top); @@ -91,9 +96,10 @@ template class PaddingLayer : public Layer { public: explicit PaddingLayer(const LayerParameter& param) - : Layer(param) {}; + : Layer(param) {} virtual void SetUp(const vector*>& bottom, vector*>* top); + protected: virtual void Forward_cpu(const vector*>& bottom, vector*>* top); @@ -116,9 +122,10 @@ template class LRNLayer : public Layer { public: explicit LRNLayer(const LayerParameter& param) - : Layer(param) {}; + : Layer(param) {} virtual void SetUp(const vector*>& bottom, vector*>* top); + protected: virtual void Forward_cpu(const vector*>& bottom, vector*>* top); @@ -144,9 +151,10 @@ template class Im2colLayer : public Layer { public: explicit Im2colLayer(const LayerParameter& param) - : Layer(param) {}; + : Layer(param) {} virtual void SetUp(const vector*>& bottom, vector*>* top); + protected: virtual void Forward_cpu(const vector*>& bottom, vector*>* top); @@ -167,9 +175,10 @@ template class PoolingLayer : public Layer { public: explicit PoolingLayer(const LayerParameter& param) - : Layer(param) {}; + : Layer(param) {} virtual void SetUp(const vector*>& bottom, vector*>* top); + protected: virtual void Forward_cpu(const vector*>& bottom, vector*>* top); @@ -192,9 +201,10 @@ template class ConvolutionLayer : public Layer { public: explicit ConvolutionLayer(const LayerParameter& param) - : Layer(param) {}; + : Layer(param) {} virtual void SetUp(const vector*>& bottom, vector*>* top); + protected: virtual void Forward_cpu(const vector*>& bottom, vector*>* top); @@ -226,9 +236,10 @@ template class DataLayer : public Layer { public: explicit DataLayer(const LayerParameter& param) - : Layer(param) {}; + : Layer(param) {} virtual void SetUp(const vector*>& bottom, vector*>* top); + protected: virtual void Forward_cpu(const vector*>& bottom, vector*>* top);