From dbd83195206f02bf3996cc5d97894571e7c26756 Mon Sep 17 00:00:00 2001 From: Jonathan L Long Date: Mon, 18 May 2015 00:45:26 -0700 Subject: [PATCH] clean up redundant message comments --- src/caffe/proto/caffe.proto | 33 +++------------------------------ 1 file changed, 3 insertions(+), 30 deletions(-) diff --git a/src/caffe/proto/caffe.proto b/src/caffe/proto/caffe.proto index 580e36e..307015f 100644 --- a/src/caffe/proto/caffe.proto +++ b/src/caffe/proto/caffe.proto @@ -368,7 +368,9 @@ message LossParameter { optional bool normalize = 2 [default = true]; } -// Message that stores parameters used by AccuracyLayer +// Messages that store parameters used by individual layer types follow, in +// alphabetical order. + message AccuracyParameter { // When computing accuracy, count as correct by comparing the true label to // the top k scoring classes. By default, only compare to the top scoring @@ -386,14 +388,12 @@ message AccuracyParameter { optional int32 ignore_label = 3; } -// Message that stores parameters used by ArgMaxLayer message ArgMaxParameter { // If true produce pairs (argmax, maxval) optional bool out_max_val = 1 [default = false]; optional uint32 top_k = 2 [default = 1]; } -// Message that stores parameters used by ConcatLayer message ConcatParameter { // The axis along which to concatenate -- may be negative to index from the // end (e.g., -1 for the last axis). Other axes must have the @@ -405,7 +405,6 @@ message ConcatParameter { optional uint32 concat_dim = 1 [default = 1]; } -// Message that stores parameters used by ContrastiveLossLayer message ContrastiveLossParameter { // margin for dissimilar pair optional float margin = 1 [default = 1.0]; @@ -418,7 +417,6 @@ message ContrastiveLossParameter { optional bool legacy_version = 2 [default = false]; } -// Message that stores parameters used by ConvolutionLayer message ConvolutionParameter { optional uint32 num_output = 1; // The number of outputs for the layer optional bool bias_term = 2 [default = true]; // whether to have bias terms @@ -444,7 +442,6 @@ message ConvolutionParameter { optional Engine engine = 15 [default = DEFAULT]; } -// Message that stores parameters used by DataLayer message DataParameter { enum DB { LEVELDB = 0; @@ -475,12 +472,10 @@ message DataParameter { optional bool force_encoded_color = 9 [default = false]; } -// Message that stores parameters used by DropoutLayer message DropoutParameter { optional float dropout_ratio = 1 [default = 0.5]; // dropout ratio } -// Message that stores parameters used by DummyDataLayer. // DummyDataLayer fills any number of arbitrarily shaped blobs with random // (or constant) data generated by "Fillers" (see "message FillerParameter"). message DummyDataParameter { @@ -500,7 +495,6 @@ message DummyDataParameter { repeated uint32 width = 5; } -// Message that stores parameters used by EltwiseLayer message EltwiseParameter { enum EltwiseOp { PROD = 0; @@ -515,7 +509,6 @@ message EltwiseParameter { optional bool stable_prod_grad = 3 [default = true]; } -// Message that stores parameters used by ExpLayer message ExpParameter { // ExpLayer computes outputs y = base ^ (shift + scale * x), for base > 0. // Or if base is set to the default (-1), base is set to e, @@ -525,7 +518,6 @@ message ExpParameter { optional float shift = 3 [default = 0.0]; } -// Message that stores parameters used by HDF5DataLayer message HDF5DataParameter { // Specify the data source. optional string source = 1; @@ -540,7 +532,6 @@ message HDF5DataParameter { optional bool shuffle = 3 [default = false]; } -// Message that stores parameters used by HDF5OutputLayer message HDF5OutputParameter { optional string file_name = 1; } @@ -554,7 +545,6 @@ message HingeLossParameter { optional Norm norm = 1 [default = L1]; } -// Message that stores parameters used by ImageDataLayer message ImageDataParameter { // Specify the data source. optional string source = 1; @@ -586,13 +576,11 @@ message ImageDataParameter { optional string root_folder = 12 [default = ""]; } -// Message that stores parameters InfogainLossLayer message InfogainLossParameter { // Specify the infogain matrix source. optional string source = 1; } -// Message that stores parameters used by InnerProductLayer message InnerProductParameter { optional uint32 num_output = 1; // The number of outputs for the layer optional bool bias_term = 2 [default = true]; // whether to have bias terms @@ -605,7 +593,6 @@ message InnerProductParameter { optional int32 axis = 5 [default = 1]; } -// Message that stores parameters used by LRNLayer message LRNParameter { optional uint32 local_size = 1 [default = 5]; optional float alpha = 2 [default = 1.]; @@ -618,7 +605,6 @@ message LRNParameter { optional float k = 5 [default = 1.]; } -// Message that stores parameters used by MemoryDataLayer message MemoryDataParameter { optional uint32 batch_size = 1; optional uint32 channels = 2; @@ -626,7 +612,6 @@ message MemoryDataParameter { optional uint32 width = 4; } -// Message that stores parameters used by MVNLayer message MVNParameter { // This parameter can be set to false to normalize mean only optional bool normalize_variance = 1 [default = true]; @@ -638,7 +623,6 @@ message MVNParameter { optional float eps = 3 [default = 1e-9]; } -// Message that stores parameters used by PoolingLayer message PoolingParameter { enum PoolMethod { MAX = 0; @@ -668,7 +652,6 @@ message PoolingParameter { optional bool global_pooling = 12 [default = false]; } -// Message that stores parameters used by PowerLayer message PowerParameter { // PowerLayer computes outputs y = (shift + scale * x) ^ power. optional float power = 1 [default = 1.0]; @@ -676,13 +659,11 @@ message PowerParameter { optional float shift = 3 [default = 0.0]; } -// Message that stores parameters used by PythonLayer message PythonParameter { optional string module = 1; optional string layer = 2; } -// Message that stores parameters used by ReLULayer message ReLUParameter { // Allow non-zero slope for negative inputs to speed up optimization // Described in: @@ -698,7 +679,6 @@ message ReLUParameter { optional Engine engine = 2 [default = DEFAULT]; } -// Message that stores parameters used by ReshapeLayer message ReshapeParameter { // Specify the output dimensions. If some of the dimensions are set to 0, // the corresponding dimension from the bottom layer is used (unchanged). @@ -763,7 +743,6 @@ message ReshapeParameter { optional int32 num_axes = 3 [default = -1]; } -// Message that stores parameters used by SigmoidLayer message SigmoidParameter { enum Engine { DEFAULT = 0; @@ -773,7 +752,6 @@ message SigmoidParameter { optional Engine engine = 1 [default = DEFAULT]; } -// Message that stores parameters used by SliceLayer message SliceParameter { // The axis along which to slice -- may be negative to index from the end // (e.g., -1 for the last axis). @@ -800,7 +778,6 @@ message SoftmaxParameter { optional int32 axis = 2 [default = 1]; } -// Message that stores parameters used by TanHLayer message TanHParameter { enum Engine { DEFAULT = 0; @@ -810,12 +787,10 @@ message TanHParameter { optional Engine engine = 1 [default = DEFAULT]; } -// Message that stores parameters used by ThresholdLayer message ThresholdParameter { optional float threshold = 1 [default = 0]; // Strictly positive values } -// Message that stores parameters used by WindowDataLayer message WindowDataParameter { // Specify the data source. optional string source = 1; @@ -849,7 +824,6 @@ message WindowDataParameter { optional string root_folder = 13 [default = ""]; } -// Message that stores parameters used by SPPLayer message SPPParameter { enum PoolMethod { MAX = 0; @@ -1053,7 +1027,6 @@ message V0LayerParameter { optional HDF5OutputParameter hdf5_output_param = 1001; } -// Message that stores parameters used by PReLULayer message PReLUParameter { // Parametric ReLU described in K. He et al, Delving Deep into Rectifiers: // Surpassing Human-Level Performance on ImageNet Classification, 2015. -- 2.7.4