From bb5ba1b7e025d4bb2a0fc3caedbe17e288b266c4 Mon Sep 17 00:00:00 2001 From: Jeff Donahue Date: Wed, 14 Jan 2015 15:07:20 -0800 Subject: [PATCH] restore upgrade_proto --- include/caffe/util/upgrade_proto.hpp | 6 +- src/caffe/proto/caffe.proto | 98 +++++++++++++++++++++++++++-- src/caffe/util/upgrade_proto.cpp | 118 +++++++++++++++++------------------ 3 files changed, 154 insertions(+), 68 deletions(-) diff --git a/include/caffe/util/upgrade_proto.hpp b/include/caffe/util/upgrade_proto.hpp index 0627afe..e89756e 100644 --- a/include/caffe/util/upgrade_proto.hpp +++ b/include/caffe/util/upgrade_proto.hpp @@ -24,10 +24,10 @@ void UpgradeV0PaddingLayers(const NetParameter& param, NetParameter* param_upgraded_pad); // Upgrade a single V0LayerConnection to the new LayerParameter format. -bool UpgradeLayerParameter(const LayerParameter& v0_layer_connection, - LayerParameter* layer_param); +bool UpgradeLayerParameter(const V1LayerParameter& v0_layer_connection, + V1LayerParameter* layer_param); -const char* UpgradeV0LayerType(const string& type); +V1LayerParameter_LayerType UpgradeV0LayerType(const string& type); // Return true iff any layer contains deprecated data transformation parameters. bool NetNeedsDataUpgrade(const NetParameter& net_param); diff --git a/src/caffe/proto/caffe.proto b/src/caffe/proto/caffe.proto index c62e727..d81acf3 100644 --- a/src/caffe/proto/caffe.proto +++ b/src/caffe/proto/caffe.proto @@ -69,7 +69,7 @@ message NetParameter { repeated LayerParameter layer = 100; // ID 100 so layers are printed last. // DEPRECATED: use 'layer' instead. - // repeated LayerParameter layers = 2; // a bunch of layers. + repeated V1LayerParameter layers = 2; } // NOTE @@ -297,11 +297,6 @@ message LayerParameter { // for their implementation. These layers include an Engine type and // engine parameter for selecting the implementation. // The default for the engine is set by the ENGINE switch at compile-time. - - // DEPRECATED: The layer parameters specified as a V0LayerParameter. - // This should never be used by any code except to upgrade to the new - // LayerParameter specification. - optional V0LayerParameter layer = 1; } // Message that stores parameters used to apply transformation @@ -695,6 +690,97 @@ message WindowDataParameter { optional string root_folder = 13 [default = ""]; } +// DEPRECATED: use LayerParameter. +message V1LayerParameter { + repeated string bottom = 2; + repeated string top = 3; + optional string name = 4; + repeated NetStateRule include = 32; + repeated NetStateRule exclude = 33; + enum LayerType { + NONE = 0; + ABSVAL = 35; + ACCURACY = 1; + ARGMAX = 30; + BNLL = 2; + CONCAT = 3; + CONTRASTIVE_LOSS = 37; + CONVOLUTION = 4; + DATA = 5; + DROPOUT = 6; + DUMMY_DATA = 32; + EUCLIDEAN_LOSS = 7; + ELTWISE = 25; + EXP = 38; + FLATTEN = 8; + HDF5_DATA = 9; + HDF5_OUTPUT = 10; + HINGE_LOSS = 28; + IM2COL = 11; + IMAGE_DATA = 12; + INFOGAIN_LOSS = 13; + INNER_PRODUCT = 14; + LRN = 15; + MEMORY_DATA = 29; + MULTINOMIAL_LOGISTIC_LOSS = 16; + MVN = 34; + POOLING = 17; + POWER = 26; + RELU = 18; + SIGMOID = 19; + SIGMOID_CROSS_ENTROPY_LOSS = 27; + SILENCE = 36; + SOFTMAX = 20; + SOFTMAX_LOSS = 21; + SPLIT = 22; + SLICE = 33; + TANH = 23; + WINDOW_DATA = 24; + THRESHOLD = 31; + } + optional LayerType type = 5; + repeated BlobProto blobs = 6; + repeated string param = 1001; + repeated DimCheckMode blob_share_mode = 1002; + enum DimCheckMode { + STRICT = 0; + PERMISSIVE = 1; + } + repeated float blobs_lr = 7; + repeated float weight_decay = 8; + repeated float loss_weight = 35; + optional AccuracyParameter accuracy_param = 27; + optional ArgMaxParameter argmax_param = 23; + optional ConcatParameter concat_param = 9; + optional ContrastiveLossParameter contrastive_loss_param = 40; + optional ConvolutionParameter convolution_param = 10; + optional DataParameter data_param = 11; + optional DropoutParameter dropout_param = 12; + optional DummyDataParameter dummy_data_param = 26; + optional EltwiseParameter eltwise_param = 24; + optional ExpParameter exp_param = 41; + optional HDF5DataParameter hdf5_data_param = 13; + optional HDF5OutputParameter hdf5_output_param = 14; + optional HingeLossParameter hinge_loss_param = 29; + optional ImageDataParameter image_data_param = 15; + optional InfogainLossParameter infogain_loss_param = 16; + optional InnerProductParameter inner_product_param = 17; + optional LRNParameter lrn_param = 18; + optional MemoryDataParameter memory_data_param = 22; + optional MVNParameter mvn_param = 34; + optional PoolingParameter pooling_param = 19; + optional PowerParameter power_param = 21; + optional ReLUParameter relu_param = 30; + optional SigmoidParameter sigmoid_param = 38; + optional SoftmaxParameter softmax_param = 39; + optional SliceParameter slice_param = 31; + optional TanHParameter tanh_param = 37; + optional ThresholdParameter threshold_param = 25; + optional WindowDataParameter window_data_param = 20; + optional TransformationParameter transform_param = 36; + optional V0LayerParameter layer = 1; +} + // DEPRECATED: V0LayerParameter is the old way of specifying layer parameters // in Caffe. We keep this message type around for legacy support. message V0LayerParameter { diff --git a/src/caffe/util/upgrade_proto.cpp b/src/caffe/util/upgrade_proto.cpp index 352d08e..16583b0 100644 --- a/src/caffe/util/upgrade_proto.cpp +++ b/src/caffe/util/upgrade_proto.cpp @@ -13,8 +13,8 @@ namespace caffe { bool NetNeedsUpgrade(const NetParameter& net_param) { - for (int i = 0; i < net_param.layer_size(); ++i) { - if (net_param.layer(i).has_layer()) { + for (int i = 0; i < net_param.layers_size(); ++i) { + if (net_param.layers(i).has_layer()) { return true; } } @@ -32,9 +32,9 @@ bool UpgradeV0Net(const NetParameter& v0_net_param_padding_layers, if (v0_net_param.has_name()) { net_param->set_name(v0_net_param.name()); } - for (int i = 0; i < v0_net_param.layer_size(); ++i) { - is_fully_compatible &= UpgradeLayerParameter(v0_net_param.layer(i), - net_param->add_layer()); + for (int i = 0; i < v0_net_param.layers_size(); ++i) { + is_fully_compatible &= UpgradeLayerParameter(v0_net_param.layers(i), + net_param->add_layers()); } for (int i = 0; i < v0_net_param.input_size(); ++i) { net_param->add_input(v0_net_param.input(i)); @@ -53,19 +53,19 @@ void UpgradeV0PaddingLayers(const NetParameter& param, // Copy everything other than the layers from the original param. param_upgraded_pad->Clear(); param_upgraded_pad->CopyFrom(param); - param_upgraded_pad->clear_layer(); + param_upgraded_pad->clear_layers(); // Figure out which layer each bottom blob comes from. map blob_name_to_last_top_idx; for (int i = 0; i < param.input_size(); ++i) { const string& blob_name = param.input(i); blob_name_to_last_top_idx[blob_name] = -1; } - for (int i = 0; i < param.layer_size(); ++i) { - const LayerParameter& layer_connection = param.layer(i); + for (int i = 0; i < param.layers_size(); ++i) { + const V1LayerParameter& layer_connection = param.layers(i); const V0LayerParameter& layer_param = layer_connection.layer(); // Add the layer to the new net, unless it's a padding layer. if (layer_param.type() != "padding") { - param_upgraded_pad->add_layer()->CopyFrom(layer_connection); + param_upgraded_pad->add_layers()->CopyFrom(layer_connection); } for (int j = 0; j < layer_connection.bottom_size(); ++j) { const string& blob_name = layer_connection.bottom(j); @@ -77,7 +77,7 @@ void UpgradeV0PaddingLayers(const NetParameter& param, if (top_idx == -1) { continue; } - LayerParameter source_layer = param.layer(top_idx); + const V1LayerParameter& source_layer = param.layers(top_idx); if (source_layer.layer().type() == "padding") { // This layer has a padding layer as input -- check that it is a conv // layer or a pooling layer and takes only one input. Also check that @@ -93,10 +93,10 @@ void UpgradeV0PaddingLayers(const NetParameter& param, << "Padding Layer takes a single blob as input."; CHECK_EQ(source_layer.top_size(), 1) << "Padding Layer produces a single blob as output."; - int layer_index = param_upgraded_pad->layer_size() - 1; - param_upgraded_pad->mutable_layer(layer_index)->mutable_layer() + int layer_index = param_upgraded_pad->layers_size() - 1; + param_upgraded_pad->mutable_layers(layer_index)->mutable_layer() ->set_pad(source_layer.layer().pad()); - param_upgraded_pad->mutable_layer(layer_index) + param_upgraded_pad->mutable_layers(layer_index) ->set_bottom(j, source_layer.bottom(0)); } } @@ -107,8 +107,8 @@ void UpgradeV0PaddingLayers(const NetParameter& param, } } -bool UpgradeLayerParameter(const LayerParameter& v0_layer_connection, - LayerParameter* layer_param) { +bool UpgradeLayerParameter(const V1LayerParameter& v0_layer_connection, + V1LayerParameter* layer_param) { bool is_fully_compatible = true; layer_param->Clear(); for (int i = 0; i < v0_layer_connection.bottom_size(); ++i) { @@ -459,79 +459,79 @@ bool UpgradeLayerParameter(const LayerParameter& v0_layer_connection, return is_fully_compatible; } -const char* UpgradeV0LayerType(const string& type) { +V1LayerParameter_LayerType UpgradeV0LayerType(const string& type) { if (type == "accuracy") { - return "Accuracy"; + return V1LayerParameter_LayerType_ACCURACY; } else if (type == "bnll") { - return "BNLL"; + return V1LayerParameter_LayerType_BNLL; } else if (type == "concat") { - return "Concat"; + return V1LayerParameter_LayerType_CONCAT; } else if (type == "conv") { - return "Convolution"; + return V1LayerParameter_LayerType_CONVOLUTION; } else if (type == "data") { - return "Data"; + return V1LayerParameter_LayerType_DATA; } else if (type == "dropout") { - return "Dropout"; + return V1LayerParameter_LayerType_DROPOUT; } else if (type == "euclidean_loss") { - return "EuclideanLoss"; + return V1LayerParameter_LayerType_EUCLIDEAN_LOSS; } else if (type == "flatten") { - return "Flatten"; + return V1LayerParameter_LayerType_FLATTEN; } else if (type == "hdf5_data") { - return "HDF5Data"; + return V1LayerParameter_LayerType_HDF5_DATA; } else if (type == "hdf5_output") { - return "HDF5Output"; + return V1LayerParameter_LayerType_HDF5_OUTPUT; } else if (type == "im2col") { - return "Im2Col"; + return V1LayerParameter_LayerType_IM2COL; } else if (type == "images") { - return "ImageData"; + return V1LayerParameter_LayerType_IMAGE_DATA; } else if (type == "infogain_loss") { - return "InfogainLoss"; + return V1LayerParameter_LayerType_INFOGAIN_LOSS; } else if (type == "innerproduct") { - return "InnerProduct"; + return V1LayerParameter_LayerType_INNER_PRODUCT; } else if (type == "lrn") { - return "LRN"; + return V1LayerParameter_LayerType_LRN; } else if (type == "multinomial_logistic_loss") { - return "MultinomialLogisticLoss"; + return V1LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS; } else if (type == "pool") { - return "Pooling"; + return V1LayerParameter_LayerType_POOLING; } else if (type == "relu") { - return "ReLU"; + return V1LayerParameter_LayerType_RELU; } else if (type == "sigmoid") { - return "Sigmoid"; + return V1LayerParameter_LayerType_SIGMOID; } else if (type == "softmax") { - return "Softmax"; + return V1LayerParameter_LayerType_SOFTMAX; } else if (type == "softmax_loss") { - return "SoftmaxWithLoss"; + return V1LayerParameter_LayerType_SOFTMAX_LOSS; } else if (type == "split") { - return "Split"; + return V1LayerParameter_LayerType_SPLIT; } else if (type == "tanh") { - return "TanH"; + return V1LayerParameter_LayerType_TANH; } else if (type == "window_data") { - return "WindowData"; + return V1LayerParameter_LayerType_WINDOW_DATA; } else { - LOG(FATAL) << "Unknown layer type: " << type; - return ""; + LOG(FATAL) << "Unknown layer name: " << type; + return V1LayerParameter_LayerType_NONE; } } bool NetNeedsDataUpgrade(const NetParameter& net_param) { - for (int i = 0; i < net_param.layer_size(); ++i) { - if (net_param.layer(i).type() == "Data") { - DataParameter layer_param = net_param.layer(i).data_param(); + for (int i = 0; i < net_param.layers_size(); ++i) { + if (net_param.layers(i).type() == V1LayerParameter_LayerType_DATA) { + DataParameter layer_param = net_param.layers(i).data_param(); if (layer_param.has_scale()) { return true; } if (layer_param.has_mean_file()) { return true; } if (layer_param.has_crop_size()) { return true; } if (layer_param.has_mirror()) { return true; } } - if (net_param.layer(i).type() == "ImageData") { - ImageDataParameter layer_param = net_param.layer(i).image_data_param(); + if (net_param.layers(i).type() == V1LayerParameter_LayerType_IMAGE_DATA) { + ImageDataParameter layer_param = net_param.layers(i).image_data_param(); if (layer_param.has_scale()) { return true; } if (layer_param.has_mean_file()) { return true; } if (layer_param.has_crop_size()) { return true; } if (layer_param.has_mirror()) { return true; } } - if (net_param.layer(i).type() == "WindowData") { - WindowDataParameter layer_param = net_param.layer(i).window_data_param(); + if (net_param.layers(i).type() == V1LayerParameter_LayerType_WINDOW_DATA) { + WindowDataParameter layer_param = net_param.layers(i).window_data_param(); if (layer_param.has_scale()) { return true; } if (layer_param.has_mean_file()) { return true; } if (layer_param.has_crop_size()) { return true; } @@ -541,13 +541,13 @@ bool NetNeedsDataUpgrade(const NetParameter& net_param) { return false; } -#define CONVERT_LAYER_TRANSFORM_PARAM(TYPE_NAME, PARAM_NAME) \ +#define CONVERT_LAYER_TRANSFORM_PARAM(TYPE, Name, param_name) \ do { \ - if (net_param->layer(i).type() == #TYPE_NAME) { \ - TYPE_NAME##Parameter* layer_param = \ - net_param->mutable_layer(i)->mutable_##PARAM_NAME##_param(); \ + if (net_param->layers(i).type() == V1LayerParameter_LayerType_##TYPE) { \ + Name##Parameter* layer_param = \ + net_param->mutable_layers(i)->mutable_##param_name##_param(); \ TransformationParameter* transform_param = \ - net_param->mutable_layer(i)->mutable_transform_param(); \ + net_param->mutable_layers(i)->mutable_transform_param(); \ if (layer_param->has_scale()) { \ transform_param->set_scale(layer_param->scale()); \ layer_param->clear_scale(); \ @@ -568,10 +568,10 @@ bool NetNeedsDataUpgrade(const NetParameter& net_param) { } while (0) void UpgradeNetDataTransformation(NetParameter* net_param) { - for (int i = 0; i < net_param->layer_size(); ++i) { - CONVERT_LAYER_TRANSFORM_PARAM(Data, data); - CONVERT_LAYER_TRANSFORM_PARAM(ImageData, image_data); - CONVERT_LAYER_TRANSFORM_PARAM(WindowData, window_data); + for (int i = 0; i < net_param->layers_size(); ++i) { + CONVERT_LAYER_TRANSFORM_PARAM(DATA, Data, data); + CONVERT_LAYER_TRANSFORM_PARAM(IMAGE_DATA, ImageData, image_data); + CONVERT_LAYER_TRANSFORM_PARAM(WINDOW_DATA, WindowData, window_data); } } @@ -590,7 +590,7 @@ void NetParameterToPrettyPrint(const NetParameter& param, for (int i = 0; i < param.input_dim_size(); ++i) { pretty_param->add_input_dim(param.input_dim(i)); } - for (int i = 0; i < param.layer_size(); ++i) { + for (int i = 0; i < param.layers_size(); ++i) { pretty_param->add_layer()->CopyFrom(param.layer(i)); } } -- 2.7.4