From af37eac80f01694a4845607bfcede595f50f893f Mon Sep 17 00:00:00 2001 From: Jeff Donahue Date: Mon, 12 Jan 2015 17:02:15 -0800 Subject: [PATCH] 'layers' -> 'layer' --- src/caffe/net.cpp | 34 ++++++++++----------- src/caffe/proto/caffe.proto | 8 ++++- src/caffe/proto/caffe_pretty_print.proto | 2 +- src/caffe/util/insert_splits.cpp | 20 ++++++------ src/caffe/util/upgrade_proto.cpp | 52 ++++++++++++++++---------------- 5 files changed, 61 insertions(+), 55 deletions(-) diff --git a/src/caffe/net.cpp b/src/caffe/net.cpp index 4350e05..db9ac7d 100644 --- a/src/caffe/net.cpp +++ b/src/caffe/net.cpp @@ -55,14 +55,14 @@ void Net::Init(const NetParameter& in_param) { } DLOG(INFO) << "Memory required for data: " << memory_used_ * sizeof(Dtype); // For each layer, set up their input and output - bottom_vecs_.resize(param.layers_size()); - top_vecs_.resize(param.layers_size()); - bottom_id_vecs_.resize(param.layers_size()); - param_id_vecs_.resize(param.layers_size()); - top_id_vecs_.resize(param.layers_size()); - bottom_need_backward_.resize(param.layers_size()); - for (int layer_id = 0; layer_id < param.layers_size(); ++layer_id) { - const LayerParameter& layer_param = param.layers(layer_id); + bottom_vecs_.resize(param.layer_size()); + top_vecs_.resize(param.layer_size()); + bottom_id_vecs_.resize(param.layer_size()); + param_id_vecs_.resize(param.layer_size()); + top_id_vecs_.resize(param.layer_size()); + bottom_need_backward_.resize(param.layer_size()); + for (int layer_id = 0; layer_id < param.layer_size(); ++layer_id) { + const LayerParameter& layer_param = param.layer(layer_id); layers_.push_back(shared_ptr >( LayerRegistry::CreateLayer(layer_param))); layer_names_.push_back(layer_param.name()); @@ -242,9 +242,9 @@ void Net::FilterNet(const NetParameter& param, } } param_filtered->CopyFrom(param); - param_filtered->clear_layers(); - for (int i = 0; i < param.layers_size(); ++i) { - const LayerParameter& layer_param = param.layers(i); + param_filtered->clear_layer(); + for (int i = 0; i < param.layer_size(); ++i) { + const LayerParameter& layer_param = param.layer(i); const string& layer_name = layer_param.name(); CHECK(layer_param.include_size() == 0 || layer_param.exclude_size() == 0) << "Specify either include rules or exclude rules; not both."; @@ -262,7 +262,7 @@ void Net::FilterNet(const NetParameter& param, } } if (layer_included) { - param_filtered->add_layers()->CopyFrom(layer_param); + param_filtered->add_layer()->CopyFrom(layer_param); } } } @@ -335,7 +335,7 @@ void Net::AppendTop(const NetParameter& param, const int layer_id, const int top_id, set* available_blobs, map* blob_name_to_idx) { shared_ptr layer_param((layer_id >= 0) ? - (new LayerParameter(param.layers(layer_id))) : NULL); + (new LayerParameter(param.layer(layer_id))) : NULL); const string& blob_name = layer_param ? (layer_param->top_size() > top_id ? layer_param->top(top_id) : "(automatic)") : param.input(top_id); @@ -385,7 +385,7 @@ template int Net::AppendBottom(const NetParameter& param, const int layer_id, const int bottom_id, set* available_blobs, map* blob_name_to_idx) { - const LayerParameter& layer_param = param.layers(layer_id); + const LayerParameter& layer_param = param.layer(layer_id); const string& blob_name = layer_param.bottom(bottom_id); if (available_blobs->find(blob_name) == available_blobs->end()) { LOG(FATAL) << "Unknown blob input " << blob_name @@ -730,9 +730,9 @@ void Net::Reshape() { template void Net::CopyTrainedLayersFrom(const NetParameter& param) { - int num_source_layers = param.layers_size(); + int num_source_layers = param.layer_size(); for (int i = 0; i < num_source_layers; ++i) { - const LayerParameter& source_layer = param.layers(i); + const LayerParameter& source_layer = param.layer(i); const string& source_layer_name = source_layer.name(); int target_layer_id = 0; while (target_layer_id != layer_names_.size() && @@ -775,7 +775,7 @@ void Net::ToProto(NetParameter* param, bool write_diff) const { } DLOG(INFO) << "Serializing " << layers_.size() << " layers"; for (int i = 0; i < layers_.size(); ++i) { - LayerParameter* layer_param = param->add_layers(); + LayerParameter* layer_param = param->add_layer(); for (int j = 0; j < bottom_id_vecs_[i].size(); ++j) { layer_param->add_bottom(blob_names_[bottom_id_vecs_[i][j]]); } diff --git a/src/caffe/proto/caffe.proto b/src/caffe/proto/caffe.proto index 1fea344..c62e727 100644 --- a/src/caffe/proto/caffe.proto +++ b/src/caffe/proto/caffe.proto @@ -45,7 +45,6 @@ message FillerParameter { message NetParameter { optional string name = 1; // consider giving the network a name - repeated LayerParameter layers = 2; // a bunch of layers. // The input blobs to the network. repeated string input = 3; // The dim of the input blobs. For each input blob there should be four @@ -64,6 +63,13 @@ message NetParameter { // Print debugging information about results while running Net::Forward, // Net::Backward, and Net::Update. optional bool debug_info = 7 [default = false]; + + // The layers that make up the net. Each of their configurations, including + // connectivity and behavior, is specified as a LayerParameter. + repeated LayerParameter layer = 100; // ID 100 so layers are printed last. + + // DEPRECATED: use 'layer' instead. + // repeated LayerParameter layers = 2; // a bunch of layers. } // NOTE diff --git a/src/caffe/proto/caffe_pretty_print.proto b/src/caffe/proto/caffe_pretty_print.proto index 6f0a5f6..056541f 100644 --- a/src/caffe/proto/caffe_pretty_print.proto +++ b/src/caffe/proto/caffe_pretty_print.proto @@ -14,5 +14,5 @@ message NetParameterPrettyPrint { optional bool force_backward = 2 [default = false]; repeated string input = 3; repeated int32 input_dim = 4; - repeated LayerParameter layers = 5; + repeated LayerParameter layer = 5; } diff --git a/src/caffe/util/insert_splits.cpp b/src/caffe/util/insert_splits.cpp index 8a0ad53..416f80a 100644 --- a/src/caffe/util/insert_splits.cpp +++ b/src/caffe/util/insert_splits.cpp @@ -12,7 +12,7 @@ namespace caffe { void InsertSplits(const NetParameter& param, NetParameter* param_split) { // Initialize by copying from the input NetParameter. param_split->CopyFrom(param); - param_split->clear_layers(); + param_split->clear_layer(); map > blob_name_to_last_top_idx; map, pair > bottom_idx_to_source_top_idx; map, int> top_idx_to_bottom_count; @@ -25,8 +25,8 @@ void InsertSplits(const NetParameter& param, NetParameter* param_split) { const string& blob_name = param.input(i); blob_name_to_last_top_idx[blob_name] = make_pair(-1, i); } - for (int i = 0; i < param.layers_size(); ++i) { - const LayerParameter& layer_param = param.layers(i); + for (int i = 0; i < param.layer_size(); ++i) { + const LayerParameter& layer_param = param.layer(i); layer_idx_to_layer_name[i] = layer_param.name(); for (int j = 0; j < layer_param.bottom_size(); ++j) { const string& blob_name = layer_param.bottom(j); @@ -56,22 +56,22 @@ void InsertSplits(const NetParameter& param, NetParameter* param_split) { } } } - // Create split layer for any input blobs used by other layers as bottom + // Create split layer for any input blobs used by other layer as bottom // blobs more than once. for (int i = 0; i < param.input_size(); ++i) { const int split_count = top_idx_to_bottom_count[make_pair(-1, i)]; if (split_count > 1) { const string& layer_name = layer_idx_to_layer_name[-1]; const string& blob_name = param.input(i); - LayerParameter* split_layer_param = param_split->add_layers(); + LayerParameter* split_layer_param = param_split->add_layer(); const float kZeroLossWeight = 0; ConfigureSplitLayer(layer_name, blob_name, i, split_count, kZeroLossWeight, split_layer_param); } } - for (int i = 0; i < param.layers_size(); ++i) { - LayerParameter* layer_param = param_split->add_layers(); - layer_param->CopyFrom(param.layers(i)); + for (int i = 0; i < param.layer_size(); ++i) { + LayerParameter* layer_param = param_split->add_layer(); + layer_param->CopyFrom(param.layer(i)); // Replace any shared bottom blobs with split layer outputs. for (int j = 0; j < layer_param->bottom_size(); ++j) { const pair& top_idx = @@ -84,7 +84,7 @@ void InsertSplits(const NetParameter& param, NetParameter* param_split) { blob_name, top_idx.second, top_idx_to_bottom_split_idx[top_idx]++)); } } - // Create split layer for any top blobs used by other layers as bottom + // Create split layer for any top blobs used by other layer as bottom // blobs more than once. for (int j = 0; j < layer_param->top_size(); ++j) { const pair& top_idx = make_pair(i, j); @@ -92,7 +92,7 @@ void InsertSplits(const NetParameter& param, NetParameter* param_split) { if (split_count > 1) { const string& layer_name = layer_idx_to_layer_name[i]; const string& blob_name = layer_param->top(j); - LayerParameter* split_layer_param = param_split->add_layers(); + LayerParameter* split_layer_param = param_split->add_layer(); const float loss_weight = top_idx_to_loss_weight[top_idx]; ConfigureSplitLayer(layer_name, blob_name, j, split_count, loss_weight, split_layer_param); diff --git a/src/caffe/util/upgrade_proto.cpp b/src/caffe/util/upgrade_proto.cpp index 31e7d4d..352d08e 100644 --- a/src/caffe/util/upgrade_proto.cpp +++ b/src/caffe/util/upgrade_proto.cpp @@ -13,8 +13,8 @@ namespace caffe { bool NetNeedsUpgrade(const NetParameter& net_param) { - for (int i = 0; i < net_param.layers_size(); ++i) { - if (net_param.layers(i).has_layer()) { + for (int i = 0; i < net_param.layer_size(); ++i) { + if (net_param.layer(i).has_layer()) { return true; } } @@ -32,9 +32,9 @@ bool UpgradeV0Net(const NetParameter& v0_net_param_padding_layers, if (v0_net_param.has_name()) { net_param->set_name(v0_net_param.name()); } - for (int i = 0; i < v0_net_param.layers_size(); ++i) { - is_fully_compatible &= UpgradeLayerParameter(v0_net_param.layers(i), - net_param->add_layers()); + for (int i = 0; i < v0_net_param.layer_size(); ++i) { + is_fully_compatible &= UpgradeLayerParameter(v0_net_param.layer(i), + net_param->add_layer()); } for (int i = 0; i < v0_net_param.input_size(); ++i) { net_param->add_input(v0_net_param.input(i)); @@ -53,19 +53,19 @@ void UpgradeV0PaddingLayers(const NetParameter& param, // Copy everything other than the layers from the original param. param_upgraded_pad->Clear(); param_upgraded_pad->CopyFrom(param); - param_upgraded_pad->clear_layers(); + param_upgraded_pad->clear_layer(); // Figure out which layer each bottom blob comes from. map blob_name_to_last_top_idx; for (int i = 0; i < param.input_size(); ++i) { const string& blob_name = param.input(i); blob_name_to_last_top_idx[blob_name] = -1; } - for (int i = 0; i < param.layers_size(); ++i) { - const LayerParameter& layer_connection = param.layers(i); + for (int i = 0; i < param.layer_size(); ++i) { + const LayerParameter& layer_connection = param.layer(i); const V0LayerParameter& layer_param = layer_connection.layer(); // Add the layer to the new net, unless it's a padding layer. if (layer_param.type() != "padding") { - param_upgraded_pad->add_layers()->CopyFrom(layer_connection); + param_upgraded_pad->add_layer()->CopyFrom(layer_connection); } for (int j = 0; j < layer_connection.bottom_size(); ++j) { const string& blob_name = layer_connection.bottom(j); @@ -77,7 +77,7 @@ void UpgradeV0PaddingLayers(const NetParameter& param, if (top_idx == -1) { continue; } - LayerParameter source_layer = param.layers(top_idx); + LayerParameter source_layer = param.layer(top_idx); if (source_layer.layer().type() == "padding") { // This layer has a padding layer as input -- check that it is a conv // layer or a pooling layer and takes only one input. Also check that @@ -93,10 +93,10 @@ void UpgradeV0PaddingLayers(const NetParameter& param, << "Padding Layer takes a single blob as input."; CHECK_EQ(source_layer.top_size(), 1) << "Padding Layer produces a single blob as output."; - int layer_index = param_upgraded_pad->layers_size() - 1; - param_upgraded_pad->mutable_layers(layer_index)->mutable_layer() + int layer_index = param_upgraded_pad->layer_size() - 1; + param_upgraded_pad->mutable_layer(layer_index)->mutable_layer() ->set_pad(source_layer.layer().pad()); - param_upgraded_pad->mutable_layers(layer_index) + param_upgraded_pad->mutable_layer(layer_index) ->set_bottom(j, source_layer.bottom(0)); } } @@ -515,23 +515,23 @@ const char* UpgradeV0LayerType(const string& type) { } bool NetNeedsDataUpgrade(const NetParameter& net_param) { - for (int i = 0; i < net_param.layers_size(); ++i) { - if (net_param.layers(i).type() == "Data") { - DataParameter layer_param = net_param.layers(i).data_param(); + for (int i = 0; i < net_param.layer_size(); ++i) { + if (net_param.layer(i).type() == "Data") { + DataParameter layer_param = net_param.layer(i).data_param(); if (layer_param.has_scale()) { return true; } if (layer_param.has_mean_file()) { return true; } if (layer_param.has_crop_size()) { return true; } if (layer_param.has_mirror()) { return true; } } - if (net_param.layers(i).type() == "ImageData") { - ImageDataParameter layer_param = net_param.layers(i).image_data_param(); + if (net_param.layer(i).type() == "ImageData") { + ImageDataParameter layer_param = net_param.layer(i).image_data_param(); if (layer_param.has_scale()) { return true; } if (layer_param.has_mean_file()) { return true; } if (layer_param.has_crop_size()) { return true; } if (layer_param.has_mirror()) { return true; } } - if (net_param.layers(i).type() == "WindowData") { - WindowDataParameter layer_param = net_param.layers(i).window_data_param(); + if (net_param.layer(i).type() == "WindowData") { + WindowDataParameter layer_param = net_param.layer(i).window_data_param(); if (layer_param.has_scale()) { return true; } if (layer_param.has_mean_file()) { return true; } if (layer_param.has_crop_size()) { return true; } @@ -543,11 +543,11 @@ bool NetNeedsDataUpgrade(const NetParameter& net_param) { #define CONVERT_LAYER_TRANSFORM_PARAM(TYPE_NAME, PARAM_NAME) \ do { \ - if (net_param->layers(i).type() == #TYPE_NAME) { \ + if (net_param->layer(i).type() == #TYPE_NAME) { \ TYPE_NAME##Parameter* layer_param = \ - net_param->mutable_layers(i)->mutable_##PARAM_NAME##_param(); \ + net_param->mutable_layer(i)->mutable_##PARAM_NAME##_param(); \ TransformationParameter* transform_param = \ - net_param->mutable_layers(i)->mutable_transform_param(); \ + net_param->mutable_layer(i)->mutable_transform_param(); \ if (layer_param->has_scale()) { \ transform_param->set_scale(layer_param->scale()); \ layer_param->clear_scale(); \ @@ -568,7 +568,7 @@ bool NetNeedsDataUpgrade(const NetParameter& net_param) { } while (0) void UpgradeNetDataTransformation(NetParameter* net_param) { - for (int i = 0; i < net_param->layers_size(); ++i) { + for (int i = 0; i < net_param->layer_size(); ++i) { CONVERT_LAYER_TRANSFORM_PARAM(Data, data); CONVERT_LAYER_TRANSFORM_PARAM(ImageData, image_data); CONVERT_LAYER_TRANSFORM_PARAM(WindowData, window_data); @@ -590,8 +590,8 @@ void NetParameterToPrettyPrint(const NetParameter& param, for (int i = 0; i < param.input_dim_size(); ++i) { pretty_param->add_input_dim(param.input_dim(i)); } - for (int i = 0; i < param.layers_size(); ++i) { - pretty_param->add_layers()->CopyFrom(param.layers(i)); + for (int i = 0; i < param.layer_size(); ++i) { + pretty_param->add_layer()->CopyFrom(param.layer(i)); } } -- 2.7.4