}
DLOG(INFO) << "Memory required for data: " << memory_used_ * sizeof(Dtype);
// For each layer, set up their input and output
- bottom_vecs_.resize(param.layers_size());
- top_vecs_.resize(param.layers_size());
- bottom_id_vecs_.resize(param.layers_size());
- param_id_vecs_.resize(param.layers_size());
- top_id_vecs_.resize(param.layers_size());
- bottom_need_backward_.resize(param.layers_size());
- for (int layer_id = 0; layer_id < param.layers_size(); ++layer_id) {
- const LayerParameter& layer_param = param.layers(layer_id);
+ bottom_vecs_.resize(param.layer_size());
+ top_vecs_.resize(param.layer_size());
+ bottom_id_vecs_.resize(param.layer_size());
+ param_id_vecs_.resize(param.layer_size());
+ top_id_vecs_.resize(param.layer_size());
+ bottom_need_backward_.resize(param.layer_size());
+ for (int layer_id = 0; layer_id < param.layer_size(); ++layer_id) {
+ const LayerParameter& layer_param = param.layer(layer_id);
layers_.push_back(shared_ptr<Layer<Dtype> >(
LayerRegistry<Dtype>::CreateLayer(layer_param)));
layer_names_.push_back(layer_param.name());
}
}
param_filtered->CopyFrom(param);
- param_filtered->clear_layers();
- for (int i = 0; i < param.layers_size(); ++i) {
- const LayerParameter& layer_param = param.layers(i);
+ param_filtered->clear_layer();
+ for (int i = 0; i < param.layer_size(); ++i) {
+ const LayerParameter& layer_param = param.layer(i);
const string& layer_name = layer_param.name();
CHECK(layer_param.include_size() == 0 || layer_param.exclude_size() == 0)
<< "Specify either include rules or exclude rules; not both.";
}
}
if (layer_included) {
- param_filtered->add_layers()->CopyFrom(layer_param);
+ param_filtered->add_layer()->CopyFrom(layer_param);
}
}
}
const int top_id, set<string>* available_blobs,
map<string, int>* blob_name_to_idx) {
shared_ptr<LayerParameter> layer_param((layer_id >= 0) ?
- (new LayerParameter(param.layers(layer_id))) : NULL);
+ (new LayerParameter(param.layer(layer_id))) : NULL);
const string& blob_name = layer_param ?
(layer_param->top_size() > top_id ?
layer_param->top(top_id) : "(automatic)") : param.input(top_id);
int Net<Dtype>::AppendBottom(const NetParameter& param,
const int layer_id, const int bottom_id,
set<string>* available_blobs, map<string, int>* blob_name_to_idx) {
- const LayerParameter& layer_param = param.layers(layer_id);
+ const LayerParameter& layer_param = param.layer(layer_id);
const string& blob_name = layer_param.bottom(bottom_id);
if (available_blobs->find(blob_name) == available_blobs->end()) {
LOG(FATAL) << "Unknown blob input " << blob_name
template <typename Dtype>
void Net<Dtype>::CopyTrainedLayersFrom(const NetParameter& param) {
- int num_source_layers = param.layers_size();
+ int num_source_layers = param.layer_size();
for (int i = 0; i < num_source_layers; ++i) {
- const LayerParameter& source_layer = param.layers(i);
+ const LayerParameter& source_layer = param.layer(i);
const string& source_layer_name = source_layer.name();
int target_layer_id = 0;
while (target_layer_id != layer_names_.size() &&
}
DLOG(INFO) << "Serializing " << layers_.size() << " layers";
for (int i = 0; i < layers_.size(); ++i) {
- LayerParameter* layer_param = param->add_layers();
+ LayerParameter* layer_param = param->add_layer();
for (int j = 0; j < bottom_id_vecs_[i].size(); ++j) {
layer_param->add_bottom(blob_names_[bottom_id_vecs_[i][j]]);
}
message NetParameter {
optional string name = 1; // consider giving the network a name
- repeated LayerParameter layers = 2; // a bunch of layers.
// The input blobs to the network.
repeated string input = 3;
// The dim of the input blobs. For each input blob there should be four
// Print debugging information about results while running Net::Forward,
// Net::Backward, and Net::Update.
optional bool debug_info = 7 [default = false];
+
+ // The layers that make up the net. Each of their configurations, including
+ // connectivity and behavior, is specified as a LayerParameter.
+ repeated LayerParameter layer = 100; // ID 100 so layers are printed last.
+
+ // DEPRECATED: use 'layer' instead.
+ // repeated LayerParameter layers = 2; // a bunch of layers.
}
// NOTE
optional bool force_backward = 2 [default = false];
repeated string input = 3;
repeated int32 input_dim = 4;
- repeated LayerParameter layers = 5;
+ repeated LayerParameter layer = 5;
}
void InsertSplits(const NetParameter& param, NetParameter* param_split) {
// Initialize by copying from the input NetParameter.
param_split->CopyFrom(param);
- param_split->clear_layers();
+ param_split->clear_layer();
map<string, pair<int, int> > blob_name_to_last_top_idx;
map<pair<int, int>, pair<int, int> > bottom_idx_to_source_top_idx;
map<pair<int, int>, int> top_idx_to_bottom_count;
const string& blob_name = param.input(i);
blob_name_to_last_top_idx[blob_name] = make_pair(-1, i);
}
- for (int i = 0; i < param.layers_size(); ++i) {
- const LayerParameter& layer_param = param.layers(i);
+ for (int i = 0; i < param.layer_size(); ++i) {
+ const LayerParameter& layer_param = param.layer(i);
layer_idx_to_layer_name[i] = layer_param.name();
for (int j = 0; j < layer_param.bottom_size(); ++j) {
const string& blob_name = layer_param.bottom(j);
}
}
}
- // Create split layer for any input blobs used by other layers as bottom
+ // Create split layer for any input blobs used by other layer as bottom
// blobs more than once.
for (int i = 0; i < param.input_size(); ++i) {
const int split_count = top_idx_to_bottom_count[make_pair(-1, i)];
if (split_count > 1) {
const string& layer_name = layer_idx_to_layer_name[-1];
const string& blob_name = param.input(i);
- LayerParameter* split_layer_param = param_split->add_layers();
+ LayerParameter* split_layer_param = param_split->add_layer();
const float kZeroLossWeight = 0;
ConfigureSplitLayer(layer_name, blob_name, i, split_count,
kZeroLossWeight, split_layer_param);
}
}
- for (int i = 0; i < param.layers_size(); ++i) {
- LayerParameter* layer_param = param_split->add_layers();
- layer_param->CopyFrom(param.layers(i));
+ for (int i = 0; i < param.layer_size(); ++i) {
+ LayerParameter* layer_param = param_split->add_layer();
+ layer_param->CopyFrom(param.layer(i));
// Replace any shared bottom blobs with split layer outputs.
for (int j = 0; j < layer_param->bottom_size(); ++j) {
const pair<int, int>& top_idx =
blob_name, top_idx.second, top_idx_to_bottom_split_idx[top_idx]++));
}
}
- // Create split layer for any top blobs used by other layers as bottom
+ // Create split layer for any top blobs used by other layer as bottom
// blobs more than once.
for (int j = 0; j < layer_param->top_size(); ++j) {
const pair<int, int>& top_idx = make_pair(i, j);
if (split_count > 1) {
const string& layer_name = layer_idx_to_layer_name[i];
const string& blob_name = layer_param->top(j);
- LayerParameter* split_layer_param = param_split->add_layers();
+ LayerParameter* split_layer_param = param_split->add_layer();
const float loss_weight = top_idx_to_loss_weight[top_idx];
ConfigureSplitLayer(layer_name, blob_name, j, split_count,
loss_weight, split_layer_param);
namespace caffe {
bool NetNeedsUpgrade(const NetParameter& net_param) {
- for (int i = 0; i < net_param.layers_size(); ++i) {
- if (net_param.layers(i).has_layer()) {
+ for (int i = 0; i < net_param.layer_size(); ++i) {
+ if (net_param.layer(i).has_layer()) {
return true;
}
}
if (v0_net_param.has_name()) {
net_param->set_name(v0_net_param.name());
}
- for (int i = 0; i < v0_net_param.layers_size(); ++i) {
- is_fully_compatible &= UpgradeLayerParameter(v0_net_param.layers(i),
- net_param->add_layers());
+ for (int i = 0; i < v0_net_param.layer_size(); ++i) {
+ is_fully_compatible &= UpgradeLayerParameter(v0_net_param.layer(i),
+ net_param->add_layer());
}
for (int i = 0; i < v0_net_param.input_size(); ++i) {
net_param->add_input(v0_net_param.input(i));
// Copy everything other than the layers from the original param.
param_upgraded_pad->Clear();
param_upgraded_pad->CopyFrom(param);
- param_upgraded_pad->clear_layers();
+ param_upgraded_pad->clear_layer();
// Figure out which layer each bottom blob comes from.
map<string, int> blob_name_to_last_top_idx;
for (int i = 0; i < param.input_size(); ++i) {
const string& blob_name = param.input(i);
blob_name_to_last_top_idx[blob_name] = -1;
}
- for (int i = 0; i < param.layers_size(); ++i) {
- const LayerParameter& layer_connection = param.layers(i);
+ for (int i = 0; i < param.layer_size(); ++i) {
+ const LayerParameter& layer_connection = param.layer(i);
const V0LayerParameter& layer_param = layer_connection.layer();
// Add the layer to the new net, unless it's a padding layer.
if (layer_param.type() != "padding") {
- param_upgraded_pad->add_layers()->CopyFrom(layer_connection);
+ param_upgraded_pad->add_layer()->CopyFrom(layer_connection);
}
for (int j = 0; j < layer_connection.bottom_size(); ++j) {
const string& blob_name = layer_connection.bottom(j);
if (top_idx == -1) {
continue;
}
- LayerParameter source_layer = param.layers(top_idx);
+ LayerParameter source_layer = param.layer(top_idx);
if (source_layer.layer().type() == "padding") {
// This layer has a padding layer as input -- check that it is a conv
// layer or a pooling layer and takes only one input. Also check that
<< "Padding Layer takes a single blob as input.";
CHECK_EQ(source_layer.top_size(), 1)
<< "Padding Layer produces a single blob as output.";
- int layer_index = param_upgraded_pad->layers_size() - 1;
- param_upgraded_pad->mutable_layers(layer_index)->mutable_layer()
+ int layer_index = param_upgraded_pad->layer_size() - 1;
+ param_upgraded_pad->mutable_layer(layer_index)->mutable_layer()
->set_pad(source_layer.layer().pad());
- param_upgraded_pad->mutable_layers(layer_index)
+ param_upgraded_pad->mutable_layer(layer_index)
->set_bottom(j, source_layer.bottom(0));
}
}
}
bool NetNeedsDataUpgrade(const NetParameter& net_param) {
- for (int i = 0; i < net_param.layers_size(); ++i) {
- if (net_param.layers(i).type() == "Data") {
- DataParameter layer_param = net_param.layers(i).data_param();
+ for (int i = 0; i < net_param.layer_size(); ++i) {
+ if (net_param.layer(i).type() == "Data") {
+ DataParameter layer_param = net_param.layer(i).data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
if (layer_param.has_crop_size()) { return true; }
if (layer_param.has_mirror()) { return true; }
}
- if (net_param.layers(i).type() == "ImageData") {
- ImageDataParameter layer_param = net_param.layers(i).image_data_param();
+ if (net_param.layer(i).type() == "ImageData") {
+ ImageDataParameter layer_param = net_param.layer(i).image_data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
if (layer_param.has_crop_size()) { return true; }
if (layer_param.has_mirror()) { return true; }
}
- if (net_param.layers(i).type() == "WindowData") {
- WindowDataParameter layer_param = net_param.layers(i).window_data_param();
+ if (net_param.layer(i).type() == "WindowData") {
+ WindowDataParameter layer_param = net_param.layer(i).window_data_param();
if (layer_param.has_scale()) { return true; }
if (layer_param.has_mean_file()) { return true; }
if (layer_param.has_crop_size()) { return true; }
#define CONVERT_LAYER_TRANSFORM_PARAM(TYPE_NAME, PARAM_NAME) \
do { \
- if (net_param->layers(i).type() == #TYPE_NAME) { \
+ if (net_param->layer(i).type() == #TYPE_NAME) { \
TYPE_NAME##Parameter* layer_param = \
- net_param->mutable_layers(i)->mutable_##PARAM_NAME##_param(); \
+ net_param->mutable_layer(i)->mutable_##PARAM_NAME##_param(); \
TransformationParameter* transform_param = \
- net_param->mutable_layers(i)->mutable_transform_param(); \
+ net_param->mutable_layer(i)->mutable_transform_param(); \
if (layer_param->has_scale()) { \
transform_param->set_scale(layer_param->scale()); \
layer_param->clear_scale(); \
} while (0)
void UpgradeNetDataTransformation(NetParameter* net_param) {
- for (int i = 0; i < net_param->layers_size(); ++i) {
+ for (int i = 0; i < net_param->layer_size(); ++i) {
CONVERT_LAYER_TRANSFORM_PARAM(Data, data);
CONVERT_LAYER_TRANSFORM_PARAM(ImageData, image_data);
CONVERT_LAYER_TRANSFORM_PARAM(WindowData, window_data);
for (int i = 0; i < param.input_dim_size(); ++i) {
pretty_param->add_input_dim(param.input_dim(i));
}
- for (int i = 0; i < param.layers_size(); ++i) {
- pretty_param->add_layers()->CopyFrom(param.layers(i));
+ for (int i = 0; i < param.layer_size(); ++i) {
+ pretty_param->add_layer()->CopyFrom(param.layer(i));
}
}