//
// LayerParameter next available ID: 43 (last added: loss_param)
message LayerParameter {
- repeated string bottom = 2; // the name of the bottom blobs
- repeated string top = 3; // the name of the top blobs
- optional string name = 4; // the layer name
-
- // Rules controlling whether and when a layer is included in the network,
- // based on the current NetState. You may specify a non-zero number of rules
- // to include OR exclude, but not both. If no include or exclude rules are
- // specified, the layer is always included. If the current NetState meets
- // ANY (i.e., one or more) of the specified rules, the layer is
- // included/excluded.
- repeated NetStateRule include = 32;
- repeated NetStateRule exclude = 33;
+ optional string name = 1; // the layer name
+ optional string type = 2; // the layer type
+ repeated string bottom = 3; // the name of each bottom blob
+ repeated string top = 4; // the name of each top blob
- optional string type = 5; // the layer type from the enum above
+ // The amount of weight to assign each top blob in the objective.
+ // Each layer assigns a default value, usually of either 0 or 1,
+ // to each top blob.
+ repeated float loss_weight = 5;
// The blobs containing the numeric parameters of the layer
repeated BlobProto blobs = 6;
+
// The names of the parameter blobs -- useful for sharing parameters among
// layers (but never required).
- repeated string param = 1001;
+ repeated string param = 7;
+
// Whether to require shared weights to have the same shape, or just the same
// count -- defaults to STRICT if unspecified.
- repeated DimCheckMode blob_share_mode = 1002;
+ repeated DimCheckMode param_share_mode = 8;
enum DimCheckMode {
// STRICT (default) requires that num, channels, height, width each match.
STRICT = 0;
// PERMISSIVE requires only the count (num*channels*height*width) to match.
PERMISSIVE = 1;
}
+
// The ratio that is multiplied on the global learning rate. If you want to
// set the learning ratio for one blob, you need to set it for all blobs.
- repeated float blobs_lr = 7;
- // The weight decay that is multiplied on the global weight decay.
- repeated float weight_decay = 8;
+ repeated float blobs_lr = 9;
- // The amount of weight to assign each top blob in the objective.
- // Each layer assigns a default value, usually of either 0 or 1,
- // to each top blob.
- repeated float loss_weight = 35;
+ // The weight decay that is multiplied on the global weight decay.
+ repeated float weight_decay = 10;
- optional AccuracyParameter accuracy_param = 27;
- optional ArgMaxParameter argmax_param = 23;
- optional ConcatParameter concat_param = 9;
- optional ContrastiveLossParameter contrastive_loss_param = 40;
- optional ConvolutionParameter convolution_param = 10;
- optional DataParameter data_param = 11;
- optional DropoutParameter dropout_param = 12;
- optional DummyDataParameter dummy_data_param = 26;
- optional EltwiseParameter eltwise_param = 24;
- optional ExpParameter exp_param = 41;
- optional HDF5DataParameter hdf5_data_param = 13;
- optional HDF5OutputParameter hdf5_output_param = 14;
- optional HingeLossParameter hinge_loss_param = 29;
- optional ImageDataParameter image_data_param = 15;
- optional InfogainLossParameter infogain_loss_param = 16;
- optional InnerProductParameter inner_product_param = 17;
- optional LRNParameter lrn_param = 18;
- optional MemoryDataParameter memory_data_param = 22;
- optional MVNParameter mvn_param = 34;
- optional PoolingParameter pooling_param = 19;
- optional PowerParameter power_param = 21;
- optional ReLUParameter relu_param = 30;
- optional SigmoidParameter sigmoid_param = 38;
- optional SoftmaxParameter softmax_param = 39;
- optional SliceParameter slice_param = 31;
- optional TanHParameter tanh_param = 37;
- optional ThresholdParameter threshold_param = 25;
- optional WindowDataParameter window_data_param = 20;
+ // Rules controlling whether and when a layer is included in the network,
+ // based on the current NetState. You may specify a non-zero number of rules
+ // to include OR exclude, but not both. If no include or exclude rules are
+ // specified, the layer is always included. If the current NetState meets
+ // ANY (i.e., one or more) of the specified rules, the layer is
+ // included/excluded.
+ repeated NetStateRule include = 11;
+ repeated NetStateRule exclude = 12;
// Parameters for data pre-processing.
- optional TransformationParameter transform_param = 36;
+ optional TransformationParameter transform_param = 13;
+
+ optional AccuracyParameter accuracy_param = 14;
+ optional ArgMaxParameter argmax_param = 15;
+ optional ConcatParameter concat_param = 16;
+ optional ContrastiveLossParameter contrastive_loss_param = 17;
+ optional ConvolutionParameter convolution_param = 18;
+ optional DataParameter data_param = 19;
+ optional DropoutParameter dropout_param = 20;
+ optional DummyDataParameter dummy_data_param = 21;
+ optional EltwiseParameter eltwise_param = 22;
+ optional ExpParameter exp_param = 23;
+ optional HDF5DataParameter hdf5_data_param = 24;
+ optional HDF5OutputParameter hdf5_output_param = 25;
+ optional HingeLossParameter hinge_loss_param = 26;
+ optional ImageDataParameter image_data_param = 27;
+ optional InfogainLossParameter infogain_loss_param = 28;
+ optional InnerProductParameter inner_product_param = 29;
+ optional LRNParameter lrn_param = 30;
+ optional MemoryDataParameter memory_data_param = 31;
+ optional MVNParameter mvn_param = 32;
+ optional PoolingParameter pooling_param = 33;
+ optional PowerParameter power_param = 34;
+ optional ReLUParameter relu_param = 35;
+ optional SigmoidParameter sigmoid_param = 36;
+ optional SoftmaxParameter softmax_param = 37;
+ optional SliceParameter slice_param = 38;
+ optional TanHParameter tanh_param = 39;
+ optional ThresholdParameter threshold_param = 40;
+ optional WindowDataParameter window_data_param = 41;
// Parameters shared by loss layers.
optional LossParameter loss_param = 42;
namespace caffe {
bool NetNeedsUpgrade(const NetParameter& net_param) {
+ return NetNeedsV0ToV1Upgrade(net_param) || NetNeedsV1ToV2Upgrade(net_param);
+}
+
+bool NetNeedsV0ToV1Upgrade(const NetParameter& net_param) {
for (int i = 0; i < net_param.layers_size(); ++i) {
if (net_param.layers(i).has_layer()) {
return true;
return false;
}
+bool NetNeedsV1ToV2Upgrade(const NetParameter& net_param) {
+ return net_param.layers_size() > 0;
+}
+
bool UpgradeV0Net(const NetParameter& v0_net_param_padding_layers,
NetParameter* net_param) {
// First upgrade padding layers to padded conv layers.
net_param->set_name(v0_net_param.name());
}
for (int i = 0; i < v0_net_param.layers_size(); ++i) {
- is_fully_compatible &= UpgradeLayerParameter(v0_net_param.layers(i),
- net_param->add_layers());
+ is_fully_compatible &= UpgradeV0LayerParameter(v0_net_param.layers(i),
+ net_param->add_layers());
}
for (int i = 0; i < v0_net_param.input_size(); ++i) {
net_param->add_input(v0_net_param.input(i));
}
}
-bool UpgradeLayerParameter(const V1LayerParameter& v0_layer_connection,
- V1LayerParameter* layer_param) {
+bool UpgradeV0LayerParameter(const V1LayerParameter& v0_layer_connection,
+ V1LayerParameter* layer_param) {
bool is_fully_compatible = true;
layer_param->Clear();
for (int i = 0; i < v0_layer_connection.bottom_size(); ++i) {
for (int i = 0; i < param.input_dim_size(); ++i) {
pretty_param->add_input_dim(param.input_dim(i));
}
- for (int i = 0; i < param.layers_size(); ++i) {
+ for (int i = 0; i < param.layer_size(); ++i) {
pretty_param->add_layer()->CopyFrom(param.layer(i));
}
}
-void UpgradeNetAsNeeded(const string& param_file, NetParameter* param) {
- if (NetNeedsUpgrade(*param)) {
+bool UpgradeNetAsNeeded(const string& param_file, NetParameter* param) {
+ bool success = true;
+ if (NetNeedsV0ToV1Upgrade(*param)) {
// NetParameter was specified using the old style (V0LayerParameter); try to
// upgrade it.
LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
<< "V0LayerParameter: " << param_file;
NetParameter original_param(*param);
if (!UpgradeV0Net(original_param, param)) {
+ success = false;
LOG(ERROR) << "Warning: had one or more problems upgrading "
<< "V0NetParameter to NetParameter (see above); continuing anyway.";
} else {
LOG(ERROR) << "Note that future Caffe releases will only support "
<< "transform_param messages for transformation fields.";
}
+ if (NetNeedsV1ToV2Upgrade(*param)) {
+ LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
+ << "V1LayerParameter: " << param_file;
+ NetParameter original_param(*param);
+ if (!UpgradeV1Net(original_param, param)) {
+ success = false;
+ LOG(ERROR) << "Warning: had one or more problems upgrading "
+ << "V1LayerParameter (see above); continuing anyway.";
+ } else {
+ LOG(INFO) << "Successfully upgraded file specified using deprecated "
+ << "V1LayerParameter";
+ }
+ }
+ return success;
+}
+
+bool UpgradeV1Net(const NetParameter& v1_net_param, NetParameter* net_param) {
+ bool is_fully_compatible = true;
+ if (v1_net_param.layer_size() > 0) {
+ LOG(ERROR) << "Input NetParameter to be upgraded already specifies 'layer' "
+ << "fields; these will be ignored for the upgrade.";
+ is_fully_compatible = false;
+ }
+ net_param->CopyFrom(v1_net_param);
+ net_param->clear_layers();
+ for (int i = 0; i < v1_net_param.layers_size(); ++i) {
+ if (!UpgradeV1LayerParameter(v1_net_param.layers(i),
+ net_param->add_layer())) {
+ LOG(ERROR) << "Upgrade of input layer " << i << " failed.";
+ is_fully_compatible = false;
+ }
+ }
+ return is_fully_compatible;
+}
+
+bool UpgradeV1LayerParameter(const V1LayerParameter& v1_layer_param,
+ LayerParameter* layer_param) {
+ layer_param->Clear();
+ bool is_fully_compatible = true;
+ for (int i = 0; i < v1_layer_param.bottom_size(); ++i) {
+ layer_param->add_bottom(v1_layer_param.bottom(i));
+ }
+ for (int i = 0; i < v1_layer_param.top_size(); ++i) {
+ layer_param->add_top(v1_layer_param.top(i));
+ }
+ if (v1_layer_param.has_name()) {
+ layer_param->set_name(v1_layer_param.name());
+ }
+ for (int i = 0; i < v1_layer_param.include_size(); ++i) {
+ layer_param->add_include()->CopyFrom(v1_layer_param.include(i));
+ }
+ for (int i = 0; i < v1_layer_param.exclude_size(); ++i) {
+ layer_param->add_exclude()->CopyFrom(v1_layer_param.exclude(i));
+ }
+ if (v1_layer_param.has_type()) {
+ layer_param->set_type(UpgradeV1LayerType(v1_layer_param.type()));
+ }
+ for (int i = 0; i < v1_layer_param.blobs_size(); ++i) {
+ layer_param->add_blobs()->CopyFrom(v1_layer_param.blobs(i));
+ }
+ for (int i = 0; i < v1_layer_param.param_size(); ++i) {
+ layer_param->add_param(v1_layer_param.param(i));
+ }
+ for (int i = 0; i < v1_layer_param.blob_share_mode_size(); ++i) {
+ switch (v1_layer_param.blob_share_mode(i)) {
+ case V1LayerParameter_DimCheckMode_STRICT:
+ layer_param->add_param_share_mode(LayerParameter_DimCheckMode_STRICT);
+ break;
+ case V1LayerParameter_DimCheckMode_PERMISSIVE:
+ layer_param->add_param_share_mode(LayerParameter_DimCheckMode_PERMISSIVE);
+ break;
+ default:
+ LOG(FATAL) << "Unknown blob_share_mode: "
+ << v1_layer_param.blob_share_mode(i);
+ break;
+ }
+ }
+ for (int i = 0; i < v1_layer_param.blobs_lr_size(); ++i) {
+ layer_param->add_blobs_lr(v1_layer_param.blobs_lr(i));
+ }
+ for (int i = 0; i < v1_layer_param.weight_decay_size(); ++i) {
+ layer_param->add_weight_decay(v1_layer_param.weight_decay(i));
+ }
+ for (int i = 0; i < v1_layer_param.loss_weight_size(); ++i) {
+ layer_param->add_loss_weight(v1_layer_param.loss_weight(i));
+ }
+ if (v1_layer_param.has_accuracy_param()) {
+ layer_param->mutable_accuracy_param()->CopyFrom(
+ v1_layer_param.accuracy_param());
+ }
+ if (v1_layer_param.has_argmax_param()) {
+ layer_param->mutable_argmax_param()->CopyFrom(
+ v1_layer_param.argmax_param());
+ }
+ if (v1_layer_param.has_concat_param()) {
+ layer_param->mutable_concat_param()->CopyFrom(
+ v1_layer_param.concat_param());
+ }
+ if (v1_layer_param.has_contrastive_loss_param()) {
+ layer_param->mutable_contrastive_loss_param()->CopyFrom(
+ v1_layer_param.contrastive_loss_param());
+ }
+ if (v1_layer_param.has_convolution_param()) {
+ layer_param->mutable_convolution_param()->CopyFrom(
+ v1_layer_param.convolution_param());
+ }
+ if (v1_layer_param.has_data_param()) {
+ layer_param->mutable_data_param()->CopyFrom(
+ v1_layer_param.data_param());
+ }
+ if (v1_layer_param.has_dropout_param()) {
+ layer_param->mutable_dropout_param()->CopyFrom(
+ v1_layer_param.dropout_param());
+ }
+ if (v1_layer_param.has_dummy_data_param()) {
+ layer_param->mutable_dummy_data_param()->CopyFrom(
+ v1_layer_param.dummy_data_param());
+ }
+ if (v1_layer_param.has_eltwise_param()) {
+ layer_param->mutable_eltwise_param()->CopyFrom(
+ v1_layer_param.eltwise_param());
+ }
+ if (v1_layer_param.has_exp_param()) {
+ layer_param->mutable_exp_param()->CopyFrom(
+ v1_layer_param.exp_param());
+ }
+ if (v1_layer_param.has_hdf5_data_param()) {
+ layer_param->mutable_hdf5_data_param()->CopyFrom(
+ v1_layer_param.hdf5_data_param());
+ }
+ if (v1_layer_param.has_hdf5_output_param()) {
+ layer_param->mutable_hdf5_output_param()->CopyFrom(
+ v1_layer_param.hdf5_output_param());
+ }
+ if (v1_layer_param.has_hinge_loss_param()) {
+ layer_param->mutable_hinge_loss_param()->CopyFrom(
+ v1_layer_param.hinge_loss_param());
+ }
+ if (v1_layer_param.has_image_data_param()) {
+ layer_param->mutable_image_data_param()->CopyFrom(
+ v1_layer_param.image_data_param());
+ }
+ if (v1_layer_param.has_infogain_loss_param()) {
+ layer_param->mutable_infogain_loss_param()->CopyFrom(
+ v1_layer_param.infogain_loss_param());
+ }
+ if (v1_layer_param.has_inner_product_param()) {
+ layer_param->mutable_inner_product_param()->CopyFrom(
+ v1_layer_param.inner_product_param());
+ }
+ if (v1_layer_param.has_lrn_param()) {
+ layer_param->mutable_lrn_param()->CopyFrom(
+ v1_layer_param.lrn_param());
+ }
+ if (v1_layer_param.has_memory_data_param()) {
+ layer_param->mutable_memory_data_param()->CopyFrom(
+ v1_layer_param.memory_data_param());
+ }
+ if (v1_layer_param.has_mvn_param()) {
+ layer_param->mutable_mvn_param()->CopyFrom(
+ v1_layer_param.mvn_param());
+ }
+ if (v1_layer_param.has_pooling_param()) {
+ layer_param->mutable_pooling_param()->CopyFrom(
+ v1_layer_param.pooling_param());
+ }
+ if (v1_layer_param.has_power_param()) {
+ layer_param->mutable_power_param()->CopyFrom(
+ v1_layer_param.power_param());
+ }
+ if (v1_layer_param.has_relu_param()) {
+ layer_param->mutable_relu_param()->CopyFrom(
+ v1_layer_param.relu_param());
+ }
+ if (v1_layer_param.has_sigmoid_param()) {
+ layer_param->mutable_sigmoid_param()->CopyFrom(
+ v1_layer_param.sigmoid_param());
+ }
+ if (v1_layer_param.has_softmax_param()) {
+ layer_param->mutable_softmax_param()->CopyFrom(
+ v1_layer_param.softmax_param());
+ }
+ if (v1_layer_param.has_slice_param()) {
+ layer_param->mutable_slice_param()->CopyFrom(
+ v1_layer_param.slice_param());
+ }
+ if (v1_layer_param.has_tanh_param()) {
+ layer_param->mutable_tanh_param()->CopyFrom(
+ v1_layer_param.tanh_param());
+ }
+ if (v1_layer_param.has_threshold_param()) {
+ layer_param->mutable_threshold_param()->CopyFrom(
+ v1_layer_param.threshold_param());
+ }
+ if (v1_layer_param.has_window_data_param()) {
+ layer_param->mutable_window_data_param()->CopyFrom(
+ v1_layer_param.window_data_param());
+ }
+ if (v1_layer_param.has_transform_param()) {
+ layer_param->mutable_transform_param()->CopyFrom(
+ v1_layer_param.transform_param());
+ }
+ if (v1_layer_param.has_layer()) {
+ LOG(ERROR) << "Input NetParameter has V0 layer -- ignoring.";
+ is_fully_compatible = false;
+ }
+ return is_fully_compatible;
+}
+
+const char* UpgradeV1LayerType(const V1LayerParameter_LayerType type) {
+ switch (type) {
+ case V1LayerParameter_LayerType_NONE:
+ return "";
+ case V1LayerParameter_LayerType_ABSVAL:
+ return "AbsVal";
+ case V1LayerParameter_LayerType_ACCURACY:
+ return "Accuracy";
+ case V1LayerParameter_LayerType_ARGMAX:
+ return "ArgMax";
+ case V1LayerParameter_LayerType_BNLL:
+ return "BNLL";
+ case V1LayerParameter_LayerType_CONCAT:
+ return "Concat";
+ case V1LayerParameter_LayerType_CONTRASTIVE_LOSS:
+ return "ContrastiveLoss";
+ case V1LayerParameter_LayerType_CONVOLUTION:
+ return "Convolution";
+ case V1LayerParameter_LayerType_DATA:
+ return "Data";
+ case V1LayerParameter_LayerType_DROPOUT:
+ return "Dropout";
+ case V1LayerParameter_LayerType_DUMMY_DATA:
+ return "DummyData";
+ case V1LayerParameter_LayerType_EUCLIDEAN_LOSS:
+ return "EuclideanLoss";
+ case V1LayerParameter_LayerType_ELTWISE:
+ return "Eltwise";
+ case V1LayerParameter_LayerType_EXP:
+ return "Exp";
+ case V1LayerParameter_LayerType_FLATTEN:
+ return "Flatten";
+ case V1LayerParameter_LayerType_HDF5_DATA:
+ return "HDF5Data";
+ case V1LayerParameter_LayerType_HDF5_OUTPUT:
+ return "HDF5Output";
+ case V1LayerParameter_LayerType_HINGE_LOSS:
+ return "HingeLoss";
+ case V1LayerParameter_LayerType_IM2COL:
+ return "Im2col";
+ case V1LayerParameter_LayerType_IMAGE_DATA:
+ return "ImageData";
+ case V1LayerParameter_LayerType_INFOGAIN_LOSS:
+ return "InfogainLoss";
+ case V1LayerParameter_LayerType_INNER_PRODUCT:
+ return "InnerProduct";
+ case V1LayerParameter_LayerType_LRN:
+ return "LRN";
+ case V1LayerParameter_LayerType_MEMORY_DATA:
+ return "MemoryData";
+ case V1LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS:
+ return "MultinomialLogisticLoss";
+ case V1LayerParameter_LayerType_MVN:
+ return "MVN";
+ case V1LayerParameter_LayerType_POOLING:
+ return "Pooling";
+ case V1LayerParameter_LayerType_POWER:
+ return "Power";
+ case V1LayerParameter_LayerType_RELU:
+ return "ReLU";
+ case V1LayerParameter_LayerType_SIGMOID:
+ return "Sigmoid";
+ case V1LayerParameter_LayerType_SIGMOID_CROSS_ENTROPY_LOSS:
+ return "SigmoidCrossEntropyLoss";
+ case V1LayerParameter_LayerType_SILENCE:
+ return "Silence";
+ case V1LayerParameter_LayerType_SOFTMAX:
+ return "Softmax";
+ case V1LayerParameter_LayerType_SOFTMAX_LOSS:
+ return "SoftmaxWithLoss";
+ case V1LayerParameter_LayerType_SPLIT:
+ return "Split";
+ case V1LayerParameter_LayerType_SLICE:
+ return "Slice";
+ case V1LayerParameter_LayerType_TANH:
+ return "TanH";
+ case V1LayerParameter_LayerType_WINDOW_DATA:
+ return "WindowData";
+ case V1LayerParameter_LayerType_THRESHOLD:
+ return "Threshold";
+ default:
+ LOG(FATAL) << "Unknown V1LayerParameter layer type: " << type;
+ return "";
+ }
}
void ReadNetParamsFromTextFileOrDie(const string& param_file,