automagic upgrade for v1->v2
authorJeff Donahue <jeff.donahue@gmail.com>
Thu, 15 Jan 2015 00:55:14 +0000 (16:55 -0800)
committerJeff Donahue <jeff.donahue@gmail.com>
Thu, 5 Feb 2015 22:49:22 +0000 (14:49 -0800)
include/caffe/util/upgrade_proto.hpp
src/caffe/net.cpp
src/caffe/proto/caffe.proto
src/caffe/util/upgrade_proto.cpp
tools/upgrade_net_proto_binary.cpp
tools/upgrade_net_proto_text.cpp

index e89756e..0df3a89 100644 (file)
@@ -8,9 +8,12 @@
 
 namespace caffe {
 
+// Return true iff the net is not the current version.
+bool NetNeedsUpgrade(const NetParameter& net_param);
+
 // Return true iff any layer contains parameters specified using
 // deprecated V0LayerParameter.
-bool NetNeedsUpgrade(const NetParameter& net_param);
+bool NetNeedsV0ToV1Upgrade(const NetParameter& net_param);
 
 // Perform all necessary transformations to upgrade a V0NetParameter into a
 // NetParameter (including upgrading padding layers and LayerParameters).
@@ -23,9 +26,9 @@ bool UpgradeV0Net(const NetParameter& v0_net_param, NetParameter* net_param);
 void UpgradeV0PaddingLayers(const NetParameter& param,
                             NetParameter* param_upgraded_pad);
 
-// Upgrade a single V0LayerConnection to the new LayerParameter format.
-bool UpgradeLayerParameter(const V1LayerParameter& v0_layer_connection,
-                           V1LayerParameter* layer_param);
+// Upgrade a single V0LayerConnection to the V1LayerParameter format.
+bool UpgradeV0LayerParameter(const V1LayerParameter& v0_layer_connection,
+                             V1LayerParameter* layer_param);
 
 V1LayerParameter_LayerType UpgradeV0LayerType(const string& type);
 
@@ -36,13 +39,25 @@ bool NetNeedsDataUpgrade(const NetParameter& net_param);
 // into a TransformationParameter.
 void UpgradeNetDataTransformation(NetParameter* net_param);
 
+// Return true iff the Net contains any layers specified as V1LayerParameters.
+bool NetNeedsV1ToV2Upgrade(const NetParameter& net_param);
+
+// Perform all necessary transformations to upgrade a NetParameter with
+// deprecated V1LayerParameters.
+bool UpgradeV1Net(const NetParameter& v1_net_param, NetParameter* net_param);
+
+bool UpgradeV1LayerParameter(const V1LayerParameter& v1_layer_param,
+                             LayerParameter* layer_param);
+
+const char* UpgradeV1LayerType(const V1LayerParameter_LayerType type);
+
 // Convert a NetParameter to NetParameterPrettyPrint used for dumping to
 // proto text files.
 void NetParameterToPrettyPrint(const NetParameter& param,
                                NetParameterPrettyPrint* pretty_param);
 
 // Check for deprecations and upgrade the NetParameter as needed.
-void UpgradeNetAsNeeded(NetParameter* param);
+bool UpgradeNetAsNeeded(const string& param_file, NetParameter* param);
 
 // Read parameters from a file into a NetParameter proto message.
 void ReadNetParamsFromTextFileOrDie(const string& param_file,
index db9ac7d..4461fa8 100644 (file)
@@ -138,9 +138,10 @@ void Net<Dtype>::Init(const NetParameter& in_param) {
     CHECK(param_size == num_param_blobs || param_size == 0)
         << "Incorrect param size: should be either 0 or the same as "
            "the number of the layer's parameter blobs: " << num_param_blobs;
-    const int blob_share_mode_size = layer_param.blob_share_mode_size();
-    CHECK(blob_share_mode_size == num_param_blobs || blob_share_mode_size == 0)
-        << "Incorrect blob_share_mode size: should be either 0 or the same as "
+    const int param_share_mode_size = layer_param.param_share_mode_size();
+    CHECK(param_share_mode_size == num_param_blobs ||
+          param_share_mode_size == 0)
+        << "Incorrect param_share_mode size: should be either 0 or the same as "
            "the number of the layer's parameter blobs: " << num_param_blobs;
     for (int param_id = 0; param_id < num_param_blobs; ++param_id) {
       AppendParam(param, layer_id, param_id);
@@ -441,9 +442,9 @@ void Net<Dtype>::AppendParam(const NetParameter& param, const int layer_id,
     Blob<Dtype>* this_blob = layers_[layer_id]->blobs()[param_id].get();
     Blob<Dtype>* owner_blob =
         layers_[owner_layer_id]->blobs()[owner_param_id].get();
-    const int blob_share_mode_size = layer_param.blob_share_mode_size();
-    if (blob_share_mode_size > param_id &&
-        (layer_param.blob_share_mode(param_id) ==
+    const int param_share_mode_size = layer_param.param_share_mode_size();
+    if (param_share_mode_size > param_id &&
+        (layer_param.param_share_mode(param_id) ==
          LayerParameter_DimCheckMode_PERMISSIVE)) {
       // Permissive dimension checking -- only check counts are the same.
       CHECK_EQ(this_blob->count(), owner_blob->count())
index d81acf3..1178efe 100644 (file)
@@ -218,77 +218,80 @@ message NetStateRule {
 //
 // LayerParameter next available ID: 43 (last added: loss_param)
 message LayerParameter {
-  repeated string bottom = 2; // the name of the bottom blobs
-  repeated string top = 3; // the name of the top blobs
-  optional string name = 4; // the layer name
-
-  // Rules controlling whether and when a layer is included in the network,
-  // based on the current NetState.  You may specify a non-zero number of rules
-  // to include OR exclude, but not both.  If no include or exclude rules are
-  // specified, the layer is always included.  If the current NetState meets
-  // ANY (i.e., one or more) of the specified rules, the layer is
-  // included/excluded.
-  repeated NetStateRule include = 32;
-  repeated NetStateRule exclude = 33;
+  optional string name = 1; // the layer name
+  optional string type = 2; // the layer type
+  repeated string bottom = 3; // the name of each bottom blob
+  repeated string top = 4; // the name of each top blob
 
-  optional string type = 5; // the layer type from the enum above
+  // The amount of weight to assign each top blob in the objective.
+  // Each layer assigns a default value, usually of either 0 or 1,
+  // to each top blob.
+  repeated float loss_weight = 5;
 
   // The blobs containing the numeric parameters of the layer
   repeated BlobProto blobs = 6;
+
   // The names of the parameter blobs -- useful for sharing parameters among
   // layers (but never required).
-  repeated string param = 1001;
+  repeated string param = 7;
+
   // Whether to require shared weights to have the same shape, or just the same
   // count -- defaults to STRICT if unspecified.
-  repeated DimCheckMode blob_share_mode = 1002;
+  repeated DimCheckMode param_share_mode = 8;
   enum DimCheckMode {
     // STRICT (default) requires that num, channels, height, width each match.
     STRICT = 0;
     // PERMISSIVE requires only the count (num*channels*height*width) to match.
     PERMISSIVE = 1;
   }
+
   // The ratio that is multiplied on the global learning rate. If you want to
   // set the learning ratio for one blob, you need to set it for all blobs.
-  repeated float blobs_lr = 7;
-  // The weight decay that is multiplied on the global weight decay.
-  repeated float weight_decay = 8;
+  repeated float blobs_lr = 9;
 
-  // The amount of weight to assign each top blob in the objective.
-  // Each layer assigns a default value, usually of either 0 or 1,
-  // to each top blob.
-  repeated float loss_weight = 35;
+  // The weight decay that is multiplied on the global weight decay.
+  repeated float weight_decay = 10;
 
-  optional AccuracyParameter accuracy_param = 27;
-  optional ArgMaxParameter argmax_param = 23;
-  optional ConcatParameter concat_param = 9;
-  optional ContrastiveLossParameter contrastive_loss_param = 40;
-  optional ConvolutionParameter convolution_param = 10;
-  optional DataParameter data_param = 11;
-  optional DropoutParameter dropout_param = 12;
-  optional DummyDataParameter dummy_data_param = 26;
-  optional EltwiseParameter eltwise_param = 24;
-  optional ExpParameter exp_param = 41;
-  optional HDF5DataParameter hdf5_data_param = 13;
-  optional HDF5OutputParameter hdf5_output_param = 14;
-  optional HingeLossParameter hinge_loss_param = 29;
-  optional ImageDataParameter image_data_param = 15;
-  optional InfogainLossParameter infogain_loss_param = 16;
-  optional InnerProductParameter inner_product_param = 17;
-  optional LRNParameter lrn_param = 18;
-  optional MemoryDataParameter memory_data_param = 22;
-  optional MVNParameter mvn_param = 34;
-  optional PoolingParameter pooling_param = 19;
-  optional PowerParameter power_param = 21;
-  optional ReLUParameter relu_param = 30;
-  optional SigmoidParameter sigmoid_param = 38;
-  optional SoftmaxParameter softmax_param = 39;
-  optional SliceParameter slice_param = 31;
-  optional TanHParameter tanh_param = 37;
-  optional ThresholdParameter threshold_param = 25;
-  optional WindowDataParameter window_data_param = 20;
+  // Rules controlling whether and when a layer is included in the network,
+  // based on the current NetState.  You may specify a non-zero number of rules
+  // to include OR exclude, but not both.  If no include or exclude rules are
+  // specified, the layer is always included.  If the current NetState meets
+  // ANY (i.e., one or more) of the specified rules, the layer is
+  // included/excluded.
+  repeated NetStateRule include = 11;
+  repeated NetStateRule exclude = 12;
 
   // Parameters for data pre-processing.
-  optional TransformationParameter transform_param = 36;
+  optional TransformationParameter transform_param = 13;
+
+  optional AccuracyParameter accuracy_param = 14;
+  optional ArgMaxParameter argmax_param = 15;
+  optional ConcatParameter concat_param = 16;
+  optional ContrastiveLossParameter contrastive_loss_param = 17;
+  optional ConvolutionParameter convolution_param = 18;
+  optional DataParameter data_param = 19;
+  optional DropoutParameter dropout_param = 20;
+  optional DummyDataParameter dummy_data_param = 21;
+  optional EltwiseParameter eltwise_param = 22;
+  optional ExpParameter exp_param = 23;
+  optional HDF5DataParameter hdf5_data_param = 24;
+  optional HDF5OutputParameter hdf5_output_param = 25;
+  optional HingeLossParameter hinge_loss_param = 26;
+  optional ImageDataParameter image_data_param = 27;
+  optional InfogainLossParameter infogain_loss_param = 28;
+  optional InnerProductParameter inner_product_param = 29;
+  optional LRNParameter lrn_param = 30;
+  optional MemoryDataParameter memory_data_param = 31;
+  optional MVNParameter mvn_param = 32;
+  optional PoolingParameter pooling_param = 33;
+  optional PowerParameter power_param = 34;
+  optional ReLUParameter relu_param = 35;
+  optional SigmoidParameter sigmoid_param = 36;
+  optional SoftmaxParameter softmax_param = 37;
+  optional SliceParameter slice_param = 38;
+  optional TanHParameter tanh_param = 39;
+  optional ThresholdParameter threshold_param = 40;
+  optional WindowDataParameter window_data_param = 41;
 
   // Parameters shared by loss layers.
   optional LossParameter loss_param = 42;
index 16583b0..575d7a7 100644 (file)
 namespace caffe {
 
 bool NetNeedsUpgrade(const NetParameter& net_param) {
+  return NetNeedsV0ToV1Upgrade(net_param) || NetNeedsV1ToV2Upgrade(net_param);
+}
+
+bool NetNeedsV0ToV1Upgrade(const NetParameter& net_param) {
   for (int i = 0; i < net_param.layers_size(); ++i) {
     if (net_param.layers(i).has_layer()) {
       return true;
@@ -21,6 +25,10 @@ bool NetNeedsUpgrade(const NetParameter& net_param) {
   return false;
 }
 
+bool NetNeedsV1ToV2Upgrade(const NetParameter& net_param) {
+  return net_param.layers_size() > 0;
+}
+
 bool UpgradeV0Net(const NetParameter& v0_net_param_padding_layers,
                   NetParameter* net_param) {
   // First upgrade padding layers to padded conv layers.
@@ -33,8 +41,8 @@ bool UpgradeV0Net(const NetParameter& v0_net_param_padding_layers,
     net_param->set_name(v0_net_param.name());
   }
   for (int i = 0; i < v0_net_param.layers_size(); ++i) {
-    is_fully_compatible &= UpgradeLayerParameter(v0_net_param.layers(i),
-                                                 net_param->add_layers());
+    is_fully_compatible &= UpgradeV0LayerParameter(v0_net_param.layers(i),
+                                                   net_param->add_layers());
   }
   for (int i = 0; i < v0_net_param.input_size(); ++i) {
     net_param->add_input(v0_net_param.input(i));
@@ -107,8 +115,8 @@ void UpgradeV0PaddingLayers(const NetParameter& param,
   }
 }
 
-bool UpgradeLayerParameter(const V1LayerParameter& v0_layer_connection,
-                           V1LayerParameter* layer_param) {
+bool UpgradeV0LayerParameter(const V1LayerParameter& v0_layer_connection,
+                             V1LayerParameter* layer_param) {
   bool is_fully_compatible = true;
   layer_param->Clear();
   for (int i = 0; i < v0_layer_connection.bottom_size(); ++i) {
@@ -590,19 +598,21 @@ void NetParameterToPrettyPrint(const NetParameter& param,
   for (int i = 0; i < param.input_dim_size(); ++i) {
     pretty_param->add_input_dim(param.input_dim(i));
   }
-  for (int i = 0; i < param.layers_size(); ++i) {
+  for (int i = 0; i < param.layer_size(); ++i) {
     pretty_param->add_layer()->CopyFrom(param.layer(i));
   }
 }
 
-void UpgradeNetAsNeeded(const string& param_file, NetParameter* param) {
-  if (NetNeedsUpgrade(*param)) {
+bool UpgradeNetAsNeeded(const string& param_file, NetParameter* param) {
+  bool success = true;
+  if (NetNeedsV0ToV1Upgrade(*param)) {
     // NetParameter was specified using the old style (V0LayerParameter); try to
     // upgrade it.
     LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
                << "V0LayerParameter: " << param_file;
     NetParameter original_param(*param);
     if (!UpgradeV0Net(original_param, param)) {
+      success = false;
       LOG(ERROR) << "Warning: had one or more problems upgrading "
           << "V0NetParameter to NetParameter (see above); continuing anyway.";
     } else {
@@ -624,6 +634,299 @@ void UpgradeNetAsNeeded(const string& param_file, NetParameter* param) {
     LOG(ERROR) << "Note that future Caffe releases will only support "
                << "transform_param messages for transformation fields.";
   }
+  if (NetNeedsV1ToV2Upgrade(*param)) {
+    LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
+               << "V1LayerParameter: " << param_file;
+    NetParameter original_param(*param);
+    if (!UpgradeV1Net(original_param, param)) {
+      success = false;
+      LOG(ERROR) << "Warning: had one or more problems upgrading "
+          << "V1LayerParameter (see above); continuing anyway.";
+    } else {
+      LOG(INFO) << "Successfully upgraded file specified using deprecated "
+                << "V1LayerParameter";
+    }
+  }
+  return success;
+}
+
+bool UpgradeV1Net(const NetParameter& v1_net_param, NetParameter* net_param) {
+  bool is_fully_compatible = true;
+  if (v1_net_param.layer_size() > 0) {
+    LOG(ERROR) << "Input NetParameter to be upgraded already specifies 'layer' "
+               << "fields; these will be ignored for the upgrade.";
+    is_fully_compatible = false;
+  }
+  net_param->CopyFrom(v1_net_param);
+  net_param->clear_layers();
+  for (int i = 0; i < v1_net_param.layers_size(); ++i) {
+    if (!UpgradeV1LayerParameter(v1_net_param.layers(i),
+                                 net_param->add_layer())) {
+      LOG(ERROR) << "Upgrade of input layer " << i << " failed.";
+      is_fully_compatible = false;
+    }
+  }
+  return is_fully_compatible;
+}
+
+bool UpgradeV1LayerParameter(const V1LayerParameter& v1_layer_param,
+                             LayerParameter* layer_param) {
+  layer_param->Clear();
+  bool is_fully_compatible = true;
+  for (int i = 0; i < v1_layer_param.bottom_size(); ++i) {
+    layer_param->add_bottom(v1_layer_param.bottom(i));
+  }
+  for (int i = 0; i < v1_layer_param.top_size(); ++i) {
+    layer_param->add_top(v1_layer_param.top(i));
+  }
+  if (v1_layer_param.has_name()) {
+    layer_param->set_name(v1_layer_param.name());
+  }
+  for (int i = 0; i < v1_layer_param.include_size(); ++i) {
+    layer_param->add_include()->CopyFrom(v1_layer_param.include(i));
+  }
+  for (int i = 0; i < v1_layer_param.exclude_size(); ++i) {
+    layer_param->add_exclude()->CopyFrom(v1_layer_param.exclude(i));
+  }
+  if (v1_layer_param.has_type()) {
+    layer_param->set_type(UpgradeV1LayerType(v1_layer_param.type()));
+  }
+  for (int i = 0; i < v1_layer_param.blobs_size(); ++i) {
+    layer_param->add_blobs()->CopyFrom(v1_layer_param.blobs(i));
+  }
+  for (int i = 0; i < v1_layer_param.param_size(); ++i) {
+    layer_param->add_param(v1_layer_param.param(i));
+  }
+  for (int i = 0; i < v1_layer_param.blob_share_mode_size(); ++i) {
+    switch (v1_layer_param.blob_share_mode(i)) {
+    case V1LayerParameter_DimCheckMode_STRICT:
+      layer_param->add_param_share_mode(LayerParameter_DimCheckMode_STRICT);
+      break;
+    case V1LayerParameter_DimCheckMode_PERMISSIVE:
+      layer_param->add_param_share_mode(LayerParameter_DimCheckMode_PERMISSIVE);
+      break;
+    default:
+      LOG(FATAL) << "Unknown blob_share_mode: "
+                 << v1_layer_param.blob_share_mode(i);
+      break;
+    }
+  }
+  for (int i = 0; i < v1_layer_param.blobs_lr_size(); ++i) {
+    layer_param->add_blobs_lr(v1_layer_param.blobs_lr(i));
+  }
+  for (int i = 0; i < v1_layer_param.weight_decay_size(); ++i) {
+    layer_param->add_weight_decay(v1_layer_param.weight_decay(i));
+  }
+  for (int i = 0; i < v1_layer_param.loss_weight_size(); ++i) {
+    layer_param->add_loss_weight(v1_layer_param.loss_weight(i));
+  }
+  if (v1_layer_param.has_accuracy_param()) {
+    layer_param->mutable_accuracy_param()->CopyFrom(
+        v1_layer_param.accuracy_param());
+  }
+  if (v1_layer_param.has_argmax_param()) {
+    layer_param->mutable_argmax_param()->CopyFrom(
+        v1_layer_param.argmax_param());
+  }
+  if (v1_layer_param.has_concat_param()) {
+    layer_param->mutable_concat_param()->CopyFrom(
+        v1_layer_param.concat_param());
+  }
+  if (v1_layer_param.has_contrastive_loss_param()) {
+    layer_param->mutable_contrastive_loss_param()->CopyFrom(
+        v1_layer_param.contrastive_loss_param());
+  }
+  if (v1_layer_param.has_convolution_param()) {
+    layer_param->mutable_convolution_param()->CopyFrom(
+        v1_layer_param.convolution_param());
+  }
+  if (v1_layer_param.has_data_param()) {
+    layer_param->mutable_data_param()->CopyFrom(
+        v1_layer_param.data_param());
+  }
+  if (v1_layer_param.has_dropout_param()) {
+    layer_param->mutable_dropout_param()->CopyFrom(
+        v1_layer_param.dropout_param());
+  }
+  if (v1_layer_param.has_dummy_data_param()) {
+    layer_param->mutable_dummy_data_param()->CopyFrom(
+        v1_layer_param.dummy_data_param());
+  }
+  if (v1_layer_param.has_eltwise_param()) {
+    layer_param->mutable_eltwise_param()->CopyFrom(
+        v1_layer_param.eltwise_param());
+  }
+  if (v1_layer_param.has_exp_param()) {
+    layer_param->mutable_exp_param()->CopyFrom(
+        v1_layer_param.exp_param());
+  }
+  if (v1_layer_param.has_hdf5_data_param()) {
+    layer_param->mutable_hdf5_data_param()->CopyFrom(
+        v1_layer_param.hdf5_data_param());
+  }
+  if (v1_layer_param.has_hdf5_output_param()) {
+    layer_param->mutable_hdf5_output_param()->CopyFrom(
+        v1_layer_param.hdf5_output_param());
+  }
+  if (v1_layer_param.has_hinge_loss_param()) {
+    layer_param->mutable_hinge_loss_param()->CopyFrom(
+        v1_layer_param.hinge_loss_param());
+  }
+  if (v1_layer_param.has_image_data_param()) {
+    layer_param->mutable_image_data_param()->CopyFrom(
+        v1_layer_param.image_data_param());
+  }
+  if (v1_layer_param.has_infogain_loss_param()) {
+    layer_param->mutable_infogain_loss_param()->CopyFrom(
+        v1_layer_param.infogain_loss_param());
+  }
+  if (v1_layer_param.has_inner_product_param()) {
+    layer_param->mutable_inner_product_param()->CopyFrom(
+        v1_layer_param.inner_product_param());
+  }
+  if (v1_layer_param.has_lrn_param()) {
+    layer_param->mutable_lrn_param()->CopyFrom(
+        v1_layer_param.lrn_param());
+  }
+  if (v1_layer_param.has_memory_data_param()) {
+    layer_param->mutable_memory_data_param()->CopyFrom(
+        v1_layer_param.memory_data_param());
+  }
+  if (v1_layer_param.has_mvn_param()) {
+    layer_param->mutable_mvn_param()->CopyFrom(
+        v1_layer_param.mvn_param());
+  }
+  if (v1_layer_param.has_pooling_param()) {
+    layer_param->mutable_pooling_param()->CopyFrom(
+        v1_layer_param.pooling_param());
+  }
+  if (v1_layer_param.has_power_param()) {
+    layer_param->mutable_power_param()->CopyFrom(
+        v1_layer_param.power_param());
+  }
+  if (v1_layer_param.has_relu_param()) {
+    layer_param->mutable_relu_param()->CopyFrom(
+        v1_layer_param.relu_param());
+  }
+  if (v1_layer_param.has_sigmoid_param()) {
+    layer_param->mutable_sigmoid_param()->CopyFrom(
+        v1_layer_param.sigmoid_param());
+  }
+  if (v1_layer_param.has_softmax_param()) {
+    layer_param->mutable_softmax_param()->CopyFrom(
+        v1_layer_param.softmax_param());
+  }
+  if (v1_layer_param.has_slice_param()) {
+    layer_param->mutable_slice_param()->CopyFrom(
+        v1_layer_param.slice_param());
+  }
+  if (v1_layer_param.has_tanh_param()) {
+    layer_param->mutable_tanh_param()->CopyFrom(
+        v1_layer_param.tanh_param());
+  }
+  if (v1_layer_param.has_threshold_param()) {
+    layer_param->mutable_threshold_param()->CopyFrom(
+        v1_layer_param.threshold_param());
+  }
+  if (v1_layer_param.has_window_data_param()) {
+    layer_param->mutable_window_data_param()->CopyFrom(
+        v1_layer_param.window_data_param());
+  }
+  if (v1_layer_param.has_transform_param()) {
+    layer_param->mutable_transform_param()->CopyFrom(
+        v1_layer_param.transform_param());
+  }
+  if (v1_layer_param.has_layer()) {
+    LOG(ERROR) << "Input NetParameter has V0 layer -- ignoring.";
+    is_fully_compatible = false;
+  }
+  return is_fully_compatible;
+}
+
+const char* UpgradeV1LayerType(const V1LayerParameter_LayerType type) {
+  switch (type) {
+  case V1LayerParameter_LayerType_NONE:
+    return "";
+  case V1LayerParameter_LayerType_ABSVAL:
+    return "AbsVal";
+  case V1LayerParameter_LayerType_ACCURACY:
+    return "Accuracy";
+  case V1LayerParameter_LayerType_ARGMAX:
+    return "ArgMax";
+  case V1LayerParameter_LayerType_BNLL:
+    return "BNLL";
+  case V1LayerParameter_LayerType_CONCAT:
+    return "Concat";
+  case V1LayerParameter_LayerType_CONTRASTIVE_LOSS:
+    return "ContrastiveLoss";
+  case V1LayerParameter_LayerType_CONVOLUTION:
+    return "Convolution";
+  case V1LayerParameter_LayerType_DATA:
+    return "Data";
+  case V1LayerParameter_LayerType_DROPOUT:
+    return "Dropout";
+  case V1LayerParameter_LayerType_DUMMY_DATA:
+    return "DummyData";
+  case V1LayerParameter_LayerType_EUCLIDEAN_LOSS:
+    return "EuclideanLoss";
+  case V1LayerParameter_LayerType_ELTWISE:
+    return "Eltwise";
+  case V1LayerParameter_LayerType_EXP:
+    return "Exp";
+  case V1LayerParameter_LayerType_FLATTEN:
+    return "Flatten";
+  case V1LayerParameter_LayerType_HDF5_DATA:
+    return "HDF5Data";
+  case V1LayerParameter_LayerType_HDF5_OUTPUT:
+    return "HDF5Output";
+  case V1LayerParameter_LayerType_HINGE_LOSS:
+    return "HingeLoss";
+  case V1LayerParameter_LayerType_IM2COL:
+    return "Im2col";
+  case V1LayerParameter_LayerType_IMAGE_DATA:
+    return "ImageData";
+  case V1LayerParameter_LayerType_INFOGAIN_LOSS:
+    return "InfogainLoss";
+  case V1LayerParameter_LayerType_INNER_PRODUCT:
+    return "InnerProduct";
+  case V1LayerParameter_LayerType_LRN:
+    return "LRN";
+  case V1LayerParameter_LayerType_MEMORY_DATA:
+    return "MemoryData";
+  case V1LayerParameter_LayerType_MULTINOMIAL_LOGISTIC_LOSS:
+    return "MultinomialLogisticLoss";
+  case V1LayerParameter_LayerType_MVN:
+    return "MVN";
+  case V1LayerParameter_LayerType_POOLING:
+    return "Pooling";
+  case V1LayerParameter_LayerType_POWER:
+    return "Power";
+  case V1LayerParameter_LayerType_RELU:
+    return "ReLU";
+  case V1LayerParameter_LayerType_SIGMOID:
+    return "Sigmoid";
+  case V1LayerParameter_LayerType_SIGMOID_CROSS_ENTROPY_LOSS:
+    return "SigmoidCrossEntropyLoss";
+  case V1LayerParameter_LayerType_SILENCE:
+    return "Silence";
+  case V1LayerParameter_LayerType_SOFTMAX:
+    return "Softmax";
+  case V1LayerParameter_LayerType_SOFTMAX_LOSS:
+    return "SoftmaxWithLoss";
+  case V1LayerParameter_LayerType_SPLIT:
+    return "Split";
+  case V1LayerParameter_LayerType_SLICE:
+    return "Slice";
+  case V1LayerParameter_LayerType_TANH:
+    return "TanH";
+  case V1LayerParameter_LayerType_WINDOW_DATA:
+    return "WindowData";
+  case V1LayerParameter_LayerType_THRESHOLD:
+    return "Threshold";
+  default:
+    LOG(FATAL) << "Unknown V1LayerParameter layer type: " << type;
+    return "";
+  }
 }
 
 void ReadNetParamsFromTextFileOrDie(const string& param_file,
index d7a62e3..8a0dd7a 100644 (file)
@@ -5,6 +5,7 @@
 #include <cstring>
 #include <fstream>  // NOLINT(readability/streams)
 #include <iostream>  // NOLINT(readability/streams)
+#include <string>
 
 #include "caffe/caffe.hpp"
 #include "caffe/util/io.hpp"
@@ -23,16 +24,20 @@ int main(int argc, char** argv) {
   }
 
   NetParameter net_param;
-  if (!ReadProtoFromBinaryFile(argv[1], &net_param)) {
+  string input_filename(argv[1]);
+  if (!ReadProtoFromBinaryFile(input_filename, &net_param)) {
     LOG(ERROR) << "Failed to parse input binary file as NetParameter: "
-               << argv[1];
+               << input_filename;
     return 2;
   }
   bool need_upgrade = NetNeedsUpgrade(net_param);
   bool success = true;
   if (need_upgrade) {
-    NetParameter v0_net_param(net_param);
-    success = UpgradeV0Net(v0_net_param, &net_param);
+    success = UpgradeNetAsNeeded(input_filename, &net_param);
+    if (!success) {
+      LOG(ERROR) << "Encountered error(s) while upgrading prototxt; "
+                 << "see details above.";
+    }
   } else {
     LOG(ERROR) << "File already in V1 proto format: " << argv[1];
   }
index 2f290fc..0fe420a 100644 (file)
@@ -5,6 +5,7 @@
 #include <cstring>
 #include <fstream>  // NOLINT(readability/streams)
 #include <iostream>  // NOLINT(readability/streams)
+#include <string>
 
 #include "caffe/caffe.hpp"
 #include "caffe/util/io.hpp"
@@ -23,19 +24,23 @@ int main(int argc, char** argv) {
   }
 
   NetParameter net_param;
-  if (!ReadProtoFromTextFile(argv[1], &net_param)) {
+  string input_filename(argv[1]);
+  if (!ReadProtoFromTextFile(input_filename, &net_param)) {
     LOG(ERROR) << "Failed to parse input text file as NetParameter: "
-               << argv[1];
+               << input_filename;
     return 2;
   }
   bool need_upgrade = NetNeedsUpgrade(net_param);
   bool need_data_upgrade = NetNeedsDataUpgrade(net_param);
   bool success = true;
   if (need_upgrade) {
-    NetParameter v0_net_param(net_param);
-    success = UpgradeV0Net(v0_net_param, &net_param);
+    success = UpgradeNetAsNeeded(input_filename, &net_param);
+    if (!success) {
+      LOG(ERROR) << "Encountered error(s) while upgrading prototxt; "
+                 << "see details above.";
+    }
   } else {
-    LOG(ERROR) << "File already in V1 proto format: " << argv[1];
+    LOG(ERROR) << "File already in latest proto format: " << input_filename;
   }
 
   if (need_data_upgrade) {