Reduce memory consumption at Caffe importer
authorDmitry Kurtaev <dmitry.kurtaev+github@gmail.com>
Fri, 4 May 2018 06:23:22 +0000 (09:23 +0300)
committerDmitry Kurtaev <dmitry.kurtaev+github@gmail.com>
Fri, 4 May 2018 06:24:13 +0000 (09:24 +0300)
modules/dnn/src/caffe/caffe_importer.cpp
modules/dnn/src/caffe/caffe_io.cpp

index 1cc6d28..37db7f0 100644 (file)
@@ -250,16 +250,13 @@ public:
         blobShapeFromProto(pbBlob, shape);
 
         dstBlob.create((int)shape.size(), &shape[0], CV_32F);
-        float *dstData = dstBlob.ptr<float>();
         if (pbBlob.data_size())
         {
             // Single precision floats.
             CV_Assert(pbBlob.data_size() == (int)dstBlob.total());
 
             CV_DbgAssert(pbBlob.GetDescriptor()->FindFieldByLowercaseName("data")->cpp_type() == FieldDescriptor::CPPTYPE_FLOAT);
-
-            for (int i = 0; i < pbBlob.data_size(); i++)
-                dstData[i] = pbBlob.data(i);
+            Mat(dstBlob.dims, &dstBlob.size[0], CV_32F, (void*)pbBlob.data().data()).copyTo(dstBlob);
         }
         else
         {
@@ -288,11 +285,18 @@ public:
         if (li == netBinary.layer_size() || netBinary.layer(li).blobs_size() == 0)
             return;
 
-        const caffe::LayerParameter &binLayer = netBinary.layer(li);
-        layerParams.blobs.resize(binLayer.blobs_size());
-        for (int bi = 0; bi < binLayer.blobs_size(); bi++)
+        caffe::LayerParameter* binLayer = netBinary.mutable_layer(li);
+        const int numBlobs = binLayer->blobs_size();
+        layerParams.blobs.resize(numBlobs);
+        for (int bi = 0; bi < numBlobs; bi++)
+        {
+            blobFromProto(binLayer->blobs(bi), layerParams.blobs[bi]);
+        }
+        binLayer->clear_blobs();
+        CV_Assert(numBlobs == binLayer->blobs().ClearedCount());
+        for (int bi = 0; bi < numBlobs; bi++)
         {
-            blobFromProto(binLayer.blobs(bi), layerParams.blobs[bi]);
+            delete binLayer->mutable_blobs()->ReleaseCleared();
         }
     }
 
index 7fc5fea..02d1aa1 100644 (file)
@@ -132,7 +132,7 @@ void UpgradeV0PaddingLayers(const NetParameter& param,
                             NetParameter* param_upgraded_pad);
 
 // Upgrade a single V0LayerConnection to the V1LayerParameter format.
-bool UpgradeV0LayerParameter(const V1LayerParameter& v0_layer_connection,
+bool UpgradeV0LayerParameter(V1LayerParameter* v0_layer_connection,
                              V1LayerParameter* layer_param);
 
 V1LayerParameter_LayerType UpgradeV0LayerType(const string& type);
@@ -149,9 +149,9 @@ bool NetNeedsV1ToV2Upgrade(const NetParameter& net_param);
 
 // Perform all necessary transformations to upgrade a NetParameter with
 // deprecated V1LayerParameters.
-bool UpgradeV1Net(const NetParameter& v1_net_param, NetParameter* net_param);
+bool UpgradeV1Net(NetParameter* net_param);
 
-bool UpgradeV1LayerParameter(const V1LayerParameter& v1_layer_param,
+bool UpgradeV1LayerParameter(V1LayerParameter* v1_layer_param,
                              LayerParameter* layer_param);
 
 const char* UpgradeV1LayerType(const V1LayerParameter_LayerType type);
@@ -194,7 +194,7 @@ bool UpgradeV0Net(const NetParameter& v0_net_param_padding_layers,
     net_param->set_name(v0_net_param.name());
   }
   for (int i = 0; i < v0_net_param.layers_size(); ++i) {
-    is_fully_compatible &= UpgradeV0LayerParameter(v0_net_param.layers(i),
+    is_fully_compatible &= UpgradeV0LayerParameter(v0_net_param.mutable_layers(i),
                                                    net_param->add_layers());
   }
   for (int i = 0; i < v0_net_param.input_size(); ++i) {
@@ -268,8 +268,9 @@ void UpgradeV0PaddingLayers(const NetParameter& param,
   }
 }
 
-bool UpgradeV0LayerParameter(const V1LayerParameter& v0_layer_connection,
+bool UpgradeV0LayerParameter(V1LayerParameter* v0_layer_connection_,
                              V1LayerParameter* layer_param) {
+  const V1LayerParameter& v0_layer_connection = *v0_layer_connection_;
   bool is_fully_compatible = true;
   layer_param->Clear();
   for (int i = 0; i < v0_layer_connection.bottom_size(); ++i) {
@@ -287,9 +288,7 @@ bool UpgradeV0LayerParameter(const V1LayerParameter& v0_layer_connection,
     if (v0_layer_param.has_type()) {
       layer_param->set_type(UpgradeV0LayerType(type));
     }
-    for (int i = 0; i < v0_layer_param.blobs_size(); ++i) {
-      layer_param->add_blobs()->CopyFrom(v0_layer_param.blobs(i));
-    }
+    layer_param->mutable_blobs()->Swap(v0_layer_connection_->mutable_blobs());
     for (int i = 0; i < v0_layer_param.blobs_lr_size(); ++i) {
       layer_param->add_blobs_lr(v0_layer_param.blobs_lr(i));
     }
@@ -770,8 +769,7 @@ bool UpgradeNetAsNeeded(const string& param_file, NetParameter* param) {
   if (NetNeedsV1ToV2Upgrade(*param)) {
     LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
                << "V1LayerParameter: " << param_file;
-    NetParameter original_param(*param);
-    if (!UpgradeV1Net(original_param, param)) {
+    if (!UpgradeV1Net(param)) {
       success = false;
       LOG(ERROR) << "Warning: had one or more problems upgrading "
           << "V1LayerParameter (see above); continuing anyway.";
@@ -791,23 +789,23 @@ bool UpgradeNetAsNeeded(const string& param_file, NetParameter* param) {
   return success;
 }
 
-bool UpgradeV1Net(const NetParameter& v1_net_param, NetParameter* net_param) {
+bool UpgradeV1Net(NetParameter* net_param) {
+  // V1LayerParameter layers -> LayerParameter layer
   bool is_fully_compatible = true;
-  if (v1_net_param.layer_size() > 0) {
+  if (net_param->layer_size() > 0) {
     LOG(ERROR) << "Input NetParameter to be upgraded already specifies 'layer' "
                << "fields; these will be ignored for the upgrade.";
     is_fully_compatible = false;
   }
-  net_param->CopyFrom(v1_net_param);
-  net_param->clear_layers();
   net_param->clear_layer();
-  for (int i = 0; i < v1_net_param.layers_size(); ++i) {
-    if (!UpgradeV1LayerParameter(v1_net_param.layers(i),
+  for (int i = 0; i < net_param->layers_size(); ++i) {
+    if (!UpgradeV1LayerParameter(net_param->mutable_layers(i),
                                  net_param->add_layer())) {
       LOG(ERROR) << "Upgrade of input layer " << i << " failed.";
       is_fully_compatible = false;
     }
   }
+  net_param->clear_layers();
   return is_fully_compatible;
 }
 
@@ -834,8 +832,9 @@ void UpgradeNetBatchNorm(NetParameter* net_param) {
   }
 }
 
-bool UpgradeV1LayerParameter(const V1LayerParameter& v1_layer_param,
+bool UpgradeV1LayerParameter(V1LayerParameter* v1_layer_param_,
                              LayerParameter* layer_param) {
+  const V1LayerParameter& v1_layer_param = *v1_layer_param_;
   layer_param->Clear();
   bool is_fully_compatible = true;
   for (int i = 0; i < v1_layer_param.bottom_size(); ++i) {
@@ -856,9 +855,7 @@ bool UpgradeV1LayerParameter(const V1LayerParameter& v1_layer_param,
   if (v1_layer_param.has_type()) {
     layer_param->set_type(UpgradeV1LayerType(v1_layer_param.type()));
   }
-  for (int i = 0; i < v1_layer_param.blobs_size(); ++i) {
-    layer_param->add_blobs()->CopyFrom(v1_layer_param.blobs(i));
-  }
+  layer_param->mutable_blobs()->Swap(v1_layer_param_->mutable_blobs());
   for (int i = 0; i < v1_layer_param.param_size(); ++i) {
     while (layer_param->param_size() <= i) { layer_param->add_param(); }
     layer_param->mutable_param(i)->set_name(v1_layer_param.param(i));