Merge pull request #20283 from SamFC10:fix-batchnorm
authorAlexander Alekhin <alexander.a.alekhin@gmail.com>
Mon, 21 Jun 2021 11:27:12 +0000 (11:27 +0000)
committerAlexander Alekhin <alexander.a.alekhin@gmail.com>
Mon, 21 Jun 2021 11:27:12 +0000 (11:27 +0000)
modules/dnn/src/layers/batch_norm_layer.cpp

index 27c3db6..42676c7 100644 (file)
@@ -29,6 +29,7 @@ namespace dnn
 class BatchNormLayerImpl CV_FINAL : public BatchNormLayer
 {
 public:
+    Mat origin_weights, origin_bias;
     Mat weights_, bias_;
     UMat umat_weight, umat_bias;
     mutable int dims;
@@ -82,11 +83,11 @@ public:
         const float* weightsData = hasWeights ? blobs[weightsBlobIndex].ptr<float>() : 0;
         const float* biasData = hasBias ? blobs[biasBlobIndex].ptr<float>() : 0;
 
-        weights_.create(1, (int)n, CV_32F);
-        bias_.create(1, (int)n, CV_32F);
+        origin_weights.create(1, (int)n, CV_32F);
+        origin_bias.create(1, (int)n, CV_32F);
 
-        float* dstWeightsData = weights_.ptr<float>();
-        float* dstBiasData = bias_.ptr<float>();
+        float* dstWeightsData = origin_weights.ptr<float>();
+        float* dstBiasData = origin_bias.ptr<float>();
 
         for (size_t i = 0; i < n; ++i)
         {
@@ -94,15 +95,12 @@ public:
             dstWeightsData[i] = w;
             dstBiasData[i] = (hasBias ? biasData[i] : 0.0f) - w * meanData[i] * varMeanScale;
         }
-        // We will use blobs to store origin weights and bias to restore them in case of reinitialization.
-        weights_.copyTo(blobs[0].reshape(1, 1));
-        bias_.copyTo(blobs[1].reshape(1, 1));
     }
 
     virtual void finalize(InputArrayOfArrays, OutputArrayOfArrays) CV_OVERRIDE
     {
-        blobs[0].reshape(1, 1).copyTo(weights_);
-        blobs[1].reshape(1, 1).copyTo(bias_);
+        origin_weights.reshape(1, 1).copyTo(weights_);
+        origin_bias.reshape(1, 1).copyTo(bias_);
     }
 
     void getScaleShift(Mat& scale, Mat& shift) const CV_OVERRIDE