class BatchNormLayerImpl CV_FINAL : public BatchNormLayer
{
public:
+ Mat origin_weights, origin_bias;
Mat weights_, bias_;
UMat umat_weight, umat_bias;
mutable int dims;
const float* weightsData = hasWeights ? blobs[weightsBlobIndex].ptr<float>() : 0;
const float* biasData = hasBias ? blobs[biasBlobIndex].ptr<float>() : 0;
- weights_.create(1, (int)n, CV_32F);
- bias_.create(1, (int)n, CV_32F);
+ origin_weights.create(1, (int)n, CV_32F);
+ origin_bias.create(1, (int)n, CV_32F);
- float* dstWeightsData = weights_.ptr<float>();
- float* dstBiasData = bias_.ptr<float>();
+ float* dstWeightsData = origin_weights.ptr<float>();
+ float* dstBiasData = origin_bias.ptr<float>();
for (size_t i = 0; i < n; ++i)
{
dstWeightsData[i] = w;
dstBiasData[i] = (hasBias ? biasData[i] : 0.0f) - w * meanData[i] * varMeanScale;
}
- // We will use blobs to store origin weights and bias to restore them in case of reinitialization.
- weights_.copyTo(blobs[0].reshape(1, 1));
- bias_.copyTo(blobs[1].reshape(1, 1));
}
virtual void finalize(InputArrayOfArrays, OutputArrayOfArrays) CV_OVERRIDE
{
- blobs[0].reshape(1, 1).copyTo(weights_);
- blobs[1].reshape(1, 1).copyTo(bias_);
+ origin_weights.reshape(1, 1).copyTo(weights_);
+ origin_bias.reshape(1, 1).copyTo(bias_);
}
void getScaleShift(Mat& scale, Mat& shift) const CV_OVERRIDE