Fix BatchNorm reinitialization

pull/20283/head
SamFC10 3 years ago
parent c2263db7bc
commit 55e1dfb778
  1. 16
      modules/dnn/src/layers/batch_norm_layer.cpp

@ -29,6 +29,7 @@ namespace dnn
class BatchNormLayerImpl CV_FINAL : public BatchNormLayer
{
public:
Mat origin_weights, origin_bias;
Mat weights_, bias_;
UMat umat_weight, umat_bias;
mutable int dims;
@ -82,11 +83,11 @@ public:
const float* weightsData = hasWeights ? blobs[weightsBlobIndex].ptr<float>() : 0;
const float* biasData = hasBias ? blobs[biasBlobIndex].ptr<float>() : 0;
weights_.create(1, (int)n, CV_32F);
bias_.create(1, (int)n, CV_32F);
origin_weights.create(1, (int)n, CV_32F);
origin_bias.create(1, (int)n, CV_32F);
float* dstWeightsData = weights_.ptr<float>();
float* dstBiasData = bias_.ptr<float>();
float* dstWeightsData = origin_weights.ptr<float>();
float* dstBiasData = origin_bias.ptr<float>();
for (size_t i = 0; i < n; ++i)
{
@ -94,15 +95,12 @@ public:
dstWeightsData[i] = w;
dstBiasData[i] = (hasBias ? biasData[i] : 0.0f) - w * meanData[i] * varMeanScale;
}
// We will use blobs to store origin weights and bias to restore them in case of reinitialization.
weights_.copyTo(blobs[0].reshape(1, 1));
bias_.copyTo(blobs[1].reshape(1, 1));
}
virtual void finalize(InputArrayOfArrays, OutputArrayOfArrays) CV_OVERRIDE
{
blobs[0].reshape(1, 1).copyTo(weights_);
blobs[1].reshape(1, 1).copyTo(bias_);
origin_weights.reshape(1, 1).copyTo(weights_);
origin_bias.reshape(1, 1).copyTo(bias_);
}
void getScaleShift(Mat& scale, Mat& shift) const CV_OVERRIDE

Loading…
Cancel
Save