dnn: don't use aligned load without alignment checks

- weights are unaligned in dasiamprn sample (comes from numpy)
pull/21426/head
Alexander Alekhin 3 years ago
parent 53b89e1ee4
commit 80d9f624d0
  1. 5
      modules/dnn/src/layers/convolution_layer.cpp
  2. 2
      modules/dnn/src/layers/layers_common.simd.hpp

@ -347,7 +347,9 @@ public:
if (!blobs.empty()) if (!blobs.empty())
{ {
Mat wm = blobs[0].reshape(1, numOutput); Mat wm = blobs[0].reshape(1, numOutput);
if( wm.step1() % VEC_ALIGN != 0 ) if ((wm.step1() % VEC_ALIGN != 0) ||
!isAligned<VEC_ALIGN * sizeof(float)>(wm.data)
)
{ {
int newcols = (int)alignSize(wm.step1(), VEC_ALIGN); int newcols = (int)alignSize(wm.step1(), VEC_ALIGN);
Mat wm_buffer = Mat(numOutput, newcols, wm.type()); Mat wm_buffer = Mat(numOutput, newcols, wm.type());
@ -1299,7 +1301,6 @@ public:
} }
} }
} }
// now compute dot product of the weights // now compute dot product of the weights
// and im2row-transformed part of the tensor // and im2row-transformed part of the tensor
#if CV_TRY_AVX512_SKX #if CV_TRY_AVX512_SKX

@ -81,6 +81,8 @@ void fastConv( const float* weights, size_t wstep, const float* bias,
int blockSize, int vecsize, int vecsize_aligned, int blockSize, int vecsize, int vecsize_aligned,
const float* relu, bool initOutput ) const float* relu, bool initOutput )
{ {
CV_Assert(isAligned<32>(weights));
int outCn = outShape[1]; int outCn = outShape[1];
size_t outPlaneSize = outShape[2]*outShape[3]; size_t outPlaneSize = outShape[2]*outShape[3];
float r0 = 1.f, r1 = 1.f, r2 = 1.f; float r0 = 1.f, r1 = 1.f, r2 = 1.f;

Loading…
Cancel
Save