From fb570b0172d2166e9282dfd72cc391d5c5832fc9 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Wed, 12 Jan 2022 04:14:48 +0000 Subject: [PATCH] dnn: don't use aligned load without alignment checks - weights are unaligned in dasiamprn sample (comes from numpy) original commit: 80d9f624d0e9d5de216f0920545423db17806835 --- modules/dnn/src/layers/convolution_layer.cpp | 5 +++-- modules/dnn/src/layers/layers_common.simd.hpp | 2 ++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/modules/dnn/src/layers/convolution_layer.cpp b/modules/dnn/src/layers/convolution_layer.cpp index bcc783d8a0..194397f557 100644 --- a/modules/dnn/src/layers/convolution_layer.cpp +++ b/modules/dnn/src/layers/convolution_layer.cpp @@ -421,7 +421,9 @@ public: if (!blobs.empty()) { Mat wm = blobs[0].reshape(1, numOutput); - if( wm.step1() % VEC_ALIGN != 0 ) + if ((wm.step1() % VEC_ALIGN != 0) || + !isAligned(wm.data) + ) { int newcols = (int)alignSize(wm.step1(), VEC_ALIGN); Mat wm_buffer = Mat(numOutput, newcols, wm.type()); @@ -1660,7 +1662,6 @@ public: } } } - // now compute dot product of the weights // and im2row-transformed part of the tensor #if CV_TRY_AVX512_SKX diff --git a/modules/dnn/src/layers/layers_common.simd.hpp b/modules/dnn/src/layers/layers_common.simd.hpp index 67a4b3c065..fd88a3c3d2 100644 --- a/modules/dnn/src/layers/layers_common.simd.hpp +++ b/modules/dnn/src/layers/layers_common.simd.hpp @@ -81,6 +81,8 @@ void fastConv( const float* weights, size_t wstep, const float* bias, int blockSize, int vecsize, int vecsize_aligned, const float* relu, bool initOutput ) { + CV_Assert(isAligned<32>(weights)); + int outCn = outShape[1]; size_t outPlaneSize = outShape[2]*outShape[3]; float r0 = 1.f, r1 = 1.f, r2 = 1.f;