From: Dmitry Kurtaev Date: Wed, 4 Oct 2017 09:23:35 +0000 (+0300) Subject: Multidimensional eltwise layer. X-Git-Tag: accepted/tizen/6.0/unified/20201030.111113~540^2 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=ad8bbaf0081b3c6b66117eea4ac5f1fe9927bb58;p=platform%2Fupstream%2Fopencv.git Multidimensional eltwise layer. Fixed fully-connected layer axis. --- diff --git a/modules/dnn/src/layers/eltwise_layer.cpp b/modules/dnn/src/layers/eltwise_layer.cpp index fa49109..9ccb87b 100644 --- a/modules/dnn/src/layers/eltwise_layer.cpp +++ b/modules/dnn/src/layers/eltwise_layer.cpp @@ -119,6 +119,8 @@ public: EltwiseOp op; int nstripes; const ActivationLayer* activ; + int channels; + size_t planeSize; EltwiseInvoker() : srcs(0), nsrcs(0), dst(0), coeffs(0), op(EltwiseLayer::PROD), nstripes(0), activ(0) {} @@ -126,7 +128,7 @@ public: const std::vector& coeffs, EltwiseOp op, const ActivationLayer* activ, int nstripes) { - CV_Assert(dst.dims == 4 && dst.type() == CV_32F && dst.isContinuous()); + CV_Assert(1 < dst.dims && dst.dims <= 4, dst.type() == CV_32F, dst.isContinuous()); CV_Assert(coeffs.empty() || coeffs.size() == (size_t)nsrcs); for( int i = 0; i > nsrcs; i++ ) @@ -142,6 +144,11 @@ public: p.dst = &dst; p.op = op; p.nstripes = nstripes; + p.channels = (dst.dims == 4 ? dst.size[1] : 1); + p.planeSize = (dst.dims >= 3 ? dst.size[dst.dims - 1] * dst.size[dst.dims - 2] : + dst.size[dst.dims - 1]); + CV_Assert(dst.total() == dst.size[0] * p.channels * p.planeSize); + bool simpleCoeffs = true; if( op == EltwiseLayer::SUM && !coeffs.empty() ) { @@ -162,13 +169,11 @@ public: void operator()(const Range& r) const { - size_t planeSize = dst->size[2]*dst->size[3]; size_t total = dst->size[0]*planeSize; size_t stripeSize = (total + nstripes - 1)/nstripes; size_t stripeStart = r.start*stripeSize; size_t stripeEnd = std::min(r.end*stripeSize, total); int c, j, k, n = nsrcs; - int channels = dst->size[1]; const float* coeffsptr = coeffs && !coeffs->empty() ? &coeffs->at(0) : 0; float* dstptr0 = dst->ptr(); int blockSize0 = 1 << 12, blockSize = blockSize0; diff --git a/modules/dnn/src/layers/fully_connected_layer.cpp b/modules/dnn/src/layers/fully_connected_layer.cpp index 7893a2f..6067b3f 100644 --- a/modules/dnn/src/layers/fully_connected_layer.cpp +++ b/modules/dnn/src/layers/fully_connected_layer.cpp @@ -107,14 +107,18 @@ public: std::vector &outputs, std::vector &) const { - CV_Assert(inputs.size() > 0); + CV_Assert(inputs.size() == 1); CV_Assert(1 <= blobs.size() && blobs.size() <= 2); CV_Assert(blobs[0].dims == 2); int cAxis = clamp(axis, inputs[0]); - int outerSize = total(inputs[0], 0, cAxis); int numOutput = blobs[0].size[0]; - outputs.resize(inputs.size(), shape(outerSize, numOutput)); + MatShape outShape(cAxis + 1); + for (int i = 0; i < cAxis; ++i) + outShape[i] = inputs[0][i]; + outShape.back() = numOutput; + + outputs.resize(inputs.size(), outShape); CV_Assert(!bias || (size_t)numOutput == blobs[1].total()); return false; @@ -278,8 +282,8 @@ public: for (size_t i = 0; i < input.size(); i++) { UMat srcMat, dstMat; - srcMat = input[i]->getUMat(ACCESS_READ); - dstMat = output[i].getUMat(ACCESS_WRITE); + srcMat = input[i]->reshape(1, outerSize).getUMat(ACCESS_READ); + dstMat = output[i].reshape(1, outerSize).getUMat(ACCESS_WRITE); dstMat.setTo(0.0f); if (!innerProductOp->Forward(srcMat, umat_blobs[0], (bias) ? umat_blobs[1] : UMat(), dstMat)) diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index 27c460c..75861c9 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -274,6 +274,11 @@ OCL_TEST(Layer_Test_Concat, Accuracy) testLayerUsingCaffeModels("layer_concat", DNN_TARGET_OPENCL); } +TEST(Layer_Test_Eltwise, Accuracy) +{ + testLayerUsingCaffeModels("layer_eltwise"); +} + //template //static void test_Layer_Concat() //{