if( nextData )
nextActivLayer = nextData->layerInstance.dynamicCast<ActivationLayer>();
+ Ptr<PowerLayer> activ_power;
if( !nextActivLayer.empty() &&
(!nextData->type.compare("ReLU") ||
!nextData->type.compare("ChannelsPReLU") ||
- !nextData->type.compare("Power")) &&
+ (!nextData->type.compare("Power") && (activ_power = nextActivLayer.dynamicCast<PowerLayer>()) && activ_power->scale == 1.0f)
+ ) &&
currLayer->setActivation(nextActivLayer) )
{
CV_Assert_N(biasLayerData->outputBlobsWrappers.size() == 1, ld.inputBlobsWrappers.size() == 1);
#include "../op_inf_engine.hpp"
#include "../ie_ngraph.hpp"
+#include <opencv2/core/utils/logger.hpp>
+
#include "opencv2/core/hal/hal.hpp"
#include "opencv2/core/hal/intrin.hpp"
#include <iostream>
Ptr<PowerLayer> activ_power = activ.dynamicCast<PowerLayer>();
if (!activ_power.empty())
{
+ if (activ_power->scale != 1.0f) // not supported well by implementation, #17964
+ {
+ // FIXIT no way to check number of blobs (like, eltwise input)
+ CV_LOG_INFO(NULL, "DNN/OpenCL: can't configure Power activation (scale != 1.0f)");
+ activ.release();
+ newActiv = false;
+ return false;
+ }
if (activ_power->scale != 1.f || activ_power->shift != 0.f)
{
const int outCh = blobs[0].size[0];
double l1 /*= 0.00001*/, double lInf /*= 0.0001*/)
{
double normL1 = cvtest::norm(ref, test, cv::NORM_L1) / ref.getMat().total();
- EXPECT_LE(normL1, l1) << comment;
+ EXPECT_LE(normL1, l1) << comment << " |ref| = " << cvtest::norm(ref, cv::NORM_INF);
double normInf = cvtest::norm(ref, test, cv::NORM_INF);
- EXPECT_LE(normInf, lInf) << comment;
+ EXPECT_LE(normInf, lInf) << comment << " |ref| = " << cvtest::norm(ref, cv::NORM_INF);
}
std::vector<cv::Rect2d> matToBoxes(const cv::Mat& m)
Backend backendId = get<0>(get<2>(GetParam()));
Target targetId = get<1>(get<2>(GetParam()));
- // bug: https://github.com/opencv/opencv/issues/17964
- if (actType == "Power" && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
- applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL);
-
Net net;
int convId = net.addLayer(convParams.name, convParams.type, convParams);
int activId = net.addLayerToPrev(activationParams.name, activationParams.type, activationParams);
expectedFusedLayers.push_back(activId); // all activations are fused
else if (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16)
{
- if (actType == "ReLU" || actType == "ChannelsPReLU" || actType == "ReLU6" || actType == "TanH" || actType == "Power")
+ if (actType == "ReLU" || actType == "ChannelsPReLU" || actType == "ReLU6" || actType == "TanH" /*|| actType == "Power"*/)
expectedFusedLayers.push_back(activId);
}
}
if ((eltwiseOp != "sum" || weightedEltwise) && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL);
- // bug: https://github.com/opencv/opencv/issues/17964
- if (actType == "Power" && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
- applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL);
-
Net net;
int convId = net.addLayer(convParams.name, convParams.type, convParams);
int eltwiseId = net.addLayer(eltwiseParams.name, eltwiseParams.type, eltwiseParams);
expectedFusedLayers.push_back(activId); // activation is fused with eltwise layer
else if (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16)
{
- if (actType == "ReLU" || actType == "ChannelsPReLU" || actType == "Power")
+ if (actType == "ReLU" || actType == "ChannelsPReLU" /*|| actType == "Power"*/)
{
expectedFusedLayers.push_back(eltwiseId);
expectedFusedLayers.push_back(activId);
Backend backendId = get<0>(get<4>(GetParam()));
Target targetId = get<1>(get<4>(GetParam()));
- // bug: https://github.com/opencv/opencv/issues/17964
- if (actType == "Power" && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
- applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL);
-
Net net;
int convId = net.addLayer(convParams.name, convParams.type, convParams);
int activId = net.addLayer(activationParams.name, activationParams.type, activationParams);
expectedFusedLayers.push_back(activId); // activation fused with convolution
else if (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16)
{
- if (actType == "ReLU" || actType == "ChannelsPReLU" || actType == "ReLU6" || actType == "TanH" || actType == "Power")
+ if (actType == "ReLU" || actType == "ChannelsPReLU" || actType == "ReLU6" || actType == "TanH" /*|| actType == "Power"*/)
expectedFusedLayers.push_back(activId); // activation fused with convolution
}
}