COMPMID-3069: Requantize in int32_t in RELUs of NEActivationLayerKernel
authorMichele Di Giorgio <michele.digiorgio@arm.com>
Wed, 22 Apr 2020 11:26:10 +0000 (12:26 +0100)
committerMichele Di Giorgio <michele.digiorgio@arm.com>
Wed, 22 Apr 2020 16:16:28 +0000 (16:16 +0000)
Requantization must be in 32 bits as there is a multiplication by
scaling factor and an offset addition. In this case, clamp was
interpreting the input as an 8-bit value, therefore cutting the
remaining 24 bits.

Change-Id: I31a5837a94e229201a023b1617fc2df5c35f61e3
Signed-off-by: Michele Di Giorgio <michele.digiorgio@arm.com>
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/3076
Reviewed-by: Michalis Spyrou <michalis.spyrou@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Tested-by: Arm Jenkins <bsgcomp@arm.com>
src/core/NEON/kernels/NEActivationLayerKernel.cpp

index a1652447f419087362adb0d3e1cbf42a362ee46a..8e91e6b4d1ba723b9a8057771312bc3d69cd6862 100644 (file)
@@ -533,17 +533,17 @@ typename std::enable_if<std::is_same<T, qasymm8_t>::value, void>::type NEActivat
             if(act == ActivationFunction::RELU)
             {
                 tmp = std::max(const_0, in);
-                tmp = utility::clamp<qasymm8_t>(tmp * s + o);
+                tmp = utility::clamp<int32_t, qasymm8_t>(tmp * s + o);
             }
             else if(act == ActivationFunction::BOUNDED_RELU)
             {
                 tmp = std::min(a, std::max(const_0, in));
-                tmp = utility::clamp<qasymm8_t>(tmp * s + o);
+                tmp = utility::clamp<int32_t, qasymm8_t>(tmp * s + o);
             }
             else if(act == ActivationFunction::LU_BOUNDED_RELU)
             {
                 tmp = std::min(a, std::max(b, in));
-                tmp = utility::clamp<qasymm8_t>(tmp * s + o);
+                tmp = utility::clamp<int32_t, qasymm8_t>(tmp * s + o);
             }
             else if(act == ActivationFunction::LOGISTIC)
             {
@@ -710,17 +710,17 @@ typename std::enable_if<std::is_same<T, qasymm8_signed_t>::value, void>::type NE
             if(act == ActivationFunction::RELU)
             {
                 tmp = std::max(const_0, in);
-                tmp = utility::clamp<qasymm8_signed_t>(tmp * s + o);
+                tmp = utility::clamp<int32_t, qasymm8_signed_t>(tmp * s + o);
             }
             else if(act == ActivationFunction::BOUNDED_RELU)
             {
                 tmp = std::min(a, std::max(const_0, in));
-                tmp = utility::clamp<qasymm8_signed_t>(tmp * s + o);
+                tmp = utility::clamp<int32_t, qasymm8_signed_t>(tmp * s + o);
             }
             else if(act == ActivationFunction::LU_BOUNDED_RELU)
             {
                 tmp = std::min(a, std::max(b, in));
-                tmp = utility::clamp<qasymm8_signed_t>(tmp * s + o);
+                tmp = utility::clamp<int32_t, qasymm8_signed_t>(tmp * s + o);
             }
             else if(act == ActivationFunction::LOGISTIC)
             {