0ca19bbb06dbd75f0cbae238007f6f2e1faaf65c
[platform/upstream/armnn.git] / src / backends / reference / RefLayerSupport.cpp
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5
6 #include "RefLayerSupport.hpp"
7 #include "RefBackendId.hpp"
8
9 #include <armnn/Types.hpp>
10 #include <armnn/Descriptors.hpp>
11 #include <armnn/BackendRegistry.hpp>
12
13 #include <armnnUtils/DataLayoutIndexed.hpp>
14
15 #include <InternalTypes.hpp>
16 #include <LayerSupportCommon.hpp>
17
18 #include <backendsCommon/LayerSupportRules.hpp>
19
20 #include <boost/cast.hpp>
21 #include <boost/core/ignore_unused.hpp>
22
23 #include <vector>
24 #include <algorithm>
25 #include <array>
26
27 using namespace boost;
28
29 namespace armnn
30 {
31
32 namespace
33 {
34
35 template<typename Float32Func, typename Uint8Func, typename ... Params>
36 bool IsSupportedForDataTypeRef(Optional<std::string&> reasonIfUnsupported,
37                                DataType dataType,
38                                Float32Func floatFuncPtr,
39                                Uint8Func uint8FuncPtr,
40                                Params&&... params)
41 {
42     return IsSupportedForDataTypeGeneric(reasonIfUnsupported,
43                                          dataType,
44                                          &FalseFunc<Params...>,
45                                          floatFuncPtr,
46                                          uint8FuncPtr,
47                                          &FalseFunc<Params...>,
48                                          &FalseFunc<Params...>,
49                                          std::forward<Params>(params)...);
50 }
51
52 } // anonymous namespace
53
54 namespace
55 {
56
57 std::string CreateIncorrectDimensionsErrorMsg(unsigned int expected,
58                                               unsigned int actual,
59                                               std::string& layerStr,
60                                               std::string& tensorName)
61 {
62     std::string errorMsg = "Reference " + layerStr + ": Expected " + std::to_string(expected) + " dimensions but got" +
63                            " " + std::to_string(actual) + " dimensions instead, for the '" + tensorName + "' tensor.";
64
65     return errorMsg;
66 }
67
68 } // anonymous namespace
69
70 bool RefLayerSupport::IsAbsSupported(const TensorInfo& input, const TensorInfo& output,
71                                      Optional<std::string&> reasonIfUnsupported) const
72 {
73     return IsElementwiseUnarySupported(input,
74                                        output,
75                                        ElementwiseUnaryDescriptor(UnaryOperation::Abs),
76                                        reasonIfUnsupported);
77 }
78
79 bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
80                                             const TensorInfo& output,
81                                             const ActivationDescriptor& descriptor,
82                                             Optional<std::string&> reasonIfUnsupported) const
83 {
84    bool supported = true;
85
86     // Define supported types.
87     std::array<DataType,4> supportedTypes = {
88         DataType::Float32,
89         DataType::Float16,
90         DataType::QAsymmU8,
91         DataType::QSymmS16
92     };
93
94     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
95                                   "Reference activation: input type not supported.");
96
97     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
98                                   "Reference activation: output type not supported.");
99
100     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
101                                   "Reference activation: input and output types mismatched.");
102
103     supported &= CheckSupportRule(ShapesAreSameRank(input, output), reasonIfUnsupported,
104                                   "Reference activation: input and output shapes are of different rank.");
105
106
107     struct ActivationFunctionSupported : public Rule
108     {
109         ActivationFunctionSupported(const ActivationDescriptor& desc)
110         {
111             switch(desc.m_Function)
112             {
113                 case ActivationFunction::Abs:
114                 case ActivationFunction::BoundedReLu:
115                 case ActivationFunction::LeakyReLu:
116                 case ActivationFunction::Linear:
117                 case ActivationFunction::ReLu:
118                 case ActivationFunction::Sigmoid:
119                 case ActivationFunction::SoftReLu:
120                 case ActivationFunction::Sqrt:
121                 case ActivationFunction::Square:
122                 case ActivationFunction::TanH:
123                 {
124                     m_Res = true;
125                     break;
126                 }
127                 default:
128                 {
129                     m_Res = false;
130                     break;
131                 }
132             }
133         }
134     };
135
136     // Function is supported
137     supported &= CheckSupportRule(ActivationFunctionSupported(descriptor), reasonIfUnsupported,
138                                   "Reference activation: function not supported.");
139
140     return supported;
141 }
142
143 bool RefLayerSupport::IsAdditionSupported(const TensorInfo& input0,
144                                           const TensorInfo& input1,
145                                           const TensorInfo& output,
146                                           Optional<std::string&> reasonIfUnsupported) const
147 {
148     bool supported = true;
149
150     std::array<DataType,5> supportedTypes = {
151         DataType::Float32,
152         DataType::Float16,
153         DataType::QSymmS8,
154         DataType::QAsymmU8,
155         DataType::QSymmS16
156     };
157
158     supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
159                                   "Reference addition: input 0 is not a supported type.");
160
161     supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
162                                   "Reference addition: input 1 is not a supported type.");
163
164     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
165                                   "Reference addition: output is not a supported type.");
166
167     supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
168                                   "Reference addition: input 0 and Input 1 types are mismatched");
169
170     supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
171                                   "Reference addition: input and output types are mismatched");
172
173     supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
174                                   "Reference addition: shapes are not suitable for implicit broadcast.");
175
176     return supported;
177 }
178
179 bool RefLayerSupport::IsArgMinMaxSupported(const armnn::TensorInfo &input, const armnn::TensorInfo &output,
180                                            const armnn::ArgMinMaxDescriptor &descriptor,
181                                            armnn::Optional<std::string &> reasonIfUnsupported) const
182 {
183     ignore_unused(descriptor);
184
185     std::array<DataType, 4> supportedTypes =
186     {
187         DataType::Float32,
188         DataType::QAsymmU8,
189         DataType::QSymmS16,
190         DataType::Signed32
191     };
192
193     bool supported = true;
194
195     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
196                                   "Reference ArgMinMax: input is not a supported type.");
197     supported &= CheckSupportRule(TypeIs(output, DataType::Signed32), reasonIfUnsupported,
198                                   "Reference ArgMinMax: output type not supported");
199
200     return supported;
201 }
202
203 bool RefLayerSupport::IsBatchNormalizationSupported(const TensorInfo& input,
204                                                     const TensorInfo& output,
205                                                     const TensorInfo& mean,
206                                                     const TensorInfo& variance,
207                                                     const TensorInfo& beta,
208                                                     const TensorInfo& gamma,
209                                                     const BatchNormalizationDescriptor& descriptor,
210                                                     Optional<std::string&> reasonIfUnsupported) const
211 {
212     ignore_unused(descriptor);
213
214     std::array<DataType, 4> supportedTypes =
215     {
216         DataType::Float32,
217         DataType::Float16,
218         DataType::QAsymmU8,
219         DataType::QSymmS16
220     };
221
222     bool supported = true;
223
224     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
225                                   "Reference batch normalization: input is not a supported type.");
226
227     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
228                                   "Reference batch normalization: output is not a supported type.");
229
230     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
231                                   "Reference batch normalization: input and output types are mismatched");
232
233     supported &= CheckSupportRule(TypeAnyOf(mean, supportedTypes), reasonIfUnsupported,
234                                   "Reference batch normalization: mean is not a supported type.");
235
236     supported &= CheckSupportRule(TypeAnyOf(variance, supportedTypes), reasonIfUnsupported,
237                                   "Reference batch normalization: variance is not a supported type.");
238
239     supported &= CheckSupportRule(TypeAnyOf(beta, supportedTypes), reasonIfUnsupported,
240                                   "Reference batch normalization: beta is not a supported type.");
241
242     supported &= CheckSupportRule(TypeAnyOf(gamma, supportedTypes), reasonIfUnsupported,
243                                   "Reference batch normalization: gamma is not a supported type.");
244
245     return supported;
246 }
247
248 bool RefLayerSupport::IsBatchToSpaceNdSupported(const TensorInfo& input,
249                                                 const TensorInfo& output,
250                                                 const BatchToSpaceNdDescriptor& descriptor,
251                                                 Optional<std::string&> reasonIfUnsupported) const
252 {
253     ignore_unused(descriptor);
254
255     bool supported = true;
256
257     std::string batchToSpaceNdLayerStr = "batchToSpaceNd";
258     std::string inputTensorStr = "input";
259     std::string outputTensorStr = "output";
260
261     // Define supported types.
262     std::array<DataType,4> supportedTypes =
263     {
264             DataType::Float32,
265             DataType::Float16,
266             DataType::QAsymmU8,
267             DataType::QSymmS16
268     };
269
270     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
271                                   "Reference BatchToSpaceNd: input type not supported.");
272
273     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
274                                   "Reference BatchToSpaceNd: output type not supported.");
275
276     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
277                                   "Reference BatchToSpaceNd: input and output types mismatched.");
278
279     supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 4),
280                                   reasonIfUnsupported,
281                                   CreateIncorrectDimensionsErrorMsg(4,
282                                                                     output.GetNumDimensions(),
283                                                                     batchToSpaceNdLayerStr,
284                                                                     outputTensorStr).data());
285
286     supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(input, 4),
287                                   reasonIfUnsupported,
288                                   CreateIncorrectDimensionsErrorMsg(4,
289                                                                     input.GetNumDimensions(),
290                                                                     batchToSpaceNdLayerStr,
291                                                                     inputTensorStr).data());
292
293     return supported;
294 }
295
296 bool RefLayerSupport::IsComparisonSupported(const TensorInfo& input0,
297                                             const TensorInfo& input1,
298                                             const TensorInfo& output,
299                                             const ComparisonDescriptor& descriptor,
300                                             Optional<std::string&> reasonIfUnsupported) const
301 {
302     boost::ignore_unused(descriptor);
303
304     std::array<DataType, 4> supportedInputTypes =
305     {
306         DataType::Float32,
307         DataType::Float16,
308         DataType::QAsymmU8,
309         DataType::QSymmS16
310     };
311
312     bool supported = true;
313     supported &= CheckSupportRule(TypeAnyOf(input0, supportedInputTypes), reasonIfUnsupported,
314                                   "Reference comparison: input 0 is not a supported type");
315
316     supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
317                                   "Reference comparison: input 0 and Input 1 types are mismatched");
318
319     supported &= CheckSupportRule(TypeIs(output, DataType::Boolean), reasonIfUnsupported,
320                                   "Reference comparison: output is not of type Boolean");
321
322     return supported;
323 }
324
325 bool RefLayerSupport::IsConcatSupported(const std::vector<const TensorInfo*> inputs,
326                                         const TensorInfo& output,
327                                         const ConcatDescriptor& descriptor,
328                                         Optional<std::string&> reasonIfUnsupported) const
329 {
330     ignore_unused(descriptor);
331
332     bool supported = true;
333     std::array<DataType,5> supportedTypes =
334     {
335             DataType::Float32,
336             DataType::Float16,
337             DataType::QSymmS8,
338             DataType::QAsymmU8,
339             DataType::QSymmS16
340     };
341
342     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
343                                   "Reference concatenation: output type not supported");
344     for (const TensorInfo* input : inputs)
345     {
346         BOOST_ASSERT(input != nullptr);
347         supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
348             "Reference concatenation: input type not supported");
349
350         supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
351             "Reference concatenation: input and output types mismatched.");
352     }
353
354     return supported;
355 }
356
357 bool RefLayerSupport::IsConstantSupported(const TensorInfo& output,
358                                           Optional<std::string&> reasonIfUnsupported) const
359 {
360     std::array<DataType,5> supportedTypes =
361     {
362         DataType::Float32,
363         DataType::Signed32,
364         DataType::QAsymmU8,
365         DataType::QSymmS8,
366         DataType::QSymmS16
367     };
368
369     return CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
370                                   "Reference constant: output is not a supported type.");
371 }
372
373 bool RefLayerSupport::IsConvertFp16ToFp32Supported(const TensorInfo& input,
374                                                    const TensorInfo& output,
375                                                    Optional<std::string&> reasonIfUnsupported) const
376 {
377     return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
378                                           input.GetDataType(),
379                                           &TrueFunc<>,
380                                           &FalseInputFuncF32<>,
381                                           &FalseFuncU8<>,
382                                           &FalseFuncI32<>,
383                                           &FalseFuncU8<>) &&
384             IsSupportedForDataTypeGeneric(reasonIfUnsupported,
385                                           output.GetDataType(),
386                                           &FalseOutputFuncF16<>,
387                                           &TrueFunc<>,
388                                           &FalseFuncU8<>,
389                                           &FalseFuncI32<>,
390                                           &FalseFuncU8<>));
391 }
392
393 bool RefLayerSupport::IsConvertFp32ToFp16Supported(const TensorInfo& input,
394                                                    const TensorInfo& output,
395                                                    Optional<std::string&> reasonIfUnsupported) const
396 {
397     return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
398                                           input.GetDataType(),
399                                           &FalseInputFuncF16<>,
400                                           &TrueFunc<>,
401                                           &FalseFuncU8<>,
402                                           &FalseFuncI32<>,
403                                           &FalseFuncU8<>) &&
404             IsSupportedForDataTypeGeneric(reasonIfUnsupported,
405                                           output.GetDataType(),
406                                           &TrueFunc<>,
407                                           &FalseOutputFuncF32<>,
408                                           &FalseFuncU8<>,
409                                           &FalseFuncI32<>,
410                                           &FalseFuncU8<>));
411 }
412
413 bool RefLayerSupport::IsConvolution2dSupported(const TensorInfo& input,
414                                                const TensorInfo& output,
415                                                const Convolution2dDescriptor& descriptor,
416                                                const TensorInfo& weights,
417                                                const Optional<TensorInfo>& biases,
418                                                Optional<std::string&> reasonIfUnsupported) const
419 {
420     bool supported = true;
421
422     // Define supported types.
423     std::array<DataType,5> supportedTypes =
424     {
425         DataType::Float32,
426         DataType::Float16,
427         DataType::QAsymmU8,
428         DataType::QSymmS8,
429         DataType::QSymmS16
430     };
431
432     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
433                                   "Reference Convolution2d: input is not a supported type.");
434
435     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
436                                   "Reference Convolution2d: output is not a supported type.");
437
438     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
439                                   "Reference Convolution2d: input and output types mismatched.");
440
441     const DataType inputType = input.GetDataType();
442     if (inputType == DataType::QAsymmU8)
443     {
444         ARMNN_NO_DEPRECATE_WARN_BEGIN
445         std::array<DataType, 3> supportedWeightTypes =
446         {
447             DataType::QAsymmU8,
448             DataType::QSymmS8,
449             DataType::QuantizedSymm8PerAxis // deprecated
450         };
451         ARMNN_NO_DEPRECATE_WARN_END
452
453         supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
454                                       "Reference Convolution2d: weights type not supported for quantized input.");
455     }
456     else
457     {
458         supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
459                                       "Reference Convolution2d: weights is not a supported type.");
460
461         supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
462                                       "Reference Convolution2d: input and weights types mismatched.");
463     }
464
465     if (biases.has_value())
466     {
467         std::array<DataType,3> biasesSupportedTypes =
468         {
469             DataType::Float32,
470             DataType::Float16,
471             DataType::Signed32
472         };
473
474         supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
475                                       "Reference Convolution2d: biases is not a supported type.");
476     }
477     ignore_unused(descriptor);
478
479     return supported;
480 }
481
482 bool RefLayerSupport::IsDebugSupported(const TensorInfo& input,
483                                        const TensorInfo& output,
484                                        Optional<std::string&> reasonIfUnsupported) const
485 {
486     bool supported = true;
487
488     std::array<DataType, 6> supportedTypes =
489     {
490         DataType::Float16,
491         DataType::Float32,
492         DataType::QAsymmU8,
493         DataType::QSymmS8,
494         DataType::QSymmS16,
495         DataType::Signed32
496     };
497
498     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
499                                   "Reference for Debug layer: input type not supported");
500
501     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
502                                   "Reference for Debug layer: output type not supported");
503
504     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
505                                   "Reference for Debug layer: input and output types are mismatched");
506
507     return supported;
508 }
509
510 bool RefLayerSupport::IsDepthToSpaceSupported(const TensorInfo& input,
511                                               const TensorInfo& output,
512                                               const DepthToSpaceDescriptor& descriptor,
513                                               Optional<std::string&> reasonIfUnsupported) const
514 {
515     ignore_unused(descriptor);
516     bool supported = true;
517
518     std::array<DataType,4> supportedTypes =
519     {
520         DataType::Float32,
521         DataType::Float16,
522         DataType::QAsymmU8,
523         DataType::QSymmS16
524     };
525
526     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
527         "Reference DepthToSpace: input type not supported");
528
529     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
530         "Reference DepthToSpace: output type not supported");
531
532     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
533         "Reference DepthToSpace: input and output types are mismatched");
534
535     return supported;
536 }
537
538 bool RefLayerSupport::IsDepthwiseConvolutionSupported(const TensorInfo& input,
539                                                       const TensorInfo& output,
540                                                       const DepthwiseConvolution2dDescriptor& descriptor,
541                                                       const TensorInfo& weights,
542                                                       const Optional<TensorInfo>& biases,
543                                                       Optional<std::string&> reasonIfUnsupported) const
544 {
545     bool supported = true;
546
547     // Define supported types.
548     std::array<DataType,4> supportedTypes =
549     {
550         DataType::Float32,
551         DataType::Float16,
552         DataType::QAsymmU8,
553         DataType::QSymmS16
554     };
555
556     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
557                                   "Reference DepthwiseConvolution2d: input is not a supported type.");
558
559     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
560                                   "Reference DepthwiseConvolution2d: output is not a supported type.");
561
562     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
563                                   "Reference DepthwiseConvolution2d: input and output types mismatched.");
564
565     ARMNN_NO_DEPRECATE_WARN_BEGIN
566     std::array<DataType, 3> supportedWeightTypes =
567         {
568             DataType::QAsymmU8,
569             DataType::QSymmS8,
570             DataType::QuantizedSymm8PerAxis // deprecated
571         };
572     ARMNN_NO_DEPRECATE_WARN_END
573
574     const DataType inputType = input.GetDataType();
575     if (inputType == DataType::QAsymmU8)
576     {
577
578         supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
579                                       "Reference convolution2d: weights type not supported for quantized input.");
580     }
581     else
582     {
583         supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
584                                       "Reference DepthwiseConvolution2d: weights is not a supported type.");
585
586         supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
587                                       "Reference DepthwiseConvolution2d: input and weights types mismatched.");
588     }
589
590     if (biases.has_value())
591     {
592         std::array<DataType,3> biasesSupportedTypes =
593         {
594             DataType::Float32,
595             DataType::Float16,
596             DataType::Signed32
597         };
598         supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
599                                       "Reference DepthwiseConvolution2d: biases is not a supported type.");
600     }
601     ignore_unused(descriptor);
602
603     return supported;
604
605 }
606
607 bool RefLayerSupport::IsDequantizeSupported(const TensorInfo& input,
608                                             const TensorInfo& output,
609                                             Optional<std::string&> reasonIfUnsupported) const
610 {
611    bool supported = true;
612
613     std::array<DataType,3> supportedInputTypes = {
614         DataType::QAsymmU8,
615         DataType::QSymmS8,
616         DataType::QSymmS16
617     };
618
619     supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
620                                   "Reference for Dequantize layer: input type not supported.");
621
622     supported &= CheckSupportRule( TypeNotPerAxisQuantized(input), reasonIfUnsupported,
623                                     "Reference for Dequantize layer: per-axis quantized input not support .");
624
625     supported &= CheckSupportRule(TypeNotPerAxisQuantized(input), reasonIfUnsupported,
626                                   "Reference dequantize: per-axis quantized input not support .");
627
628     std::array<DataType,2> supportedOutputTypes = {
629         DataType::Float32,
630         DataType::Float16
631     };
632
633     supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
634                                   "Reference for Dequantize layer: output type not supported.");
635
636     supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
637                                   "Reference for Dequantize layer: input/output shapes have different num total "
638                                   "elements.");
639
640     return supported;
641 }
642
643 bool RefLayerSupport::IsDetectionPostProcessSupported(const TensorInfo& boxEncodings,
644                                                       const TensorInfo& scores,
645                                                       const TensorInfo& anchors,
646                                                       const TensorInfo& detectionBoxes,
647                                                       const TensorInfo& detectionClasses,
648                                                       const TensorInfo& detectionScores,
649                                                       const TensorInfo& numDetections,
650                                                       const DetectionPostProcessDescriptor& descriptor,
651                                                       Optional<std::string&> reasonIfUnsupported) const
652 {
653     boost::ignore_unused(anchors, detectionBoxes, detectionClasses, detectionScores, numDetections, descriptor);
654
655     bool supported = true;
656
657     std::array<DataType,3> supportedInputTypes =
658     {
659         DataType::Float32,
660         DataType::QAsymmU8,
661         DataType::QSymmS16
662     };
663
664     supported &= CheckSupportRule(TypeAnyOf(boxEncodings, supportedInputTypes), reasonIfUnsupported,
665                                   "Reference DetectionPostProcess: input 0 is not a supported type.");
666
667     supported &= CheckSupportRule(TypeAnyOf(scores, supportedInputTypes), reasonIfUnsupported,
668                                   "Reference DetectionPostProcess: input 1 is not a supported type.");
669
670     return supported;
671 }
672
673 bool RefLayerSupport::IsDilatedDepthwiseConvolutionSupported(const TensorInfo& input,
674                                                              const TensorInfo& output,
675                                                              const DepthwiseConvolution2dDescriptor& descriptor,
676                                                              const TensorInfo& weights,
677                                                              const Optional<TensorInfo>& biases,
678                                                              Optional<std::string&> reasonIfUnsupported) const
679 {
680     return IsDepthwiseConvolutionSupported(input, output, descriptor, weights, biases, reasonIfUnsupported);
681 }
682
683 bool RefLayerSupport::IsDivisionSupported(const TensorInfo& input0,
684                                           const TensorInfo& input1,
685                                           const TensorInfo& output,
686                                           Optional<std::string&> reasonIfUnsupported) const
687 {
688     bool supported = true;
689
690     std::array<DataType,4> supportedTypes = {
691         DataType::Float32,
692         DataType::Float16,
693         DataType::QAsymmU8,
694         DataType::QSymmS16
695     };
696
697     supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
698                                   "Reference division: input 0 is not a supported type.");
699
700     supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
701                                   "Reference division: input 1 is not a supported type.");
702
703     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
704                                   "Reference division: output is not a supported type.");
705
706     supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
707                                   "Reference division: input 0 and Input 1 types are mismatched");
708
709     supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
710                                   "Reference division: input and output types are mismatched");
711
712     supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
713                                   "Reference division: shapes are not suitable for implicit broadcast.");
714
715     return supported;
716 }
717
718 bool RefLayerSupport::IsElementwiseUnarySupported(const TensorInfo& input,
719                                                   const TensorInfo& output,
720                                                   const ElementwiseUnaryDescriptor& descriptor,
721                                                   Optional<std::string&> reasonIfUnsupported) const
722 {
723     boost::ignore_unused(descriptor);
724
725     std::array<DataType, 4> supportedTypes =
726     {
727         DataType::Float32,
728         DataType::Float16,
729         DataType::QAsymmU8,
730         DataType::QSymmS16
731     };
732
733     bool supported = true;
734
735     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
736                                   "Reference elementwise unary: input type not supported");
737
738     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
739                                   "Reference elementwise unary: output type not supported");
740
741     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
742                                   "Reference elementwise unary: input and output types not matching");
743
744     supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
745                                   "Reference elementwise unary: input and output shapes"
746                                   "have different number of total elements");
747
748     return supported;
749 }
750
751 bool RefLayerSupport::IsEqualSupported(const TensorInfo& input0,
752                                        const TensorInfo& input1,
753                                        const TensorInfo& output,
754                                        Optional<std::string&> reasonIfUnsupported) const
755 {
756     return IsComparisonSupported(input0,
757                                  input1,
758                                  output,
759                                  ComparisonDescriptor(ComparisonOperation::Equal),
760                                  reasonIfUnsupported);
761 }
762
763 bool RefLayerSupport::IsFakeQuantizationSupported(const TensorInfo& input,
764                                                   const FakeQuantizationDescriptor& descriptor,
765                                                   Optional<std::string&> reasonIfUnsupported) const
766 {
767     ignore_unused(descriptor);
768     bool supported = true;
769
770     std::array<DataType,1> supportedTypes =
771     {
772         DataType::Float32
773     };
774
775     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
776                                   "Reference fake quantization: input type not supported.");
777
778     return supported;
779 }
780
781 bool RefLayerSupport::IsFloorSupported(const TensorInfo& input,
782                                        const TensorInfo& output,
783                                        Optional<std::string&> reasonIfUnsupported) const
784 {
785     ignore_unused(output);
786     bool supported = true;
787
788     std::array<DataType,3> supportedTypes =
789     {
790         DataType::Float32,
791         DataType::Float16,
792         DataType::QSymmS16
793     };
794
795     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
796                                   "Reference Floor: input type not supported.");
797
798     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
799                                   "Reference Floor: output type not supported.");
800
801     return supported;
802 }
803
804 bool RefLayerSupport::IsFullyConnectedSupported(const TensorInfo& input,
805                                                 const TensorInfo& output,
806                                                 const TensorInfo& weights,
807                                                 const TensorInfo& biases,
808                                                 const FullyConnectedDescriptor& descriptor,
809                                                 Optional<std::string&> reasonIfUnsupported) const
810 {
811     bool supported = true;
812
813     // Define supported types.
814     std::array<DataType,4> supportedTypes =
815     {
816             DataType::Float32,
817             DataType::Float16,
818             DataType::QAsymmU8,
819             DataType::QSymmS16
820     };
821
822     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
823                                   "Reference Fully Connected: input type not supported.");
824
825     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
826                                   "Reference Fully Connected: output type not supported.");
827
828     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
829                                   "Reference Fully Connected: input and output types mismatched.");
830
831     supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
832                                   "Reference Fully Connected: weights type not supported.");
833
834     supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
835                                   "Reference Fully Connected: input and weight types mismatched.");
836
837     if (descriptor.m_BiasEnabled)
838     {
839         // Defined supported types for bias
840         std::array<DataType, 3>
841         supportedBiasTypes =
842         {
843             DataType::Float32,
844             DataType::Float16,
845             DataType::Signed32
846         };
847
848         supported &= CheckSupportRule(TypeAnyOf(biases, supportedBiasTypes), reasonIfUnsupported,
849                                       "Reference Fully Connected: bias type not supported.");
850
851         supported &= CheckSupportRule(BiasAndWeightsTypesMatch(biases, weights), reasonIfUnsupported,
852                                       "Reference Fully Connected: bias and weight types mismatch.");
853
854         supported &= CheckSupportRule(BiasAndWeightsTypesCompatible(weights, supportedBiasTypes), reasonIfUnsupported,
855                                       "Reference Fully Connected: bias type inferred from weights is incompatible.");
856
857     }
858
859     return supported;
860 }
861
862 bool RefLayerSupport::IsGatherSupported(const armnn::TensorInfo& input0,
863                                         const armnn::TensorInfo& input1,
864                                         const armnn::TensorInfo& output,
865                                         armnn::Optional<std::string&> reasonIfUnsupported) const
866 {
867     bool supported = true;
868     std::array<DataType,4> supportedTypes =
869     {
870         DataType::Float32,
871         DataType::Float16,
872         DataType::QAsymmU8,
873         DataType::QSymmS16
874     };
875
876     supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
877                                   "Reference Gather: input type not supported");
878
879     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
880                                   "Reference Gather: output type not supported");
881
882     supported &= CheckSupportRule(TypeIs(input1, DataType::Signed32), reasonIfUnsupported,
883                                   "Reference Gather: indices (input1) type not supported");
884
885     supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
886                                   "Reference Gather: input and output types not matching");
887
888     return supported;
889 }
890
891 bool RefLayerSupport::IsGreaterSupported(const TensorInfo& input0,
892                                          const TensorInfo& input1,
893                                          const TensorInfo& output,
894                                          Optional<std::string&> reasonIfUnsupported) const
895 {
896     return IsComparisonSupported(input0,
897                                  input1,
898                                  output,
899                                  ComparisonDescriptor(ComparisonOperation::Greater),
900                                  reasonIfUnsupported);
901 }
902
903 bool RefLayerSupport::IsInputSupported(const TensorInfo& /*input*/,
904                                        Optional<std::string&> /*reasonIfUnsupported*/) const
905 {
906     return true;
907 }
908
909 bool RefLayerSupport::IsInstanceNormalizationSupported(const TensorInfo& input,
910                                                        const TensorInfo& output,
911                                                        const InstanceNormalizationDescriptor& descriptor,
912                                                        Optional<std::string&> reasonIfUnsupported) const
913 {
914     ignore_unused(descriptor);
915     // Define supported types
916     std::array<DataType, 4> supportedTypes =
917         {
918             DataType::Float32,
919             DataType::Float16
920         };
921
922     bool supported = true;
923
924     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
925                                   "Reference Instance Normalization: input type not supported.");
926
927     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
928                                   "Reference Instance Normalization: output type not supported.");
929
930     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
931                                   "Reference Instance Normalization: input and output types mismatched.");
932
933     supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
934                                   "Reference Instance Normalization: input and output shapes have different "
935                                   "num total elements.");
936
937     return supported;
938 }
939
940 bool RefLayerSupport::IsL2NormalizationSupported(const TensorInfo& input,
941                                                  const TensorInfo& output,
942                                                  const L2NormalizationDescriptor& descriptor,
943                                                  Optional<std::string&> reasonIfUnsupported) const
944 {
945     ignore_unused(descriptor);
946     // Define supported types
947     std::array<DataType, 4> supportedTypes =
948     {
949         DataType::Float32,
950         DataType::Float16,
951         DataType::QAsymmU8,
952         DataType::QSymmS16
953     };
954
955     bool supported = true;
956
957     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
958                                   "Reference L2normalization: input type not supported.");
959
960     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
961                                   "Reference L2normalization: output type not supported.");
962
963     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
964                                   "Reference L2normalization: input and output types mismatched.");
965
966     supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
967                                   "Reference L2normalization: input and output shapes have different "
968                                   "num total elements.");
969
970     return supported;
971 }
972
973 bool RefLayerSupport::IsLogSoftmaxSupported(const TensorInfo& input,
974                                             const TensorInfo& output,
975                                             const LogSoftmaxDescriptor& descriptor,
976                                             Optional<std::string&> reasonIfUnsupported) const
977 {
978     ignore_unused(descriptor);
979
980     std::array<DataType, 2> supportedTypes =
981     {
982             DataType::Float32,
983             DataType::Float16
984     };
985
986     bool supported = true;
987     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
988                                   "Reference LogSoftmax: input type not supported");
989
990     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
991                                   "Reference LogSoftmax: output type not supported");
992
993     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
994                                   "Reference LogSoftmax: input and output types do not match");
995
996     return supported;
997 }
998
999 bool RefLayerSupport::IsLstmSupported(const TensorInfo& input,
1000                                       const TensorInfo& outputStateIn,
1001                                       const TensorInfo& cellStateIn,
1002                                       const TensorInfo& scratchBuffer,
1003                                       const TensorInfo& outputStateOut,
1004                                       const TensorInfo& cellStateOut,
1005                                       const TensorInfo& output,
1006                                       const LstmDescriptor& descriptor,
1007                                       const LstmInputParamsInfo& paramsInfo,
1008                                       Optional<std::string&> reasonIfUnsupported) const
1009 {
1010     ignore_unused(descriptor);
1011     ignore_unused(paramsInfo);
1012
1013     bool supported = true;
1014
1015     std::array<DataType,2> supportedTypes = {
1016         DataType::Float32,
1017         DataType::QSymmS16
1018     };
1019
1020     // check inputs and outputs
1021     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1022                                   "Reference Lstm: input is not a supported type.");
1023     supported &= CheckSupportRule(TypesAreEqual(input, outputStateIn), reasonIfUnsupported,
1024                                   "Reference Lstm: input and outputStateIn types are mismatched");
1025     supported &= CheckSupportRule(TypesAreEqual(input, cellStateIn), reasonIfUnsupported,
1026                                   "Reference Lstm: input and cellStateIn types are mismatched");
1027     supported &= CheckSupportRule(TypesAreEqual(input, scratchBuffer), reasonIfUnsupported,
1028                                   "Reference Lstm: input and scratchBuffer types are mismatched");
1029     supported &= CheckSupportRule(TypesAreEqual(input, outputStateOut), reasonIfUnsupported,
1030                                   "Reference Lstm: input and outputStateOut types are mismatched");
1031     supported &= CheckSupportRule(TypesAreEqual(input, cellStateOut), reasonIfUnsupported,
1032                                   "Reference Lstm: input and cellStateOut types are mismatched");
1033     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1034                                   "Reference Lstm: input and output types are mismatched");
1035     // check layer parameters
1036     supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToForgetWeights()), reasonIfUnsupported,
1037                                   "Reference Lstm: input and InputToForgetWeights types are mismatched");
1038     supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToCellWeights()), reasonIfUnsupported,
1039                                   "Reference Lstm: input and InputToCellWeights types are mismatched");
1040     supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToOutputWeights()), reasonIfUnsupported,
1041                                   "Reference Lstm: input and InputToOutputWeights types are mismatched");
1042     supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToForgetWeights()), reasonIfUnsupported,
1043                                   "Reference Lstm: input and RecurrentToForgetWeights types are mismatched");
1044     supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToCellWeights()), reasonIfUnsupported,
1045                                   "Reference Lstm: input and RecurrentToCellWeights types are mismatched");
1046     supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToOutputWeights()), reasonIfUnsupported,
1047                                   "Reference Lstm: input and RecurrentToOutputWeights types are mismatched");
1048     supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetGateBias()), reasonIfUnsupported,
1049                                   "Reference Lstm: input and ForgetGateBias types are mismatched");
1050     supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellBias()), reasonIfUnsupported,
1051                                   "Reference Lstm: input and CellBias types are mismatched");
1052     supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputGateBias()), reasonIfUnsupported,
1053                                   "Reference Lstm: input and OutputGateBias types are mismatched");
1054     if (!descriptor.m_CifgEnabled)
1055     {
1056         supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToInputWeights()), reasonIfUnsupported,
1057                                       "Reference Lstm: input and InputToInputWeights types are mismatched");
1058         supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToInputWeights()),
1059                                       reasonIfUnsupported,
1060                                       "Reference Lstm: input and RecurrentToInputWeights types are mismatched");
1061         supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputGateBias()), reasonIfUnsupported,
1062                                       "Reference Lstm: input and InputGateBias types are mismatched");
1063         if (descriptor.m_PeepholeEnabled)
1064         {
1065             supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToInputWeights()),
1066                                           reasonIfUnsupported,
1067                                           "Reference Lstm: input and CellToInputWeights types are mismatched");
1068         }
1069     }
1070     if (descriptor.m_PeepholeEnabled)
1071     {
1072         supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToForgetWeights()), reasonIfUnsupported,
1073                                       "Reference Lstm: input and CellToForgetWeights types are mismatched");
1074         supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToOutputWeights()), reasonIfUnsupported,
1075                                       "Reference Lstm: input and CellToOutputWeights types are mismatched");
1076     }
1077     if (descriptor.m_ProjectionEnabled)
1078     {
1079         supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionWeights()), reasonIfUnsupported,
1080                                       "Reference Lstm: input and mProjectionWeights types are mismatched");
1081         if (paramsInfo.m_ProjectionBias != nullptr)
1082         {
1083             supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionBias()), reasonIfUnsupported,
1084                                           "Reference Lstm: input and ProjectionBias types are mismatched");
1085         }
1086     }
1087     if (descriptor.m_LayerNormEnabled)
1088     {
1089         if (!descriptor.m_CifgEnabled)
1090         {
1091             supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputLayerNormWeights()),
1092                                           reasonIfUnsupported,
1093                                           "Reference Lstm: input and InputLayerNormWeights types are mismatched");
1094         }
1095         supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetLayerNormWeights()),
1096                                       reasonIfUnsupported,
1097                                       "Reference Lstm: input and ForgetLayerNormWeights types are mismatched");
1098         supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellLayerNormWeights()),
1099                                       reasonIfUnsupported,
1100                                       "Reference Lstm: input and CellLayerNormWeights types are mismatched");
1101         supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputLayerNormWeights()),
1102                                       reasonIfUnsupported,
1103                                       "Reference Lstm: input and OutputLayerNormWeights types are mismatched");
1104     }
1105
1106     return supported;
1107 }
1108
1109 bool RefLayerSupport::IsMaximumSupported(const TensorInfo& input0,
1110                                          const TensorInfo& input1,
1111                                          const TensorInfo& output,
1112                                          Optional<std::string&> reasonIfUnsupported) const
1113 {
1114     bool supported = true;
1115
1116     std::array<DataType,5> supportedTypes = {
1117         DataType::Float32,
1118         DataType::Float16,
1119         DataType::QSymmS8,
1120         DataType::QAsymmU8,
1121         DataType::QSymmS16
1122     };
1123
1124     supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1125                                   "Reference maximum: input 0 is not a supported type.");
1126
1127     supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1128                                   "Reference maximum: input 1 is not a supported type.");
1129
1130     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1131                                   "Reference maximum: output is not a supported type.");
1132
1133     supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1134                                   "Reference maximum: input 0 and Input 1 types are mismatched");
1135
1136     supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1137                                   "Reference maximum: input and output types are mismatched");
1138
1139     supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1140                                   "Reference maximum: shapes are not suitable for implicit broadcast.");
1141
1142     return supported;
1143 }
1144
1145 bool RefLayerSupport::IsMeanSupported(const TensorInfo& input,
1146                                       const TensorInfo& output,
1147                                       const MeanDescriptor& descriptor,
1148                                       Optional<std::string&> reasonIfUnsupported) const
1149 {
1150     bool supported = true;
1151     std::string meanLayerStr = "Mean";
1152     std::string outputTensorStr = "output";
1153
1154     std::array<DataType,4> supportedTypes =
1155     {
1156         DataType::Float32,
1157         DataType::Float16,
1158         DataType::QAsymmU8,
1159         DataType::QSymmS16
1160     };
1161
1162     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1163                                   "Reference Mean: input type not supported.");
1164
1165     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1166                                   "Reference Mean: input and output types are mismatched");
1167
1168     if (descriptor.m_KeepDims)
1169     {
1170         supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, input.GetNumDimensions()),
1171                                       reasonIfUnsupported,
1172                                       CreateIncorrectDimensionsErrorMsg(input.GetNumDimensions(),
1173                                                                         output.GetNumDimensions(),
1174                                                                         meanLayerStr, outputTensorStr).data());
1175     }
1176     else if (descriptor.m_Axis.empty())
1177     {
1178         supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1179                                       reasonIfUnsupported,
1180                                       CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1181                                                                         meanLayerStr, outputTensorStr).data());
1182     }
1183     else
1184     {
1185         auto outputDim = input.GetNumDimensions() - boost::numeric_cast<unsigned int>(descriptor.m_Axis.size());
1186
1187         if (outputDim > 0)
1188         {
1189             supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, outputDim),
1190                                           reasonIfUnsupported,
1191                                           CreateIncorrectDimensionsErrorMsg(outputDim, output.GetNumDimensions(),
1192                                                                             meanLayerStr, outputTensorStr).data());
1193         }
1194         else
1195         {
1196             supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
1197                                           reasonIfUnsupported,
1198                                           CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
1199                                                                             meanLayerStr, outputTensorStr).data());
1200         }
1201     }
1202
1203     return supported;
1204 }
1205
1206 bool RefLayerSupport::IsMergerSupported(const std::vector<const TensorInfo*> inputs,
1207                                         const TensorInfo& output,
1208                                         const MergerDescriptor& descriptor,
1209                                         Optional<std::string&> reasonIfUnsupported) const
1210 {
1211     return IsConcatSupported(inputs, output, descriptor, reasonIfUnsupported);
1212 }
1213
1214 bool RefLayerSupport::IsMemCopySupported(const TensorInfo &input,
1215                                          const TensorInfo &output,
1216                                          Optional<std::string &> reasonIfUnsupported) const
1217 {
1218     bool supported = true;
1219
1220     std::array<DataType,5> supportedTypes =
1221     {
1222         DataType::Float32,
1223         DataType::Float16,
1224         DataType::QAsymmU8,
1225         DataType::QSymmS16,
1226         DataType::Boolean
1227     };
1228
1229     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1230                                   "Reference MemCopy: input type not supported");
1231
1232     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1233                                   "Reference MemCopy: output type not supported");
1234
1235     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1236                                   "Reference MemCopy: input and output types are mismatched");
1237
1238     return supported;
1239 }
1240
1241 bool RefLayerSupport::IsMinimumSupported(const TensorInfo& input0,
1242                                          const TensorInfo& input1,
1243                                          const TensorInfo& output,
1244                                          Optional<std::string&> reasonIfUnsupported) const
1245 {
1246     bool supported = true;
1247
1248     std::array<DataType,4> supportedTypes = {
1249         DataType::Float32,
1250         DataType::Float16,
1251         DataType::QAsymmU8,
1252         DataType::QSymmS16
1253     };
1254
1255     supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1256                                   "Reference minimum: input 0 is not a supported type.");
1257
1258     supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1259                                   "Reference minimum: input 1 is not a supported type.");
1260
1261     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1262                                   "Reference minimum: output is not a supported type.");
1263
1264     supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1265                                   "Reference minimum: input 0 and Input 1 types are mismatched");
1266
1267     supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1268                                   "Reference minimum: input and output types are mismatched");
1269
1270     supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1271                                   "Reference minimum: shapes are not suitable for implicit broadcast.");
1272
1273     return supported;
1274 }
1275
1276 bool RefLayerSupport::IsMultiplicationSupported(const TensorInfo& input0,
1277                                                 const TensorInfo& input1,
1278                                                 const TensorInfo& output,
1279                                                 Optional<std::string&> reasonIfUnsupported) const
1280 {
1281     bool supported = true;
1282
1283     std::array<DataType,5> supportedTypes = {
1284         DataType::Float32,
1285         DataType::Float16,
1286         DataType::QSymmS8,
1287         DataType::QAsymmU8,
1288         DataType::QSymmS16
1289     };
1290
1291     supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1292                                   "Reference multiplication: input 0 is not a supported type.");
1293
1294     supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1295                                   "Reference multiplication: input 1 is not a supported type.");
1296
1297     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1298                                   "Reference multiplication: output is not a supported type.");
1299
1300     supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1301                                   "Reference multiplication: input 0 and Input 1 types are mismatched");
1302
1303     supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1304                                   "Reference multiplication: input and output types are mismatched");
1305
1306     supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1307                                   "Reference multiplication: shapes are not suitable for implicit broadcast.");
1308
1309     return supported;
1310 }
1311
1312 bool RefLayerSupport::IsNormalizationSupported(const TensorInfo& input,
1313                                                const TensorInfo& output,
1314                                                const NormalizationDescriptor& descriptor,
1315                                                Optional<std::string&> reasonIfUnsupported) const
1316 {
1317     ignore_unused(descriptor);
1318
1319     // Define supported types
1320     std::array<DataType, 4> supportedTypes =
1321     {
1322         DataType::Float16,
1323         DataType::Float32,
1324         DataType::QAsymmU8,
1325         DataType::QSymmS16
1326     };
1327
1328     bool supported = true;
1329
1330     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1331                                   "Reference normalization: input type not supported.");
1332
1333     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1334                                   "Reference normalization: output type not supported.");
1335
1336     supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1337                                   "Reference normalization: input and output shapes have different "
1338                                   "num total elements.");
1339
1340     return supported;
1341 }
1342
1343 bool RefLayerSupport::IsOutputSupported(const TensorInfo& /*output*/,
1344                                         Optional<std::string&> /*reasonIfUnsupported*/) const
1345 {
1346     return true;
1347 }
1348
1349 bool RefLayerSupport::IsPadSupported(const TensorInfo& input,
1350                                      const TensorInfo& output,
1351                                      const PadDescriptor& descriptor,
1352                                      Optional<std::string&> reasonIfUnsupported) const
1353 {
1354     ignore_unused(descriptor);
1355     bool supported = true;
1356
1357     // Define supported output and inputs types.
1358     std::array<DataType,4> supportedTypes =
1359     {
1360         DataType::Float32,
1361         DataType::Float16,
1362         DataType::QAsymmU8,
1363         DataType::QSymmS16
1364     };
1365
1366     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1367                                   "Reference pad: input is not a supported type.");
1368
1369     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1370                                   "Reference pad: output is not a supported type.");
1371
1372     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1373                                   "Reference pad: input and output types are mismatched.");
1374
1375     return supported;
1376 }
1377
1378 bool RefLayerSupport::IsPermuteSupported(const TensorInfo& input,
1379                                          const TensorInfo& output,
1380                                          const PermuteDescriptor& descriptor,
1381                                          Optional<std::string&> reasonIfUnsupported) const
1382 {
1383     ignore_unused(descriptor);
1384     bool supported = true;
1385
1386     // Define supported output and inputs types.
1387     std::array<DataType,3> supportedTypes =
1388     {
1389         DataType::Float32,
1390         DataType::QAsymmU8,
1391         DataType::QSymmS16
1392     };
1393
1394     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1395                                   "Reference permute: input is not a supported type.");
1396
1397     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1398                                   "Reference permute: output is not a supported type.");
1399
1400     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1401                                   "Reference permute: input and output types are mismatched.");
1402
1403     return supported;
1404 }
1405
1406 bool RefLayerSupport::IsPooling2dSupported(const TensorInfo& input,
1407                                            const TensorInfo& output,
1408                                            const Pooling2dDescriptor& descriptor,
1409                                            Optional<std::string&> reasonIfUnsupported) const
1410 {
1411     ignore_unused(descriptor);
1412     bool supported = true;
1413
1414     // Define supported output and inputs types.
1415     std::array<DataType,4> supportedTypes =
1416     {
1417         DataType::Float32,
1418         DataType::Float16,
1419         DataType::QAsymmU8,
1420         DataType::QSymmS16
1421     };
1422
1423     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1424                                   "Reference poolind2d: input is not a supported type.");
1425
1426     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1427                                   "Reference poolind2d: output is not a supported type.");
1428
1429     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1430                                   "Reference poolind2d: input and output types are mismatched.");
1431
1432     return supported;
1433 }
1434
1435 bool RefLayerSupport::IsQuantizeSupported(const TensorInfo& input,
1436                                           const TensorInfo& output,
1437                                           Optional<std::string&> reasonIfUnsupported) const
1438 {
1439    bool supported = true;
1440
1441     // Define supported input types.
1442     std::array<DataType,5> supportedInputTypes = {
1443         DataType::Float32,
1444         DataType::Float16,
1445         DataType::QAsymmU8,
1446         DataType::QSymmS8,
1447         DataType::QSymmS16
1448     };
1449
1450     supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
1451                                   "Reference quantize: input type not supported.");
1452
1453     // Define supported output types.
1454     std::array<DataType,3> supportedOutputTypes = {
1455         DataType::QAsymmU8,
1456         DataType::QSymmS8,
1457         DataType::QSymmS16
1458     };
1459     supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1460                                   "Reference quantize: output type not supported.");
1461
1462     supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1463                                   "Reference quantize: input and output shapes have different num total elements.");
1464
1465     return supported;
1466 }
1467
1468 bool RefLayerSupport::IsReshapeSupported(const TensorInfo& input,
1469                                          const TensorInfo& output,
1470                                          const ReshapeDescriptor& descriptor,
1471                                          Optional<std::string&> reasonIfUnsupported) const
1472 {
1473     ignore_unused(output);
1474     ignore_unused(descriptor);
1475     // Define supported output types.
1476     std::array<DataType,6> supportedOutputTypes =
1477     {
1478         DataType::Float32,
1479         DataType::Float16,
1480         DataType::Signed32,
1481         DataType::QAsymmU8,
1482         DataType::QSymmS8,
1483         DataType::QSymmS16
1484     };
1485     return CheckSupportRule(TypeAnyOf(input, supportedOutputTypes), reasonIfUnsupported,
1486         "Reference reshape: input type not supported.");
1487 }
1488
1489 bool RefLayerSupport::IsResizeBilinearSupported(const TensorInfo& input,
1490                                                 const TensorInfo& output,
1491                                                 Optional<std::string&> reasonIfUnsupported) const
1492 {
1493     bool supported = true;
1494     std::array<DataType,4> supportedTypes =
1495     {
1496         DataType::Float32,
1497         DataType::Float16,
1498         DataType::QAsymmU8,
1499         DataType::QSymmS16
1500     };
1501
1502     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1503                                   "Reference ResizeBilinear: input type not supported");
1504
1505     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1506                                   "Reference ResizeBilinear: output type not supported");
1507
1508     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1509                                   "Reference ResizeBilinear: input and output types not matching");
1510
1511     return supported;
1512 }
1513
1514 bool RefLayerSupport::IsResizeSupported(const TensorInfo& input,
1515                                         const TensorInfo& output,
1516                                         const ResizeDescriptor& descriptor,
1517                                         Optional<std::string&> reasonIfUnsupported) const
1518 {
1519     boost::ignore_unused(descriptor);
1520     bool supported = true;
1521     std::array<DataType,5> supportedTypes =
1522     {
1523         DataType::Float32,
1524         DataType::Float16,
1525         DataType::QSymmS8,
1526         DataType::QAsymmU8,
1527         DataType::QSymmS16
1528     };
1529
1530     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1531                                   "Reference Resize: input type not supported");
1532
1533     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1534                                   "Reference Resize: output type not supported");
1535
1536     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1537                                   "Reference Resize: input and output types not matching");
1538
1539     return supported;
1540 }
1541
1542 bool RefLayerSupport::IsRsqrtSupported(const TensorInfo& input,
1543                                        const TensorInfo& output,
1544                                        Optional<std::string&> reasonIfUnsupported) const
1545 {
1546     return IsElementwiseUnarySupported(input,
1547                                        output,
1548                                        ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt),
1549                                        reasonIfUnsupported);
1550 }
1551
1552 bool RefLayerSupport::IsSliceSupported(const TensorInfo& input,
1553                                        const TensorInfo& output,
1554                                        const SliceDescriptor& descriptor,
1555                                        Optional<std::string&> reasonIfUnsupported) const
1556 {
1557     boost::ignore_unused(descriptor);
1558     bool supported = true;
1559
1560     std::array<DataType, 3> supportedTypes =
1561     {
1562         DataType::Float32,
1563         DataType::QAsymmU8,
1564         DataType::QSymmS16
1565     };
1566
1567     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1568                                   "Reference Slice: input type not supported");
1569
1570     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1571                                   "Reference Slice: output type not supported");
1572
1573     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1574                                   "Reference Slice: input and output types are mismatched");
1575
1576     return supported;
1577 }
1578
1579 bool RefLayerSupport::IsSoftmaxSupported(const TensorInfo& input,
1580                                          const TensorInfo& output,
1581                                          const SoftmaxDescriptor& descriptor,
1582                                          Optional<std::string&> reasonIfUnsupported) const
1583 {
1584     boost::ignore_unused(descriptor);
1585     bool supported = true;
1586     std::array<DataType,4> supportedTypes =
1587     {
1588             DataType::Float32,
1589             DataType::Float16,
1590             DataType::QAsymmU8,
1591             DataType::QSymmS16
1592     };
1593
1594     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1595                                   "Reference Softmax: output type not supported");
1596
1597     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1598                                   "Reference Softmax: input type not supported");
1599
1600     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1601                                   "Reference Softmax: input type not supported");
1602
1603     return supported;
1604 }
1605
1606 bool RefLayerSupport::IsSpaceToBatchNdSupported(const TensorInfo& input,
1607                                                 const TensorInfo& output,
1608                                                 const SpaceToBatchNdDescriptor& descriptor,
1609                                                 Optional<std::string&> reasonIfUnsupported) const
1610 {
1611     boost::ignore_unused(descriptor);
1612     bool supported = true;
1613     std::array<DataType,4> supportedTypes =
1614     {
1615             DataType::Float32,
1616             DataType::Float16,
1617             DataType::QAsymmU8,
1618             DataType::QSymmS16
1619     };
1620
1621     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1622                                   "Reference SpaceToBatchNd: input type not supported");
1623
1624     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1625                                   "Reference SpaceToBatchNd: output type not supported");
1626
1627     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1628                                   "Reference SpaceToBatchNd: input and output types are mismatched");
1629
1630     return supported;
1631 }
1632
1633 bool RefLayerSupport::IsSpaceToDepthSupported(const TensorInfo& input,
1634                                               const TensorInfo& output,
1635                                               const SpaceToDepthDescriptor& descriptor,
1636                                               Optional<std::string&> reasonIfUnsupported) const
1637 {
1638
1639     ignore_unused(descriptor);
1640     bool supported = true;
1641
1642     std::array<DataType,4> supportedTypes =
1643     {
1644         DataType::Float32,
1645         DataType::Float16,
1646         DataType::QAsymmU8,
1647         DataType::QSymmS16
1648     };
1649
1650     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1651         "Reference SpaceToDepth: input type not supported");
1652
1653     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1654         "Reference SpaceToDepth: output type not supported");
1655
1656     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1657         "Reference SpaceToDepth: input and output types are mismatched");
1658
1659     return supported;
1660 }
1661
1662 bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
1663                                           const ViewsDescriptor& descriptor,
1664                                           Optional<std::string&> reasonIfUnsupported) const
1665 {
1666     ignore_unused(descriptor);
1667     bool supported = true;
1668     std::array<DataType,4> supportedTypes =
1669     {
1670         DataType::Float32,
1671         DataType::Float16,
1672         DataType::QAsymmU8,
1673         DataType::QSymmS16
1674     };
1675
1676     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1677                                   "Reference splitter: input type not supported");
1678
1679     return supported;
1680 }
1681
1682 bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
1683                                           const std::vector<std::reference_wrapper<TensorInfo>>& outputs,
1684                                           const ViewsDescriptor& descriptor,
1685                                           Optional<std::string&> reasonIfUnsupported) const
1686 {
1687     ignore_unused(descriptor);
1688     bool supported = true;
1689     std::array<DataType,4> supportedTypes =
1690     {
1691         DataType::Float32,
1692         DataType::Float16,
1693         DataType::QAsymmU8,
1694         DataType::QSymmS16
1695     };
1696
1697     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1698                                   "Reference splitter: output type not supported");
1699     for (const TensorInfo output : outputs)
1700     {
1701         supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1702                                       "Reference splitter: input type not supported");
1703
1704         supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1705                                       "Reference splitter: input and output types mismatched.");
1706     }
1707
1708     return supported;
1709 }
1710
1711 bool RefLayerSupport::IsStackSupported(const std::vector<const TensorInfo*>& inputs,
1712                                        const TensorInfo& output,
1713                                        const StackDescriptor& descriptor,
1714                                        Optional<std::string&> reasonIfUnsupported) const
1715 {
1716     ignore_unused(descriptor);
1717
1718     bool supported = true;
1719     std::array<DataType,4> supportedTypes =
1720     {
1721         DataType::Float32,
1722         DataType::Float16,
1723         DataType::QAsymmU8,
1724         DataType::QSymmS16
1725     };
1726
1727     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1728                                   "Reference stack: output type not supported");
1729     for (const TensorInfo* input : inputs)
1730     {
1731         BOOST_ASSERT(input != nullptr);
1732         supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
1733             "Reference stack: input type not supported");
1734
1735         supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
1736             "Reference stack: input and output types mismatched.");
1737     }
1738
1739     return supported;
1740 }
1741
1742 bool RefLayerSupport::IsStridedSliceSupported(const TensorInfo& input,
1743                                               const TensorInfo& output,
1744                                               const StridedSliceDescriptor& descriptor,
1745                                               Optional<std::string&> reasonIfUnsupported) const
1746 {
1747     ignore_unused(descriptor);
1748     bool supported = true;
1749
1750     std::array<DataType,3> supportedTypes =
1751     {
1752         DataType::Float32,
1753         DataType::QAsymmU8,
1754         DataType::QSymmS16
1755     };
1756
1757     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1758                                   "Reference StridedSlice: input type not supported");
1759
1760     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1761                                   "Reference StridedSlice: output type not supported");
1762
1763     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1764                                   "Reference StridedSlice: input and output types are mismatched");
1765
1766     return supported;
1767 }
1768
1769 bool RefLayerSupport::IsSubtractionSupported(const TensorInfo& input0,
1770                                              const TensorInfo& input1,
1771                                              const TensorInfo& output,
1772                                              Optional<std::string&> reasonIfUnsupported) const
1773 {
1774     bool supported = true;
1775
1776     std::array<DataType,4> supportedTypes = {
1777         DataType::Float32,
1778         DataType::Float16,
1779         DataType::QAsymmU8,
1780         DataType::QSymmS16
1781     };
1782
1783     supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1784                                   "Reference subtraction: input 0 is not a supported type.");
1785
1786     supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1787                                   "Reference subtraction: input 1 is not a supported type.");
1788
1789     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1790                                   "Reference subtraction: output is not a supported type.");
1791
1792     supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1793                                   "Reference subtraction: input 0 and Input 1 types are mismatched");
1794
1795     supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1796                                   "Reference subtraction: input and output types are mismatched");
1797
1798     supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1799                                   "Reference subtraction: shapes are not suitable for implicit broadcast.");
1800
1801     return supported;
1802 }
1803
1804 bool RefLayerSupport::IsPreluSupported(const TensorInfo& input,
1805                                        const TensorInfo& alpha,
1806                                        const TensorInfo& output,
1807                                        Optional<std::string&> reasonIfUnsupported) const
1808 {
1809     bool supported = true;
1810
1811     std::array<DataType, 4> supportedTypes
1812     {
1813         DataType::Float32,
1814         DataType::Float16,
1815         DataType::QAsymmU8,
1816         DataType::QSymmS16
1817     };
1818
1819     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1820                                   "PReLU: input is not a supported type.");
1821
1822     supported &= CheckSupportRule(TypeAnyOf(alpha, supportedTypes), reasonIfUnsupported,
1823                                   "PReLU: alpha is not a supported type.");
1824
1825     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1826                                   "PReLU: output is not a supported type.");
1827
1828     supported &= CheckSupportRule(TypesAreEqual(input, alpha, output), reasonIfUnsupported,
1829                                   "PReLU: input, alpha and output types are mismatched");
1830
1831     supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input, alpha, output), reasonIfUnsupported,
1832                                   "PReLU: shapes are not suitable for implicit broadcast");
1833
1834     return supported;
1835 }
1836
1837 bool RefLayerSupport::IsTransposeConvolution2dSupported(const TensorInfo& input,
1838                                                         const TensorInfo& output,
1839                                                         const TransposeConvolution2dDescriptor& descriptor,
1840                                                         const TensorInfo& weights,
1841                                                         const Optional<TensorInfo>& biases,
1842                                                         Optional<std::string&> reasonIfUnsupported) const
1843 {
1844     boost::ignore_unused(descriptor);
1845     bool supported = true;
1846
1847     std::array<DataType,4> supportedTypes =
1848     {
1849             DataType::Float32,
1850             DataType::Float16,
1851             DataType::QAsymmU8,
1852             DataType::QSymmS16
1853     };
1854
1855     supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1856                                   "Reference TransposeConvolution2d: input is not a supported type.");
1857
1858     supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1859                                   "Reference TransposeConvolution2d: output is not a supported type.");
1860
1861     supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1862                                   "Reference TransposeConvolution2d: input and output types mismatched.");
1863
1864
1865     const DataType inputType = input.GetDataType();
1866     if (inputType == DataType::QAsymmU8)
1867     {
1868         ARMNN_NO_DEPRECATE_WARN_BEGIN
1869         std::array<DataType, 3> supportedWeightTypes =
1870         {
1871             DataType::QAsymmU8,
1872             DataType::QSymmS8,
1873             DataType::QuantizedSymm8PerAxis //Deprecated
1874         };
1875         ARMNN_NO_DEPRECATE_WARN_END
1876
1877         supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
1878                                       "Reference TransposeConvolution2d: weights type not supported for "
1879                                       "quantized input.");
1880     }
1881     else
1882     {
1883         supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
1884                                     "Reference TransposeConvolution2d: weights is not a supported type.");
1885
1886         supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
1887                                     "Reference TransposeConvolution2d: input and weights types mismatched.");
1888     }
1889
1890     if (biases.has_value())
1891     {
1892         std::array<DataType,3> biasesSupportedTypes =
1893         {
1894                 DataType::Float32,
1895                 DataType::Float16,
1896                 DataType::Signed32
1897         };
1898         supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
1899                                       "Reference TransposeConvolution2d: biases is not a supported type.");
1900     }
1901
1902     return supported;
1903 }
1904
1905 } // namespace armnn