2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
6 #include "RefLayerSupport.hpp"
7 #include "RefBackendId.hpp"
9 #include <DataLayoutIndexed.hpp>
10 #include <InternalTypes.hpp>
11 #include <LayerSupportCommon.hpp>
13 #include <armnn/Types.hpp>
14 #include <armnn/Descriptors.hpp>
16 #include <backendsCommon/BackendRegistry.hpp>
17 #include <backendsCommon/LayerSupportRules.hpp>
18 #include <backendsCommon/test/WorkloadTestUtils.hpp>
20 #include <boost/core/ignore_unused.hpp>
26 using namespace boost;
34 template<typename Float32Func, typename Uint8Func, typename ... Params>
35 bool IsSupportedForDataTypeRef(Optional<std::string&> reasonIfUnsupported,
37 Float32Func floatFuncPtr,
38 Uint8Func uint8FuncPtr,
41 return IsSupportedForDataTypeGeneric(reasonIfUnsupported,
43 &FalseFunc<Params...>,
46 &FalseFunc<Params...>,
47 &FalseFunc<Params...>,
48 std::forward<Params>(params)...);
51 } // anonymous namespace
56 std::string CreateIncorrectDimensionsErrorMsg(unsigned int expected,
58 std::string& layerStr,
59 std::string& tensorName)
61 std::string errorMsg = "Reference " + layerStr + ": Expected " + std::to_string(expected) + " dimensions but got" +
62 " " + std::to_string(actual) + " dimensions instead, for the '" + tensorName + "' tensor.";
67 } // anonymous namespace
69 bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
70 const TensorInfo& output,
71 const ActivationDescriptor& descriptor,
72 Optional<std::string&> reasonIfUnsupported) const
74 bool supported = true;
76 // Define supported types.
77 std::array<DataType,3> supportedTypes = {
79 DataType::QuantisedAsymm8,
80 DataType::QuantisedSymm16
83 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
84 "Reference activation: input type not supported.");
86 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
87 "Reference activation: output type not supported.");
89 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
90 "Reference activation: input and output types mismatched.");
92 supported &= CheckSupportRule(ShapesAreSameRank(input, output), reasonIfUnsupported,
93 "Reference activation: input and output shapes are of different rank.");
96 struct ActivationFunctionSupported : public Rule
98 ActivationFunctionSupported(const ActivationDescriptor& desc)
100 switch(desc.m_Function)
102 case ActivationFunction::Abs:
103 case ActivationFunction::BoundedReLu:
104 case ActivationFunction::LeakyReLu:
105 case ActivationFunction::Linear:
106 case ActivationFunction::ReLu:
107 case ActivationFunction::Sigmoid:
108 case ActivationFunction::SoftReLu:
109 case ActivationFunction::Sqrt:
110 case ActivationFunction::Square:
111 case ActivationFunction::TanH:
125 // Function is supported
126 supported &= CheckSupportRule(ActivationFunctionSupported(descriptor), reasonIfUnsupported,
127 "Reference activation: function not supported.");
132 bool RefLayerSupport::IsAdditionSupported(const TensorInfo& input0,
133 const TensorInfo& input1,
134 const TensorInfo& output,
135 Optional<std::string&> reasonIfUnsupported) const
137 bool supported = true;
139 std::array<DataType,3> supportedTypes = {
141 DataType::QuantisedAsymm8,
142 DataType::QuantisedSymm16
145 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
146 "Reference addition: input 0 is not a supported type.");
148 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
149 "Reference addition: input 1 is not a supported type.");
151 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
152 "Reference addition: output is not a supported type.");
154 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
155 "Reference addition: input 0 and Input 1 types are mismatched");
157 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
158 "Reference addition: input and output types are mismatched");
160 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
161 "Reference addition: shapes are not suitable for implicit broadcast.");
166 bool RefLayerSupport::IsBatchNormalizationSupported(const TensorInfo& input,
167 const TensorInfo& output,
168 const TensorInfo& mean,
169 const TensorInfo& variance,
170 const TensorInfo& beta,
171 const TensorInfo& gamma,
172 const BatchNormalizationDescriptor& descriptor,
173 Optional<std::string&> reasonIfUnsupported) const
175 ignore_unused(descriptor);
177 std::array<DataType, 3> supportedTypes =
180 DataType::QuantisedAsymm8,
181 DataType::QuantisedSymm16
184 bool supported = true;
186 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
187 "Reference batch normalization: input is not a supported type.");
189 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
190 "Reference batch normalization: output is not a supported type.");
192 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
193 "Reference batch normalization: input and output types are mismatched");
195 supported &= CheckSupportRule(TypeAnyOf(mean, supportedTypes), reasonIfUnsupported,
196 "Reference batch normalization: mean is not a supported type.");
198 supported &= CheckSupportRule(TypeAnyOf(variance, supportedTypes), reasonIfUnsupported,
199 "Reference batch normalization: variance is not a supported type.");
201 supported &= CheckSupportRule(TypeAnyOf(beta, supportedTypes), reasonIfUnsupported,
202 "Reference batch normalization: beta is not a supported type.");
204 supported &= CheckSupportRule(TypeAnyOf(gamma, supportedTypes), reasonIfUnsupported,
205 "Reference batch normalization: gamma is not a supported type.");
210 bool RefLayerSupport::IsBatchToSpaceNdSupported(const TensorInfo& input,
211 const TensorInfo& output,
212 const BatchToSpaceNdDescriptor& descriptor,
213 Optional<std::string&> reasonIfUnsupported) const
215 ignore_unused(descriptor);
217 bool supported = true;
219 std::string batchToSpaceNdLayerStr = "batchToSpaceNd";
220 std::string inputTensorStr = "input";
221 std::string outputTensorStr = "output";
223 // Define supported types.
224 std::array<DataType,3> supportedTypes =
227 DataType::QuantisedAsymm8,
228 DataType::QuantisedSymm16
231 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
232 "Reference BatchToSpaceNd: input type not supported.");
234 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
235 "Reference BatchToSpaceNd: output type not supported.");
237 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
238 "Reference BatchToSpaceNd: input and output types mismatched.");
240 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 4),
242 CreateIncorrectDimensionsErrorMsg(4,
243 output.GetNumDimensions(),
244 batchToSpaceNdLayerStr,
245 outputTensorStr).data());
247 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(input, 4),
249 CreateIncorrectDimensionsErrorMsg(4,
250 input.GetNumDimensions(),
251 batchToSpaceNdLayerStr,
252 inputTensorStr).data());
257 bool RefLayerSupport::IsConcatSupported(const std::vector<const TensorInfo*> inputs,
258 const TensorInfo& output,
259 const ConcatDescriptor& descriptor,
260 Optional<std::string&> reasonIfUnsupported) const
262 ignore_unused(descriptor);
264 bool supported = true;
265 std::array<DataType,3> supportedTypes =
268 DataType::QuantisedAsymm8,
269 DataType::QuantisedSymm16
272 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
273 "Reference concatenation: output type not supported");
274 for (const TensorInfo* input : inputs)
276 BOOST_ASSERT(input != nullptr);
277 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
278 "Reference concatenation: input type not supported");
280 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
281 "Reference concatenation: input and output types mismatched.");
287 bool RefLayerSupport::IsConstantSupported(const TensorInfo& output,
288 Optional<std::string&> reasonIfUnsupported) const
290 std::array<DataType,4> supportedTypes =
294 DataType::QuantisedAsymm8,
295 DataType::QuantisedSymm16
298 return CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
299 "Reference constant: output is not a supported type.");
302 bool RefLayerSupport::IsConvertFp16ToFp32Supported(const TensorInfo& input,
303 const TensorInfo& output,
304 Optional<std::string&> reasonIfUnsupported) const
306 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
309 &FalseInputFuncF32<>,
313 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
314 output.GetDataType(),
315 &FalseOutputFuncF16<>,
322 bool RefLayerSupport::IsConvertFp32ToFp16Supported(const TensorInfo& input,
323 const TensorInfo& output,
324 Optional<std::string&> reasonIfUnsupported) const
326 return (IsSupportedForDataTypeGeneric(reasonIfUnsupported,
328 &FalseInputFuncF16<>,
333 IsSupportedForDataTypeGeneric(reasonIfUnsupported,
334 output.GetDataType(),
336 &FalseOutputFuncF32<>,
342 bool RefLayerSupport::IsConvolution2dSupported(const TensorInfo& input,
343 const TensorInfo& output,
344 const Convolution2dDescriptor& descriptor,
345 const TensorInfo& weights,
346 const Optional<TensorInfo>& biases,
347 Optional<std::string&> reasonIfUnsupported) const
349 bool supported = true;
351 // Define supported types.
352 std::array<DataType,3> supportedTypes = {
354 DataType::QuantisedAsymm8,
355 DataType::QuantisedSymm16
358 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
359 "Reference convolution2d: input is not a supported type.");
361 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
362 "Reference convolution2d: output is not a supported type.");
364 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
365 "Reference convolution2d: weights is not a supported type.");
367 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
368 "Reference convolution2d: input and output types mismatched.");
370 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
371 "Reference convolution2d: input and weights types mismatched.");
373 if (biases.has_value())
375 std::array<DataType,2> biasesSupportedTypes = {
379 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
380 "Reference convolution2d: biases is not a supported type.");
382 ignore_unused(descriptor);
387 bool RefLayerSupport::IsDebugSupported(const TensorInfo& input,
388 const TensorInfo& output,
389 Optional<std::string&> reasonIfUnsupported) const
391 bool supported = true;
393 std::array<DataType,3> supportedTypes =
396 DataType::QuantisedAsymm8,
397 DataType::QuantisedSymm16
400 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
401 "Reference debug: input type not supported");
403 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
404 "Reference debug: output type not supported");
406 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
407 "Reference debug: input and output types are mismatched");
412 bool RefLayerSupport::IsDepthwiseConvolutionSupported(const TensorInfo& input,
413 const TensorInfo& output,
414 const DepthwiseConvolution2dDescriptor& descriptor,
415 const TensorInfo& weights,
416 const Optional<TensorInfo>& biases,
417 Optional<std::string&> reasonIfUnsupported) const
419 bool supported = true;
421 // Define supported types.
422 std::array<DataType,3> supportedTypes =
425 DataType::QuantisedAsymm8,
426 DataType::QuantisedSymm16
429 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
430 "Reference DepthwiseConvolution2d: input is not a supported type.");
432 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
433 "Reference DepthwiseConvolution2d: output is not a supported type.");
435 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
436 "Reference DepthwiseConvolution2d: weights is not a supported type.");
438 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
439 "Reference DepthwiseConvolution2d: input and output types mismatched.");
441 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
442 "Reference DepthwiseConvolution2d: input and weights types mismatched.");
444 if (biases.has_value())
446 std::array<DataType,2> biasesSupportedTypes =
451 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
452 "Reference DepthwiseConvolution2d: biases is not a supported type.");
454 ignore_unused(descriptor);
460 bool RefLayerSupport::IsDequantizeSupported(const TensorInfo& input,
461 const TensorInfo& output,
462 Optional<std::string&> reasonIfUnsupported) const
464 bool supported = true;
466 std::array<DataType,2> supportedInputTypes = {
467 DataType::QuantisedAsymm8,
468 DataType::QuantisedSymm16
471 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
472 "Reference dequantize: input type not supported.");
474 std::array<DataType,1> supportedOutputTypes = {
478 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
479 "Reference dequantize: output type not supported.");
481 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
482 "Reference dequantize: input and output shapes have different num total elements.");
487 bool RefLayerSupport::IsDetectionPostProcessSupported(const armnn::TensorInfo& input0,
488 const armnn::TensorInfo& input1,
489 const armnn::DetectionPostProcessDescriptor& descriptor,
490 armnn::Optional<std::string&> reasonIfUnsupported) const
492 bool supported = true;
494 std::array<DataType,3> supportedInputTypes =
497 DataType::QuantisedAsymm8,
498 DataType::QuantisedSymm16
501 supported &= CheckSupportRule(TypeAnyOf(input0, supportedInputTypes), reasonIfUnsupported,
502 "Reference DetectionPostProcess: input 0 is not a supported type.");
504 supported &= CheckSupportRule(TypeAnyOf(input1, supportedInputTypes), reasonIfUnsupported,
505 "Reference DetectionPostProcess: input 1 is not a supported type.");
510 bool RefLayerSupport::IsDilatedDepthwiseConvolutionSupported(const TensorInfo& input,
511 const TensorInfo& output,
512 const DepthwiseConvolution2dDescriptor& descriptor,
513 const TensorInfo& weights,
514 const Optional<TensorInfo>& biases,
515 Optional<std::string&> reasonIfUnsupported) const
517 return IsDepthwiseConvolutionSupported(input, output, descriptor, weights, biases, reasonIfUnsupported);
520 bool RefLayerSupport::IsDivisionSupported(const TensorInfo& input0,
521 const TensorInfo& input1,
522 const TensorInfo& output,
523 Optional<std::string&> reasonIfUnsupported) const
525 bool supported = true;
527 std::array<DataType,3> supportedTypes = {
529 DataType::QuantisedAsymm8,
530 DataType::QuantisedSymm16
533 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
534 "Reference division: input 0 is not a supported type.");
536 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
537 "Reference division: input 1 is not a supported type.");
539 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
540 "Reference division: output is not a supported type.");
542 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
543 "Reference division: input 0 and Input 1 types are mismatched");
545 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
546 "Reference division: input and output types are mismatched");
548 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
549 "Reference division: shapes are not suitable for implicit broadcast.");
554 bool RefLayerSupport::IsEqualSupported(const TensorInfo& input0,
555 const TensorInfo& input1,
556 const TensorInfo& output,
557 Optional<std::string&> reasonIfUnsupported) const
559 bool supported = true;
561 std::array<DataType,3> supportedTypes =
564 DataType::QuantisedAsymm8,
565 DataType::QuantisedSymm16
568 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
569 "Reference equal: input 0 is not a supported type.");
571 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
572 "Reference equal: input 1 is not a supported type.");
574 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
575 "Reference equal: input 0 and Input 1 types are mismatched");
577 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
578 "Reference equal: shapes are not suitable for implicit broadcast.");
583 bool RefLayerSupport::IsFakeQuantizationSupported(const TensorInfo& input,
584 const FakeQuantizationDescriptor& descriptor,
585 Optional<std::string&> reasonIfUnsupported) const
587 ignore_unused(descriptor);
588 bool supported = true;
590 std::array<DataType,1> supportedTypes =
595 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
596 "Reference fake quantization: input type not supported.");
601 bool RefLayerSupport::IsFloorSupported(const TensorInfo& input,
602 const TensorInfo& output,
603 Optional<std::string&> reasonIfUnsupported) const
605 ignore_unused(output);
606 bool supported = true;
608 std::array<DataType,2> supportedTypes =
611 DataType::QuantisedSymm16
614 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
615 "Reference Floor: input type not supported.");
617 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
618 "Reference Floor: output type not supported.");
623 bool RefLayerSupport::IsFullyConnectedSupported(const TensorInfo& input,
624 const TensorInfo& output,
625 const TensorInfo& weights,
626 const TensorInfo& biases,
627 const FullyConnectedDescriptor& descriptor,
628 Optional<std::string&> reasonIfUnsupported) const
630 bool supported = true;
632 // Define supported types.
633 std::array<DataType,3> supportedTypes =
636 DataType::QuantisedAsymm8,
637 DataType::QuantisedSymm16
640 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
641 "Reference Fully Connected: input type not supported.");
643 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
644 "Reference Fully Connected: output type not supported.");
646 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
647 "Reference Fully Connected: input and output types mismatched.");
649 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
650 "Reference Fully Connected: weights type not supported.");
652 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
653 "Reference Fully Connected: input and weight types mismatched.");
655 if (descriptor.m_BiasEnabled)
657 // Defined supported types for bias
658 std::array<DataType, 2>
665 supported &= CheckSupportRule(TypeAnyOf(biases, supportedBiasTypes), reasonIfUnsupported,
666 "Reference Fully Connected: bias type not supported.");
668 supported &= CheckSupportRule(BiasAndWeightsTypesMatch(biases, weights), reasonIfUnsupported,
669 "Reference Fully Connected: bias and weight types mismatch.");
671 supported &= CheckSupportRule(BiasAndWeightsTypesCompatible(weights, supportedBiasTypes), reasonIfUnsupported,
672 "Reference Fully Connected: bias type inferred from weights is incompatible.");
679 bool RefLayerSupport::IsGatherSupported(const armnn::TensorInfo& input0,
680 const armnn::TensorInfo& input1,
681 const armnn::TensorInfo& output,
682 armnn::Optional<std::string&> reasonIfUnsupported) const
684 bool supported = true;
685 std::array<DataType,3> supportedTypes =
688 DataType::QuantisedAsymm8,
689 DataType::QuantisedSymm16
692 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
693 "Reference Gather: input type not supported");
695 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
696 "Reference Gather: output type not supported");
698 supported &= CheckSupportRule(TypeIs(input1, DataType::Signed32), reasonIfUnsupported,
699 "Reference Gather: indices (input1) type not supported");
701 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
702 "Reference Gather: input and output types not matching");
707 bool RefLayerSupport::IsGreaterSupported(const TensorInfo& input0,
708 const TensorInfo& input1,
709 const TensorInfo& output,
710 Optional<std::string&> reasonIfUnsupported) const
712 bool supported = true;
714 std::array<DataType,3> supportedTypes =
717 DataType::QuantisedAsymm8,
718 DataType::QuantisedSymm16
721 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
722 "Reference greater: input 0 is not a supported type.");
724 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
725 "Reference greater: input 1 is not a supported type.");
727 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
728 "Reference greater: input 0 and Input 1 types are mismatched");
730 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
731 "Reference greater: shapes are not suitable for implicit broadcast.");
736 bool RefLayerSupport::IsInputSupported(const TensorInfo& input,
737 Optional<std::string&> reasonIfUnsupported) const
742 bool RefLayerSupport::IsL2NormalizationSupported(const TensorInfo& input,
743 const TensorInfo& output,
744 const L2NormalizationDescriptor& descriptor,
745 Optional<std::string&> reasonIfUnsupported) const
747 ignore_unused(descriptor);
748 // Define supported types
749 std::array<DataType, 3> supportedTypes =
752 DataType::QuantisedAsymm8,
753 DataType::QuantisedSymm16
756 bool supported = true;
758 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
759 "Reference L2normalization: input type not supported.");
761 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
762 "Reference L2normalization: output type not supported.");
764 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
765 "Reference L2normalization: input and output types mismatched.");
767 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
768 "Reference L2normalization: input and output shapes have different "
769 "num total elements.");
774 bool RefLayerSupport::IsLstmSupported(const TensorInfo& input,
775 const TensorInfo& outputStateIn,
776 const TensorInfo& cellStateIn,
777 const TensorInfo& scratchBuffer,
778 const TensorInfo& outputStateOut,
779 const TensorInfo& cellStateOut,
780 const TensorInfo& output,
781 const LstmDescriptor& descriptor,
782 const LstmInputParamsInfo& paramsInfo,
783 Optional<std::string&> reasonIfUnsupported) const
785 ignore_unused(descriptor);
786 ignore_unused(paramsInfo);
788 bool supported = true;
790 std::array<DataType,2> supportedTypes = {
792 DataType::QuantisedSymm16
795 // check inputs and outputs
796 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
797 "Reference Lstm: input is not a supported type.");
798 supported &= CheckSupportRule(TypesAreEqual(input, outputStateIn), reasonIfUnsupported,
799 "Reference Lstm: input and outputStateIn types are mismatched");
800 supported &= CheckSupportRule(TypesAreEqual(input, cellStateIn), reasonIfUnsupported,
801 "Reference Lstm: input and cellStateIn types are mismatched");
802 supported &= CheckSupportRule(TypesAreEqual(input, scratchBuffer), reasonIfUnsupported,
803 "Reference Lstm: input and scratchBuffer types are mismatched");
804 supported &= CheckSupportRule(TypesAreEqual(input, outputStateOut), reasonIfUnsupported,
805 "Reference Lstm: input and outputStateOut types are mismatched");
806 supported &= CheckSupportRule(TypesAreEqual(input, cellStateOut), reasonIfUnsupported,
807 "Reference Lstm: input and cellStateOut types are mismatched");
808 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
809 "Reference Lstm: input and output types are mismatched");
810 // check layer parameters
811 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToForgetWeights()), reasonIfUnsupported,
812 "Reference Lstm: input and InputToForgetWeights types are mismatched");
813 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToCellWeights()), reasonIfUnsupported,
814 "Reference Lstm: input and InputToCellWeights types are mismatched");
815 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToOutputWeights()), reasonIfUnsupported,
816 "Reference Lstm: input and InputToOutputWeights types are mismatched");
817 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToForgetWeights()), reasonIfUnsupported,
818 "Reference Lstm: input and RecurrentToForgetWeights types are mismatched");
819 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToCellWeights()), reasonIfUnsupported,
820 "Reference Lstm: input and RecurrentToCellWeights types are mismatched");
821 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToOutputWeights()), reasonIfUnsupported,
822 "Reference Lstm: input and RecurrentToOutputWeights types are mismatched");
823 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetGateBias()), reasonIfUnsupported,
824 "Reference Lstm: input and ForgetGateBias types are mismatched");
825 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellBias()), reasonIfUnsupported,
826 "Reference Lstm: input and CellBias types are mismatched");
827 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputGateBias()), reasonIfUnsupported,
828 "Reference Lstm: input and OutputGateBias types are mismatched");
829 if (!descriptor.m_CifgEnabled)
831 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputToInputWeights()), reasonIfUnsupported,
832 "Reference Lstm: input and InputToInputWeights types are mismatched");
833 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetRecurrentToInputWeights()),
835 "Reference Lstm: input and RecurrentToInputWeights types are mismatched");
836 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputGateBias()), reasonIfUnsupported,
837 "Reference Lstm: input and InputGateBias types are mismatched");
838 if (descriptor.m_PeepholeEnabled)
840 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToInputWeights()),
842 "Reference Lstm: input and CellToInputWeights types are mismatched");
845 if (descriptor.m_PeepholeEnabled)
847 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToForgetWeights()), reasonIfUnsupported,
848 "Reference Lstm: input and CellToForgetWeights types are mismatched");
849 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellToOutputWeights()), reasonIfUnsupported,
850 "Reference Lstm: input and CellToOutputWeights types are mismatched");
852 if (descriptor.m_ProjectionEnabled)
854 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionWeights()), reasonIfUnsupported,
855 "Reference Lstm: input and mProjectionWeights types are mismatched");
856 if (paramsInfo.m_ProjectionBias != nullptr)
858 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetProjectionBias()), reasonIfUnsupported,
859 "Reference Lstm: input and ProjectionBias types are mismatched");
862 if (descriptor.m_LayerNormEnabled)
864 if (!descriptor.m_CifgEnabled)
866 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetInputLayerNormWeights()),
868 "Reference Lstm: input and InputLayerNormWeights types are mismatched");
870 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetForgetLayerNormWeights()),
872 "Reference Lstm: input and ForgetLayerNormWeights types are mismatched");
873 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetCellLayerNormWeights()),
875 "Reference Lstm: input and CellLayerNormWeights types are mismatched");
876 supported &= CheckSupportRule(TypesAreEqual(input, paramsInfo.GetOutputLayerNormWeights()),
878 "Reference Lstm: input and OutputLayerNormWeights types are mismatched");
884 bool RefLayerSupport::IsMaximumSupported(const TensorInfo& input0,
885 const TensorInfo& input1,
886 const TensorInfo& output,
887 Optional<std::string&> reasonIfUnsupported) const
889 bool supported = true;
891 std::array<DataType,3> supportedTypes = {
893 DataType::QuantisedAsymm8,
894 DataType::QuantisedSymm16
897 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
898 "Reference maximum: input 0 is not a supported type.");
900 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
901 "Reference maximum: input 1 is not a supported type.");
903 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
904 "Reference maximum: output is not a supported type.");
906 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
907 "Reference maximum: input 0 and Input 1 types are mismatched");
909 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
910 "Reference maximum: input and output types are mismatched");
912 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
913 "Reference maximum: shapes are not suitable for implicit broadcast.");
918 bool RefLayerSupport::IsMeanSupported(const TensorInfo& input,
919 const TensorInfo& output,
920 const MeanDescriptor& descriptor,
921 Optional<std::string&> reasonIfUnsupported) const
923 bool supported = true;
924 std::string meanLayerStr = "Mean";
925 std::string outputTensorStr = "output";
927 std::array<DataType,3> supportedTypes =
930 DataType::QuantisedAsymm8,
931 DataType::QuantisedSymm16
934 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
935 "Reference Mean: input type not supported.");
937 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
938 "Reference Mean: input and output types are mismatched");
940 if (descriptor.m_KeepDims)
942 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, input.GetNumDimensions()),
944 CreateIncorrectDimensionsErrorMsg(input.GetNumDimensions(),
945 output.GetNumDimensions(),
946 meanLayerStr, outputTensorStr).data());
948 else if (descriptor.m_Axis.empty())
950 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
952 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
953 meanLayerStr, outputTensorStr).data());
957 auto outputDim = input.GetNumDimensions() - boost::numeric_cast<unsigned int>(descriptor.m_Axis.size());
961 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, outputDim),
963 CreateIncorrectDimensionsErrorMsg(outputDim, output.GetNumDimensions(),
964 meanLayerStr, outputTensorStr).data());
968 supported &= CheckSupportRule(TensorNumDimensionsAreCorrect(output, 1),
970 CreateIncorrectDimensionsErrorMsg(1, output.GetNumDimensions(),
971 meanLayerStr, outputTensorStr).data());
978 bool RefLayerSupport::IsMergerSupported(const std::vector<const TensorInfo*> inputs,
979 const TensorInfo& output,
980 const MergerDescriptor& descriptor,
981 Optional<std::string&> reasonIfUnsupported) const
983 return IsConcatSupported(inputs, output, descriptor, reasonIfUnsupported);
986 bool RefLayerSupport::IsMemCopySupported(const TensorInfo &input,
987 const TensorInfo &output,
988 Optional<std::string &> reasonIfUnsupported) const
990 bool supported = true;
992 std::array<DataType,5> supportedTypes =
996 DataType::QuantisedAsymm8,
997 DataType::QuantisedSymm16,
1001 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1002 "Reference MemCopy: input type not supported");
1004 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1005 "Reference MemCopy: output type not supported");
1007 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1008 "Reference MemCopy: input and output types are mismatched");
1013 bool RefLayerSupport::IsMinimumSupported(const TensorInfo& input0,
1014 const TensorInfo& input1,
1015 const TensorInfo& output,
1016 Optional<std::string&> reasonIfUnsupported) const
1018 bool supported = true;
1020 std::array<DataType,3> supportedTypes = {
1022 DataType::QuantisedAsymm8,
1023 DataType::QuantisedSymm16
1026 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1027 "Reference minimum: input 0 is not a supported type.");
1029 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1030 "Reference minimum: input 1 is not a supported type.");
1032 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1033 "Reference minimum: output is not a supported type.");
1035 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1036 "Reference minimum: input 0 and Input 1 types are mismatched");
1038 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1039 "Reference minimum: input and output types are mismatched");
1041 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1042 "Reference minimum: shapes are not suitable for implicit broadcast.");
1047 bool RefLayerSupport::IsMultiplicationSupported(const TensorInfo& input0,
1048 const TensorInfo& input1,
1049 const TensorInfo& output,
1050 Optional<std::string&> reasonIfUnsupported) const
1052 bool supported = true;
1054 std::array<DataType,3> supportedTypes = {
1056 DataType::QuantisedAsymm8,
1057 DataType::QuantisedSymm16
1060 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1061 "Reference multiplication: input 0 is not a supported type.");
1063 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1064 "Reference multiplication: input 1 is not a supported type.");
1066 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1067 "Reference multiplication: output is not a supported type.");
1069 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1070 "Reference multiplication: input 0 and Input 1 types are mismatched");
1072 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1073 "Reference multiplication: input and output types are mismatched");
1075 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1076 "Reference multiplication: shapes are not suitable for implicit broadcast.");
1081 bool RefLayerSupport::IsNormalizationSupported(const TensorInfo& input,
1082 const TensorInfo& output,
1083 const NormalizationDescriptor& descriptor,
1084 Optional<std::string&> reasonIfUnsupported) const
1086 ignore_unused(descriptor);
1088 // Define supported types
1089 std::array<DataType, 4> supportedTypes =
1093 DataType::QuantisedAsymm8,
1094 DataType::QuantisedSymm16
1097 bool supported = true;
1099 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1100 "Reference normalization: input type not supported.");
1102 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1103 "Reference normalization: output type not supported.");
1105 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1106 "Reference normalization: input and output shapes have different "
1107 "num total elements.");
1112 bool RefLayerSupport::IsOutputSupported(const TensorInfo& output,
1113 Optional<std::string&> reasonIfUnsupported) const
1118 bool RefLayerSupport::IsPadSupported(const TensorInfo& input,
1119 const TensorInfo& output,
1120 const PadDescriptor& descriptor,
1121 Optional<std::string&> reasonIfUnsupported) const
1123 ignore_unused(descriptor);
1124 bool supported = true;
1126 // Define supported output and inputs types.
1127 std::array<DataType,3> supportedTypes =
1130 DataType::QuantisedAsymm8,
1131 DataType::QuantisedSymm16
1134 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1135 "Reference pad: input is not a supported type.");
1137 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1138 "Reference pad: output is not a supported type.");
1140 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1141 "Reference pad: input and output types are mismatched.");
1146 bool RefLayerSupport::IsPermuteSupported(const TensorInfo& input,
1147 const TensorInfo& output,
1148 const PermuteDescriptor& descriptor,
1149 Optional<std::string&> reasonIfUnsupported) const
1151 ignore_unused(descriptor);
1152 bool supported = true;
1154 // Define supported output and inputs types.
1155 std::array<DataType,3> supportedTypes =
1158 DataType::QuantisedAsymm8,
1159 DataType::QuantisedSymm16
1162 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1163 "Reference permute: input is not a supported type.");
1165 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1166 "Reference permute: output is not a supported type.");
1168 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1169 "Reference permute: input and output types are mismatched.");
1174 bool RefLayerSupport::IsPooling2dSupported(const TensorInfo& input,
1175 const TensorInfo& output,
1176 const Pooling2dDescriptor& descriptor,
1177 Optional<std::string&> reasonIfUnsupported) const
1179 ignore_unused(descriptor);
1180 bool supported = true;
1182 // Define supported output and inputs types.
1183 std::array<DataType,3> supportedTypes =
1186 DataType::QuantisedAsymm8,
1187 DataType::QuantisedSymm16
1190 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1191 "Reference poolind2d: input is not a supported type.");
1193 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1194 "Reference poolind2d: output is not a supported type.");
1196 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1197 "Reference poolind2d: input and output types are mismatched.");
1202 bool RefLayerSupport::IsQuantizeSupported(const TensorInfo& input,
1203 const TensorInfo& output,
1204 Optional<std::string&> reasonIfUnsupported) const
1206 bool supported = true;
1208 // Define supported output types.
1209 std::array<DataType,1> supportedInputTypes = {
1213 supported &= CheckSupportRule(TypeAnyOf(input, supportedInputTypes), reasonIfUnsupported,
1214 "Reference quantize: input type not supported.");
1216 // Define supported output types.
1217 std::array<DataType,2> supportedOutputTypes = {
1218 DataType::QuantisedAsymm8,
1219 DataType::QuantisedSymm16
1221 supported &= CheckSupportRule(TypeAnyOf(output, supportedOutputTypes), reasonIfUnsupported,
1222 "Reference quantize: output type not supported.");
1224 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1225 "Reference quantize: input and output shapes have different num total elements.");
1230 bool RefLayerSupport::IsReshapeSupported(const TensorInfo& input,
1231 const ReshapeDescriptor& descriptor,
1232 Optional<std::string&> reasonIfUnsupported) const
1234 ignore_unused(descriptor);
1235 // Define supported output types.
1236 std::array<DataType,4> supportedOutputTypes =
1240 DataType::QuantisedAsymm8,
1241 DataType::QuantisedSymm16
1243 return CheckSupportRule(TypeAnyOf(input, supportedOutputTypes), reasonIfUnsupported,
1244 "Reference reshape: input type not supported.");
1247 bool RefLayerSupport::IsResizeBilinearSupported(const TensorInfo& input,
1248 const TensorInfo& output,
1249 Optional<std::string&> reasonIfUnsupported) const
1251 bool supported = true;
1252 std::array<DataType,3> supportedTypes =
1255 DataType::QuantisedAsymm8,
1256 DataType::QuantisedSymm16
1259 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1260 "Reference ResizeBilinear: input type not supported");
1262 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1263 "Reference ResizeBilinear: output type not supported");
1265 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1266 "Reference ResizeBilinear: input and output types not matching");
1271 bool RefLayerSupport::IsResizeSupported(const TensorInfo& input,
1272 const TensorInfo& output,
1273 const ResizeDescriptor& descriptor,
1274 Optional<std::string&> reasonIfUnsupported) const
1276 bool supported = true;
1277 std::array<DataType,3> supportedTypes =
1280 DataType::QuantisedAsymm8,
1281 DataType::QuantisedSymm16
1284 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1285 "Reference Resize: input type not supported");
1287 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1288 "Reference Resize: output type not supported");
1290 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1291 "Reference Resize: input and output types not matching");
1296 bool RefLayerSupport::IsRsqrtSupported(const TensorInfo& input,
1297 const TensorInfo& output,
1298 Optional<std::string&> reasonIfUnsupported) const
1300 bool supported = true;
1301 std::array<DataType,3> supportedTypes =
1304 DataType::QuantisedAsymm8,
1305 DataType::QuantisedSymm16
1308 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1309 "Reference rsqrt: input type not supported");
1311 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1312 "Reference rsqrt: output type not supported");
1314 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1315 "Reference rsqrt: input and output types not matching");
1317 supported &= CheckSupportRule(ShapesAreSameTotalSize(input, output), reasonIfUnsupported,
1318 "Reference Rsqrt: input and output shapes have different number of total elements");
1323 bool RefLayerSupport::IsSoftmaxSupported(const TensorInfo& input,
1324 const TensorInfo& output,
1325 const SoftmaxDescriptor& descriptor,
1326 Optional<std::string&> reasonIfUnsupported) const
1328 ignore_unused(output);
1329 bool supported = true;
1330 std::array<DataType,3> supportedTypes =
1333 DataType::QuantisedAsymm8,
1334 DataType::QuantisedSymm16
1337 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1338 "Reference concatenation: output type not supported");
1340 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1341 "Reference concatenation: input type not supported");
1343 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1344 "Reference concatenation: input type not supported");
1349 bool RefLayerSupport::IsSpaceToBatchNdSupported(const TensorInfo& input,
1350 const TensorInfo& output,
1351 const SpaceToBatchNdDescriptor& descriptor,
1352 Optional<std::string&> reasonIfUnsupported) const
1354 ignore_unused(output);
1355 bool supported = true;
1356 std::array<DataType,3> supportedTypes =
1359 DataType::QuantisedAsymm8,
1360 DataType::QuantisedSymm16
1363 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1364 "Reference SpaceToBatchNd: input type not supported");
1366 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1367 "Reference SpaceToBatchNd: output type not supported");
1369 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1370 "Reference SpaceToBatchNd: input and output types are mismatched");
1375 bool RefLayerSupport::IsSpaceToDepthSupported(const TensorInfo& input,
1376 const TensorInfo& output,
1377 const SpaceToDepthDescriptor& descriptor,
1378 Optional<std::string&> reasonIfUnsupported) const
1381 ignore_unused(descriptor);
1382 bool supported = true;
1384 std::array<DataType,3> supportedTypes =
1387 DataType::QuantisedAsymm8,
1388 DataType::QuantisedSymm16
1391 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1392 "Reference SpaceToDepth: input type not supported");
1394 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1395 "Reference SpaceToDepth: output type not supported");
1397 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1398 "Reference SpaceToDepth: input and output types are mismatched");
1403 bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
1404 const ViewsDescriptor& descriptor,
1405 Optional<std::string&> reasonIfUnsupported) const
1407 ignore_unused(descriptor);
1408 bool supported = true;
1409 std::array<DataType,3> supportedTypes =
1412 DataType::QuantisedAsymm8,
1413 DataType::QuantisedSymm16
1416 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1417 "Reference splitter: input type not supported");
1422 bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
1423 const std::vector<std::reference_wrapper<TensorInfo>>& outputs,
1424 const ViewsDescriptor& descriptor,
1425 Optional<std::string&> reasonIfUnsupported) const
1427 ignore_unused(descriptor);
1428 bool supported = true;
1429 std::array<DataType,3> supportedTypes =
1432 DataType::QuantisedAsymm8,
1433 DataType::QuantisedSymm16
1436 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1437 "Reference splitter: output type not supported");
1438 for (const TensorInfo output : outputs)
1440 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1441 "Reference splitter: input type not supported");
1443 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1444 "Reference splitter: input and output types mismatched.");
1450 bool RefLayerSupport::IsStackSupported(const std::vector<const TensorInfo*>& inputs,
1451 const TensorInfo& output,
1452 const StackDescriptor& descriptor,
1453 Optional<std::string&> reasonIfUnsupported) const
1455 ignore_unused(descriptor);
1457 bool supported = true;
1458 std::array<DataType,3> supportedTypes =
1461 DataType::QuantisedAsymm8,
1462 DataType::QuantisedSymm16
1465 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1466 "Reference stack: output type not supported");
1467 for (const TensorInfo* input : inputs)
1469 BOOST_ASSERT(input != nullptr);
1470 supported &= CheckSupportRule(TypeAnyOf(*input, supportedTypes), reasonIfUnsupported,
1471 "Reference stack: input type not supported");
1473 supported &= CheckSupportRule(TypesAreEqual(*input, output), reasonIfUnsupported,
1474 "Reference stack: input and output types mismatched.");
1480 bool RefLayerSupport::IsStridedSliceSupported(const TensorInfo& input,
1481 const TensorInfo& output,
1482 const StridedSliceDescriptor& descriptor,
1483 Optional<std::string&> reasonIfUnsupported) const
1485 ignore_unused(descriptor);
1486 bool supported = true;
1488 std::array<DataType,3> supportedTypes =
1491 DataType::QuantisedAsymm8,
1492 DataType::QuantisedSymm16
1495 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1496 "Reference StridedSlice: input type not supported");
1498 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1499 "Reference StridedSlice: output type not supported");
1501 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1502 "Reference StridedSlice: input and output types are mismatched");
1507 bool RefLayerSupport::IsSubtractionSupported(const TensorInfo& input0,
1508 const TensorInfo& input1,
1509 const TensorInfo& output,
1510 Optional<std::string&> reasonIfUnsupported) const
1512 bool supported = true;
1514 std::array<DataType,3> supportedTypes = {
1516 DataType::QuantisedAsymm8,
1517 DataType::QuantisedSymm16
1520 supported &= CheckSupportRule(TypeAnyOf(input0, supportedTypes), reasonIfUnsupported,
1521 "Reference subtraction: input 0 is not a supported type.");
1523 supported &= CheckSupportRule(TypeAnyOf(input1, supportedTypes), reasonIfUnsupported,
1524 "Reference subtraction: input 1 is not a supported type.");
1526 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1527 "Reference subtraction: output is not a supported type.");
1529 supported &= CheckSupportRule(TypesAreEqual(input0, input1), reasonIfUnsupported,
1530 "Reference subtraction: input 0 and Input 1 types are mismatched");
1532 supported &= CheckSupportRule(TypesAreEqual(input0, output), reasonIfUnsupported,
1533 "Reference subtraction: input and output types are mismatched");
1535 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input0, input1, output), reasonIfUnsupported,
1536 "Reference subtraction: shapes are not suitable for implicit broadcast.");
1541 bool RefLayerSupport::IsPreluSupported(const TensorInfo& input,
1542 const TensorInfo& alpha,
1543 const TensorInfo& output,
1544 Optional<std::string&> reasonIfUnsupported) const
1546 bool supported = true;
1548 std::array<DataType, 3> supportedTypes
1551 DataType::QuantisedAsymm8,
1552 DataType::QuantisedSymm16
1555 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1556 "PReLU: input is not a supported type.");
1558 supported &= CheckSupportRule(TypeAnyOf(alpha, supportedTypes), reasonIfUnsupported,
1559 "PReLU: alpha is not a supported type.");
1561 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1562 "PReLU: output is not a supported type.");
1564 supported &= CheckSupportRule(TypesAreEqual(input, alpha, output), reasonIfUnsupported,
1565 "PReLU: input, alpha and output types are mismatched");
1567 supported &= CheckSupportRule(ShapesAreBroadcastCompatible(input, alpha, output), reasonIfUnsupported,
1568 "PReLU: shapes are not suitable for implicit broadcast");
1573 bool RefLayerSupport::IsTransposeConvolution2dSupported(const TensorInfo& input,
1574 const TensorInfo& output,
1575 const TransposeConvolution2dDescriptor& descriptor,
1576 const TensorInfo& weights,
1577 const Optional<TensorInfo>& biases,
1578 Optional<std::string&> reasonIfUnsupported) const
1580 bool supported = true;
1582 std::array<DataType,3> supportedTypes =
1585 DataType::QuantisedAsymm8,
1586 DataType::QuantisedSymm16
1589 supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
1590 "Reference TransposeConvolution2d: input is not a supported type.");
1592 supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
1593 "Reference TransposeConvolution2d: output is not a supported type.");
1595 supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
1596 "Reference TransposeConvolution2d: weights is not a supported type.");
1598 supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
1599 "Reference TransposeConvolution2d: input and output types mismatched.");
1601 supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
1602 "Reference TransposeConvolution2d: input and weights types mismatched.");
1604 if (biases.has_value())
1606 std::array<DataType,2> biasesSupportedTypes =
1611 supported &= CheckSupportRule(TypeAnyOf(biases.value(), biasesSupportedTypes), reasonIfUnsupported,
1612 "Reference TransposeConvolution2d: biases is not a supported type.");
1618 } // namespace armnn