From 868eb14c463ee76668ad75d4cd4e718a840f7be6 Mon Sep 17 00:00:00 2001 From: Kevin May Date: Wed, 4 Sep 2019 17:29:31 +0100 Subject: [PATCH] IVGCVSW-3739 Add Arm NN front end support for Abs Layer * Add Abs Layer * Add no-op factory implementations for CpuRef, CpuAcc, GpuAcc * Add Queue Descriptor in WorkloadData * Add IsAbsLayerSupported to LayerSupport * Add LayerVisitor tests Signed-off-by: Kevin May Change-Id: Ib0992571d1c80de851cea466291be904e9bdb430 --- Android.mk | 1 + CMakeLists.txt | 2 + include/armnn/ILayerSupport.hpp | 4 ++ include/armnn/ILayerVisitor.hpp | 7 +++ include/armnn/INetwork.hpp | 5 ++ include/armnn/LayerVisitorBase.hpp | 2 + src/armnn/InternalTypes.hpp | 3 +- src/armnn/LayersFwd.hpp | 2 + src/armnn/Network.cpp | 5 ++ src/armnn/Network.hpp | 2 + src/armnn/layers/AbsLayer.cpp | 53 ++++++++++++++++++++++ src/armnn/layers/AbsLayer.hpp | 42 +++++++++++++++++ src/armnnSerializer/Serializer.cpp | 5 ++ src/armnnSerializer/Serializer.hpp | 3 ++ src/backends/backendsCommon/LayerSupportBase.cpp | 7 +++ src/backends/backendsCommon/LayerSupportBase.hpp | 4 ++ src/backends/backendsCommon/WorkloadData.cpp | 24 ++++++++++ src/backends/backendsCommon/WorkloadData.hpp | 5 ++ src/backends/backendsCommon/WorkloadFactory.cpp | 15 ++++++ src/backends/backendsCommon/WorkloadFactory.hpp | 3 ++ .../test/IsLayerSupportedTestImpl.hpp | 2 + 21 files changed, 195 insertions(+), 1 deletion(-) create mode 100644 src/armnn/layers/AbsLayer.cpp create mode 100644 src/armnn/layers/AbsLayer.hpp diff --git a/Android.mk b/Android.mk index 5dd5a74..21b186e 100644 --- a/Android.mk +++ b/Android.mk @@ -112,6 +112,7 @@ LOCAL_SRC_FILES := \ src/armnnUtils/Permute.cpp \ src/armnnUtils/TensorUtils.cpp \ src/armnnUtils/VerificationHelpers.cpp \ + src/armnn/layers/AbsLayer.cpp \ src/armnn/layers/ActivationLayer.cpp \ src/armnn/layers/AdditionLayer.cpp \ src/armnn/layers/BatchNormalizationLayer.cpp \ diff --git a/CMakeLists.txt b/CMakeLists.txt index 5478536..6f2a41c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -229,6 +229,8 @@ list(APPEND armnn_sources include/armnn/Version.hpp src/armnn/layers/LayerCloneBase.hpp src/armnn/layers/LayerWithParameters.hpp + src/armnn/layers/AbsLayer.hpp + src/armnn/layers/AbsLayer.cpp src/armnn/layers/ActivationLayer.hpp src/armnn/layers/ActivationLayer.cpp src/armnn/layers/AdditionLayer.hpp diff --git a/include/armnn/ILayerSupport.hpp b/include/armnn/ILayerSupport.hpp index 33f86de..c67569b 100644 --- a/include/armnn/ILayerSupport.hpp +++ b/include/armnn/ILayerSupport.hpp @@ -27,6 +27,10 @@ protected: virtual ~ILayerSupport() {} public: + virtual bool IsAbsSupported(const TensorInfo& input, + const TensorInfo& output, + Optional reasonIfUnsupported = EmptyOptional()) const = 0; + virtual bool IsActivationSupported(const TensorInfo& input, const TensorInfo& output, const ActivationDescriptor& descriptor, diff --git a/include/armnn/ILayerVisitor.hpp b/include/armnn/ILayerVisitor.hpp index 1ccbf98..a22de87 100644 --- a/include/armnn/ILayerVisitor.hpp +++ b/include/armnn/ILayerVisitor.hpp @@ -20,6 +20,13 @@ protected: virtual ~ILayerVisitor() {} public: + /// Function an absolute layer should call back to when its Accept(ILayerVisitor&) + /// function is invoked. + /// @param layer - pointer to the layer which is calling back to this visit function. + /// @param name - Optional name for the layer. + virtual void VisitAbsLayer(const IConnectableLayer* layer, + const char* name = nullptr) = 0; + /// Function that an activation layer should call back to when its Accept(ILayerVisitor&) function is invoked. /// @param layer - pointer to the layer which is calling back to this visit function. /// @param activationDescriptor - ActivationDescriptor to configure the activation. diff --git a/include/armnn/INetwork.hpp b/include/armnn/INetwork.hpp index a2ff0dc..ce0fda2 100644 --- a/include/armnn/INetwork.hpp +++ b/include/armnn/INetwork.hpp @@ -265,6 +265,11 @@ public: virtual IConnectableLayer* AddMergerLayer(const MergerDescriptor& mergerDescriptor, const char* name = nullptr) = 0; + /// Add absolute layer to the network. + /// @param name - Optional name for the layer. + /// @ return - Interface for configuring the layer. + virtual IConnectableLayer* AddAbsLayer(const char* name = nullptr) = 0; + /// Adds an addition layer to the network. /// @param name - Optional name for the layer. /// @return - Interface for configuring the layer. diff --git a/include/armnn/LayerVisitorBase.hpp b/include/armnn/LayerVisitorBase.hpp index 8c5464c..363a091 100644 --- a/include/armnn/LayerVisitorBase.hpp +++ b/include/armnn/LayerVisitorBase.hpp @@ -29,6 +29,8 @@ protected: virtual ~LayerVisitorBase() {} public: + void VisitAbsLayer(const IConnectableLayer*, + const char*) override { DefaultPolicy::Apply(__func__); } void VisitActivationLayer(const IConnectableLayer*, const ActivationDescriptor&, diff --git a/src/armnn/InternalTypes.hpp b/src/armnn/InternalTypes.hpp index 7a0f9a1..6141f27 100644 --- a/src/armnn/InternalTypes.hpp +++ b/src/armnn/InternalTypes.hpp @@ -14,7 +14,8 @@ namespace armnn enum class LayerType { FirstLayer, - Activation = FirstLayer, + Abs = FirstLayer, + Activation, Addition, BatchNormalization, BatchToSpaceNd, diff --git a/src/armnn/LayersFwd.hpp b/src/armnn/LayersFwd.hpp index cadcd49..94a3b89 100644 --- a/src/armnn/LayersFwd.hpp +++ b/src/armnn/LayersFwd.hpp @@ -6,6 +6,7 @@ #include "InternalTypes.hpp" +#include "layers/AbsLayer.hpp" #include "layers/ActivationLayer.hpp" #include "layers/AdditionLayer.hpp" #include "layers/BatchNormalizationLayer.hpp" @@ -85,6 +86,7 @@ constexpr LayerType LayerEnumOf(const T* = nullptr); #define DECLARE_LAYER(LayerName) DECLARE_LAYER_IMPL(, LayerName) +DECLARE_LAYER(Abs) DECLARE_LAYER(Activation) DECLARE_LAYER(Addition) DECLARE_LAYER(BatchNormalization) diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp index 0ef1407..dc26a1b 100644 --- a/src/armnn/Network.cpp +++ b/src/armnn/Network.cpp @@ -1151,6 +1151,11 @@ IConnectableLayer* Network::AddMergerLayer(const MergerDescriptor& mergerDescrip return AddConcatLayer(mergerDescriptor, name); } +IConnectableLayer* Network::AddAbsLayer(const char * name) +{ + return m_Graph->AddLayer(name); +} + IConnectableLayer* Network::AddAdditionLayer(const char* name) { return m_Graph->AddLayer(name); diff --git a/src/armnn/Network.hpp b/src/armnn/Network.hpp index 679ab51..4516c0a 100644 --- a/src/armnn/Network.hpp +++ b/src/armnn/Network.hpp @@ -124,6 +124,8 @@ public: IConnectableLayer* AddMergerLayer(const MergerDescriptor& mergerDescriptor, const char* name = nullptr) override; + IConnectableLayer* AddAbsLayer(const char* name = nullptr) override; + IConnectableLayer* AddAdditionLayer(const char* name = nullptr) override; IConnectableLayer* AddMultiplicationLayer(const char* name = nullptr) override; diff --git a/src/armnn/layers/AbsLayer.cpp b/src/armnn/layers/AbsLayer.cpp new file mode 100644 index 0000000..f87706a --- /dev/null +++ b/src/armnn/layers/AbsLayer.cpp @@ -0,0 +1,53 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include "AbsLayer.hpp" + +#include "LayerCloneBase.hpp" + +#include +#include +#include + +namespace armnn +{ + +AbsLayer::AbsLayer(const char* name) + : Layer(1, 1, LayerType::Abs, name) +{ +} + +std::unique_ptr AbsLayer::CreateWorkload(const Graph& graph, + const IWorkloadFactory& factory) const +{ + AbsQueueDescriptor descriptor; + return factory.CreateAbs(descriptor, PrepInfoAndDesc(descriptor, graph)); +} + +AbsLayer* AbsLayer::Clone(Graph& graph) const +{ + return CloneBase(graph, GetName()); +} + +void AbsLayer::ValidateTensorShapesFromInputs() +{ + VerifyLayerConnections(1, CHECK_LOCATION()); + + auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() }); + + BOOST_ASSERT(inferredShapes.size() == 1); + + ConditionalThrowIfNotEqual( + "AbsLayer: TensorShape set on OutputSlot[0] does not match the inferred shape.", + GetOutputSlot(0).GetTensorInfo().GetShape(), + inferredShapes[0]); +} + +void AbsLayer::Accept(ILayerVisitor& visitor) const +{ + visitor.VisitAbsLayer(this, GetName()); +} + +} // namespace armnn \ No newline at end of file diff --git a/src/armnn/layers/AbsLayer.hpp b/src/armnn/layers/AbsLayer.hpp new file mode 100644 index 0000000..643cf4b --- /dev/null +++ b/src/armnn/layers/AbsLayer.hpp @@ -0,0 +1,42 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#pragma once + +#include + +namespace armnn +{ + +class AbsLayer : public Layer +{ +public: + /// Makes a workload for the Abs type. + /// @param [in] graph The graph where this layer can be found. + /// @param [in] factory The workload factory which will create the workload. + /// @return A pointer to the created workload, or nullptr if not created. + virtual std::unique_ptr CreateWorkload(const Graph& graph, + const IWorkloadFactory& factory) const override; + + /// Creates a dynamically-allocated copy of this layer. + /// @param [in] graph The graph into which this layer is being cloned. + AbsLayer* Clone(Graph& graph) const override; + + /// Check if the input tensor shape(s) + /// will lead to a valid configuration of @ref AbsLayer. + void ValidateTensorShapesFromInputs() override; + + void Accept(ILayerVisitor& visitor) const override; + +protected: + /// Constructor to create an AbsLayer. + /// @param [in] name Optional name for the layer. + AbsLayer(const char* name); + + /// Default destructor + ~AbsLayer() = default; +}; + +} // namespace \ No newline at end of file diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp index faf3d82..56d313f 100644 --- a/src/armnnSerializer/Serializer.cpp +++ b/src/armnnSerializer/Serializer.cpp @@ -105,6 +105,11 @@ void SerializerVisitor::VisitOutputLayer(const armnn::IConnectableLayer* layer, CreateAnyLayer(flatBufferOutputLayer.o, serializer::Layer::Layer_OutputLayer); } +void SerializerVisitor::VisitAbsLayer(const armnn::IConnectableLayer* layer, const char* name) +{ + throw UnimplementedException("SerializerVisitor::VisitAbsLayer is not implemented"); +} + // Build FlatBuffer for Activation Layer void SerializerVisitor::VisitActivationLayer(const armnn::IConnectableLayer* layer, const armnn::ActivationDescriptor& descriptor, diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp index b859ae9..7400885 100644 --- a/src/armnnSerializer/Serializer.hpp +++ b/src/armnnSerializer/Serializer.hpp @@ -42,6 +42,9 @@ public: return m_serializedLayers; } + void VisitAbsLayer(const armnn::IConnectableLayer* layer, + const char* name = nullptr) override; + void VisitActivationLayer(const armnn::IConnectableLayer* layer, const armnn::ActivationDescriptor& descriptor, const char* name = nullptr) override; diff --git a/src/backends/backendsCommon/LayerSupportBase.cpp b/src/backends/backendsCommon/LayerSupportBase.cpp index ee8dc5f..464ec4e 100644 --- a/src/backends/backendsCommon/LayerSupportBase.cpp +++ b/src/backends/backendsCommon/LayerSupportBase.cpp @@ -34,6 +34,13 @@ bool DefaultLayerSupport(const char* func, namespace armnn { +bool LayerSupportBase::IsAbsSupported(const TensorInfo &input, + const TensorInfo &output, + Optional reasonIfUnsupported) const +{ + return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported); +} + bool LayerSupportBase::IsActivationSupported(const TensorInfo& input, const TensorInfo& output, const ActivationDescriptor& descriptor, diff --git a/src/backends/backendsCommon/LayerSupportBase.hpp b/src/backends/backendsCommon/LayerSupportBase.hpp index 0d5a2af..3cf3d4e 100644 --- a/src/backends/backendsCommon/LayerSupportBase.hpp +++ b/src/backends/backendsCommon/LayerSupportBase.hpp @@ -13,6 +13,10 @@ namespace armnn class LayerSupportBase : public ILayerSupport { public: + bool IsAbsSupported(const TensorInfo& input, + const TensorInfo& output, + Optional reasonIfUnsupported = EmptyOptional()) const override; + bool IsActivationSupported(const TensorInfo& input, const TensorInfo& output, const ActivationDescriptor& descriptor, diff --git a/src/backends/backendsCommon/WorkloadData.cpp b/src/backends/backendsCommon/WorkloadData.cpp index b590da1..fed159b 100644 --- a/src/backends/backendsCommon/WorkloadData.cpp +++ b/src/backends/backendsCommon/WorkloadData.cpp @@ -2534,4 +2534,28 @@ void QuantizedLstmQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) co ValidateBiasTensorQuantization(outputGateBiasInfo, inputInfo, inputToInputWeightsInfo, descriptorName); } +void AbsQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) const +{ + const std::string descriptorName{"AbsQueueDescriptor"}; + + ValidateNumInputs(workloadInfo, descriptorName, 1); + ValidateNumOutputs(workloadInfo, descriptorName, 1); + + const TensorInfo& inputTensorInfo = workloadInfo.m_InputTensorInfos[0]; + const TensorInfo& outputTensorInfo = workloadInfo.m_OutputTensorInfos[0]; + + ValidateTensorShapesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output"); + + std::vector supportedTypes = + { + DataType::Float16, + DataType::Float32, + DataType::QuantisedAsymm8, + DataType::QuantisedSymm16 + }; + + ValidateDataTypes(inputTensorInfo, supportedTypes, descriptorName); + ValidateTensorDataTypesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output"); +} + } // namespace armnn diff --git a/src/backends/backendsCommon/WorkloadData.hpp b/src/backends/backendsCommon/WorkloadData.hpp index c055beb..a43c7cc 100644 --- a/src/backends/backendsCommon/WorkloadData.hpp +++ b/src/backends/backendsCommon/WorkloadData.hpp @@ -524,4 +524,9 @@ struct QuantizedLstmQueueDescriptor : QueueDescriptor void Validate(const WorkloadInfo& workloadInfo) const; }; +struct AbsQueueDescriptor : QueueDescriptor +{ + void Validate(const WorkloadInfo& workloadInfo) const; +}; + } //namespace armnn diff --git a/src/backends/backendsCommon/WorkloadFactory.cpp b/src/backends/backendsCommon/WorkloadFactory.cpp index ffef5b4..9d081af 100644 --- a/src/backends/backendsCommon/WorkloadFactory.cpp +++ b/src/backends/backendsCommon/WorkloadFactory.cpp @@ -69,6 +69,15 @@ bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId, switch(layer.GetType()) { + case LayerType::Abs: + { + const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo(); + const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo(); + result = layerSupportObject->IsAbsSupported(OverrideDataType(input, dataType), + OverrideDataType(output, dataType), + reason); + break; + } case LayerType::Activation: { auto cLayer = boost::polymorphic_downcast(&layer); @@ -952,6 +961,12 @@ bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLaye } // Default Implementations +std::unique_ptr IWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor, + const WorkloadInfo& info) const +{ + return std::unique_ptr(); +} + std::unique_ptr IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor, const WorkloadInfo& info) const { diff --git a/src/backends/backendsCommon/WorkloadFactory.hpp b/src/backends/backendsCommon/WorkloadFactory.hpp index a9c6049..d0164b2 100644 --- a/src/backends/backendsCommon/WorkloadFactory.hpp +++ b/src/backends/backendsCommon/WorkloadFactory.hpp @@ -49,6 +49,9 @@ public: virtual std::unique_ptr CreateTensorHandle(const TensorInfo& tensorInfo, DataLayout dataLayout) const = 0; + virtual std::unique_ptr CreateAbs(const AbsQueueDescriptor& descriptor, + const WorkloadInfo& info) const; + virtual std::unique_ptr CreateActivation(const ActivationQueueDescriptor& descriptor, const WorkloadInfo& info) const; diff --git a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp index 1f43c98..caf5e58 100644 --- a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp +++ b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp @@ -385,6 +385,8 @@ template struct LayerTypePolicy; // Every entry in the armnn::LayerType enum must be accounted for below. +DECLARE_LAYER_POLICY_1_PARAM(Abs) + DECLARE_LAYER_POLICY_2_PARAM(Activation) DECLARE_LAYER_POLICY_1_PARAM(Addition) -- 2.7.4