IVGCVSW-3739 Add Arm NN front end support for Abs Layer
authorKevin May <kevin.may@arm.com>
Wed, 4 Sep 2019 16:29:31 +0000 (17:29 +0100)
committerSadik Armagan <sadik.armagan@arm.com>
Thu, 5 Sep 2019 10:48:35 +0000 (10:48 +0000)
* Add Abs Layer
* Add no-op factory implementations for CpuRef, CpuAcc, GpuAcc
* Add Queue Descriptor in WorkloadData
* Add IsAbsLayerSupported to LayerSupport
* Add LayerVisitor tests

Signed-off-by: Kevin May <kevin.may@arm.com>
Change-Id: Ib0992571d1c80de851cea466291be904e9bdb430

21 files changed:
Android.mk
CMakeLists.txt
include/armnn/ILayerSupport.hpp
include/armnn/ILayerVisitor.hpp
include/armnn/INetwork.hpp
include/armnn/LayerVisitorBase.hpp
src/armnn/InternalTypes.hpp
src/armnn/LayersFwd.hpp
src/armnn/Network.cpp
src/armnn/Network.hpp
src/armnn/layers/AbsLayer.cpp [new file with mode: 0644]
src/armnn/layers/AbsLayer.hpp [new file with mode: 0644]
src/armnnSerializer/Serializer.cpp
src/armnnSerializer/Serializer.hpp
src/backends/backendsCommon/LayerSupportBase.cpp
src/backends/backendsCommon/LayerSupportBase.hpp
src/backends/backendsCommon/WorkloadData.cpp
src/backends/backendsCommon/WorkloadData.hpp
src/backends/backendsCommon/WorkloadFactory.cpp
src/backends/backendsCommon/WorkloadFactory.hpp
src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp

index 5dd5a748229188097a1905958145fe64c1a05aad..21b186e9cadfd27d2bdee37b52f43c4837ac5177 100644 (file)
@@ -112,6 +112,7 @@ LOCAL_SRC_FILES := \
         src/armnnUtils/Permute.cpp \
         src/armnnUtils/TensorUtils.cpp \
         src/armnnUtils/VerificationHelpers.cpp \
+        src/armnn/layers/AbsLayer.cpp \
         src/armnn/layers/ActivationLayer.cpp \
         src/armnn/layers/AdditionLayer.cpp \
         src/armnn/layers/BatchNormalizationLayer.cpp \
index 547853605027d647e0dd0285036fdd5a2d5ba18e..6f2a41c4d7abe188fd802158e84061648bda0c3d 100644 (file)
@@ -229,6 +229,8 @@ list(APPEND armnn_sources
     include/armnn/Version.hpp
     src/armnn/layers/LayerCloneBase.hpp
     src/armnn/layers/LayerWithParameters.hpp
+    src/armnn/layers/AbsLayer.hpp
+    src/armnn/layers/AbsLayer.cpp
     src/armnn/layers/ActivationLayer.hpp
     src/armnn/layers/ActivationLayer.cpp
     src/armnn/layers/AdditionLayer.hpp
index 33f86dea5906495130a9b67983f5b4bda46ec456..c67569bf008baa9df2a128d4f5b986485bcc5596 100644 (file)
@@ -27,6 +27,10 @@ protected:
     virtual ~ILayerSupport() {}
 
 public:
+    virtual bool IsAbsSupported(const TensorInfo& input,
+                                const TensorInfo& output,
+                                Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const = 0;
+
     virtual bool IsActivationSupported(const TensorInfo& input,
                                        const TensorInfo& output,
                                        const ActivationDescriptor& descriptor,
index 1ccbf98d95396a368566398072db9e0c46745b9c..a22de878ca028e337b4e64b7b5106b5203fc60c6 100644 (file)
@@ -20,6 +20,13 @@ protected:
     virtual ~ILayerVisitor() {}
 
 public:
+    /// Function an absolute layer should call back to when its Accept(ILayerVisitor&)
+    /// function is invoked.
+    /// @param layer - pointer to the layer which is calling back to this visit function.
+    /// @param name - Optional name for the layer.
+    virtual void VisitAbsLayer(const IConnectableLayer* layer,
+                               const char* name = nullptr) = 0;
+
     /// Function that an activation layer should call back to when its Accept(ILayerVisitor&) function is invoked.
     /// @param layer - pointer to the layer which is calling back to this visit function.
     /// @param activationDescriptor - ActivationDescriptor to configure the activation.
index a2ff0dc575bcd28f0c4b09afe545248b446c2931..ce0fda2707d493905915daf61a788e66e60e4863 100644 (file)
@@ -265,6 +265,11 @@ public:
     virtual IConnectableLayer* AddMergerLayer(const MergerDescriptor& mergerDescriptor,
         const char* name = nullptr) = 0;
 
+    /// Add absolute layer to the network.
+    /// @param name - Optional name for the layer.
+    /// @ return - Interface for configuring the layer.
+    virtual IConnectableLayer* AddAbsLayer(const char* name = nullptr) = 0;
+
     /// Adds an addition layer to the network.
     /// @param name - Optional name for the layer.
     /// @return - Interface for configuring the layer.
index 8c5464c29e9a5dc3890038de29dc30ed304fd650..363a09154d43e8b9bbc50df42ad9cb6d4671ec8a 100644 (file)
@@ -29,6 +29,8 @@ protected:
     virtual ~LayerVisitorBase() {}
 
 public:
+    void VisitAbsLayer(const IConnectableLayer*,
+                       const char*) override { DefaultPolicy::Apply(__func__); }
 
     void VisitActivationLayer(const IConnectableLayer*,
                               const ActivationDescriptor&,
index 7a0f9a1cb05c8e5ecc5eb04ff60cf160f38f198c..6141f27df5f74a8ef779f7daec17f6cbeec42d35 100644 (file)
@@ -14,7 +14,8 @@ namespace armnn
 enum class LayerType
 {
     FirstLayer,
-    Activation = FirstLayer,
+    Abs = FirstLayer,
+    Activation,
     Addition,
     BatchNormalization,
     BatchToSpaceNd,
index cadcd49acbcf61eb9b76b9a7d2ecd48d1595a402..94a3b899128e779e45b978d838c3f0052853b8b1 100644 (file)
@@ -6,6 +6,7 @@
 
 #include "InternalTypes.hpp"
 
+#include "layers/AbsLayer.hpp"
 #include "layers/ActivationLayer.hpp"
 #include "layers/AdditionLayer.hpp"
 #include "layers/BatchNormalizationLayer.hpp"
@@ -85,6 +86,7 @@ constexpr LayerType LayerEnumOf(const T* = nullptr);
 
 #define DECLARE_LAYER(LayerName) DECLARE_LAYER_IMPL(, LayerName)
 
+DECLARE_LAYER(Abs)
 DECLARE_LAYER(Activation)
 DECLARE_LAYER(Addition)
 DECLARE_LAYER(BatchNormalization)
index 0ef14075b6c45b1a9ec723b8f1b201f65d1c3ef5..dc26a1b3724a7b37788339c4a79f1a85c3c62526 100644 (file)
@@ -1151,6 +1151,11 @@ IConnectableLayer* Network::AddMergerLayer(const MergerDescriptor& mergerDescrip
     return AddConcatLayer(mergerDescriptor, name);
 }
 
+IConnectableLayer* Network::AddAbsLayer(const char * name)
+{
+    return m_Graph->AddLayer<AbsLayer>(name);
+}
+
 IConnectableLayer* Network::AddAdditionLayer(const char* name)
 {
     return m_Graph->AddLayer<AdditionLayer>(name);
index 679ab51d43415532e7961fc89bb469d63af4d1f7..4516c0a8f96d718c7485ed947176469ff60e1468 100644 (file)
@@ -124,6 +124,8 @@ public:
     IConnectableLayer* AddMergerLayer(const MergerDescriptor& mergerDescriptor,
                                       const char* name = nullptr) override;
 
+    IConnectableLayer* AddAbsLayer(const char* name = nullptr) override;
+
     IConnectableLayer* AddAdditionLayer(const char* name = nullptr) override;
 
     IConnectableLayer* AddMultiplicationLayer(const char* name = nullptr) override;
diff --git a/src/armnn/layers/AbsLayer.cpp b/src/armnn/layers/AbsLayer.cpp
new file mode 100644 (file)
index 0000000..f87706a
--- /dev/null
@@ -0,0 +1,53 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include "AbsLayer.hpp"
+
+#include "LayerCloneBase.hpp"
+
+#include <armnn/TypesUtils.hpp>
+#include <backendsCommon/WorkloadData.hpp>
+#include <backendsCommon/WorkloadFactory.hpp>
+
+namespace armnn
+{
+
+AbsLayer::AbsLayer(const char* name)
+    : Layer(1, 1, LayerType::Abs, name)
+{
+}
+
+std::unique_ptr<IWorkload> AbsLayer::CreateWorkload(const Graph& graph,
+                                                    const IWorkloadFactory& factory) const
+{
+    AbsQueueDescriptor descriptor;
+    return factory.CreateAbs(descriptor, PrepInfoAndDesc(descriptor, graph));
+}
+
+AbsLayer* AbsLayer::Clone(Graph& graph) const
+{
+    return CloneBase<AbsLayer>(graph, GetName());
+}
+
+void AbsLayer::ValidateTensorShapesFromInputs()
+{
+    VerifyLayerConnections(1, CHECK_LOCATION());
+
+    auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
+
+    BOOST_ASSERT(inferredShapes.size() == 1);
+
+    ConditionalThrowIfNotEqual<LayerValidationException>(
+            "AbsLayer: TensorShape set on OutputSlot[0] does not match the inferred shape.",
+            GetOutputSlot(0).GetTensorInfo().GetShape(),
+            inferredShapes[0]);
+}
+
+void AbsLayer::Accept(ILayerVisitor& visitor) const
+{
+    visitor.VisitAbsLayer(this, GetName());
+}
+
+} // namespace armnn
\ No newline at end of file
diff --git a/src/armnn/layers/AbsLayer.hpp b/src/armnn/layers/AbsLayer.hpp
new file mode 100644 (file)
index 0000000..643cf4b
--- /dev/null
@@ -0,0 +1,42 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#pragma once
+
+#include <Layer.hpp>
+
+namespace armnn
+{
+
+class AbsLayer : public Layer
+{
+public:
+    /// Makes a workload for the Abs type.
+    /// @param [in] graph The graph where this layer can be found.
+    /// @param [in] factory The workload factory which will create the workload.
+    /// @return A pointer to the created workload, or nullptr if not created.
+    virtual std::unique_ptr<IWorkload> CreateWorkload(const Graph& graph,
+                                                      const IWorkloadFactory& factory) const override;
+
+    /// Creates a dynamically-allocated copy of this layer.
+    /// @param [in] graph The graph into which this layer is being cloned.
+    AbsLayer* Clone(Graph& graph) const override;
+
+    /// Check if the input tensor shape(s)
+    /// will lead to a valid configuration of @ref AbsLayer.
+    void ValidateTensorShapesFromInputs() override;
+
+    void Accept(ILayerVisitor& visitor) const override;
+
+protected:
+    /// Constructor to create an AbsLayer.
+    /// @param [in] name Optional name for the layer.
+    AbsLayer(const char* name);
+
+    /// Default destructor
+    ~AbsLayer() = default;
+};
+
+} // namespace
\ No newline at end of file
index faf3d82d24613e5e55ad94fa27fe8943034abb65..56d313f97b4568ac75eb614c484ec0eda3986fd3 100644 (file)
@@ -105,6 +105,11 @@ void SerializerVisitor::VisitOutputLayer(const armnn::IConnectableLayer* layer,
     CreateAnyLayer(flatBufferOutputLayer.o, serializer::Layer::Layer_OutputLayer);
 }
 
+void SerializerVisitor::VisitAbsLayer(const armnn::IConnectableLayer* layer, const char* name)
+{
+    throw UnimplementedException("SerializerVisitor::VisitAbsLayer is not implemented");
+}
+
 // Build FlatBuffer for Activation Layer
 void SerializerVisitor::VisitActivationLayer(const armnn::IConnectableLayer* layer,
                                              const armnn::ActivationDescriptor& descriptor,
index b859ae9cf0f0189a69d2887c2a6bb4eacb9ec713..7400885ab110277010e07c29b11da86e1ef6d078 100644 (file)
@@ -42,6 +42,9 @@ public:
         return m_serializedLayers;
     }
 
+    void VisitAbsLayer(const armnn::IConnectableLayer* layer,
+                       const char* name = nullptr) override;
+
     void VisitActivationLayer(const armnn::IConnectableLayer* layer,
                               const armnn::ActivationDescriptor& descriptor,
                               const char* name = nullptr) override;
index ee8dc5f7e9cb019e4d5a9375e3ed9e4df10ab29a..464ec4e1080bd18d39af72738d1a2b2994a30788 100644 (file)
@@ -34,6 +34,13 @@ bool DefaultLayerSupport(const char* func,
 namespace armnn
 {
 
+bool LayerSupportBase::IsAbsSupported(const TensorInfo &input,
+                                      const TensorInfo &output,
+                                      Optional<std::string &> reasonIfUnsupported) const
+{
+    return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
+}
+
 bool LayerSupportBase::IsActivationSupported(const TensorInfo& input,
                                              const TensorInfo& output,
                                              const ActivationDescriptor& descriptor,
index 0d5a2af16e779aee94373ee5f46a2068aa073c7c..3cf3d4e6723ab6a367b9d52d82035db7a43e5f97 100644 (file)
@@ -13,6 +13,10 @@ namespace armnn
 class LayerSupportBase : public ILayerSupport
 {
 public:
+    bool IsAbsSupported(const TensorInfo& input,
+                        const TensorInfo& output,
+                        Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const override;
+
     bool IsActivationSupported(const TensorInfo& input,
                                const TensorInfo& output,
                                const ActivationDescriptor& descriptor,
index b590da15523af81266d48f6d536dec8386e6d7db..fed159bd606016c6aa9a5edba9ef4e36d0279323 100644 (file)
@@ -2534,4 +2534,28 @@ void QuantizedLstmQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) co
     ValidateBiasTensorQuantization(outputGateBiasInfo, inputInfo, inputToInputWeightsInfo, descriptorName);
 }
 
+void AbsQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) const
+{
+    const std::string descriptorName{"AbsQueueDescriptor"};
+
+    ValidateNumInputs(workloadInfo,  descriptorName, 1);
+    ValidateNumOutputs(workloadInfo, descriptorName, 1);
+
+    const TensorInfo& inputTensorInfo  = workloadInfo.m_InputTensorInfos[0];
+    const TensorInfo& outputTensorInfo = workloadInfo.m_OutputTensorInfos[0];
+
+    ValidateTensorShapesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output");
+
+    std::vector<DataType> supportedTypes =
+        {
+            DataType::Float16,
+            DataType::Float32,
+            DataType::QuantisedAsymm8,
+            DataType::QuantisedSymm16
+        };
+
+    ValidateDataTypes(inputTensorInfo, supportedTypes, descriptorName);
+    ValidateTensorDataTypesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output");
+}
+
 } // namespace armnn
index c055beb88d647662cfa29d85ee405052cffe03a0..a43c7cccdbd8533525203e1f8a52c513963777c9 100644 (file)
@@ -524,4 +524,9 @@ struct QuantizedLstmQueueDescriptor : QueueDescriptor
     void Validate(const WorkloadInfo& workloadInfo) const;
 };
 
+struct AbsQueueDescriptor : QueueDescriptor
+{
+    void Validate(const WorkloadInfo& workloadInfo) const;
+};
+
 } //namespace armnn
index ffef5b4eb772228a3279c4f004244e44f179d047..9d081af8e984c99701464fffce2b46f04ef9ecbc 100644 (file)
@@ -69,6 +69,15 @@ bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
 
     switch(layer.GetType())
     {
+        case LayerType::Abs:
+        {
+            const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
+            const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
+            result = layerSupportObject->IsAbsSupported(OverrideDataType(input, dataType),
+                                                        OverrideDataType(output, dataType),
+                                                        reason);
+            break;
+        }
         case LayerType::Activation:
         {
             auto cLayer = boost::polymorphic_downcast<const ActivationLayer*>(&layer);
@@ -952,6 +961,12 @@ bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLaye
 }
 
 // Default Implementations
+std::unique_ptr<IWorkload> IWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
+                                                       const WorkloadInfo& info) const
+{
+    return std::unique_ptr<IWorkload>();
+}
+
 std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
                                                               const WorkloadInfo& info) const
 {
index a9c6049c372eb242a1930599bb164d3029e715ea..d0164b202d94fc3e4280e621065cd432407e902c 100644 (file)
@@ -49,6 +49,9 @@ public:
     virtual std::unique_ptr<ITensorHandle> CreateTensorHandle(const TensorInfo& tensorInfo,
                                                               DataLayout dataLayout) const = 0;
 
+    virtual std::unique_ptr<IWorkload> CreateAbs(const AbsQueueDescriptor& descriptor,
+                                                 const WorkloadInfo& info) const;
+
     virtual std::unique_ptr<IWorkload> CreateActivation(const ActivationQueueDescriptor& descriptor,
                                                         const WorkloadInfo&              info) const;
 
index 1f43c989d6f8921bae6e00d0934ca51ead80bb01..caf5e588f761202b0bb4090989ed85738ece6e64 100644 (file)
@@ -385,6 +385,8 @@ template<armnn::LayerType Type, armnn::DataType DataType>
 struct LayerTypePolicy;
 
 // Every entry in the armnn::LayerType enum must be accounted for below.
+DECLARE_LAYER_POLICY_1_PARAM(Abs)
+
 DECLARE_LAYER_POLICY_2_PARAM(Activation)
 
 DECLARE_LAYER_POLICY_1_PARAM(Addition)