IVGCVSW-5011 Implement GetCapabilities in NeonTensorHandleFactory
authorNarumol Prangnawarat <narumol.prangnawarat@arm.com>
Mon, 27 Jul 2020 14:52:13 +0000 (15:52 +0100)
committerNarumol Prangnawarat <narumol.prangnawarat@arm.com>
Mon, 27 Jul 2020 14:52:13 +0000 (15:52 +0100)
Signed-off-by: Narumol Prangnawarat <narumol.prangnawarat@arm.com>
Change-Id: I43591ec1250c1d84d286de85956a86eb5e2abc2a

src/backends/neon/NeonTensorHandleFactory.cpp
src/backends/neon/NeonTensorHandleFactory.hpp
src/backends/neon/test/CMakeLists.txt
src/backends/neon/test/NeonTensorHandleTests.cpp [new file with mode: 0644]

index a8b5b81..ec9e063 100644 (file)
@@ -6,6 +6,8 @@
 #include "NeonTensorHandleFactory.hpp"
 #include "NeonTensorHandle.hpp"
 
+#include "Layer.hpp"
+
 #include <armnn/utility/IgnoreUnused.hpp>
 #include <armnn/utility/PolymorphicDowncast.hpp>
 
@@ -108,4 +110,23 @@ MemorySourceFlags NeonTensorHandleFactory::GetImportFlags() const
     return 0;
 }
 
+std::vector<Capability> NeonTensorHandleFactory::GetCapabilities(const IConnectableLayer* layer,
+                                                                 const IConnectableLayer* connectedLayer,
+                                                                 CapabilityClass capabilityClass)
+
+{
+    IgnoreUnused(connectedLayer);
+    std::vector<Capability> capabilities;
+    if (capabilityClass == CapabilityClass::PaddingRequired)
+    {
+        auto search = paddingRequiredLayers.find((PolymorphicDowncast<const Layer*>(layer))->GetType());
+        if ( search != paddingRequiredLayers.end())
+        {
+            Capability paddingCapability(CapabilityClass::PaddingRequired, true);
+            capabilities.push_back(paddingCapability);
+        }
+    }
+    return capabilities;
+}
+
 } // namespace armnn
index e1cdc8b..0930d4e 100644 (file)
@@ -13,6 +13,30 @@ namespace armnn
 
 constexpr const char* NeonTensorHandleFactoryId() { return "Arm/Neon/TensorHandleFactory"; }
 
+const std::set<armnn::LayerType> paddingRequiredLayers {
+    LayerType::ArgMinMax,
+    LayerType::Concat,
+    LayerType::Convolution2d,
+    LayerType::DepthToSpace,
+    LayerType::DepthwiseConvolution2d,
+    LayerType::Dequantize,
+    LayerType::FullyConnected,
+    LayerType::Gather,
+    LayerType::L2Normalization,
+    LayerType::Lstm,
+    LayerType::Mean,
+    LayerType::Multiplication,
+    LayerType::Normalization,
+    LayerType::Permute,
+    LayerType::Pooling2d,
+    LayerType::Quantize,
+    LayerType::QuantizedLstm,
+    LayerType::Resize,
+    LayerType::Stack,
+    LayerType::Transpose,
+    LayerType::TransposeConvolution2d
+};
+
 class NeonTensorHandleFactory : public ITensorHandleFactory
 {
 public:
@@ -46,6 +70,10 @@ public:
 
     MemorySourceFlags GetImportFlags() const override;
 
+    std::vector<Capability> GetCapabilities(const IConnectableLayer* layer,
+                                            const IConnectableLayer* connectedLayer,
+                                            CapabilityClass capabilityClass) override;
+
 private:
     mutable std::shared_ptr<NeonMemoryManager> m_MemoryManager;
 };
index 0c3944f..16c066b 100644 (file)
@@ -11,6 +11,7 @@ list(APPEND armnnNeonBackendUnitTests_sources
     NeonLayerTests.cpp
     NeonOptimizedNetworkTests.cpp
     NeonRuntimeTests.cpp
+    NeonTensorHandleTests.cpp
     NeonTimerTest.cpp
     NeonWorkloadFactoryHelper.hpp
 )
diff --git a/src/backends/neon/test/NeonTensorHandleTests.cpp b/src/backends/neon/test/NeonTensorHandleTests.cpp
new file mode 100644 (file)
index 0000000..fe5e8f9
--- /dev/null
@@ -0,0 +1,80 @@
+//
+// Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+#include <neon/NeonTensorHandle.hpp>
+#include <neon/NeonTensorHandleFactory.hpp>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_SUITE(NeonTensorHandleTests)
+using namespace armnn;
+
+BOOST_AUTO_TEST_CASE(NeonTensorHandleGetCapabilitiesNoPadding)
+{
+    std::shared_ptr<NeonMemoryManager> memoryManager = std::make_shared<NeonMemoryManager>();
+    NeonTensorHandleFactory handleFactory(memoryManager);
+
+    INetworkPtr network(INetwork::Create());
+
+    // Add the layers
+    IConnectableLayer* input = network->AddInputLayer(0);
+    SoftmaxDescriptor descriptor;
+    descriptor.m_Beta = 1.0f;
+    IConnectableLayer* softmax = network->AddSoftmaxLayer(descriptor);
+    IConnectableLayer* output = network->AddOutputLayer(2);
+
+    // Establish connections
+    input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
+    softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
+
+    // No padding required for input
+    std::vector<Capability> capabilities = handleFactory.GetCapabilities(input,
+                                                                         softmax,
+                                                                         CapabilityClass::PaddingRequired);
+    BOOST_TEST(capabilities.empty());
+
+    // No padding required for Softmax
+    capabilities = handleFactory.GetCapabilities(softmax, output, CapabilityClass::PaddingRequired);
+    BOOST_TEST(capabilities.empty());
+
+    // No padding required for output
+    capabilities = handleFactory.GetCapabilities(output, nullptr, CapabilityClass::PaddingRequired);
+    BOOST_TEST(capabilities.empty());
+}
+
+BOOST_AUTO_TEST_CASE(NeonTensorHandleGetCapabilitiesPadding)
+{
+    std::shared_ptr<NeonMemoryManager> memoryManager = std::make_shared<NeonMemoryManager>();
+    NeonTensorHandleFactory handleFactory(memoryManager);
+
+    INetworkPtr network(INetwork::Create());
+
+    // Add the layers
+    IConnectableLayer* input = network->AddInputLayer(0);
+    Pooling2dDescriptor descriptor;
+    IConnectableLayer* pooling = network->AddPooling2dLayer(descriptor);
+    IConnectableLayer* output = network->AddOutputLayer(2);
+
+    // Establish connections
+    input->GetOutputSlot(0).Connect(pooling->GetInputSlot(0));
+    pooling->GetOutputSlot(0).Connect(output->GetInputSlot(0));
+
+    // No padding required for input
+    std::vector<Capability> capabilities = handleFactory.GetCapabilities(input,
+                                                                         pooling,
+                                                                         CapabilityClass::PaddingRequired);
+    BOOST_TEST(capabilities.empty());
+
+    // No padding required for output
+    capabilities = handleFactory.GetCapabilities(output, nullptr, CapabilityClass::PaddingRequired);
+    BOOST_TEST(capabilities.empty());
+
+    // Padding required for Pooling2d
+    capabilities = handleFactory.GetCapabilities(pooling, output, CapabilityClass::PaddingRequired);
+    BOOST_TEST(capabilities.size() == 1);
+    BOOST_TEST((capabilities[0].m_CapabilityClass == CapabilityClass::PaddingRequired));
+    BOOST_TEST(capabilities[0].m_Value);
+}
+
+BOOST_AUTO_TEST_SUITE_END()