IVGCVSW-4009 StandInLayer frontend API
authorDerek Lamberti <derek.lamberti@arm.com>
Mon, 21 Oct 2019 09:46:16 +0000 (10:46 +0100)
committerMatteo Martincigh <matteo.martincigh@arm.com>
Mon, 21 Oct 2019 12:59:23 +0000 (12:59 +0000)
Change-Id: I058c57b554769799c6775813215070ef47790e3d
Signed-off-by: Derek Lamberti <derek.lamberti@arm.com>
20 files changed:
Android.mk
CMakeLists.txt
include/armnn/Descriptors.hpp
include/armnn/DescriptorsFwd.hpp
include/armnn/ILayerSupport.hpp
include/armnn/ILayerVisitor.hpp
include/armnn/INetwork.hpp
include/armnn/LayerVisitorBase.hpp
src/armnn/InternalTypes.hpp
src/armnn/LayersFwd.hpp
src/armnn/Network.cpp
src/armnn/Network.hpp
src/armnn/layers/StandInLayer.cpp [new file with mode: 0644]
src/armnn/layers/StandInLayer.hpp [new file with mode: 0644]
src/armnnSerializer/Serializer.cpp
src/armnnSerializer/Serializer.hpp
src/backends/backendsCommon/LayerSupportBase.cpp
src/backends/backendsCommon/LayerSupportBase.hpp
src/backends/backendsCommon/WorkloadFactory.cpp
src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp

index 0294123..29ca083 100644 (file)
@@ -165,6 +165,7 @@ LOCAL_SRC_FILES := \
         src/armnn/layers/SpaceToDepthLayer.cpp \
         src/armnn/layers/SplitterLayer.cpp \
         src/armnn/layers/StackLayer.cpp \
+        src/armnn/layers/StandInLayer.cpp \
         src/armnn/layers/StridedSliceLayer.cpp \
         src/armnn/layers/SubtractionLayer.cpp \
         src/armnn/layers/SwitchLayer.cpp \
index 626478a..ed683da 100644 (file)
@@ -344,6 +344,8 @@ list(APPEND armnn_sources
     src/armnn/layers/SplitterLayer.cpp
     src/armnn/layers/StackLayer.hpp
     src/armnn/layers/StackLayer.cpp
+    src/armnn/layers/StandInLayer.cpp
+    src/armnn/layers/StandInLayer.hpp
     src/armnn/layers/StridedSliceLayer.cpp
     src/armnn/layers/StridedSliceLayer.hpp
     src/armnn/layers/SubtractionLayer.cpp
index 10d8ab7..425c526 100644 (file)
@@ -952,6 +952,22 @@ struct StackDescriptor
     TensorShape m_InputShape;
 };
 
+/// A StandInDescriptor for the StandIn layer
+struct StandInDescriptor
+{
+    StandInDescriptor() {};
+
+    StandInDescriptor(uint32_t numInputs, uint32_t numOutputs)
+        : m_NumInputs(numInputs)
+        , m_NumOutputs(numOutputs)
+    {}
+
+    /// Number of input tensors
+    uint32_t m_NumInputs = 0;
+    /// Number of output tensors
+    uint32_t m_NumOutputs = 0;
+};
+
 /// A StridedSliceDescriptor for the StridedSliceLayer.
 struct StridedSliceDescriptor
 {
index a978f77..cfdef8a 100644 (file)
@@ -36,6 +36,7 @@ struct SpaceToBatchNdDescriptor;
 struct SpaceToDepthDescriptor;
 struct SliceDescriptor;
 struct StackDescriptor;
+struct StandInDescriptor;
 struct StridedSliceDescriptor;
 struct TransposeConvolution2dDescriptor;
 struct ViewsDescriptor;
index 87197ee..54f4a28 100644 (file)
@@ -328,6 +328,12 @@ public:
                                   const StackDescriptor& descriptor,
                                   Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const = 0;
 
+    virtual bool IsStandInSupported(const std::vector<const TensorInfo*>& inputs,
+                                    const std::vector<const TensorInfo*>& outputs,
+                                    const StandInDescriptor& descriptor,
+                                    Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const = 0;
+
+
     virtual bool IsStridedSliceSupported(const TensorInfo& input,
                                          const TensorInfo& output,
                                          const StridedSliceDescriptor& descriptor,
index 80931eb..9669b3a 100644 (file)
@@ -443,6 +443,14 @@ public:
                                  const StackDescriptor& stackDescriptor,
                                  const char* name = nullptr) = 0;
 
+    /// Function a StandInLayer should call back to when its Accept(ILaterVisitor&) function is invoked
+    /// @param layer - pointer to the layer which is calling back to this visit function.
+    /// @param standInDescriptor - Parameters for the stand-in layer.
+    /// @param name - Optional name for the layer.
+    virtual void VisitStandInLayer(const IConnectableLayer* layer,
+                                   const StandInDescriptor& standInDescriptor,
+                                   const char* name = nullptr) = 0;
+
     /// Function a strided slice layer should call back to when its Accept(ILayerVisitor&) function is invoked.
     /// @param layer - pointer to the layer which is calling back to this visit function.
     /// @param stridedSliceDescriptor - Parameters for the strided slice operation.
index b3fab82..1041492 100644 (file)
@@ -507,6 +507,16 @@ public:
     virtual IConnectableLayer* AddStackLayer(const StackDescriptor& descriptor,
                                              const char* name = nullptr) = 0;
 
+
+    /// Add a stand-in layer for a type unknown to the Arm NN framework.
+    /// Note: Due to the nature of this layer, no validation can be performed by the framework.
+    /// Furthermore, Any model containing this layer cannot make use of dynamic tensors since the
+    /// tensor sizes cannot be inferred.
+    /// @descriptor - Descriptor for the StandIn layer.
+    /// @return - Interface for configuring the layer.
+    virtual IConnectableLayer* AddStandInLayer(const StandInDescriptor& descriptor,
+                                               const char* name = nullptr) = 0;
+
     /// Add a QuantizedLstm layer to the network
     /// @param params - The weights and biases for the Quantized LSTM cell
     /// @param name - Optional name for the layer
index 5226fa2..388fc6f 100644 (file)
@@ -222,6 +222,10 @@ public:
                          const StackDescriptor&,
                          const char*) override { DefaultPolicy::Apply(__func__); }
 
+    void VisitStandInLayer(const IConnectableLayer*,
+                           const StandInDescriptor&,
+                           const char*) override { DefaultPolicy::Apply(__func__); }
+
     void VisitStridedSliceLayer(const IConnectableLayer*,
                                 const StridedSliceDescriptor&,
                                 const char*) override { DefaultPolicy::Apply(__func__); }
index d7f932f..36e7280 100644 (file)
@@ -66,6 +66,7 @@ enum class LayerType
     SpaceToDepth,
     Splitter,
     Stack,
+    StandIn,
     StridedSlice,
     Subtraction,
     Switch,
index 6c30749..13bf900 100644 (file)
@@ -58,6 +58,7 @@
 #include "layers/SpaceToDepthLayer.hpp"
 #include "layers/SplitterLayer.hpp"
 #include "layers/StackLayer.hpp"
+#include "layers/StandInLayer.hpp"
 #include "layers/StridedSliceLayer.hpp"
 #include "layers/SubtractionLayer.hpp"
 #include "layers/SwitchLayer.hpp"
@@ -142,6 +143,7 @@ DECLARE_LAYER(SpaceToBatchNd)
 DECLARE_LAYER(SpaceToDepth)
 DECLARE_LAYER(Splitter)
 DECLARE_LAYER(Stack)
+DECLARE_LAYER(StandIn)
 DECLARE_LAYER(StridedSlice)
 DECLARE_LAYER(Subtraction)
 DECLARE_LAYER(Switch)
index 857f6b3..1339a6e 100644 (file)
@@ -1503,6 +1503,13 @@ IConnectableLayer* Network::AddStackLayer(const StackDescriptor& stackDescriptor
     return m_Graph->AddLayer<StackLayer>(stackDescriptor, name);
 }
 
+
+IConnectableLayer* Network::AddStandInLayer(const StandInDescriptor& desc,
+                                            const char* name)
+{
+    return m_Graph->AddLayer<StandInLayer>(desc, name);
+}
+
 IConnectableLayer* Network::AddQuantizedLstmLayer(const QuantizedLstmInputParams& params,
                                                   const char* name)
 {
index c1d99a9..49cf4da 100644 (file)
@@ -222,6 +222,9 @@ public:
     IConnectableLayer* AddStackLayer(const StackDescriptor& stackDescriptor,
                                      const char* name = nullptr) override;
 
+    IConnectableLayer* AddStandInLayer(const StandInDescriptor& descriptor,
+                                       const char* name = nullptr) override;
+
     IConnectableLayer* AddQuantizedLstmLayer(const QuantizedLstmInputParams& params,
                                              const char* name = nullptr) override;
 
diff --git a/src/armnn/layers/StandInLayer.cpp b/src/armnn/layers/StandInLayer.cpp
new file mode 100644 (file)
index 0000000..fdc905f
--- /dev/null
@@ -0,0 +1,47 @@
+//
+// Copyright © 2019 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include "StandInLayer.hpp"
+#include "LayerCloneBase.hpp"
+
+namespace armnn
+{
+
+StandInLayer::StandInLayer(const StandInDescriptor& param, const char* name)
+    : LayerWithParameters(param.m_NumInputs, 1, LayerType::StandIn, param, name)
+{
+}
+
+std::unique_ptr<IWorkload> StandInLayer::CreateWorkload(const Graph& graph, const IWorkloadFactory& factory) const
+{
+    // This throws in the event that it's called. We would expect that any backend that
+    // "claims" to support the StandInLayer type would actually substitute it with a PrecompiledLayer
+    // during graph optimization. There is no interface on the IWorkloadFactory to create a StandInWorkload.
+    throw Exception("Stand in layer does not support creating workloads");
+}
+
+StandInLayer* StandInLayer::Clone(Graph& graph) const
+{
+    return CloneBase<StandInLayer>(graph, m_Param, GetName());
+}
+
+std::vector<TensorShape> StandInLayer::InferOutputShapes(const std::vector<TensorShape>& inputShapes) const
+{
+    throw Exception("Stand in layer does not support infering output shapes");
+}
+
+void StandInLayer::ValidateTensorShapesFromInputs()
+{
+    // Cannot validate this layer since no implementation details can be known by the framework
+    // so do nothing here.
+}
+
+void StandInLayer::Accept(ILayerVisitor& visitor) const
+{
+    visitor.VisitStandInLayer(this, GetParameters(), GetName());
+}
+} //namespace armnn
+
+
diff --git a/src/armnn/layers/StandInLayer.hpp b/src/armnn/layers/StandInLayer.hpp
new file mode 100644 (file)
index 0000000..9fe1773
--- /dev/null
@@ -0,0 +1,56 @@
+//
+// Copyright © 2019 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#pragma once
+
+#include "LayerWithParameters.hpp"
+
+namespace armnn
+{
+
+/// This layer represents an unknown operation in the input graph.
+class StandInLayer : public LayerWithParameters<StandInDescriptor>
+{
+public:
+    /// Empty implementation explictly does NOT create a workload. Throws Exception if called.
+    /// @param [in] graph The graph where this layer can be found.
+    /// @param [in] factory The workload factory which will create the workload.
+    /// @return Does not return anything. Throws Exception if called.
+    virtual std::unique_ptr<IWorkload>CreateWorkload(const Graph& graph,
+                                                     const IWorkloadFactory& factory) const override;
+
+    /// Creates a dynamically-allocated copy of this layer.
+    /// @param [in] graph The graph into which this layer is being cloned.
+    StandInLayer* Clone(Graph& graph) const override;
+
+    /// Check if the input tensor shape(s)
+    /// Does nothing since cannot validate any properties of this layer.
+    void ValidateTensorShapesFromInputs() override;
+
+    /// Empty implementation that throws Exception if called.
+    /// otherwise infers the output shapes from given input shapes and layer properties.
+    /// @param [in] inputShapes The input shapes layer has.
+    /// @return Does not return anything. Throws Exception if called.
+    std::vector<TensorShape> InferOutputShapes(const std::vector<TensorShape>& inputShapes) const override;
+
+    /// Accepts a visitor object and calls VisitStandInLayer() method.
+    /// @param visitor The visitor on which to call VisitStandInLayer() method.
+    void Accept(ILayerVisitor& visitor) const override;
+
+protected:
+    /// Constructor to create a StandInLayer.
+    /// @param [in] param StandInDescriptor to configure the stand-in operation.
+    /// @param [in] name Optional name for the layer.
+    StandInLayer(const StandInDescriptor& param, const char* name);
+
+    /// Default destructor
+    ~StandInLayer() = default;
+};
+
+} //namespace armnn
+
+
+
+
index 6122351..d147d47 100644 (file)
@@ -1090,6 +1090,13 @@ void SerializerVisitor::VisitStackLayer(const armnn::IConnectableLayer* layer,
     CreateAnyLayer(stackLayer.o, serializer::Layer::Layer_StackLayer);
 }
 
+void SerializerVisitor::VisitStandInLayer(const armnn::IConnectableLayer *layer,
+                                          const armnn::StandInDescriptor& standInDescriptor,
+                                          const char *name)
+{
+    // TODO: IVGCVSW-4010 Implement serialization
+}
+
 void SerializerVisitor::VisitStridedSliceLayer(const armnn::IConnectableLayer* layer,
                                                const armnn::StridedSliceDescriptor& stridedSliceDescriptor,
                                                const char* name)
index 79dc17b..1fd507a 100644 (file)
@@ -235,6 +235,10 @@ public:
                             const armnn::ViewsDescriptor& viewsDescriptor,
                             const char* name = nullptr) override;
 
+    void VisitStandInLayer(const armnn::IConnectableLayer* layer,
+                           const armnn::StandInDescriptor& standInDescriptor,
+                           const char* name = nullptr) override;
+
     void VisitStackLayer(const armnn::IConnectableLayer* layer,
                          const armnn::StackDescriptor& stackDescriptor,
                          const char* name = nullptr) override;
index 358106e..9ffad7b 100644 (file)
@@ -502,6 +502,22 @@ bool LayerSupportBase::IsStackSupported(const std::vector<const TensorInfo*>& in
     return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
 }
 
+bool LayerSupportBase::IsStandInSupported(const std::vector<const TensorInfo*>& inputs,
+                                          const std::vector<const TensorInfo*>& outputs,
+                                          const StandInDescriptor& descriptor,
+                                          Optional<std::string&> reasonIfUnsupported) const
+{
+    if (reasonIfUnsupported)
+    {
+        std::stringstream message;
+        message << "StandIn layer is not executable via backends";
+
+        reasonIfUnsupported.value() = message.str();
+    }
+
+    return false;
+}
+
 bool LayerSupportBase::IsStridedSliceSupported(const TensorInfo& input,
                                                const TensorInfo& output,
                                                const StridedSliceDescriptor& descriptor,
index d4c37c1..e99cb67 100644 (file)
@@ -312,6 +312,11 @@ public:
                           const StackDescriptor& descriptor,
                           Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const override;
 
+    bool IsStandInSupported(const std::vector<const TensorInfo*>& inputs,
+                            const std::vector<const TensorInfo*>& outputs,
+                            const StandInDescriptor& descriptor,
+                            Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const override;
+
     bool IsStridedSliceSupported(const TensorInfo& input,
                                  const TensorInfo& output,
                                  const StridedSliceDescriptor& descriptor,
index 30dfa02..34e4cbe 100644 (file)
@@ -902,6 +902,47 @@ bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
 
             break;
         }
+        case LayerType::StandIn:
+        {
+            auto cLayer = boost::polymorphic_downcast<const StandInLayer*>(&layer);
+
+            // Get vector of all inputs.
+            auto getTensorInfoIn = [&dataType](const InputSlot& slot)
+                {
+                    return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
+                };
+            auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
+                {
+                    return OverrideDataType(slot.GetTensorInfo(), dataType);
+                };
+            auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfoIn);
+            auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfoIn);
+            std::vector<TensorInfo> inputs(beginI, endI);
+
+            auto beginO = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
+            auto endO = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfoOut);
+            std::vector<TensorInfo> outputs(beginO, endO);
+
+
+            auto getTensorInfoPtr = [](const TensorInfo& info)
+                {
+                    return &info;
+                };
+            auto beginPtrI = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
+            auto endPtrI = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
+            std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
+
+            auto beginPtrO = boost::make_transform_iterator(outputs.begin(), getTensorInfoPtr);
+            auto endPtrO = boost::make_transform_iterator(outputs.end(), getTensorInfoPtr);
+            std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
+
+
+            result = layerSupportObject->IsStandInSupported(inputPtrs,
+                                                            outputPtrs,
+                                                            cLayer->GetParameters(),
+                                                            reason);
+            break;
+        }
         case LayerType::StridedSlice:
         {
             auto cLayer = boost::polymorphic_downcast<const StridedSliceLayer*>(&layer);
index 9bddae9..c52d6a9 100644 (file)
@@ -380,6 +380,25 @@ struct LayerTypePolicy<armnn::LayerType::name, DataType> \
 // Use this version for layers whose constructor takes 2 parameters(descriptor and name).
 #define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
 
+
+#define DECLARE_LAYER_POLICY_EXCEPTION(name, descType) \
+template<armnn::DataType DataType> \
+struct LayerTypePolicy<armnn::LayerType::name, DataType> \
+{ \
+    using Type = armnn::name##Layer; \
+    using Desc = descType; \
+    constexpr static const char* NameStr = #name; \
+    \
+    static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
+        unsigned int nIn, unsigned int nOut) \
+    { \
+        return std::unique_ptr<armnn::IWorkload>(); \
+    } \
+};
+
+#define DECLARE_LAYER_POLICY_EXCEPTION_1_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, void)
+#define DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, armnn::name##Descriptor)
+
 // Layer policy template.
 template<armnn::LayerType Type, armnn::DataType DataType>
 struct LayerTypePolicy;
@@ -489,6 +508,8 @@ DECLARE_LAYER_POLICY_2_PARAM(Splitter)
 
 DECLARE_LAYER_POLICY_2_PARAM(Stack)
 
+DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn)
+
 DECLARE_LAYER_POLICY_2_PARAM(StridedSlice)
 
 DECLARE_LAYER_POLICY_1_PARAM(Subtraction)