From 013c390c2d9829fede2d8b1d59c3f2a497730462 Mon Sep 17 00:00:00 2001 From: Derek Lamberti Date: Mon, 21 Oct 2019 10:46:16 +0100 Subject: [PATCH] IVGCVSW-4009 StandInLayer frontend API Change-Id: I058c57b554769799c6775813215070ef47790e3d Signed-off-by: Derek Lamberti --- Android.mk | 1 + CMakeLists.txt | 2 + include/armnn/Descriptors.hpp | 16 +++++++ include/armnn/DescriptorsFwd.hpp | 1 + include/armnn/ILayerSupport.hpp | 6 +++ include/armnn/ILayerVisitor.hpp | 8 ++++ include/armnn/INetwork.hpp | 10 ++++ include/armnn/LayerVisitorBase.hpp | 4 ++ src/armnn/InternalTypes.hpp | 1 + src/armnn/LayersFwd.hpp | 2 + src/armnn/Network.cpp | 7 +++ src/armnn/Network.hpp | 3 ++ src/armnn/layers/StandInLayer.cpp | 47 ++++++++++++++++++ src/armnn/layers/StandInLayer.hpp | 56 ++++++++++++++++++++++ src/armnnSerializer/Serializer.cpp | 7 +++ src/armnnSerializer/Serializer.hpp | 4 ++ src/backends/backendsCommon/LayerSupportBase.cpp | 16 +++++++ src/backends/backendsCommon/LayerSupportBase.hpp | 5 ++ src/backends/backendsCommon/WorkloadFactory.cpp | 41 ++++++++++++++++ .../test/IsLayerSupportedTestImpl.hpp | 21 ++++++++ 20 files changed, 258 insertions(+) create mode 100644 src/armnn/layers/StandInLayer.cpp create mode 100644 src/armnn/layers/StandInLayer.hpp diff --git a/Android.mk b/Android.mk index 0294123..29ca083 100644 --- a/Android.mk +++ b/Android.mk @@ -165,6 +165,7 @@ LOCAL_SRC_FILES := \ src/armnn/layers/SpaceToDepthLayer.cpp \ src/armnn/layers/SplitterLayer.cpp \ src/armnn/layers/StackLayer.cpp \ + src/armnn/layers/StandInLayer.cpp \ src/armnn/layers/StridedSliceLayer.cpp \ src/armnn/layers/SubtractionLayer.cpp \ src/armnn/layers/SwitchLayer.cpp \ diff --git a/CMakeLists.txt b/CMakeLists.txt index 626478a..ed683da 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -344,6 +344,8 @@ list(APPEND armnn_sources src/armnn/layers/SplitterLayer.cpp src/armnn/layers/StackLayer.hpp src/armnn/layers/StackLayer.cpp + src/armnn/layers/StandInLayer.cpp + src/armnn/layers/StandInLayer.hpp src/armnn/layers/StridedSliceLayer.cpp src/armnn/layers/StridedSliceLayer.hpp src/armnn/layers/SubtractionLayer.cpp diff --git a/include/armnn/Descriptors.hpp b/include/armnn/Descriptors.hpp index 10d8ab7..425c526 100644 --- a/include/armnn/Descriptors.hpp +++ b/include/armnn/Descriptors.hpp @@ -952,6 +952,22 @@ struct StackDescriptor TensorShape m_InputShape; }; +/// A StandInDescriptor for the StandIn layer +struct StandInDescriptor +{ + StandInDescriptor() {}; + + StandInDescriptor(uint32_t numInputs, uint32_t numOutputs) + : m_NumInputs(numInputs) + , m_NumOutputs(numOutputs) + {} + + /// Number of input tensors + uint32_t m_NumInputs = 0; + /// Number of output tensors + uint32_t m_NumOutputs = 0; +}; + /// A StridedSliceDescriptor for the StridedSliceLayer. struct StridedSliceDescriptor { diff --git a/include/armnn/DescriptorsFwd.hpp b/include/armnn/DescriptorsFwd.hpp index a978f77..cfdef8a 100644 --- a/include/armnn/DescriptorsFwd.hpp +++ b/include/armnn/DescriptorsFwd.hpp @@ -36,6 +36,7 @@ struct SpaceToBatchNdDescriptor; struct SpaceToDepthDescriptor; struct SliceDescriptor; struct StackDescriptor; +struct StandInDescriptor; struct StridedSliceDescriptor; struct TransposeConvolution2dDescriptor; struct ViewsDescriptor; diff --git a/include/armnn/ILayerSupport.hpp b/include/armnn/ILayerSupport.hpp index 87197ee..54f4a28 100644 --- a/include/armnn/ILayerSupport.hpp +++ b/include/armnn/ILayerSupport.hpp @@ -328,6 +328,12 @@ public: const StackDescriptor& descriptor, Optional reasonIfUnsupported = EmptyOptional()) const = 0; + virtual bool IsStandInSupported(const std::vector& inputs, + const std::vector& outputs, + const StandInDescriptor& descriptor, + Optional reasonIfUnsupported = EmptyOptional()) const = 0; + + virtual bool IsStridedSliceSupported(const TensorInfo& input, const TensorInfo& output, const StridedSliceDescriptor& descriptor, diff --git a/include/armnn/ILayerVisitor.hpp b/include/armnn/ILayerVisitor.hpp index 80931eb..9669b3a 100644 --- a/include/armnn/ILayerVisitor.hpp +++ b/include/armnn/ILayerVisitor.hpp @@ -443,6 +443,14 @@ public: const StackDescriptor& stackDescriptor, const char* name = nullptr) = 0; + /// Function a StandInLayer should call back to when its Accept(ILaterVisitor&) function is invoked + /// @param layer - pointer to the layer which is calling back to this visit function. + /// @param standInDescriptor - Parameters for the stand-in layer. + /// @param name - Optional name for the layer. + virtual void VisitStandInLayer(const IConnectableLayer* layer, + const StandInDescriptor& standInDescriptor, + const char* name = nullptr) = 0; + /// Function a strided slice layer should call back to when its Accept(ILayerVisitor&) function is invoked. /// @param layer - pointer to the layer which is calling back to this visit function. /// @param stridedSliceDescriptor - Parameters for the strided slice operation. diff --git a/include/armnn/INetwork.hpp b/include/armnn/INetwork.hpp index b3fab82..1041492 100644 --- a/include/armnn/INetwork.hpp +++ b/include/armnn/INetwork.hpp @@ -507,6 +507,16 @@ public: virtual IConnectableLayer* AddStackLayer(const StackDescriptor& descriptor, const char* name = nullptr) = 0; + + /// Add a stand-in layer for a type unknown to the Arm NN framework. + /// Note: Due to the nature of this layer, no validation can be performed by the framework. + /// Furthermore, Any model containing this layer cannot make use of dynamic tensors since the + /// tensor sizes cannot be inferred. + /// @descriptor - Descriptor for the StandIn layer. + /// @return - Interface for configuring the layer. + virtual IConnectableLayer* AddStandInLayer(const StandInDescriptor& descriptor, + const char* name = nullptr) = 0; + /// Add a QuantizedLstm layer to the network /// @param params - The weights and biases for the Quantized LSTM cell /// @param name - Optional name for the layer diff --git a/include/armnn/LayerVisitorBase.hpp b/include/armnn/LayerVisitorBase.hpp index 5226fa2..388fc6f 100644 --- a/include/armnn/LayerVisitorBase.hpp +++ b/include/armnn/LayerVisitorBase.hpp @@ -222,6 +222,10 @@ public: const StackDescriptor&, const char*) override { DefaultPolicy::Apply(__func__); } + void VisitStandInLayer(const IConnectableLayer*, + const StandInDescriptor&, + const char*) override { DefaultPolicy::Apply(__func__); } + void VisitStridedSliceLayer(const IConnectableLayer*, const StridedSliceDescriptor&, const char*) override { DefaultPolicy::Apply(__func__); } diff --git a/src/armnn/InternalTypes.hpp b/src/armnn/InternalTypes.hpp index d7f932f..36e7280 100644 --- a/src/armnn/InternalTypes.hpp +++ b/src/armnn/InternalTypes.hpp @@ -66,6 +66,7 @@ enum class LayerType SpaceToDepth, Splitter, Stack, + StandIn, StridedSlice, Subtraction, Switch, diff --git a/src/armnn/LayersFwd.hpp b/src/armnn/LayersFwd.hpp index 6c30749..13bf900 100644 --- a/src/armnn/LayersFwd.hpp +++ b/src/armnn/LayersFwd.hpp @@ -58,6 +58,7 @@ #include "layers/SpaceToDepthLayer.hpp" #include "layers/SplitterLayer.hpp" #include "layers/StackLayer.hpp" +#include "layers/StandInLayer.hpp" #include "layers/StridedSliceLayer.hpp" #include "layers/SubtractionLayer.hpp" #include "layers/SwitchLayer.hpp" @@ -142,6 +143,7 @@ DECLARE_LAYER(SpaceToBatchNd) DECLARE_LAYER(SpaceToDepth) DECLARE_LAYER(Splitter) DECLARE_LAYER(Stack) +DECLARE_LAYER(StandIn) DECLARE_LAYER(StridedSlice) DECLARE_LAYER(Subtraction) DECLARE_LAYER(Switch) diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp index 857f6b3..1339a6e 100644 --- a/src/armnn/Network.cpp +++ b/src/armnn/Network.cpp @@ -1503,6 +1503,13 @@ IConnectableLayer* Network::AddStackLayer(const StackDescriptor& stackDescriptor return m_Graph->AddLayer(stackDescriptor, name); } + +IConnectableLayer* Network::AddStandInLayer(const StandInDescriptor& desc, + const char* name) +{ + return m_Graph->AddLayer(desc, name); +} + IConnectableLayer* Network::AddQuantizedLstmLayer(const QuantizedLstmInputParams& params, const char* name) { diff --git a/src/armnn/Network.hpp b/src/armnn/Network.hpp index c1d99a9..49cf4da 100644 --- a/src/armnn/Network.hpp +++ b/src/armnn/Network.hpp @@ -222,6 +222,9 @@ public: IConnectableLayer* AddStackLayer(const StackDescriptor& stackDescriptor, const char* name = nullptr) override; + IConnectableLayer* AddStandInLayer(const StandInDescriptor& descriptor, + const char* name = nullptr) override; + IConnectableLayer* AddQuantizedLstmLayer(const QuantizedLstmInputParams& params, const char* name = nullptr) override; diff --git a/src/armnn/layers/StandInLayer.cpp b/src/armnn/layers/StandInLayer.cpp new file mode 100644 index 0000000..fdc905f --- /dev/null +++ b/src/armnn/layers/StandInLayer.cpp @@ -0,0 +1,47 @@ +// +// Copyright © 2019 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#include "StandInLayer.hpp" +#include "LayerCloneBase.hpp" + +namespace armnn +{ + +StandInLayer::StandInLayer(const StandInDescriptor& param, const char* name) + : LayerWithParameters(param.m_NumInputs, 1, LayerType::StandIn, param, name) +{ +} + +std::unique_ptr StandInLayer::CreateWorkload(const Graph& graph, const IWorkloadFactory& factory) const +{ + // This throws in the event that it's called. We would expect that any backend that + // "claims" to support the StandInLayer type would actually substitute it with a PrecompiledLayer + // during graph optimization. There is no interface on the IWorkloadFactory to create a StandInWorkload. + throw Exception("Stand in layer does not support creating workloads"); +} + +StandInLayer* StandInLayer::Clone(Graph& graph) const +{ + return CloneBase(graph, m_Param, GetName()); +} + +std::vector StandInLayer::InferOutputShapes(const std::vector& inputShapes) const +{ + throw Exception("Stand in layer does not support infering output shapes"); +} + +void StandInLayer::ValidateTensorShapesFromInputs() +{ + // Cannot validate this layer since no implementation details can be known by the framework + // so do nothing here. +} + +void StandInLayer::Accept(ILayerVisitor& visitor) const +{ + visitor.VisitStandInLayer(this, GetParameters(), GetName()); +} +} //namespace armnn + + diff --git a/src/armnn/layers/StandInLayer.hpp b/src/armnn/layers/StandInLayer.hpp new file mode 100644 index 0000000..9fe1773 --- /dev/null +++ b/src/armnn/layers/StandInLayer.hpp @@ -0,0 +1,56 @@ +// +// Copyright © 2019 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// + +#pragma once + +#include "LayerWithParameters.hpp" + +namespace armnn +{ + +/// This layer represents an unknown operation in the input graph. +class StandInLayer : public LayerWithParameters +{ +public: + /// Empty implementation explictly does NOT create a workload. Throws Exception if called. + /// @param [in] graph The graph where this layer can be found. + /// @param [in] factory The workload factory which will create the workload. + /// @return Does not return anything. Throws Exception if called. + virtual std::unique_ptrCreateWorkload(const Graph& graph, + const IWorkloadFactory& factory) const override; + + /// Creates a dynamically-allocated copy of this layer. + /// @param [in] graph The graph into which this layer is being cloned. + StandInLayer* Clone(Graph& graph) const override; + + /// Check if the input tensor shape(s) + /// Does nothing since cannot validate any properties of this layer. + void ValidateTensorShapesFromInputs() override; + + /// Empty implementation that throws Exception if called. + /// otherwise infers the output shapes from given input shapes and layer properties. + /// @param [in] inputShapes The input shapes layer has. + /// @return Does not return anything. Throws Exception if called. + std::vector InferOutputShapes(const std::vector& inputShapes) const override; + + /// Accepts a visitor object and calls VisitStandInLayer() method. + /// @param visitor The visitor on which to call VisitStandInLayer() method. + void Accept(ILayerVisitor& visitor) const override; + +protected: + /// Constructor to create a StandInLayer. + /// @param [in] param StandInDescriptor to configure the stand-in operation. + /// @param [in] name Optional name for the layer. + StandInLayer(const StandInDescriptor& param, const char* name); + + /// Default destructor + ~StandInLayer() = default; +}; + +} //namespace armnn + + + + diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp index 6122351..d147d47 100644 --- a/src/armnnSerializer/Serializer.cpp +++ b/src/armnnSerializer/Serializer.cpp @@ -1090,6 +1090,13 @@ void SerializerVisitor::VisitStackLayer(const armnn::IConnectableLayer* layer, CreateAnyLayer(stackLayer.o, serializer::Layer::Layer_StackLayer); } +void SerializerVisitor::VisitStandInLayer(const armnn::IConnectableLayer *layer, + const armnn::StandInDescriptor& standInDescriptor, + const char *name) +{ + // TODO: IVGCVSW-4010 Implement serialization +} + void SerializerVisitor::VisitStridedSliceLayer(const armnn::IConnectableLayer* layer, const armnn::StridedSliceDescriptor& stridedSliceDescriptor, const char* name) diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp index 79dc17b..1fd507a 100644 --- a/src/armnnSerializer/Serializer.hpp +++ b/src/armnnSerializer/Serializer.hpp @@ -235,6 +235,10 @@ public: const armnn::ViewsDescriptor& viewsDescriptor, const char* name = nullptr) override; + void VisitStandInLayer(const armnn::IConnectableLayer* layer, + const armnn::StandInDescriptor& standInDescriptor, + const char* name = nullptr) override; + void VisitStackLayer(const armnn::IConnectableLayer* layer, const armnn::StackDescriptor& stackDescriptor, const char* name = nullptr) override; diff --git a/src/backends/backendsCommon/LayerSupportBase.cpp b/src/backends/backendsCommon/LayerSupportBase.cpp index 358106e..9ffad7b 100644 --- a/src/backends/backendsCommon/LayerSupportBase.cpp +++ b/src/backends/backendsCommon/LayerSupportBase.cpp @@ -502,6 +502,22 @@ bool LayerSupportBase::IsStackSupported(const std::vector& in return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported); } +bool LayerSupportBase::IsStandInSupported(const std::vector& inputs, + const std::vector& outputs, + const StandInDescriptor& descriptor, + Optional reasonIfUnsupported) const +{ + if (reasonIfUnsupported) + { + std::stringstream message; + message << "StandIn layer is not executable via backends"; + + reasonIfUnsupported.value() = message.str(); + } + + return false; +} + bool LayerSupportBase::IsStridedSliceSupported(const TensorInfo& input, const TensorInfo& output, const StridedSliceDescriptor& descriptor, diff --git a/src/backends/backendsCommon/LayerSupportBase.hpp b/src/backends/backendsCommon/LayerSupportBase.hpp index d4c37c1..e99cb67 100644 --- a/src/backends/backendsCommon/LayerSupportBase.hpp +++ b/src/backends/backendsCommon/LayerSupportBase.hpp @@ -312,6 +312,11 @@ public: const StackDescriptor& descriptor, Optional reasonIfUnsupported = EmptyOptional()) const override; + bool IsStandInSupported(const std::vector& inputs, + const std::vector& outputs, + const StandInDescriptor& descriptor, + Optional reasonIfUnsupported = EmptyOptional()) const override; + bool IsStridedSliceSupported(const TensorInfo& input, const TensorInfo& output, const StridedSliceDescriptor& descriptor, diff --git a/src/backends/backendsCommon/WorkloadFactory.cpp b/src/backends/backendsCommon/WorkloadFactory.cpp index 30dfa02..34e4cbe 100644 --- a/src/backends/backendsCommon/WorkloadFactory.cpp +++ b/src/backends/backendsCommon/WorkloadFactory.cpp @@ -902,6 +902,47 @@ bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId, break; } + case LayerType::StandIn: + { + auto cLayer = boost::polymorphic_downcast(&layer); + + // Get vector of all inputs. + auto getTensorInfoIn = [&dataType](const InputSlot& slot) + { + return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType); + }; + auto getTensorInfoOut = [&dataType](const OutputSlot& slot) + { + return OverrideDataType(slot.GetTensorInfo(), dataType); + }; + auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfoIn); + auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfoIn); + std::vector inputs(beginI, endI); + + auto beginO = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfoOut); + auto endO = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfoOut); + std::vector outputs(beginO, endO); + + + auto getTensorInfoPtr = [](const TensorInfo& info) + { + return &info; + }; + auto beginPtrI = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr); + auto endPtrI = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr); + std::vector inputPtrs(beginPtrI, endPtrI); + + auto beginPtrO = boost::make_transform_iterator(outputs.begin(), getTensorInfoPtr); + auto endPtrO = boost::make_transform_iterator(outputs.end(), getTensorInfoPtr); + std::vector outputPtrs(beginPtrO, endPtrO); + + + result = layerSupportObject->IsStandInSupported(inputPtrs, + outputPtrs, + cLayer->GetParameters(), + reason); + break; + } case LayerType::StridedSlice: { auto cLayer = boost::polymorphic_downcast(&layer); diff --git a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp index 9bddae9..c52d6a9 100644 --- a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp +++ b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp @@ -380,6 +380,25 @@ struct LayerTypePolicy \ // Use this version for layers whose constructor takes 2 parameters(descriptor and name). #define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor) + +#define DECLARE_LAYER_POLICY_EXCEPTION(name, descType) \ +template \ +struct LayerTypePolicy \ +{ \ + using Type = armnn::name##Layer; \ + using Desc = descType; \ + constexpr static const char* NameStr = #name; \ + \ + static std::unique_ptr MakeDummyWorkload(armnn::IWorkloadFactory *factory, \ + unsigned int nIn, unsigned int nOut) \ + { \ + return std::unique_ptr(); \ + } \ +}; + +#define DECLARE_LAYER_POLICY_EXCEPTION_1_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, void) +#define DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, armnn::name##Descriptor) + // Layer policy template. template struct LayerTypePolicy; @@ -489,6 +508,8 @@ DECLARE_LAYER_POLICY_2_PARAM(Splitter) DECLARE_LAYER_POLICY_2_PARAM(Stack) +DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn) + DECLARE_LAYER_POLICY_2_PARAM(StridedSlice) DECLARE_LAYER_POLICY_1_PARAM(Subtraction) -- 2.7.4