IVGCVSW-3932 Add frontend for INSTANCE_NORMALIZATION
authorKevin May <kevin.may@arm.com>
Wed, 2 Oct 2019 13:07:47 +0000 (14:07 +0100)
committerKevin May <kevin.may@arm.com>
Thu, 3 Oct 2019 11:56:18 +0000 (11:56 +0000)
Signed-off-by: Kevin May <kevin.may@arm.com>
Change-Id: Ib152148ccd8d2733c617d0cf9402661fc6b71316

26 files changed:
Android.mk
CMakeLists.txt
include/armnn/Descriptors.hpp
include/armnn/DescriptorsFwd.hpp
include/armnn/ILayerSupport.hpp
include/armnn/ILayerVisitor.hpp
include/armnn/INetwork.hpp
include/armnn/LayerVisitorBase.hpp
src/armnn/InternalTypes.cpp
src/armnn/InternalTypes.hpp
src/armnn/LayersFwd.hpp
src/armnn/Network.cpp
src/armnn/Network.hpp
src/armnn/layers/InstanceNormalizationLayer.cpp [new file with mode: 0644]
src/armnn/layers/InstanceNormalizationLayer.hpp [new file with mode: 0644]
src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp
src/armnn/test/TestNameAndDescriptorLayerVisitor.hpp
src/armnnSerializer/Serializer.cpp
src/armnnSerializer/Serializer.hpp
src/backends/backendsCommon/LayerSupportBase.cpp
src/backends/backendsCommon/LayerSupportBase.hpp
src/backends/backendsCommon/WorkloadData.cpp
src/backends/backendsCommon/WorkloadData.hpp
src/backends/backendsCommon/WorkloadFactory.cpp
src/backends/backendsCommon/WorkloadFactory.hpp
src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp

index 4c3789c..6bf9a50 100644 (file)
@@ -138,6 +138,7 @@ LOCAL_SRC_FILES := \
         src/armnn/layers/GatherLayer.cpp \
         src/armnn/layers/GreaterLayer.cpp \
         src/armnn/layers/InputLayer.cpp \
+        src/armnn/layers/InstanceNormalizationLayer.cpp \
         src/armnn/layers/L2NormalizationLayer.cpp \
         src/armnn/layers/LstmLayer.cpp \
         src/armnn/layers/MaximumLayer.cpp \
index 6ae352d..94da6bf 100644 (file)
@@ -280,6 +280,8 @@ list(APPEND armnn_sources
     src/armnn/layers/GreaterLayer.hpp
     src/armnn/layers/InputLayer.hpp
     src/armnn/layers/InputLayer.cpp
+    src/armnn/layers/InstanceNormalizationLayer.hpp
+    src/armnn/layers/InstanceNormalizationLayer.cpp
     src/armnn/layers/L2NormalizationLayer.hpp
     src/armnn/layers/L2NormalizationLayer.cpp
     src/armnn/layers/LstmLayer.cpp
index c973089..5bf4043 100644 (file)
@@ -468,6 +468,26 @@ struct BatchNormalizationDescriptor
     DataLayout m_DataLayout;
 };
 
+/// An InstanceNormalizationDescriptor for InstanceNormalizationLayer
+struct InstanceNormalizationDescriptor
+{
+    InstanceNormalizationDescriptor()
+        : m_Gamma(1.0f)
+        , m_Beta(0.0f)
+        , m_Eps(1e-12f)
+        , m_DataLayout(DataLayout::NCHW)
+    {}
+
+    /// Gamma, the scale scalar value applied for the normalized tensor. Defaults to 1.0.
+    float m_Gamma;
+    /// Beta, the offset scalar value applied for the normalized tensor. Defaults to 1.0.
+    float m_Beta;
+    /// Epsilon, small scalar value added to variance to avoid dividing by zero. Defaults to 1e-12f.
+    float m_Eps;
+    /// The data layout to be used (NCHW, NHWC).
+    DataLayout m_DataLayout;
+};
+
 /// A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
 struct BatchToSpaceNdDescriptor
 {
index e9624f1..2cc9582 100644 (file)
@@ -16,6 +16,7 @@ struct DepthwiseConvolution2dDescriptor;
 struct DetectionPostProcessDescriptor;
 struct FakeQuantizationDescriptor;
 struct FullyConnectedDescriptor;
+struct InstanceNormalizationDescriptor;
 struct L2NormalizationDescriptor;
 struct LstmDescriptor;
 struct MeanDescriptor;
index e18b86a..fef7595 100644 (file)
@@ -157,6 +157,12 @@ public:
     virtual bool IsInputSupported(const TensorInfo& input,
                                   Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const = 0;
 
+    virtual bool IsInstanceNormalizationSupported(
+        const TensorInfo& input,
+        const TensorInfo& output,
+        const InstanceNormalizationDescriptor& descriptor,
+        Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const = 0;
+
     virtual bool IsL2NormalizationSupported(const TensorInfo& input,
                                             const TensorInfo& output,
                                             const L2NormalizationDescriptor& descriptor,
index 486a13f..b9c96d5 100644 (file)
@@ -206,6 +206,14 @@ public:
                                  LayerBindingId id,
                                  const char* name = nullptr) = 0;
 
+    /// Function that an instance normalization layer should call back to when its Accept(ILayerVisitor&)
+    /// function is invoked.
+    /// @param layer - pointer to the layer which is calling back to this visit function.
+    /// @param desc - Parameters for the instance normalization operation.
+    /// @param name - Optional name for the layer.
+    virtual void VisitInstanceNormalizationLayer(const IConnectableLayer* layer,
+                                                 const InstanceNormalizationDescriptor& desc,
+                                                 const char* name = nullptr) = 0;
 
     /// Function that an L2 normalization layer should call back to when its Accept(ILayerVisitor&)
     /// function is invoked. Normalization is performed along dimension 1, but requires a 4d input.
index 0e0b99a..dc831db 100644 (file)
@@ -329,6 +329,13 @@ public:
     virtual IConnectableLayer* AddResizeLayer(const ResizeDescriptor& resizeDescriptor,
                                               const char* name = nullptr) = 0;
 
+    /// Adds an instance normalization layer to the network.
+    /// @param desc - Parameters for the instance normalization operation.
+    /// @param name - Optional name for the layer.
+    /// @return - Interface for configuring the layer.
+    virtual IConnectableLayer* AddInstanceNormalizationLayer(const InstanceNormalizationDescriptor& desc,
+                                                             const char* name = nullptr) = 0;
+
     /// Adds an L2 normalization layer to the network.
     /// Normalization is performed along dimension 1, but requires a 4d input.
     /// @param desc - Parameters for the L2 normalization operation.
index 65d2303..719e59d 100644 (file)
@@ -112,6 +112,10 @@ public:
                          LayerBindingId,
                          const char*) override { DefaultPolicy::Apply(__func__); }
 
+    void VisitInstanceNormalizationLayer(const IConnectableLayer*,
+                                         const InstanceNormalizationDescriptor&,
+                                         const char*) override { DefaultPolicy::Apply(__func__); }
+
     void VisitL2NormalizationLayer(const IConnectableLayer*,
                                    const L2NormalizationDescriptor&,
                                    const char*) override { DefaultPolicy::Apply(__func__); }
index e6f7367..612d00b 100644 (file)
@@ -38,6 +38,7 @@ char const* GetLayerTypeAsCString(LayerType type)
         case LayerType::Gather: return "Gather";
         case LayerType::Greater: return "Greater";
         case LayerType::Input: return "Input";
+        case LayerType::InstanceNormalization: return "InstanceNormalization";
         case LayerType::L2Normalization: return "L2Normalization";
         case LayerType::Lstm: return "Lstm";
         case LayerType::Maximum: return "Maximum";
index fbca9bc..039d0f8 100644 (file)
@@ -38,6 +38,7 @@ enum class LayerType
     Gather,
     Greater,
     Input,
+    InstanceNormalization,
     L2Normalization,
     Lstm,
     Maximum,
index 3599eac..1f539f3 100644 (file)
@@ -30,6 +30,7 @@
 #include "layers/GatherLayer.hpp"
 #include "layers/GreaterLayer.hpp"
 #include "layers/InputLayer.hpp"
+#include "layers/InstanceNormalizationLayer.hpp"
 #include "layers/L2NormalizationLayer.hpp"
 #include "layers/LstmLayer.hpp"
 #include "layers/MaximumLayer.hpp"
@@ -113,6 +114,7 @@ DECLARE_LAYER(FullyConnected)
 DECLARE_LAYER(Gather)
 DECLARE_LAYER(Greater)
 DECLARE_LAYER(Input)
+DECLARE_LAYER(InstanceNormalization)
 DECLARE_LAYER(L2Normalization)
 DECLARE_LAYER(Lstm)
 DECLARE_LAYER(Maximum)
index cf9a138..9d10b9a 100644 (file)
@@ -1224,6 +1224,12 @@ resizeDescriptor, const char* name)
     return m_Graph->AddLayer<ResizeLayer>(resizeDescriptor, name);
 }
 
+IConnectableLayer* Network::AddInstanceNormalizationLayer(const InstanceNormalizationDescriptor& desc,
+                                                          const char* name)
+{
+    return m_Graph->AddLayer<InstanceNormalizationLayer>(desc, name);
+}
+
 IConnectableLayer* Network::AddL2NormalizationLayer(const L2NormalizationDescriptor& desc,
                                                     const char* name)
 {
index 4a8bfbc..e11f3d2 100644 (file)
@@ -152,6 +152,9 @@ public:
     IConnectableLayer* AddResizeLayer(const ResizeDescriptor& resizeDescriptor,
                                       const char* name = nullptr) override;
 
+    IConnectableLayer* AddInstanceNormalizationLayer(const InstanceNormalizationDescriptor& desc,
+                                                     const char* name = nullptr) override;
+
     IConnectableLayer* AddL2NormalizationLayer(const L2NormalizationDescriptor& desc,
                                                const char* name = nullptr) override;
 
diff --git a/src/armnn/layers/InstanceNormalizationLayer.cpp b/src/armnn/layers/InstanceNormalizationLayer.cpp
new file mode 100644 (file)
index 0000000..fc3044a
--- /dev/null
@@ -0,0 +1,52 @@
+//
+// Copyright © 2019 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+#include "InstanceNormalizationLayer.hpp"
+
+#include "LayerCloneBase.hpp"
+
+#include <armnn/TypesUtils.hpp>
+#include <backendsCommon/WorkloadData.hpp>
+#include <backendsCommon/WorkloadFactory.hpp>
+
+namespace armnn
+{
+
+InstanceNormalizationLayer::InstanceNormalizationLayer(const InstanceNormalizationDescriptor& param, const char* name)
+    : LayerWithParameters(1, 1, LayerType::InstanceNormalization, param, name)
+{
+}
+
+std::unique_ptr<IWorkload> InstanceNormalizationLayer::CreateWorkload(const Graph& graph,
+    const IWorkloadFactory& factory) const
+{
+    InstanceNormalizationQueueDescriptor descriptor;
+    return factory.CreateInstanceNormalization(descriptor, PrepInfoAndDesc(descriptor, graph));
+}
+
+InstanceNormalizationLayer* InstanceNormalizationLayer::Clone(Graph& graph) const
+{
+    return CloneBase<InstanceNormalizationLayer>(graph, m_Param, GetName());
+}
+
+void InstanceNormalizationLayer::ValidateTensorShapesFromInputs()
+{
+    VerifyLayerConnections(1, CHECK_LOCATION());
+
+    auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
+
+    BOOST_ASSERT(inferredShapes.size() == 1);
+
+    ConditionalThrowIfNotEqual<LayerValidationException>(
+        "InstanceNormalizationLayer: TensorShape set on OutputSlot[0] does not match the inferred shape.",
+        GetOutputSlot(0).GetTensorInfo().GetShape(),
+        inferredShapes[0]);
+}
+
+void InstanceNormalizationLayer::Accept(ILayerVisitor& visitor) const
+{
+    visitor.VisitInstanceNormalizationLayer(this, GetParameters(), GetName());
+}
+
+} // namespace armnn
diff --git a/src/armnn/layers/InstanceNormalizationLayer.hpp b/src/armnn/layers/InstanceNormalizationLayer.hpp
new file mode 100644 (file)
index 0000000..9ba5673
--- /dev/null
@@ -0,0 +1,43 @@
+//
+// Copyright © 2019 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+#pragma once
+
+#include "LayerWithParameters.hpp"
+
+namespace armnn
+{
+
+/// This layer represents an instance normalization operation.
+class InstanceNormalizationLayer : public LayerWithParameters<InstanceNormalizationDescriptor>
+{
+public:
+    /// Makes a workload for the InstanceNormalization type.
+    /// @param [in] graph The graph where this layer can be found.
+    /// @param [in] factory The workload factory which will create the workload.
+    /// @return A pointer to the created workload, or nullptr if not created.
+    virtual std::unique_ptr<IWorkload> CreateWorkload(const Graph& graph,
+                                                      const IWorkloadFactory& factory) const override;
+
+    /// Creates a dynamically-allocated copy of this layer.
+    /// @param [in] graph The graph into which this layer is being cloned.
+    InstanceNormalizationLayer* Clone(Graph& graph) const override;
+
+    /// Check if the input tensor shape(s)
+    /// will lead to a valid configuration of @ref InstanceNormalizationLayer.
+    void ValidateTensorShapesFromInputs() override;
+
+    void Accept(ILayerVisitor& visitor) const override;
+
+protected:
+    /// Constructor to create a InstanceNormalizationLayer.
+    /// @param [in] param InstanceNormalizationDescriptor to configure the Instance normalization operation.
+    /// @param [in] name Optional name for the layer.
+    InstanceNormalizationLayer(const InstanceNormalizationDescriptor& param, const char* name);
+
+    /// Default destructor
+    ~InstanceNormalizationLayer() = default;
+};
+
+} // namespace
index 653612f..dcc5dc4 100644 (file)
@@ -282,6 +282,29 @@ BOOST_AUTO_TEST_CASE(CheckResizeLayerVisitorNameNullAndDescriptor)
     layer->Accept(visitor);
 }
 
+BOOST_AUTO_TEST_CASE(CheckInstanceNormalizationLayerVisitorNameAndDescriptor)
+{
+    const char* layerName = "InstanceNormalizationLayer";
+    InstanceNormalizationDescriptor descriptor;
+    descriptor.m_DataLayout = DataLayout::NHWC;
+    TestInstanceNormalizationLayerVisitor visitor(descriptor, layerName);
+    Network net;
+
+    IConnectableLayer *const layer = net.AddInstanceNormalizationLayer(descriptor, layerName);
+    layer->Accept(visitor);
+}
+
+BOOST_AUTO_TEST_CASE(CheckInstanceNormalizationLayerVisitorNameNullAndDescriptor)
+{
+    InstanceNormalizationDescriptor descriptor;
+    descriptor.m_DataLayout = DataLayout::NHWC;
+    TestInstanceNormalizationLayerVisitor visitor(descriptor);
+    Network net;
+
+    IConnectableLayer *const layer = net.AddInstanceNormalizationLayer(descriptor);
+    layer->Accept(visitor);
+}
+
 BOOST_AUTO_TEST_CASE(CheckL2NormalizationLayerVisitorNameAndDescriptor)
 {
     const char* layerName = "L2NormalizationLayer";
index f1936d6..aa0b359 100644 (file)
@@ -418,6 +418,40 @@ public:
     };
 };
 
+class TestInstanceNormalizationLayerVisitor : public TestLayerVisitor
+{
+private:
+    InstanceNormalizationDescriptor m_VisitorDescriptor;
+
+public:
+    explicit TestInstanceNormalizationLayerVisitor(const InstanceNormalizationDescriptor& desc,
+                                                   const char* name = nullptr)
+        : TestLayerVisitor(name)
+    {
+        m_VisitorDescriptor.m_Beta        = desc.m_Beta;
+        m_VisitorDescriptor.m_Gamma       = desc.m_Gamma;
+        m_VisitorDescriptor.m_Eps         = desc.m_Eps;
+        m_VisitorDescriptor.m_DataLayout  = desc.m_DataLayout;
+    };
+
+    void CheckDescriptor(const InstanceNormalizationDescriptor& desc)
+    {
+        BOOST_CHECK(desc.m_Beta       == m_VisitorDescriptor.m_Beta);
+        BOOST_CHECK(desc.m_Gamma      == m_VisitorDescriptor.m_Gamma);
+        BOOST_CHECK(desc.m_Eps        == m_VisitorDescriptor.m_Eps);
+        BOOST_CHECK(desc.m_DataLayout == m_VisitorDescriptor.m_DataLayout);
+    }
+
+    void VisitInstanceNormalizationLayer(const IConnectableLayer* layer,
+                                         const InstanceNormalizationDescriptor& desc,
+                                         const char* name = nullptr) override
+    {
+        CheckLayerPointer(layer);
+        CheckDescriptor(desc);
+        CheckLayerName(name);
+    };
+};
+
 class TestL2NormalizationLayerVisitor : public TestLayerVisitor
 {
 private:
index 99ba7e3..84a1b6b 100644 (file)
@@ -426,6 +426,14 @@ void SerializerVisitor::VisitGreaterLayer(const armnn::IConnectableLayer* layer,
     CreateAnyLayer(fbGreaterLayer.o, serializer::Layer::Layer_GreaterLayer);
 }
 
+void SerializerVisitor::VisitInstanceNormalizationLayer(
+    const armnn::IConnectableLayer* layer,
+    const armnn::InstanceNormalizationDescriptor& instanceNormalizationDescriptor,
+    const char* name)
+{
+    throw UnimplementedException("SerializerVisitor::InstanceNormalizationLayer is not implemented");
+}
+
 void SerializerVisitor::VisitL2NormalizationLayer(const armnn::IConnectableLayer* layer,
                                                   const armnn::L2NormalizationDescriptor& l2NormalizationDescriptor,
                                                   const char* name)
index 429487d..f98bd17 100644 (file)
@@ -125,6 +125,10 @@ public:
                          armnn::LayerBindingId id,
                          const char* name = nullptr) override;
 
+    void VisitInstanceNormalizationLayer(const armnn::IConnectableLayer* layer,
+                                         const armnn::InstanceNormalizationDescriptor& instanceNormalizationDescriptor,
+                                         const char* name = nullptr) override;
+
     void VisitL2NormalizationLayer(const armnn::IConnectableLayer* layer,
                                    const armnn::L2NormalizationDescriptor& l2NormalizationDescriptor,
                                    const char* name = nullptr) override;
index 656407d..c41f0b1 100644 (file)
@@ -234,6 +234,14 @@ bool LayerSupportBase::IsInputSupported(const TensorInfo& input,
     return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
 }
 
+bool LayerSupportBase::IsInstanceNormalizationSupported(const TensorInfo& input,
+                                                        const TensorInfo& output,
+                                                        const InstanceNormalizationDescriptor& descriptor,
+                                                        Optional<std::string&> reasonIfUnsupported) const
+{
+    return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
+}
+
 bool LayerSupportBase::IsL2NormalizationSupported(const TensorInfo& input,
                                                   const TensorInfo& output,
                                                   const L2NormalizationDescriptor& descriptor,
index c3875e6..495870e 100644 (file)
@@ -141,6 +141,12 @@ public:
     bool IsInputSupported(const TensorInfo& input,
                           Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const override;
 
+    bool IsInstanceNormalizationSupported(
+        const TensorInfo& input,
+        const TensorInfo& output,
+        const InstanceNormalizationDescriptor& descriptor,
+        Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const override;
+
     bool IsL2NormalizationSupported(const TensorInfo& input,
                                     const TensorInfo& output,
                                     const L2NormalizationDescriptor& descriptor,
index e49fd09..aca5023 100644 (file)
@@ -1233,6 +1233,52 @@ void FakeQuantizationQueueDescriptor::Validate(const WorkloadInfo& workloadInfo)
     }
 }
 
+void InstanceNormalizationQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) const
+{
+    const std::string descriptorName{"InstanceNormalizationQueueDescriptor"};
+
+    ValidateNumInputs(workloadInfo,  descriptorName, 1);
+    ValidateNumOutputs(workloadInfo, descriptorName, 1);
+
+    const TensorInfo& inputTensorInfo  = workloadInfo.m_InputTensorInfos[0];
+    const TensorInfo& outputTensorInfo = workloadInfo.m_OutputTensorInfos[0];
+
+    if (inputTensorInfo.GetNumDimensions() > 4)
+    {
+        throw InvalidArgumentException(descriptorName + ": Input tensors with rank greater than 4 are not supported.");
+    }
+
+    ValidateTensorShapesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output");
+
+    // Check the supported data types
+    std::vector<DataType> supportedTypes =
+        {
+            DataType::Float32,
+            DataType::Float16
+        };
+
+    ValidateDataTypes(inputTensorInfo,  supportedTypes, descriptorName);
+    ValidateDataTypes(outputTensorInfo, supportedTypes, descriptorName);
+
+    ValidateTensorDataTypesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output");
+
+    ValidatePointer(m_Beta,  descriptorName, "beta");
+    ValidatePointer(m_Eps,   descriptorName, "epsilon");
+    ValidatePointer(m_Gamma, descriptorName, "gamma");
+
+    const TensorInfo& beta     = m_Beta->GetTensorInfo();
+    const TensorInfo& epsilon = m_Eps->GetTensorInfo();
+    const TensorInfo& gamma    = m_Gamma->GetTensorInfo();
+
+    ValidateTensorNumDimensions(beta,     descriptorName, 1, "beta");
+    ValidateTensorNumDimensions(epsilon,  descriptorName, 1, "epsilon");
+    ValidateTensorNumDimensions(gamma,    descriptorName, 1, "gamma");
+
+    ValidateTensorDataTypesMatch(inputTensorInfo, beta,     descriptorName, "input", "beta");
+    ValidateTensorDataTypesMatch(inputTensorInfo, epsilon,  descriptorName, "input", "epsilon");
+    ValidateTensorDataTypesMatch(inputTensorInfo, gamma,    descriptorName, "input", "gamma");
+}
+
 void L2NormalizationQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) const
 {
     const std::string descriptorName{"L2NormalizationQueueDescriptor"};
index 177bfb7..14d7b58 100644 (file)
@@ -307,6 +307,21 @@ struct FakeQuantizationQueueDescriptor : QueueDescriptorWithParameters<FakeQuant
     void Validate(const WorkloadInfo& workloadInfo) const;
 };
 
+struct InstanceNormalizationQueueDescriptor : QueueDescriptorWithParameters<InstanceNormalizationDescriptor>
+{
+    InstanceNormalizationQueueDescriptor()
+        : m_Beta(nullptr)
+        , m_Eps(nullptr)
+        , m_Gamma(nullptr)
+    {
+    }
+
+    const ConstCpuTensorHandle* m_Beta;
+    const ConstCpuTensorHandle* m_Eps;
+    const ConstCpuTensorHandle* m_Gamma;
+    void Validate(const WorkloadInfo& workloadInfo) const;
+};
+
 struct L2NormalizationQueueDescriptor : QueueDescriptorWithParameters<L2NormalizationDescriptor>
 {
     void Validate(const WorkloadInfo& workloadInfo) const;
index 44888b3..98fe158 100644 (file)
@@ -371,6 +371,21 @@ bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
             result = layerSupportObject->IsInputSupported(OverrideDataType(input, dataType), reason);
             break;
         }
+        case LayerType::InstanceNormalization:
+        {
+            auto cLayer = boost::polymorphic_downcast<const InstanceNormalizationLayer*>(&layer);
+            const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
+
+            const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
+            const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
+
+            result = layerSupportObject->IsInstanceNormalizationSupported(
+                OverrideDataType(input, dataType),
+                OverrideDataType(output, dataType),
+                descriptor,
+                reason);
+            break;
+        }
         case LayerType::L2Normalization:
         {
             auto cLayer = boost::polymorphic_downcast<const L2NormalizationLayer*>(&layer);
@@ -1139,6 +1154,13 @@ std::unique_ptr<IWorkload> IWorkloadFactory::CreateGreater(const GreaterQueueDes
     return std::unique_ptr<IWorkload>();
 }
 
+std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
+    const InstanceNormalizationQueueDescriptor& descriptor,
+    const WorkloadInfo& info) const
+{
+    return std::unique_ptr<IWorkload>();
+}
+
 std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
                                                                    const WorkloadInfo& info) const
 {
index 2809e2f..9fa0221 100644 (file)
@@ -120,6 +120,10 @@ public:
     virtual std::unique_ptr<IWorkload> CreateGreater(const GreaterQueueDescriptor& descriptor,
                                                      const WorkloadInfo& info) const;
 
+    virtual std::unique_ptr<IWorkload> CreateInstanceNormalization(
+        const InstanceNormalizationQueueDescriptor& descriptor,
+        const WorkloadInfo& info) const;
+
     virtual std::unique_ptr<IWorkload> CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
                                                              const WorkloadInfo& info) const;
 
index e492cd6..c860414 100644 (file)
@@ -435,6 +435,8 @@ DECLARE_LAYER_POLICY_1_PARAM(Greater)
 
 DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId)
 
+DECLARE_LAYER_POLICY_2_PARAM(InstanceNormalization)
+
 DECLARE_LAYER_POLICY_2_PARAM(L2Normalization)
 
 DECLARE_LAYER_POLICY_2_PARAM(Lstm)