IVGCVSW-3973 Add frontend for LOG_SOFTMAX
authorAron Virginas-Tar <Aron.Virginas-Tar@arm.com>
Fri, 11 Oct 2019 13:07:53 +0000 (14:07 +0100)
committerÁron Virginás-Tar <aron.virginas-tar@arm.com>
Fri, 11 Oct 2019 14:36:50 +0000 (14:36 +0000)
Signed-off-by: Aron Virginas-Tar <Aron.Virginas-Tar@arm.com>
Change-Id: Ic6acc7176deea3753b32ce6340f642d19dce0e9f

26 files changed:
Android.mk
CMakeLists.txt
include/armnn/Descriptors.hpp
include/armnn/DescriptorsFwd.hpp
include/armnn/ILayerSupport.hpp
include/armnn/ILayerVisitor.hpp
include/armnn/INetwork.hpp
include/armnn/LayerVisitorBase.hpp
src/armnn/InternalTypes.cpp
src/armnn/InternalTypes.hpp
src/armnn/LayersFwd.hpp
src/armnn/Network.cpp
src/armnn/Network.hpp
src/armnn/layers/LogSoftmaxLayer.cpp [new file with mode: 0644]
src/armnn/layers/LogSoftmaxLayer.hpp [new file with mode: 0644]
src/armnn/test/TestNameAndDescriptorLayerVisitor.cpp
src/armnn/test/TestNameAndDescriptorLayerVisitor.hpp
src/armnnSerializer/Serializer.cpp
src/armnnSerializer/Serializer.hpp
src/backends/backendsCommon/LayerSupportBase.cpp
src/backends/backendsCommon/LayerSupportBase.hpp
src/backends/backendsCommon/WorkloadData.cpp
src/backends/backendsCommon/WorkloadData.hpp
src/backends/backendsCommon/WorkloadFactory.cpp
src/backends/backendsCommon/WorkloadFactory.hpp
src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp

index 108e01107adefd551bfea92829c2ddb9704fd881..c27b707f1211dc0703a1f0e517993fa23aacef8b 100644 (file)
@@ -140,6 +140,7 @@ LOCAL_SRC_FILES := \
         src/armnn/layers/InputLayer.cpp \
         src/armnn/layers/InstanceNormalizationLayer.cpp \
         src/armnn/layers/L2NormalizationLayer.cpp \
+        src/armnn/layers/LogSoftmaxLayer.cpp \
         src/armnn/layers/LstmLayer.cpp \
         src/armnn/layers/MaximumLayer.cpp \
         src/armnn/layers/MeanLayer.cpp \
index 043064349405ccb44afca6b7ad35abc1db42e40c..e69d29c5aa2b81ef7ae3204521eed479075a05dc 100644 (file)
@@ -284,6 +284,8 @@ list(APPEND armnn_sources
     src/armnn/layers/InstanceNormalizationLayer.cpp
     src/armnn/layers/L2NormalizationLayer.hpp
     src/armnn/layers/L2NormalizationLayer.cpp
+    src/armnn/layers/LogSoftmaxLayer.hpp
+    src/armnn/layers/LogSoftmaxLayer.cpp
     src/armnn/layers/LstmLayer.cpp
     src/armnn/layers/LstmLayer.hpp
     src/armnn/layers/MaximumLayer.cpp
index 5bf4043afa62efce99a28480c914feba8c53e3a4..e2e59741a3faee62a60cd21541fa47cd59820c08 100644 (file)
@@ -74,6 +74,9 @@ struct SoftmaxDescriptor
     int m_Axis;
 };
 
+/// A LogSoftmaxDescriptor for the LogSoftmaxLayer
+using LogSoftmaxDescriptor = SoftmaxDescriptor;
+
 /// @brief An OriginsDescriptor for the ConcatLayer.
 /// Descriptor to configure the concatenation process. Number of views must be equal to the number of inputs, and
 /// their order must match - e.g. first view corresponds to the first input, second view to the second input, etc.
index 2cc95828e60ba31e1b838f16c5dd1fd23f43e31b..6f1c0e0a6e2cb4285dd7f4b28445b4a79b4d0115 100644 (file)
@@ -7,6 +7,7 @@
 
 namespace armnn
 {
+
 struct ActivationDescriptor;
 struct ArgMinMaxDescriptor;
 struct BatchNormalizationDescriptor;
@@ -38,10 +39,11 @@ struct StridedSliceDescriptor;
 struct TransposeConvolution2dDescriptor;
 struct ViewsDescriptor;
 
+using ConcatDescriptor       = OriginsDescriptor;
 using DepthToSpaceDescriptor = SpaceToDepthDescriptor;
+using LogSoftmaxDescriptor   = SoftmaxDescriptor;
+// MergerDescriptor is deprecated, use ConcatDescriptor instead
+using MergerDescriptor       = OriginsDescriptor;
+using SplitterDescriptor     = ViewsDescriptor;
 
-// MergerDescriptor is deprecated use ConcatDescriptor instead
-using MergerDescriptor = OriginsDescriptor;
-using ConcatDescriptor = OriginsDescriptor;
-using SplitterDescriptor = ViewsDescriptor;
-}
+} // namespace armnn
index fef7595b542caa9d62c0751e17e6db57ea8e35c8..31b5e134e96135047f6db05def917d598bfc9744 100644 (file)
@@ -168,6 +168,11 @@ public:
                                             const L2NormalizationDescriptor& descriptor,
                                             Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const = 0;
 
+    virtual bool IsLogSoftmaxSupported(const TensorInfo& input,
+                                       const TensorInfo& output,
+                                       const LogSoftmaxDescriptor& descriptor,
+                                       Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const = 0;
+
     virtual bool IsLstmSupported(const TensorInfo& input,
                                  const TensorInfo& outputStateIn,
                                  const TensorInfo& cellStateIn,
index b9c96d544845441837520a070149aa6cb037c47d..e99e10f800ef929b63b4f3454e81af10f4b3d038 100644 (file)
@@ -224,6 +224,14 @@ public:
                                            const L2NormalizationDescriptor& desc,
                                            const char* name = nullptr) = 0;
 
+    /// Function that a log softmax layer should call back to when its Accept(ILayerVisitor&) function is invoked.
+    /// @param layer - pointer to the layer which is calling back to this visit function.
+    /// @param logSoftmaxDescriptor - LogSoftmaxDescriptor to configure the log softmax.
+    /// @param name - Optional name for the layer.
+    virtual void VisitLogSoftmaxLayer(const IConnectableLayer* layer,
+                                      const LogSoftmaxDescriptor& logSoftmaxDescriptor,
+                                      const char* name = nullptr) = 0;
+
     /// Function an Lstm layer should call back to when its Accept(ILayerVisitor&) function is invoked.
     /// @param layer - pointer to the layer which is calling back to this visit function.
     /// @param descriptor - Parameters controlling the operation of the Lstm operation.
index dc831db86425282140e41438d14c56b256a2b6dc..d12f5c239c713c6a56b9d0f1e49339cba25ccbaf 100644 (file)
@@ -344,6 +344,13 @@ public:
     virtual IConnectableLayer* AddL2NormalizationLayer(const L2NormalizationDescriptor& desc,
                                                        const char* name = nullptr) = 0;
 
+    /// Adds a log softmax layer to the network.
+    /// @param logSoftmaxDescriptor - LogSoftmaxDescriptor to configure the log softmax.
+    /// @param name - Optional name for the layer.
+    /// @return - Interface for configuring the layer.
+    virtual IConnectableLayer* AddLogSoftmaxLayer(const LogSoftmaxDescriptor& logSoftmaxDescriptor,
+                                                  const char* name = nullptr) = 0;
+
     /// Adds a layer with no inputs and a single output, which always corresponds to
     /// the passed in constant tensor.
     /// @param input - Tensor to be provided as the only output of the layer. The layer will maintain
index 719e59d39c8cc98af493445059b6beff9307eb07..912f25500c94541ae0040f55a28068af7b86e0dc 100644 (file)
@@ -120,6 +120,10 @@ public:
                                    const L2NormalizationDescriptor&,
                                    const char*) override { DefaultPolicy::Apply(__func__); }
 
+    void VisitLogSoftmaxLayer(const IConnectableLayer*,
+                              const LogSoftmaxDescriptor&,
+                              const char*) override { DefaultPolicy::Apply(__func__); }
+
     void VisitLstmLayer(const IConnectableLayer*,
                         const LstmDescriptor&,
                         const LstmInputParams&,
index 612d00be5ff3ab4efe4c00e659b16452035d935b..7c39128becf119f0d25bc2d8f1167b2c23189cf9 100644 (file)
@@ -40,6 +40,7 @@ char const* GetLayerTypeAsCString(LayerType type)
         case LayerType::Input: return "Input";
         case LayerType::InstanceNormalization: return "InstanceNormalization";
         case LayerType::L2Normalization: return "L2Normalization";
+        case LayerType::LogSoftmax: return "LogSoftmax";
         case LayerType::Lstm: return "Lstm";
         case LayerType::Maximum: return "Maximum";
         case LayerType::Mean: return "Mean";
index 039d0f8ac812e13b61fb6634fc3a8d650bed19ba..895fe3235db993d5e4c58673fa944e40713c86b7 100644 (file)
@@ -40,6 +40,7 @@ enum class LayerType
     Input,
     InstanceNormalization,
     L2Normalization,
+    LogSoftmax,
     Lstm,
     Maximum,
     Mean,
index 1f539f3076159d3c451c969a867102b2829200c2..7bb9c648182dc66b489ffb42986af3bfb4f1ad5b 100644 (file)
@@ -32,6 +32,7 @@
 #include "layers/InputLayer.hpp"
 #include "layers/InstanceNormalizationLayer.hpp"
 #include "layers/L2NormalizationLayer.hpp"
+#include "layers/LogSoftmaxLayer.hpp"
 #include "layers/LstmLayer.hpp"
 #include "layers/MaximumLayer.hpp"
 #include "layers/MeanLayer.hpp"
@@ -116,6 +117,7 @@ DECLARE_LAYER(Greater)
 DECLARE_LAYER(Input)
 DECLARE_LAYER(InstanceNormalization)
 DECLARE_LAYER(L2Normalization)
+DECLARE_LAYER(LogSoftmax)
 DECLARE_LAYER(Lstm)
 DECLARE_LAYER(Maximum)
 DECLARE_LAYER(Mean)
index 9d10b9ace1794d09058818887633d7724a4d0539..b2fc1a6389da8ccaddc8a4c23121341ad2cafe93 100644 (file)
@@ -1236,6 +1236,12 @@ IConnectableLayer* Network::AddL2NormalizationLayer(const L2NormalizationDescrip
     return m_Graph->AddLayer<L2NormalizationLayer>(desc, name);
 }
 
+IConnectableLayer* Network::AddLogSoftmaxLayer(const LogSoftmaxDescriptor& desc,
+                                               const char* name)
+{
+    return m_Graph->AddLayer<LogSoftmaxLayer>(desc, name);
+}
+
 IConnectableLayer* Network::AddConstantLayer(const ConstTensor& input, const char* name)
 {
     auto layer = m_Graph->AddLayer<ConstantLayer>(name);
index e11f3d21854a0e3b80572290cdb854adde511192..ad1e7c456e7a8623e235517545f828a9b42e7127 100644 (file)
@@ -158,6 +158,9 @@ public:
     IConnectableLayer* AddL2NormalizationLayer(const L2NormalizationDescriptor& desc,
                                                const char* name = nullptr) override;
 
+    IConnectableLayer* AddLogSoftmaxLayer(const LogSoftmaxDescriptor& logSoftmaxDescriptor,
+                                          const char* name = nullptr) override;
+
     IConnectableLayer* AddConstantLayer(const ConstTensor& input, const char* name = nullptr) override;
 
     IConnectableLayer* AddReshapeLayer(const ReshapeDescriptor& reshapeDescriptor,
diff --git a/src/armnn/layers/LogSoftmaxLayer.cpp b/src/armnn/layers/LogSoftmaxLayer.cpp
new file mode 100644 (file)
index 0000000..6ca15b2
--- /dev/null
@@ -0,0 +1,50 @@
+//
+// Copyright © 2019 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include "LogSoftmaxLayer.hpp"
+
+#include "LayerCloneBase.hpp"
+
+#include <armnn/TypesUtils.hpp>
+
+#include <backendsCommon/WorkloadData.hpp>
+#include <backendsCommon/WorkloadFactory.hpp>
+
+namespace armnn
+{
+
+LogSoftmaxLayer::LogSoftmaxLayer(const LogSoftmaxDescriptor &param, const char* name)
+    : LayerWithParameters(1, 1, LayerType::LogSoftmax, param, name) {}
+
+std::unique_ptr<IWorkload> LogSoftmaxLayer::CreateWorkload(const Graph& graph, const IWorkloadFactory& factory) const
+{
+    LogSoftmaxQueueDescriptor descriptor;
+    return factory.CreateLogSoftmax(descriptor, PrepInfoAndDesc(descriptor, graph));
+}
+
+LogSoftmaxLayer* LogSoftmaxLayer::Clone(Graph& graph) const
+{
+    return CloneBase<LogSoftmaxLayer>(graph, m_Param, GetName());
+}
+
+void LogSoftmaxLayer::ValidateTensorShapesFromInputs()
+{
+    VerifyLayerConnections(1, CHECK_LOCATION());
+
+    auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
+    BOOST_ASSERT(inferredShapes.size() == 1);
+
+    ConditionalThrowIfNotEqual<LayerValidationException>(
+        "LogSoftmaxLayer: TensorShape set on OutputSlot[0] does not match the inferred shape.",
+        GetOutputSlot(0).GetTensorInfo().GetShape(),
+        inferredShapes[0]);
+}
+
+void LogSoftmaxLayer::Accept(ILayerVisitor& visitor) const
+{
+    visitor.VisitLogSoftmaxLayer(this, GetParameters(), GetName());
+}
+
+} // namespace armnn
diff --git a/src/armnn/layers/LogSoftmaxLayer.hpp b/src/armnn/layers/LogSoftmaxLayer.hpp
new file mode 100644 (file)
index 0000000..13da542
--- /dev/null
@@ -0,0 +1,44 @@
+//
+// Copyright © 2019 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#pragma once
+
+#include "LayerWithParameters.hpp"
+
+namespace armnn
+{
+
+/// This layer represents a log softmax operation.
+class LogSoftmaxLayer : public LayerWithParameters<LogSoftmaxDescriptor>
+{
+public:
+    /// Makes a workload for the LogSoftmax type.
+    /// @param [in] graph The graph where this layer can be found.
+    /// @param [in] factory The workload factory which will create the workload.
+    /// @return A pointer to the created workload, or nullptr if not created.
+    virtual std::unique_ptr<IWorkload> CreateWorkload(const Graph&            graph,
+                                                      const IWorkloadFactory& factory) const override;
+
+    /// Creates a dynamically-allocated copy of this layer.
+    /// @param [in] graph The graph into which this layer is being cloned.
+    LogSoftmaxLayer* Clone(Graph& graph) const override;
+
+    /// Check if the input tensor shape(s)
+    /// will lead to a valid configuration of @ref LogSoftmaxLayer.
+    void ValidateTensorShapesFromInputs() override;
+
+    void Accept(ILayerVisitor& visitor) const override;
+
+protected:
+    /// Constructor to create a LogSoftmaxLayer.
+    /// @param [in] param LogSoftmaxDescriptor to configure the log softmax operation.
+    /// @param [in] name Optional name for the layer.
+    LogSoftmaxLayer(const LogSoftmaxDescriptor& param, const char* name);
+
+    /// Default destructor
+    ~LogSoftmaxLayer() = default;
+};
+
+} // namespace
index dcc5dc4cfbf26cddd58b674e2a5af4b29a2732a4..e2bfb0173377a742c347fd6384582b45f3a70ab7 100644 (file)
@@ -328,6 +328,34 @@ BOOST_AUTO_TEST_CASE(CheckL2NormalizationLayerVisitorNameNullAndDescriptor)
     layer->Accept(visitor);
 }
 
+BOOST_AUTO_TEST_CASE(CheckLogSoftmaxLayerVisitorNameAndDescriptor)
+{
+    const char* layerName = "LogSoftmaxLayer";
+
+    LogSoftmaxDescriptor descriptor;
+    descriptor.m_Beta = 2.0f;
+    descriptor.m_Axis = 1;
+
+    TestLogSoftmaxLayerVisitor visitor(descriptor, layerName);
+    Network net;
+
+    IConnectableLayer *const layer = net.AddLogSoftmaxLayer(descriptor, layerName);
+    layer->Accept(visitor);
+}
+
+BOOST_AUTO_TEST_CASE(CheckLogSoftmaxLayerVisitorNameNullAndDescriptor)
+{
+    LogSoftmaxDescriptor descriptor;
+    descriptor.m_Beta = 2.0f;
+    descriptor.m_Axis = 1;
+
+    TestLogSoftmaxLayerVisitor visitor(descriptor);
+    Network net;
+
+    IConnectableLayer *const layer = net.AddLogSoftmaxLayer(descriptor);
+    layer->Accept(visitor);
+}
+
 BOOST_AUTO_TEST_CASE(CheckReshapeLayerVisitorNameAndDescriptor)
 {
     const char* layerName = "ReshapeLayer";
index aa0b3597fa8914e46d9aa8cc1cf91c951d454543..e46aa34e29d73489802e1cb426ce30ec33f0c986 100644 (file)
@@ -479,6 +479,32 @@ public:
     };
 };
 
+class TestLogSoftmaxLayerVisitor : public TestLayerVisitor
+{
+private:
+    LogSoftmaxDescriptor m_VisitorDescriptor;
+
+public:
+    explicit TestLogSoftmaxLayerVisitor(const LogSoftmaxDescriptor& descriptor, const char* name = nullptr)
+        : TestLayerVisitor(name)
+        , m_VisitorDescriptor(descriptor) {}
+
+    void CheckDescriptor(const LogSoftmaxDescriptor& descriptor)
+    {
+        BOOST_CHECK_EQUAL(descriptor.m_Beta, m_VisitorDescriptor.m_Beta);
+        BOOST_CHECK_EQUAL(descriptor.m_Axis, m_VisitorDescriptor.m_Axis);
+    }
+
+    void VisitLogSoftmaxLayer(const IConnectableLayer* layer,
+                              const LogSoftmaxDescriptor& descriptor,
+                              const char* name = nullptr) override
+    {
+        CheckLayerPointer(layer);
+        CheckDescriptor(descriptor);
+        CheckLayerName(name);
+    };
+};
+
 class TestReshapeLayerVisitor : public TestLayerVisitor
 {
 private:
index 5949d1d9fa55fa83c7b856f988b95fa6b90955ff..0e8c894e46f4d08494b245a3da315690337a81f1 100644 (file)
@@ -463,8 +463,17 @@ void SerializerVisitor::VisitL2NormalizationLayer(const armnn::IConnectableLayer
     CreateAnyLayer(fbLayer.o, serializer::Layer::Layer_L2NormalizationLayer);
 }
 
-void SerializerVisitor::VisitLstmLayer(const armnn::IConnectableLayer* layer, const armnn::LstmDescriptor& descriptor,
-                                       const armnn::LstmInputParams& params, const char* name)
+void SerializerVisitor::VisitLogSoftmaxLayer(const armnn::IConnectableLayer* layer,
+                                             const armnn::LogSoftmaxDescriptor& logSoftmaxDescriptor,
+                                             const char* name)
+{
+    throw armnn::UnimplementedException("SerializerVisitor::VisitLogSoftmaxLayer() is not implemented");
+}
+
+void SerializerVisitor::VisitLstmLayer(const armnn::IConnectableLayer* layer,
+                                       const armnn::LstmDescriptor& descriptor,
+                                       const armnn::LstmInputParams& params,
+                                       const char* name)
 {
     auto fbLstmBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Lstm);
 
index f98bd178959d6f0bf24299d2194e964274caef18..8c13245aebd7e86766e03c7607e8342557928c2d 100644 (file)
@@ -133,6 +133,10 @@ public:
                                    const armnn::L2NormalizationDescriptor& l2NormalizationDescriptor,
                                    const char* name = nullptr) override;
 
+    void VisitLogSoftmaxLayer(const armnn::IConnectableLayer* layer,
+                              const armnn::LogSoftmaxDescriptor& logSoftmaxDescriptor,
+                              const char* name = nullptr) override;
+
     void VisitLstmLayer(const armnn::IConnectableLayer* layer,
                         const armnn::LstmDescriptor& descriptor,
                         const armnn::LstmInputParams& params,
index c41f0b11eae459bfe4e5f560f775118d9c1c18e5..7d5555ce68342fead9403a99f26c5ae1edf3a61b 100644 (file)
@@ -250,6 +250,14 @@ bool LayerSupportBase::IsL2NormalizationSupported(const TensorInfo& input,
     return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
 }
 
+bool LayerSupportBase::IsLogSoftmaxSupported(const TensorInfo& input,
+                                             const TensorInfo& output,
+                                             const LogSoftmaxDescriptor& descriptor,
+                                             Optional<std::string&> reasonIfUnsupported) const
+{
+    return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
+}
+
 bool LayerSupportBase::IsLstmSupported(const TensorInfo& input,
                                        const TensorInfo& outputStateIn,
                                        const TensorInfo& cellStateIn,
index 495870e6451576ee70fef366c096121dde836651..cb660f5c2b839a5b4db72da731088e2258545b22 100644 (file)
@@ -152,6 +152,11 @@ public:
                                     const L2NormalizationDescriptor& descriptor,
                                     Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const override;
 
+    bool IsLogSoftmaxSupported(const TensorInfo& input,
+                               const TensorInfo& output,
+                               const LogSoftmaxDescriptor& descriptor,
+                               Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const override;
+
     bool IsLstmSupported(const TensorInfo& input,
                          const TensorInfo& outputStateIn,
                          const TensorInfo& cellStateIn,
index ea0e5c82b803f6acc482c3bb8e74e0621c90e472..b8d4f0dfff0f5da8b12523e2b0ff7e47e1dc7f2a 100644 (file)
@@ -1294,8 +1294,6 @@ void InstanceNormalizationQueueDescriptor::Validate(const WorkloadInfo& workload
         };
 
     ValidateDataTypes(inputTensorInfo,  supportedTypes, descriptorName);
-    ValidateDataTypes(outputTensorInfo, supportedTypes, descriptorName);
-
     ValidateTensorDataTypesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output");
 }
 
@@ -1326,8 +1324,28 @@ void L2NormalizationQueueDescriptor::Validate(const WorkloadInfo& workloadInfo)
     };
 
     ValidateDataTypes(inputTensorInfo,  supportedTypes, descriptorName);
-    ValidateDataTypes(outputTensorInfo, supportedTypes, descriptorName);
+    ValidateTensorDataTypesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output");
+}
+
+void LogSoftmaxQueueDescriptor::Validate(const WorkloadInfo& workloadInfo) const
+{
+    const std::string descriptorName{"LogSoftmaxQueueDescriptor"};
+
+    ValidateNumInputs(workloadInfo,  descriptorName, 1);
+    ValidateNumOutputs(workloadInfo, descriptorName, 1);
+
+    const TensorInfo& inputTensorInfo  = workloadInfo.m_InputTensorInfos[0];
+    const TensorInfo& outputTensorInfo = workloadInfo.m_OutputTensorInfos[0];
+
+    ValidateTensorShapesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output");
 
+    std::vector<DataType> supportedTypes =
+    {
+        DataType::Float32,
+        DataType::Float16,
+    };
+
+    ValidateDataTypes(inputTensorInfo,  supportedTypes, descriptorName);
     ValidateTensorDataTypesMatch(inputTensorInfo, outputTensorInfo, descriptorName, "input", "output");
 }
 
index 1bf3aa750990c8e7c3bddfe4f7c1e767c64207fe..5a3600fc71ad397e553ff6555d9c0bfc9a151d19 100644 (file)
@@ -317,6 +317,11 @@ struct L2NormalizationQueueDescriptor : QueueDescriptorWithParameters<L2Normaliz
     void Validate(const WorkloadInfo& workloadInfo) const;
 };
 
+struct LogSoftmaxQueueDescriptor : QueueDescriptorWithParameters<LogSoftmaxDescriptor>
+{
+    void Validate(const WorkloadInfo& workloadInfo) const;
+};
+
 struct ConstantQueueDescriptor : QueueDescriptor
 {
     ConstantQueueDescriptor()
index 98fe158fc5fde0a4e966b4807fd6a6dc51a4bbe2..f19b48491a89ddb9c2ee21d4c205cafa7caadc24 100644 (file)
@@ -401,6 +401,19 @@ bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
                                                 reason);
             break;
         }
+        case LayerType::LogSoftmax:
+        {
+            auto cLayer = boost::polymorphic_downcast<const LogSoftmaxLayer*>(&layer);
+
+            const TensorInfo& input  = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
+            const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
+
+            result = layerSupportObject->IsLogSoftmaxSupported(OverrideDataType(input, dataType),
+                                                               OverrideDataType(output, dataType),
+                                                               cLayer->GetParameters(),
+                                                               reason);
+            break;
+        }
         case LayerType::Lstm:
         {
             auto cLayer = boost::polymorphic_downcast<const LstmLayer*>(&layer);
@@ -1167,6 +1180,12 @@ std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2Norma
     return std::unique_ptr<IWorkload>();
 }
 
+std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
+                                                              const WorkloadInfo& info) const
+{
+    return std::unique_ptr<IWorkload>();
+}
+
 std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
                                                         const WorkloadInfo& info) const
 {
index 9fa0221f312658f7276b864e7c8c321a9e7a0bba..fa7a9d46a8106adc221919fe23d38eb9afc88e94 100644 (file)
@@ -127,6 +127,9 @@ public:
     virtual std::unique_ptr<IWorkload> CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
                                                              const WorkloadInfo& info) const;
 
+    virtual std::unique_ptr<IWorkload> CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
+                                                        const WorkloadInfo& info) const;
+
     virtual std::unique_ptr<IWorkload> CreateLstm(const LstmQueueDescriptor& descriptor,
                                                   const WorkloadInfo& info) const;
 
index c8604140ec3d495ec6acf5ac76a098609c3552c3..907285c5cff75c2280172563ffbdf6bdff003723 100644 (file)
@@ -439,6 +439,8 @@ DECLARE_LAYER_POLICY_2_PARAM(InstanceNormalization)
 
 DECLARE_LAYER_POLICY_2_PARAM(L2Normalization)
 
+DECLARE_LAYER_POLICY_2_PARAM(LogSoftmax)
+
 DECLARE_LAYER_POLICY_2_PARAM(Lstm)
 
 DECLARE_LAYER_POLICY_1_PARAM(Maximum)