[IE CLDNN] Added LogSoftmax-5 operation (#2945)
authorRoman Lyamin <Roman.Lyamin@intel.com>
Wed, 11 Nov 2020 05:53:30 +0000 (08:53 +0300)
committerGitHub <noreply@github.com>
Wed, 11 Nov 2020 05:53:30 +0000 (08:53 +0300)
inference-engine/src/cldnn_engine/cldnn_engine.cpp
inference-engine/src/cldnn_engine/cldnn_program.cpp
inference-engine/src/cldnn_engine/cldnn_program.h
inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/log_softmax.cpp [new file with mode: 0644]

index c1c5215..f56ebc2 100644 (file)
@@ -136,7 +136,8 @@ InferenceEngine::ICNNNetwork::Ptr clDNNEngine::CloneAndTransformNetwork(const In
                    std::dynamic_pointer_cast<const ::ngraph::opset4::HSwish>(node) ||
                    std::dynamic_pointer_cast<const ::ngraph::opset4::ReduceL1>(node) ||
                    std::dynamic_pointer_cast<const ::ngraph::opset4::ReduceL2>(node) ||
-                   std::dynamic_pointer_cast<const ::ngraph::opset4::SoftPlus>(node);
+                   std::dynamic_pointer_cast<const ::ngraph::opset4::SoftPlus>(node) ||
+                   std::dynamic_pointer_cast<const ::ngraph::opset5::LogSoftmax>(node);
         };
         auto nGraphFunc = clonedNetwork->getFunction();
         // Disable shape inference (WA for generic operations)
index e2eec46..59ed9cc 100644 (file)
@@ -761,6 +761,7 @@ Program::LayerType Program::LayerTypeFromStr(const std::string &str) {
         { "Pooling" , Pooling },
         { "FullyConnected" , FullyConnected },
         { "SoftMax" , SoftMax },
+        { "LogSoftmax", LogSoftmax },
         { "Power" , Power },
         { "Split" , Split },
         { "VariadicSplit", VariadicSplit },
@@ -1442,6 +1443,8 @@ void Program::CreateSingleLayerPrimitive(cldnn::topology& topology, InferenceEng
             break;
         case SoftMax: CreateSoftMaxPrimitive(topology, layer);
             break;
+        case LogSoftmax: CreateLogSoftmaxPrimitive(topology, layer);
+            break;
         case Power: CreatePowerPrimitive(topology, layer);
             break;
         case Split: CreateSplitPrimitive(topology, layer);
@@ -2922,6 +2925,36 @@ void Program::CreateSoftMaxPrimitive(cldnn::topology& topology, InferenceEngine:
     AddPrimitiveToProfiler(softmaxLayerName, layer);
 }
 
+void Program::CreateLogSoftmaxPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer) {
+    ValidateLayer(layer, 1);
+    auto inputPrimitives = GetPrevLayersPrimitives(layer);
+    auto logSoftmaxLayer = as<InferenceEngine::GenericLayer*>(layer);
+    auto sz = logSoftmaxLayer->input().get()->getTensorDesc().getDims().size();
+
+    auto axis = logSoftmaxLayer->GetParamAsInt("axis", 1);
+    if (axis < 0) axis += sz;
+    cldnn::softmax::dimension_t softmax_axis;
+
+    switch (axis) {
+        case 0: softmax_axis = cldnn::softmax::normalize_all; break;
+        case 1: softmax_axis = cldnn::softmax::normalize_f; break;
+        case 2: softmax_axis = sz > 4 ? cldnn::softmax::normalize_z : cldnn::softmax::normalize_y; break;
+        case 3: softmax_axis = sz > 4 ? cldnn::softmax::normalize_y : cldnn::softmax::normalize_x; break;
+        case 4: softmax_axis = cldnn::softmax::normalize_x; break;
+        default: THROW_CLDNN_EXCEPTION("Unsupported logsoftmax axis " << axis);
+    }
+
+    std::string softmaxLayerName = "softMax";
+    auto softmaxPrim = cldnn::softmax(softmaxLayerName, inputPrimitives[0], softmax_axis);
+    topology.add(softmaxPrim);
+    AddPrimitiveToProfiler(softmaxLayerName, layer);
+
+    std::string logSoftmaxLayerName = layer_type_name_ID(layer);
+    auto logPrim = cldnn::activation(logSoftmaxLayerName, softmaxLayerName, cldnn::activation_func::log);
+    topology.add(logPrim);
+    AddPrimitiveToProfiler(logSoftmaxLayerName, layer);
+}
+
 void Program::CreateFullyConnectedPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer) {
     ValidateLayer(layer, {1, 2, 3});
     auto inputPrimitives = GetPrevLayersPrimitives(layer);
index 6802fa3..8276944 100644 (file)
@@ -138,6 +138,7 @@ public:
         Pooling,
         FullyConnected,
         SoftMax,
+        LogSoftmax,
         Power,
         Split,
         VariadicSplit,
@@ -337,6 +338,7 @@ private:
     void CreateFusedSplitConvMergePrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer, bool useGroups = true);
     void CreatePowerPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);
     void CreateSoftMaxPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);
+    void CreateLogSoftmaxPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);
     void CreateFullyConnectedPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);
     void CreatePoolingPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);
     void CreateLRNPrimitive(cldnn::topology& topology, InferenceEngine::CNNLayerPtr &layer);
diff --git a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/log_softmax.cpp b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/log_softmax.cpp
new file mode 100644 (file)
index 0000000..f497609
--- /dev/null
@@ -0,0 +1,76 @@
+// Copyright (C) 2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include <vector>
+
+#include "single_layer_tests/log_softmax.hpp"
+#include "common_test_utils/test_constants.hpp"
+
+using namespace LayerTestsDefinitions;
+
+namespace {
+
+const std::vector<InferenceEngine::Precision> netPrecisions = {
+    InferenceEngine::Precision::FP32,
+};
+
+const std::vector<InferenceEngine::SizeVector> inputShapes2D = {
+    InferenceEngine::SizeVector {1, 100},
+    InferenceEngine::SizeVector {100, 1},
+    InferenceEngine::SizeVector {10, 10},
+};
+
+const std::vector<int64_t> axis2D = {
+    -1, 1
+};
+
+const auto params2D = testing::Combine(
+    testing::ValuesIn(netPrecisions),
+    testing::Values(InferenceEngine::Precision::UNSPECIFIED),
+    testing::Values(InferenceEngine::Precision::UNSPECIFIED),
+    testing::Values(InferenceEngine::Layout::ANY),
+    testing::Values(InferenceEngine::Layout::ANY),
+    testing::ValuesIn(inputShapes2D),
+    testing::ValuesIn(axis2D),
+    testing::Values(CommonTestUtils::DEVICE_GPU),
+    testing::Values(std::map<std::string, std::string>())
+);
+
+INSTANTIATE_TEST_CASE_P(
+        smoke_LogSoftmax2D,
+        LogSoftmaxLayerTest,
+        params2D,
+        LogSoftmaxLayerTest::getTestCaseName
+);
+
+const std::vector<InferenceEngine::SizeVector> inputShapes4D = {
+    InferenceEngine::SizeVector {1, 100, 1, 1},
+    InferenceEngine::SizeVector {1, 3, 4, 3},
+    InferenceEngine::SizeVector {2, 3, 4, 5},
+};
+
+const std::vector<int64_t> axis4D = {
+    -3, -2, -1, 1, 2, 3
+};
+
+const auto params4D = testing::Combine(
+    testing::ValuesIn(netPrecisions),
+    testing::Values(InferenceEngine::Precision::UNSPECIFIED),
+    testing::Values(InferenceEngine::Precision::UNSPECIFIED),
+    testing::Values(InferenceEngine::Layout::ANY),
+    testing::Values(InferenceEngine::Layout::ANY),
+    testing::ValuesIn(inputShapes4D),
+    testing::ValuesIn(axis4D),
+    testing::Values(CommonTestUtils::DEVICE_GPU),
+    testing::Values(std::map<std::string, std::string>())
+);
+
+INSTANTIATE_TEST_CASE_P(
+        smoke_LogSoftmax4D,
+        LogSoftmaxLayerTest,
+        params4D,
+        LogSoftmaxLayerTest::getTestCaseName
+);
+
+}  // namespace