SoftPlus operation implementation (#1365)
authorAnton Chetverikov <Anton.Chetverikov@intel.com>
Tue, 25 Aug 2020 16:36:39 +0000 (19:36 +0300)
committerGitHub <noreply@github.com>
Tue, 25 Aug 2020 16:36:39 +0000 (19:36 +0300)
35 files changed:
inference-engine/src/cldnn_engine/cldnn_engine.cpp
inference-engine/src/mkldnn_plugin/mkldnn_plugin.cpp
inference-engine/src/mkldnn_plugin/nodes/list_tbl.hpp
inference-engine/src/mkldnn_plugin/nodes/math.cpp
inference-engine/src/transformations/include/transformations/softplus_decomposition.hpp [new file with mode: 0644]
inference-engine/src/transformations/include/transformations/softplus_fusion.hpp [new file with mode: 0644]
inference-engine/src/transformations/src/transformations/common_optimizations/common_optimizations.cpp
inference-engine/src/transformations/src/transformations/convert_opset3_to_opset2/convert_opset3_to_opset2.cpp
inference-engine/src/transformations/src/transformations/softplus_decomposition.cpp [new file with mode: 0644]
inference-engine/src/transformations/src/transformations/softplus_fusion.cpp [new file with mode: 0644]
inference-engine/src/vpu/graph_transformer/src/frontend/frontend.cpp
inference-engine/tests/functional/inference_engine/transformations/softplus_decomposition_test.cpp [new file with mode: 0644]
inference-engine/tests/functional/inference_engine/transformations/softplus_fusion_test.cpp [new file with mode: 0644]
inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/activation.cpp
inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/activation.cpp
inference-engine/tests/functional/plugin/myriad/shared_tests_instances/single_layer_tests/activation.cpp
inference-engine/tests/functional/plugin/shared/include/single_layer_tests/activation.hpp
inference-engine/tests/ngraph_functions/include/ngraph_functions/utils/ngraph_helpers.hpp
inference-engine/tests/ngraph_functions/src/activation.cpp
inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/math_tests.cpp
model-optimizer/automation/package_BOM.txt
model-optimizer/extensions/front/softplus.py [deleted file]
model-optimizer/extensions/front/softplus_test.py [deleted file]
model-optimizer/extensions/ops/activation_ops.py
ngraph/core/include/ngraph/op/softplus.hpp [new file with mode: 0644]
ngraph/core/include/ngraph/ops.hpp
ngraph/core/include/ngraph/opsets/opset4_tbl.hpp
ngraph/core/reference/include/ngraph/runtime/reference/softplus.hpp [new file with mode: 0644]
ngraph/core/src/op/softplus.cpp [new file with mode: 0644]
ngraph/python/src/ngraph/__init__.py
ngraph/python/src/ngraph/opset4/__init__.py
ngraph/python/src/ngraph/opset4/ops.py
ngraph/test/CMakeLists.txt
ngraph/test/op_eval/softplus.cpp [new file with mode: 0644]
ngraph/test/type_prop/softplus.cpp [new file with mode: 0644]

index ecde611..bcf5770 100644 (file)
@@ -94,7 +94,8 @@ InferenceEngine::ICNNNetwork::Ptr clDNNEngine::CloneAndTransformNetwork(const In
                    std::dynamic_pointer_cast<const ::ngraph::opset3::ExtractImagePatches>(node) ||
                    std::dynamic_pointer_cast<const ::ngraph::opset4::HSwish>(node) ||
                    std::dynamic_pointer_cast<const ::ngraph::opset4::ReduceL1>(node) ||
-                   std::dynamic_pointer_cast<const ::ngraph::opset4::ReduceL2>(node);
+                   std::dynamic_pointer_cast<const ::ngraph::opset4::ReduceL2>(node) ||
+                   std::dynamic_pointer_cast<const ::ngraph::opset4::SoftPlus>(node);
         };
         auto nGraphFunc = clonedNetwork->getFunction();
         // Disable shape inference (WA for generic operations)
index 0d36510..b783470 100644 (file)
@@ -84,6 +84,7 @@ static void Transformation(ICNNNetwork::Ptr& clonedNetwork) {
                std::dynamic_pointer_cast<const ngraph::opset4::HSwish>(node) ||
                std::dynamic_pointer_cast<const ngraph::opset4::ReduceL1>(node) ||
                std::dynamic_pointer_cast<const ngraph::opset4::ReduceL2>(node) ||
+               std::dynamic_pointer_cast<const ngraph::opset4::SoftPlus>(node) ||
                std::dynamic_pointer_cast<const ngraph::opset4::Pad>(node);
     };
     auto nGraphFunc = clonedNetwork->getFunction();
index 2015c4c..5402276 100644 (file)
@@ -31,7 +31,7 @@ MKLDNN_EXTENSION_NODE(MathImpl, Selu);
 MKLDNN_EXTENSION_NODE(MathImpl, Sign);
 MKLDNN_EXTENSION_NODE(MathImpl, Sin);
 MKLDNN_EXTENSION_NODE(MathImpl, Sinh);
-MKLDNN_EXTENSION_NODE(MathImpl, Softplus);
+MKLDNN_EXTENSION_NODE(MathImpl, SoftPlus);
 MKLDNN_EXTENSION_NODE(MathImpl, Softsign);
 MKLDNN_EXTENSION_NODE(MathImpl, Tan);
 MKLDNN_EXTENSION_NODE(ExperimentalDetectronTopKROIsImpl, ExperimentalDetectronTopKROIs);
index 2920bad..e690662 100644 (file)
@@ -82,7 +82,7 @@ public:
             else if (math_func == "Sign") mathFunction = Math::Sign;
             else if (math_func == "Sin") mathFunction = Math::Sin;
             else if (math_func == "Sinh") mathFunction = Math::Sinh;
-            else if (math_func == "Softplus") mathFunction = Math::Softplus;
+            else if (math_func == "SoftPlus") mathFunction = Math::SoftPlus;
             else if (math_func == "Softsign") mathFunction = Math::Softsign;
             else if (math_func == "Tan") mathFunction = Math::Tan;
             else
@@ -212,7 +212,7 @@ public:
                 dst_data[i] = sinhf(src_data[i]);
             });
             break;
-        case Math::Softplus:
+        case Math::SoftPlus:
             parallel_for(dataSize, [&](size_t i) {
                 dst_data[i] = logf(expf(src_data[i]) + 1);
             });
@@ -260,7 +260,7 @@ private:
         Sign,
         Sin,
         Sinh,
-        Softplus,
+        SoftPlus,
         Softsign,
         Tan
     };
@@ -291,7 +291,7 @@ REG_FACTORY_FOR(MathImpl, Selu);
 REG_FACTORY_FOR(MathImpl, Sign);
 REG_FACTORY_FOR(MathImpl, Sin);
 REG_FACTORY_FOR(MathImpl, Sinh);
-REG_FACTORY_FOR(MathImpl, Softplus);
+REG_FACTORY_FOR(MathImpl, SoftPlus);
 REG_FACTORY_FOR(MathImpl, Softsign);
 REG_FACTORY_FOR(MathImpl, Tan);
 
diff --git a/inference-engine/src/transformations/include/transformations/softplus_decomposition.hpp b/inference-engine/src/transformations/include/transformations/softplus_decomposition.hpp
new file mode 100644 (file)
index 0000000..9b167c5
--- /dev/null
@@ -0,0 +1,29 @@
+// Copyright (C) 2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#pragma once
+
+#include <vector>
+#include <memory>
+
+#include <transformations_visibility.hpp>
+#include <ngraph/pass/graph_rewrite.hpp>
+
+namespace ngraph {
+namespace pass {
+
+class TRANSFORMATIONS_API SoftPlusDecomposition;
+
+}  // namespace pass
+}  // namespace ngraph
+
+/**
+ * @ingroup ie_transformation_common_api
+ * @brief SoftPlusDecomposition transformation replaces SoftPlus op to
+ * group of operations: log(exp(x) + 1).
+ */
+class ngraph::pass::SoftPlusDecomposition: public ngraph::pass::MatcherPass {
+public:
+    SoftPlusDecomposition();
+};
diff --git a/inference-engine/src/transformations/include/transformations/softplus_fusion.hpp b/inference-engine/src/transformations/include/transformations/softplus_fusion.hpp
new file mode 100644 (file)
index 0000000..3da9e88
--- /dev/null
@@ -0,0 +1,29 @@
+// Copyright (C) 2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#pragma once
+
+#include <vector>
+#include <memory>
+
+#include <transformations_visibility.hpp>
+#include <ngraph/pass/graph_rewrite.hpp>
+
+namespace ngraph {
+namespace pass {
+
+class TRANSFORMATIONS_API SoftPlusFusion;
+
+}  // namespace pass
+}  // namespace ngraph
+
+/**
+ * @ingroup ie_transformation_common_api
+ * @brief SoftPlusFusion transformation replaces group of
+ * operations: log(exp(x) + 1) to SoftPlus op.
+ */
+class ngraph::pass::SoftPlusFusion: public ngraph::pass::MatcherPass {
+public:
+    SoftPlusFusion();
+};
index 8d43916..7f988f7 100644 (file)
@@ -16,6 +16,7 @@
 #include "transformations/init_node_info.hpp"
 #include "transformations/itt.hpp"
 #include "transformations/mish_fusion.hpp"
+#include "transformations/softplus_fusion.hpp"
 #include "transformations/swish_fusion.hpp"
 #include "transformations/hswish_fusion.hpp"
 
@@ -40,6 +41,7 @@ bool ngraph::pass::CommonOptimizations::run_on_function(std::shared_ptr<ngraph::
     manager.register_pass<ngraph::pass::ConvertScatterElementsToScatter>(); // partially depends on CF
     manager.register_pass<ngraph::pass::DepthToSpaceFusion>();
     manager.register_pass<ngraph::pass::MishFusion>();
+    manager.register_pass<ngraph::pass::SoftPlusFusion>();
     manager.register_pass<ngraph::pass::SwishFusion>();
     manager.register_pass<ngraph::pass::HSwishFusion>();
     manager.register_pass<ngraph::pass::ConvertPadToGroupConvolution>();
index 8b8873d..c579244 100644 (file)
@@ -10,6 +10,7 @@
 #include "transformations/convert_opset3_to_opset2/convert_shuffle_channels3.hpp"
 #include "transformations/convert_opset3_to_opset2/convert_topk3.hpp"
 #include "transformations/convert_extract_image_patches_to_reorg_yolo.hpp"
+#include "transformations/softplus_decomposition.hpp"
 #include "transformations/itt.hpp"
 
 #include <memory>
@@ -28,6 +29,7 @@ bool ngraph::pass::ConvertOpSet3ToOpSet2::run_on_function(std::shared_ptr<ngraph
     manager.register_pass<ngraph::pass::ConvertShuffleChannels3>();
     manager.register_pass<ngraph::pass::ConvertTopK3>();
     manager.register_pass<ngraph::pass::ConvertExtractImagePatchesToReorgYolo>();
+    manager.register_pass<ngraph::pass::SoftPlusDecomposition>();
 
     manager.set_callback(m_transformation_callback);
     manager.run_passes(f);
diff --git a/inference-engine/src/transformations/src/transformations/softplus_decomposition.cpp b/inference-engine/src/transformations/src/transformations/softplus_decomposition.cpp
new file mode 100644 (file)
index 0000000..bfac0af
--- /dev/null
@@ -0,0 +1,41 @@
+// Copyright (C) 2018-2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include "transformations/softplus_decomposition.hpp"
+
+#include <memory>
+#include <vector>
+
+#include <ngraph/opsets/opset4.hpp>
+#include <ngraph/rt_info.hpp>
+#include <ngraph/pattern/op/wrap_type.hpp>
+
+ngraph::pass::SoftPlusDecomposition::SoftPlusDecomposition() {
+    // decomposes SoftPlus(x) operation into ln(exp(x) + 1.0)
+    auto input = ngraph::pattern::any_input();
+    auto softplus = std::make_shared<ngraph::opset4::SoftPlus>(input);
+
+    ngraph::matcher_pass_callback callback = [=](ngraph::pattern::Matcher& m) {
+        auto &pattern_to_output = m.get_pattern_value_map();
+        auto softplus_input = pattern_to_output.at(input);
+        auto softplus_node = pattern_to_output.at(softplus).get_node_shared_ptr();
+
+        if (m_transformation_callback(softplus_node)) {
+            return false;
+        }
+
+        auto exp = std::make_shared<ngraph::opset4::Exp>(softplus_input);
+        auto add = std::make_shared<ngraph::opset4::Add>(exp,
+            opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.0}));
+        auto log = std::make_shared<ngraph::opset4::Log>(add);
+
+        log->set_friendly_name(softplus_node->get_friendly_name());
+        ngraph::copy_runtime_info(softplus_node, {exp, add, log});
+        ngraph::replace_node(softplus_node, log);
+        return true;
+    };
+
+    auto m = std::make_shared<ngraph::pattern::Matcher>(softplus, "SoftPlusDecomposition");
+    register_matcher(m, callback);
+}
diff --git a/inference-engine/src/transformations/src/transformations/softplus_fusion.cpp b/inference-engine/src/transformations/src/transformations/softplus_fusion.cpp
new file mode 100644 (file)
index 0000000..a2a3056
--- /dev/null
@@ -0,0 +1,53 @@
+// Copyright (C) 2018-2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include "transformations/softplus_fusion.hpp"
+
+#include <memory>
+#include <vector>
+
+#include <ngraph/opsets/opset4.hpp>
+#include <ngraph/rt_info.hpp>
+#include <ngraph/pattern/op/wrap_type.hpp>
+
+ngraph::pass::SoftPlusFusion::SoftPlusFusion() {
+    // fuses ln(exp(x) + 1.0) operations into SoftPlus(x)
+    auto input = ngraph::pattern::any_input();
+    auto exp = std::make_shared<ngraph::opset4::Exp>(input);
+    auto add_constant = ngraph::pattern::wrap_type<ngraph::opset4::Constant>();
+    auto add = std::make_shared<ngraph::opset4::Add>(exp, add_constant);
+    auto log = std::make_shared<ngraph::opset4::Log>(add);
+
+    ngraph::matcher_pass_callback callback = [=](ngraph::pattern::Matcher &m) {
+        auto &pattern_to_output = m.get_pattern_value_map();
+        auto exp_input = pattern_to_output.at(input);
+
+        auto constant = std::dynamic_pointer_cast<ngraph::opset4::Constant>(pattern_to_output.at(add_constant).get_node_shared_ptr());
+
+        if (constant == nullptr) {
+            return false;
+        }
+
+        if (constant->get_element_type() == ngraph::element::f32 || constant->get_element_type() == ngraph::element::f16) {
+            auto data = constant->cast_vector<float>();
+            if (data.size() != 1 || data[0] != 1.0) {
+                return false;
+            }
+        } else {
+            return false;
+        }
+
+        auto softplus = std::make_shared<ngraph::opset4::SoftPlus>(exp_input);
+
+        softplus->set_friendly_name(m.get_match_root()->get_friendly_name());
+        ngraph::copy_runtime_info({pattern_to_output.at(log).get_node_shared_ptr(),
+                                   pattern_to_output.at(add).get_node_shared_ptr(),
+                                   pattern_to_output.at(exp).get_node_shared_ptr()}, softplus);
+        ngraph::replace_node(m.get_match_root(), softplus);
+        return true;
+    };
+
+    auto m = std::make_shared<ngraph::pattern::Matcher>(log, "SoftPlusFusion");
+    register_matcher(m, callback);
+}
index a504d74..b4b48be 100644 (file)
@@ -22,6 +22,7 @@
 #include <legacy/convert_function_to_cnn_network.hpp>
 #include <generic_ie.hpp>
 #include <ngraph/opsets/opset3.hpp>
+#include <ngraph/opsets/opset4.hpp>
 #include <transformations/tensor_iterator_transformations/apply_transformations_to_ti_body.hpp>
 #include <transformations/convert_opset3_to_opset2/convert_opset3_to_opset2.hpp>
 #include <transformations/convert_opset2_to_opset1/convert_opset2_to_opset1.hpp>
@@ -387,7 +388,8 @@ ModelPtr FrontEnd::runCommonPasses(ie::ICNNNetwork& network, const UnsupportedLa
             const auto transformationsPredicate = [](const std::shared_ptr<const ngraph::Node> &node) -> bool {
                 return std::dynamic_pointer_cast<const ngraph::opset3::Gelu>(node) ||
                        (std::dynamic_pointer_cast<const ngraph::opset3::MatMul>(node) &&
-                        std::dynamic_pointer_cast<const ngraph::vpu::op::DynamicShapeResolver>(node->input_value(0).get_node_shared_ptr()));
+                        std::dynamic_pointer_cast<const ngraph::vpu::op::DynamicShapeResolver>(node->input_value(0).get_node_shared_ptr())) ||
+                       std::dynamic_pointer_cast<const ngraph::opset4::SoftPlus>(node);
             };
 
             auto nGraphFunc = originalOrConvertNetwork->getFunction();
diff --git a/inference-engine/tests/functional/inference_engine/transformations/softplus_decomposition_test.cpp b/inference-engine/tests/functional/inference_engine/transformations/softplus_decomposition_test.cpp
new file mode 100644 (file)
index 0000000..ad66191
--- /dev/null
@@ -0,0 +1,48 @@
+// Copyright (C) 2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include <gtest/gtest.h>
+
+#include <string>
+#include <memory>
+
+#include <ngraph/function.hpp>
+#include <ngraph/opsets/opset4.hpp>
+#include <ngraph/pass/manager.hpp>
+#include <transformations/softplus_decomposition.hpp>
+#include <transformations/init_node_info.hpp>
+#include <transformations/utils/utils.hpp>
+
+#include "common_test_utils/ngraph_test_utils.hpp"
+
+using namespace testing;
+
+TEST(TransformationTests, SoftPlusDecomposition) {
+    std::shared_ptr<ngraph::Function> f(nullptr), f_ref(nullptr);
+    {
+        auto data = std::make_shared<ngraph::opset4::Parameter>(ngraph::element::f32, ngraph::Shape{3, 1, 2});
+        auto softplus = std::make_shared<ngraph::opset4::SoftPlus>(data);
+
+        f = std::make_shared<ngraph::Function>(ngraph::NodeVector{softplus}, ngraph::ParameterVector{data});
+
+        ngraph::pass::Manager manager;
+        manager.register_pass<ngraph::pass::InitNodeInfo>();
+        manager.register_pass<ngraph::pass::SoftPlusDecomposition>();
+        manager.run_passes(f);
+        ASSERT_NO_THROW(check_rt_info(f));
+    }
+
+    {
+        auto input = std::make_shared<ngraph::opset4::Parameter>(ngraph::element::f32, ngraph::Shape{3, 1, 2});
+        auto exp = std::make_shared<ngraph::opset4::Exp>(input);
+        auto add = std::make_shared<ngraph::opset4::Add>(exp,
+            ngraph::opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.0}));
+        auto log = std::make_shared<ngraph::opset4::Log>(add);
+
+        f_ref = std::make_shared<ngraph::Function>(ngraph::NodeVector{log}, ngraph::ParameterVector{input});
+    }
+
+    auto res = compare_functions(f, f_ref);
+    ASSERT_TRUE(res.first) << res.second;
+}
diff --git a/inference-engine/tests/functional/inference_engine/transformations/softplus_fusion_test.cpp b/inference-engine/tests/functional/inference_engine/transformations/softplus_fusion_test.cpp
new file mode 100644 (file)
index 0000000..d7a2f9e
--- /dev/null
@@ -0,0 +1,109 @@
+// Copyright (C) 2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#include <gtest/gtest.h>
+
+#include <string>
+#include <memory>
+
+#include <ngraph/function.hpp>
+#include <ngraph/opsets/opset4.hpp>
+#include <ngraph/pass/manager.hpp>
+#include <transformations/softplus_fusion.hpp>
+#include <transformations/init_node_info.hpp>
+#include <transformations/utils/utils.hpp>
+
+#include "common_test_utils/ngraph_test_utils.hpp"
+
+using namespace testing;
+
+TEST(TransformationTests, SoftPlusFusing) {
+    std::shared_ptr<ngraph::Function> f(nullptr), f_ref(nullptr);
+    {
+        auto input0 = std::make_shared<ngraph::opset4::Parameter>(ngraph::element::f32, ngraph::Shape{3, 1, 2});
+        auto exp = std::make_shared<ngraph::opset4::Exp>(input0);
+        auto input_const = ngraph::opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.0});
+        auto add = std::make_shared<ngraph::opset4::Add>(exp, input_const);
+        auto log = std::make_shared<ngraph::opset4::Log>(add);
+
+        f = std::make_shared<ngraph::Function>(ngraph::NodeVector{log}, ngraph::ParameterVector{input0});
+
+        ngraph::pass::Manager manager;
+        manager.register_pass<ngraph::pass::InitNodeInfo>();
+        manager.register_pass<ngraph::pass::SoftPlusFusion>();
+        manager.run_passes(f);
+        ASSERT_NO_THROW(check_rt_info(f));
+    }
+
+    {
+        auto data = std::make_shared<ngraph::opset4::Parameter>(ngraph::element::f32, ngraph::Shape{3, 1, 2});
+        auto softplus = std::make_shared<ngraph::opset4::SoftPlus>(data);
+
+        f_ref = std::make_shared<ngraph::Function>(ngraph::NodeVector{softplus}, ngraph::ParameterVector{data});
+    }
+
+    auto res = compare_functions(f, f_ref);
+    ASSERT_TRUE(res.first) << res.second;
+}
+
+TEST(TransformationTests, SoftPlusFusingDynamic) {
+    std::shared_ptr<ngraph::Function> f(nullptr), f_ref(nullptr);
+    {
+        auto input0 = std::make_shared<ngraph::opset4::Parameter>(ngraph::element::f32, ngraph::PartialShape::dynamic(1));
+        auto exp = std::make_shared<ngraph::opset4::Exp>(input0);
+        auto input_const = ngraph::opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.0});
+        auto add = std::make_shared<ngraph::opset4::Add>(exp, input_const);
+        auto log = std::make_shared<ngraph::opset4::Log>(add);
+
+        f = std::make_shared<ngraph::Function>(ngraph::NodeVector{log}, ngraph::ParameterVector{input0});
+
+        ngraph::pass::Manager manager;
+        manager.register_pass<ngraph::pass::InitNodeInfo>();
+        manager.register_pass<ngraph::pass::SoftPlusFusion>();
+        manager.run_passes(f);
+        ASSERT_NO_THROW(check_rt_info(f));
+    }
+
+    {
+        auto data = std::make_shared<ngraph::opset4::Parameter>(ngraph::element::f32, ngraph::PartialShape::dynamic(1));
+        auto softplus = std::make_shared<ngraph::opset4::SoftPlus>(data);
+
+        f_ref = std::make_shared<ngraph::Function>(ngraph::NodeVector{softplus}, ngraph::ParameterVector{data});
+    }
+
+    auto res = compare_functions(f, f_ref);
+    ASSERT_TRUE(res.first) << res.second;
+}
+
+TEST(TransformationTests, SoftPlusFusingNegative) {
+    std::shared_ptr<ngraph::Function> f(nullptr), f_ref(nullptr);
+    {
+        auto input0 = std::make_shared<ngraph::opset4::Parameter>(ngraph::element::f32, ngraph::PartialShape::dynamic(1));
+        auto exp = std::make_shared<ngraph::opset4::Exp>(input0);
+        auto input_const = ngraph::opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {-1.0});
+        auto add = std::make_shared<ngraph::opset4::Add>(exp, input_const);
+        auto log = std::make_shared<ngraph::opset4::Log>(add);
+
+        f = std::make_shared<ngraph::Function>(ngraph::NodeVector{log}, ngraph::ParameterVector{input0});
+
+        ngraph::pass::Manager manager;
+        manager.register_pass<ngraph::pass::InitNodeInfo>();
+        manager.register_pass<ngraph::pass::SoftPlusFusion>();
+        manager.run_passes(f);
+        ASSERT_NO_THROW(check_rt_info(f));
+    }
+
+    {
+        auto input0 = std::make_shared<ngraph::opset4::Parameter>(ngraph::element::f32, ngraph::PartialShape::dynamic(1));
+        auto exp = std::make_shared<ngraph::opset4::Exp>(input0);
+        auto input_const = ngraph::opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {-1.0});
+        auto add = std::make_shared<ngraph::opset4::Add>(exp, input_const);
+        auto log = std::make_shared<ngraph::opset4::Log>(add);
+
+        f_ref = std::make_shared<ngraph::Function>(ngraph::NodeVector{log}, ngraph::ParameterVector{input0});
+    }
+
+    auto res = compare_functions(f, f_ref);
+    ASSERT_TRUE(res.first) << res.second;
+}
index 286a708..0088bb9 100644 (file)
@@ -48,7 +48,8 @@ const std::vector<ActivationTypes> activationTypes = {
         Selu,
         Ceiling,
         Mish,
-        HSwish
+        HSwish,
+        SoftPlus
 };
 
 const std::vector<ActivationTypes> activationParamTypes = {
index eb41567..703ac25 100644 (file)
@@ -43,7 +43,8 @@ const std::vector<ActivationTypes> activationTypes = {
         Selu,
         Ceiling,
         Mish,
-        HSwish
+        HSwish,
+        SoftPlus
 };
 
 std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
index 25fba89..cced8e4 100644 (file)
@@ -22,7 +22,8 @@ const std::vector<ActivationTypes> activationTypes = {
         Exp,
         Log,
         Gelu,
-        Mish
+        Mish,
+        SoftPlus
 };
 
 std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
index 2744a85..6593d5a 100644 (file)
@@ -69,6 +69,7 @@ static std::map<ngraph::helpers::ActivationTypes, std::string> activationNames =
         {ngraph::helpers::ActivationTypes::PReLu,       "PReLu"},
         {ngraph::helpers::ActivationTypes::Mish,        "Mish"},
         {ngraph::helpers::ActivationTypes::HSwish,      "HSwish"},
+        {ngraph::helpers::ActivationTypes::SoftPlus,    "SoftPlus"},
 };
 
 typedef std::tuple<
index ab8c878..02c83f0 100644 (file)
@@ -95,6 +95,8 @@ std::shared_ptr<ngraph::Node> makeActivation(const ngraph::Output<Node> &in,
             return std::make_shared<ngraph::op::v4::Mish>(in);
         case ngraph::helpers::ActivationTypes::HSwish:
             return std::make_shared<ngraph::op::v4::HSwish>(in);
+        case ngraph::helpers::ActivationTypes::SoftPlus:
+            return std::make_shared<ngraph::op::v4::SoftPlus>(in);
         default:
             throw std::runtime_error("Can't create layer for this activation type");
     }
index 930fea5..6e7eb38 100644 (file)
@@ -127,7 +127,7 @@ void ref_math(
         for (i = 0; i < dst_size; i++) {
             dst_data[i] = sinhf(src_data[i]);
         }
-    } else if (math_function == "Softplus") {
+    } else if (math_function == "SoftPlus") {
         for (i = 0; i < dst_size; i++) {
             dst_data[i] = logf(expf(src_data[i]) + 1);
         }
@@ -313,7 +313,7 @@ INSTANTIATE_TEST_CASE_P(
                 math_test_params{ "Sign",{ 3 },{ -0.5f, 0.f, 0.5f },{},{},{},{-1, 0, 1} },
                 math_test_params{ "Sin",{ 3 },{ -1, 0, 1 },{},{},{},{ -0.841470957f, 0.0f, 0.841470957f } },
                 math_test_params{ "Sinh",{ 3 },{ -0.5f, 0.f, 0.5f },{},{},{},{ } },
-                math_test_params{ "Softplus",{ 3 },{ -1, 0, 1 },{},{},{},{ 0.31326166f, 0.69314718f, 1.31326163f } },
+                math_test_params{ "SoftPlus",{ 3 },{ -1, 0, 1 },{},{},{},{ 0.31326166f, 0.69314718f, 1.31326163f } },
                 math_test_params{ "Softsign",{ 3 },{ -1, 0, 1 },{},{},{},{ -0.5f, 0.f, 0.5f } },
                 math_test_params{ "Tan",{ 3 },{ -1, 0, 1 },{},{},{},{ -1.55740774f, 0.0f, 1.55740774f } }
             ));
index 569cf75..bd74604 100644 (file)
@@ -326,7 +326,6 @@ extensions/front/reshape_dim_normalizer.py
 extensions/front/restore_ports.py
 extensions/front/scatter_normalizer.py
 extensions/front/softmax.py
-extensions/front/softplus.py
 extensions/front/softsign_replacer.py
 extensions/front/split_normalizer.py
 extensions/front/SqueezeNormalize.py
diff --git a/model-optimizer/extensions/front/softplus.py b/model-optimizer/extensions/front/softplus.py
deleted file mode 100644 (file)
index d6d082e..0000000
+++ /dev/null
@@ -1,44 +0,0 @@
-"""
- Copyright (C) 2020 Intel Corporation
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-"""
-
-from extensions.ops.activation_ops import Exp, Log
-from extensions.ops.elementwise import Add
-from mo.front.common.partial_infer.utils import float_array
-from mo.front.common.replacement import FrontReplacementOp
-from mo.front.tf.graph_utils import create_op_node_with_second_input
-from mo.graph.graph import Graph, rename_nodes
-
-
-class SoftPlus(FrontReplacementOp):
-    """
-    The transformation replaces SoftPlus(x) with log(1.0 + exp(x)).
-    """
-    op = 'SoftPlus'
-    enabled = True
-
-    def replace_sub_graph(self, graph: Graph, match: dict):
-        softplus = match['op']
-
-        name = softplus.soft_get('name', softplus.id)
-        exp_node = Exp(graph, {'name': name + '/Exp'}).create_node()
-        add_node = create_op_node_with_second_input(graph, Add, float_array([1.0]), {'name': name + '/Add'})
-        log_node = Log(graph, {'name': name + '/Log'}).create_node()
-        rename_nodes([(softplus, name + '/Log'), (log_node, name)])
-
-        softplus.in_port(0).get_connection().set_destination(exp_node.in_port(0))
-        add_node.in_port(0).connect(exp_node.out_port(0))
-        log_node.in_port(0).connect(add_node.out_port(0))
-        softplus.out_port(0).get_connection().set_source(log_node.out_port(0))
diff --git a/model-optimizer/extensions/front/softplus_test.py b/model-optimizer/extensions/front/softplus_test.py
deleted file mode 100644 (file)
index 5020095..0000000
+++ /dev/null
@@ -1,50 +0,0 @@
-"""
- Copyright (C) 2020 Intel Corporation
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-"""
-
-import unittest
-
-from extensions.front.softplus import SoftPlus
-from mo.front.common.partial_infer.utils import int64_array
-from mo.utils.ir_engine.compare_graphs import compare_graphs
-from mo.utils.unittest.graph import build_graph
-
-
-class TestSoftPlus(unittest.TestCase):
-    nodes = {
-        'node_1': {'shape': int64_array([1, 2, 3, 4]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
-        'softplus': {'value': None, 'kind': 'op', 'op': 'SoftPlus'},
-        'exp': {'value': None, 'kind': 'op', 'op': 'Exp'},
-        'add': {'value': None, 'kind': 'op', 'op': 'Add'},
-        'add_const': {'value': None, 'kind': 'op', 'op': 'Const'},
-        'log': {'value': None, 'kind': 'op', 'op': 'Log'},
-        'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'}
-    }
-
-    def test_softplus_1(self):
-        graph = build_graph(self.nodes, [('node_1', 'softplus'),
-                                         ('softplus', 'last')], nodes_with_edges_only=True)
-
-        graph_ref = build_graph(self.nodes, [('node_1', 'exp'),
-                                             ('exp', 'add'),
-                                             ('add_const', 'add'),
-                                             ('add', 'log'),
-                                             ('log', 'last')], nodes_with_edges_only=True)
-
-        graph.stage = 'front'
-        SoftPlus().find_and_replace_pattern(graph)
-
-        (flag, resp) = compare_graphs(graph, graph_ref, 'last', check_op_attrs=True)
-        self.assertTrue(flag, resp)
index a05dba3..c6c3f3e 100644 (file)
@@ -28,13 +28,14 @@ class Activation(Op):
     enabled = False
     operation = None
     op = None
+    version = 'opset1'
 
     def __init__(self, graph: Graph, attrs: dict):
         super().__init__(graph, {
             'type': self.op,
             'op': self.op,
             'operation': self.operation,
-            'version': 'opset1',
+            'version': self.version,
             'infer': self.infer,
             'in_ports_count': 1,
             'out_ports_count': 1,
@@ -67,13 +68,9 @@ class Asin(Activation):
 
 class Asinh(Activation):
     op = 'Asinh'
+    version = 'opset4'
     operation = staticmethod(lambda x: np.arcsinh(x))
 
-    def __init__(self, graph: Graph, attrs: dict):
-        sp_attrs = {'version': 'opset4'}
-        sp_attrs.update(attrs)
-        super().__init__(graph, sp_attrs)
-
 
 class Cos(Activation):
     op = 'Cos'
@@ -92,13 +89,9 @@ class Acos(Activation):
 
 class Acosh(Activation):
     op = 'Acosh'
+    version = 'opset4'
     operation = staticmethod(lambda x: np.arccosh(x))
 
-    def __init__(self, graph: Graph, attrs: dict):
-        sp_attrs = {'version': 'opset4'}
-        sp_attrs.update(attrs)
-        super().__init__(graph, sp_attrs)
-
 
 class Tan(Activation):
     op = 'Tan'
@@ -117,13 +110,9 @@ class Atan(Activation):
 
 class Atanh(Activation):
     op = 'Atanh'
+    version = 'opset4'
     operation = staticmethod(lambda x: np.arctanh(x))
 
-    def __init__(self, graph: Graph, attrs: dict):
-        sp_attrs = {'version': 'opset4'}
-        sp_attrs.update(attrs)
-        super().__init__(graph, sp_attrs)
-
 
 class ReLU6(AttributedClamp):
     op = 'ReLU6'
@@ -243,29 +232,17 @@ class Log(Activation):
     operation = staticmethod(lambda x: np.log(x))
 
 
-class SoftPlus(Op):
+class SoftPlus(Activation):
     op = 'SoftPlus'
-
-    def __init__(self, graph: Graph, attrs: dict):
-        mandatory_props = {
-            'op': self.op,
-            'type': None,
-            'in_ports_count': 1,
-            'out_ports_count': 1,
-            'infer': None
-        }
-        super().__init__(graph, mandatory_props, attrs)
+    version = 'opset4'
+    operation = staticmethod(lambda x: np.ln(np.exp(x) + 1.0))
 
 
 class Mish(Activation):
     op = 'Mish'
+    version = 'opset4'
     operation = staticmethod(lambda x: x * np.tanh(np.ln(np.exp(x) + 1.0)))
 
-    def __init__(self, graph: Graph, attrs: dict):
-        sp_attrs = {'version': 'opset4'}
-        sp_attrs.update(attrs)
-        super().__init__(graph, sp_attrs)
-
 
 class Swish(Op):
     op = 'Swish'
diff --git a/ngraph/core/include/ngraph/op/softplus.hpp b/ngraph/core/include/ngraph/op/softplus.hpp
new file mode 100644 (file)
index 0000000..5bec1fa
--- /dev/null
@@ -0,0 +1,52 @@
+//*****************************************************************************
+// Copyright 2017-2020 Intel Corporation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//*****************************************************************************
+
+#pragma once
+
+#include "ngraph/node.hpp"
+#include "ngraph/op/op.hpp"
+
+namespace ngraph
+{
+    namespace op
+    {
+        namespace v4
+        {
+            /// \brief A Self Regularized Non-Monotonic Neural Activation Function
+            /// f(x) =  ln(exp(x) + 1.)
+            ///
+            class NGRAPH_API SoftPlus : public ngraph::op::Op
+            {
+            public:
+                NGRAPH_RTTI_DECLARATION;
+
+                SoftPlus() = default;
+                /// \brief Constructs an SoftPlus operation.
+                ///
+                /// \param data Input tensor
+                SoftPlus(const Output<Node>& arg);
+                bool visit_attributes(AttributeVisitor& visitor) override;
+                void validate_and_infer_types() override;
+
+                virtual std::shared_ptr<Node>
+                    clone_with_new_inputs(const OutputVector& new_args) const override;
+
+                bool evaluate(const HostTensorVector& outputs,
+                              const HostTensorVector& inputs) const override;
+            };
+        }
+    }
+}
index 1f7bac8..f070bac 100644 (file)
 #include "ngraph/op/sinh.hpp"
 #include "ngraph/op/slice.hpp"
 #include "ngraph/op/softmax.hpp"
+#include "ngraph/op/softplus.hpp"
 #include "ngraph/op/space_to_batch.hpp"
 #include "ngraph/op/space_to_depth.hpp"
 #include "ngraph/op/split.hpp"
index 61aae11..e5e4428 100644 (file)
@@ -161,4 +161,5 @@ NGRAPH_OP(NonMaxSuppression, ngraph::op::v4)
 NGRAPH_OP(Mish, ngraph::op::v4)
 NGRAPH_OP(ReduceL1, ngraph::op::v4)
 NGRAPH_OP(ReduceL2, ngraph::op::v4)
+NGRAPH_OP(SoftPlus, ngraph::op::v4)
 NGRAPH_OP(Swish, ngraph::op::v4)
diff --git a/ngraph/core/reference/include/ngraph/runtime/reference/softplus.hpp b/ngraph/core/reference/include/ngraph/runtime/reference/softplus.hpp
new file mode 100644 (file)
index 0000000..fbbbba4
--- /dev/null
@@ -0,0 +1,38 @@
+//*****************************************************************************
+// Copyright 2017-2020 Intel Corporation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//*****************************************************************************
+
+#pragma once
+
+#include <cmath>
+#include <cstddef>
+
+namespace ngraph
+{
+    namespace runtime
+    {
+        namespace reference
+        {
+            template <typename T>
+            void softplus(const T* arg, T* out, size_t count)
+            {
+                for (size_t i = 0; i < count; i++)
+                {
+                    out[i] = std::log(std::exp(arg[i]) + 1.0);
+                }
+            }
+        }
+    }
+}
diff --git a/ngraph/core/src/op/softplus.cpp b/ngraph/core/src/op/softplus.cpp
new file mode 100644 (file)
index 0000000..fc63ed4
--- /dev/null
@@ -0,0 +1,85 @@
+//*****************************************************************************
+// Copyright 2017-2020 Intel Corporation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//*****************************************************************************
+
+#include "ngraph/op/softplus.hpp"
+#include "itt.hpp"
+#include "ngraph/attribute_visitor.hpp"
+#include "ngraph/runtime/host_tensor.hpp"
+#include "ngraph/runtime/reference/softplus.hpp"
+
+using namespace std;
+using namespace ngraph;
+
+NGRAPH_RTTI_DEFINITION(op::v4::SoftPlus, "SoftPlus", 4);
+
+op::v4::SoftPlus::SoftPlus(const Output<Node>& arg)
+    : Op({arg})
+{
+    constructor_validate_and_infer_types();
+}
+
+bool op::v4::SoftPlus::visit_attributes(AttributeVisitor& visitor)
+{
+    return true;
+}
+
+void op::v4::SoftPlus::validate_and_infer_types()
+{
+    set_output_size(1);
+    set_output_type(0, get_input_element_type(0), get_input_partial_shape(0));
+}
+
+shared_ptr<Node> op::v4::SoftPlus::clone_with_new_inputs(const OutputVector& new_args) const
+{
+    check_new_args_count(this, new_args);
+    return make_shared<op::v4::SoftPlus>(new_args.at(0));
+}
+
+namespace
+{
+    template <element::Type_t ET>
+    inline bool evaluate(const HostTensorPtr& arg, const HostTensorPtr& out, const size_t count)
+    {
+        using T = typename element_type_traits<ET>::value_type;
+        runtime::reference::softplus<T>(arg->get_data_ptr<ET>(), out->get_data_ptr<ET>(), count);
+        return true;
+    }
+
+    bool evaluate_softplus(const HostTensorPtr& arg, const HostTensorPtr& out, const size_t count)
+    {
+        bool rc = true;
+        out->set_unary(arg);
+
+        switch (arg->get_element_type())
+        {
+            TYPE_CASE(bf16)(arg, out, count);
+            break;
+            TYPE_CASE(f16)(arg, out, count);
+            break;
+            TYPE_CASE(f32)(arg, out, count);
+            break;
+        default: rc = false; break;
+        }
+        return rc;
+    }
+}
+
+bool op::v4::SoftPlus::evaluate(const HostTensorVector& outputs,
+                                const HostTensorVector& inputs) const
+{
+    OV_ITT_SCOPED_TASK(itt::domains::nGraphOp, "op::SoftPlus::evaluate");
+    return evaluate_softplus(inputs[0], outputs[0], shape_size(get_output_shape(0)));
+}
index 19e5b36..f8ed499 100644 (file)
@@ -149,6 +149,7 @@ from ngraph.opset4 import sign
 from ngraph.opset4 import sin
 from ngraph.opset4 import sinh
 from ngraph.opset4 import softmax
+from ngraph.opset4 import softplus
 from ngraph.opset4 import space_to_batch
 from ngraph.opset4 import space_to_depth
 from ngraph.opset4 import split
index 2980f88..07d2c07 100644 (file)
@@ -137,6 +137,7 @@ from ngraph.opset1.ops import sign
 from ngraph.opset1.ops import sin
 from ngraph.opset1.ops import sinh
 from ngraph.opset1.ops import softmax
+from ngraph.opset4.ops import softplus
 from ngraph.opset2.ops import space_to_batch
 from ngraph.opset1.ops import space_to_depth
 from ngraph.opset1.ops import split
index 00e31b0..8149a32 100644 (file)
@@ -140,6 +140,16 @@ def non_max_suppression(
 
 
 @nameable_op
+def softplus(data: NodeInput, name: Optional[str] = None) -> Node:
+    """Apply SoftPlus operation on each element of input tensor.
+
+    :param data: The tensor providing input data.
+    :return: The new node with SoftPlus operation applied on each element.
+    """
+    return _get_node_factory_opset4().create("SoftPlus", as_nodes(data), {})
+
+
+@nameable_op
 def mish(data: NodeInput, name: Optional[str] = None,) -> Node:
     """Return a node which performs Mish.
 
index cd5a2d0..c1e6536 100644 (file)
@@ -75,6 +75,7 @@ set(SRC
     op_eval/non_zero.cpp
     op_eval/reduce_l1.cpp
     op_eval/reduce_l2.cpp
+    op_eval/softplus.cpp
     op_eval/split.cpp
     op_eval/strided_slice.cpp
     op_eval/variadic_split.cpp
@@ -163,6 +164,7 @@ set(SRC
     type_prop/shape_of.cpp
     type_prop/shuffle_channels.cpp
     type_prop/slice.cpp
+    type_prop/softplus.cpp
     type_prop/space_to_batch.cpp
     type_prop/space_to_depth.cpp
     type_prop/split.cpp
diff --git a/ngraph/test/op_eval/softplus.cpp b/ngraph/test/op_eval/softplus.cpp
new file mode 100644 (file)
index 0000000..5404e74
--- /dev/null
@@ -0,0 +1,48 @@
+//*****************************************************************************
+// Copyright 2017-2020 Intel Corporation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//*****************************************************************************
+
+#include <string>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "ngraph/op/softplus.hpp"
+#include "ngraph/runtime/host_tensor.hpp"
+#include "ngraph/validation_util.hpp"
+#include "runtime/backend.hpp"
+#include "util/test_tools.hpp"
+
+using namespace std;
+using namespace ngraph;
+
+TEST(op_eval, softplus_4D)
+{
+    auto p = make_shared<op::Parameter>(element::f32, Shape{4});
+    auto softplus = make_shared<op::v4::SoftPlus>(p);
+    auto fun = make_shared<Function>(OutputVector{softplus}, ParameterVector{p});
+
+    std::vector<float> inputs{-1.0, 0.0, 1.0, 20.0};
+    std::vector<float> expected_result{0.31326166, 0.69314718, 1.3132616, 20.0};
+
+    auto result = make_shared<HostTensor>();
+    ASSERT_TRUE(
+        fun->evaluate({result}, {make_host_tensor<element::Type_t::f32>(Shape{4}, inputs)}));
+    EXPECT_EQ(result->get_element_type(), element::f32);
+    EXPECT_EQ(result->get_shape(), Shape{4});
+    auto result_data = read_vector<float>(result);
+    for (size_t i = 0; i < inputs.size(); i++)
+        EXPECT_NEAR(result_data[i], expected_result[i], 0.000001);
+}
diff --git a/ngraph/test/type_prop/softplus.cpp b/ngraph/test/type_prop/softplus.cpp
new file mode 100644 (file)
index 0000000..7e40369
--- /dev/null
@@ -0,0 +1,54 @@
+//*****************************************************************************
+// Copyright 2017-2020 Intel Corporation
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//*****************************************************************************
+
+#include "gtest/gtest.h"
+#include "ngraph/ngraph.hpp"
+#include "util/type_prop.hpp"
+
+using namespace std;
+using namespace ngraph;
+
+TEST(type_prop, softplus)
+{
+    auto data = make_shared<op::Parameter>(element::f32, Shape{1, 3, 6});
+    auto softplus_func = make_shared<op::v4::SoftPlus>(data);
+    EXPECT_EQ(softplus_func->get_element_type(), element::f32);
+    EXPECT_EQ(softplus_func->get_shape(), (Shape{1, 3, 6}));
+}
+
+TEST(type_prop, softplus_partial)
+{
+    auto data = make_shared<op::Parameter>(element::f32, PartialShape{1, Dimension::dynamic(), 6});
+    auto softplus_func = make_shared<op::v4::SoftPlus>(data);
+    EXPECT_EQ(softplus_func->get_element_type(), element::f32);
+    ASSERT_TRUE(softplus_func->get_output_partial_shape(0).same_scheme(
+        (PartialShape{1, Dimension::dynamic(), 6})));
+
+    // rank unknown
+    auto softplus_partial = make_shared<op::v4::SoftPlus>(
+        make_shared<op::Parameter>(element::f32, PartialShape::dynamic()));
+    ASSERT_TRUE(softplus_partial->get_output_partial_shape(0).same_scheme(PartialShape::dynamic()));
+}
+
+TEST(type_prop, softplus_partial_static_rank)
+{
+    auto data = make_shared<op::Parameter>(element::f32, PartialShape{1, Dimension::dynamic(), 6});
+    auto softplus_func = make_shared<op::v4::SoftPlus>(data);
+    EXPECT_EQ(softplus_func->get_element_type(), element::f32);
+    ASSERT_TRUE(softplus_func->get_output_partial_shape(0).same_scheme(
+        (PartialShape{1, Dimension::dynamic(), 6})));
+    ASSERT_TRUE(softplus_func->get_output_partial_shape(0).rank().is_static());
+}