From 84c7b048dba423f6ba3c93ddcb9a4761519ba267 Mon Sep 17 00:00:00 2001 From: Anton Chetverikov Date: Tue, 25 Aug 2020 19:36:39 +0300 Subject: [PATCH] SoftPlus operation implementation (#1365) --- inference-engine/src/cldnn_engine/cldnn_engine.cpp | 3 +- .../src/mkldnn_plugin/mkldnn_plugin.cpp | 1 + .../src/mkldnn_plugin/nodes/list_tbl.hpp | 2 +- inference-engine/src/mkldnn_plugin/nodes/math.cpp | 8 +- .../transformations/softplus_decomposition.hpp | 29 ++++++ .../include/transformations/softplus_fusion.hpp | 29 ++++++ .../common_optimizations/common_optimizations.cpp | 2 + .../convert_opset3_to_opset2.cpp | 2 + .../src/transformations/softplus_decomposition.cpp | 41 ++++++++ .../src/transformations/softplus_fusion.cpp | 53 ++++++++++ .../graph_transformer/src/frontend/frontend.cpp | 4 +- .../softplus_decomposition_test.cpp | 48 +++++++++ .../transformations/softplus_fusion_test.cpp | 109 +++++++++++++++++++++ .../single_layer_tests/activation.cpp | 3 +- .../single_layer_tests/activation.cpp | 3 +- .../single_layer_tests/activation.cpp | 3 +- .../include/single_layer_tests/activation.hpp | 1 + .../ngraph_functions/utils/ngraph_helpers.hpp | 3 +- .../tests/ngraph_functions/src/activation.cpp | 2 + .../mkldnn/graph/layers/extensions/math_tests.cpp | 4 +- model-optimizer/automation/package_BOM.txt | 1 - model-optimizer/extensions/front/softplus.py | 44 --------- model-optimizer/extensions/front/softplus_test.py | 50 ---------- model-optimizer/extensions/ops/activation_ops.py | 41 ++------ ngraph/core/include/ngraph/op/softplus.hpp | 52 ++++++++++ ngraph/core/include/ngraph/ops.hpp | 1 + ngraph/core/include/ngraph/opsets/opset4_tbl.hpp | 1 + .../include/ngraph/runtime/reference/softplus.hpp | 38 +++++++ ngraph/core/src/op/softplus.cpp | 85 ++++++++++++++++ ngraph/python/src/ngraph/__init__.py | 1 + ngraph/python/src/ngraph/opset4/__init__.py | 1 + ngraph/python/src/ngraph/opset4/ops.py | 10 ++ ngraph/test/CMakeLists.txt | 2 + ngraph/test/op_eval/softplus.cpp | 48 +++++++++ ngraph/test/type_prop/softplus.cpp | 54 ++++++++++ 35 files changed, 639 insertions(+), 140 deletions(-) create mode 100644 inference-engine/src/transformations/include/transformations/softplus_decomposition.hpp create mode 100644 inference-engine/src/transformations/include/transformations/softplus_fusion.hpp create mode 100644 inference-engine/src/transformations/src/transformations/softplus_decomposition.cpp create mode 100644 inference-engine/src/transformations/src/transformations/softplus_fusion.cpp create mode 100644 inference-engine/tests/functional/inference_engine/transformations/softplus_decomposition_test.cpp create mode 100644 inference-engine/tests/functional/inference_engine/transformations/softplus_fusion_test.cpp delete mode 100644 model-optimizer/extensions/front/softplus.py delete mode 100644 model-optimizer/extensions/front/softplus_test.py create mode 100644 ngraph/core/include/ngraph/op/softplus.hpp create mode 100644 ngraph/core/reference/include/ngraph/runtime/reference/softplus.hpp create mode 100644 ngraph/core/src/op/softplus.cpp create mode 100644 ngraph/test/op_eval/softplus.cpp create mode 100644 ngraph/test/type_prop/softplus.cpp diff --git a/inference-engine/src/cldnn_engine/cldnn_engine.cpp b/inference-engine/src/cldnn_engine/cldnn_engine.cpp index ecde611..bcf5770 100644 --- a/inference-engine/src/cldnn_engine/cldnn_engine.cpp +++ b/inference-engine/src/cldnn_engine/cldnn_engine.cpp @@ -94,7 +94,8 @@ InferenceEngine::ICNNNetwork::Ptr clDNNEngine::CloneAndTransformNetwork(const In std::dynamic_pointer_cast(node) || std::dynamic_pointer_cast(node) || std::dynamic_pointer_cast(node) || - std::dynamic_pointer_cast(node); + std::dynamic_pointer_cast(node) || + std::dynamic_pointer_cast(node); }; auto nGraphFunc = clonedNetwork->getFunction(); // Disable shape inference (WA for generic operations) diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_plugin.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_plugin.cpp index 0d36510..b783470 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_plugin.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_plugin.cpp @@ -84,6 +84,7 @@ static void Transformation(ICNNNetwork::Ptr& clonedNetwork) { std::dynamic_pointer_cast(node) || std::dynamic_pointer_cast(node) || std::dynamic_pointer_cast(node) || + std::dynamic_pointer_cast(node) || std::dynamic_pointer_cast(node); }; auto nGraphFunc = clonedNetwork->getFunction(); diff --git a/inference-engine/src/mkldnn_plugin/nodes/list_tbl.hpp b/inference-engine/src/mkldnn_plugin/nodes/list_tbl.hpp index 2015c4c..5402276 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/list_tbl.hpp +++ b/inference-engine/src/mkldnn_plugin/nodes/list_tbl.hpp @@ -31,7 +31,7 @@ MKLDNN_EXTENSION_NODE(MathImpl, Selu); MKLDNN_EXTENSION_NODE(MathImpl, Sign); MKLDNN_EXTENSION_NODE(MathImpl, Sin); MKLDNN_EXTENSION_NODE(MathImpl, Sinh); -MKLDNN_EXTENSION_NODE(MathImpl, Softplus); +MKLDNN_EXTENSION_NODE(MathImpl, SoftPlus); MKLDNN_EXTENSION_NODE(MathImpl, Softsign); MKLDNN_EXTENSION_NODE(MathImpl, Tan); MKLDNN_EXTENSION_NODE(ExperimentalDetectronTopKROIsImpl, ExperimentalDetectronTopKROIs); diff --git a/inference-engine/src/mkldnn_plugin/nodes/math.cpp b/inference-engine/src/mkldnn_plugin/nodes/math.cpp index 2920bad..e690662 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/math.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/math.cpp @@ -82,7 +82,7 @@ public: else if (math_func == "Sign") mathFunction = Math::Sign; else if (math_func == "Sin") mathFunction = Math::Sin; else if (math_func == "Sinh") mathFunction = Math::Sinh; - else if (math_func == "Softplus") mathFunction = Math::Softplus; + else if (math_func == "SoftPlus") mathFunction = Math::SoftPlus; else if (math_func == "Softsign") mathFunction = Math::Softsign; else if (math_func == "Tan") mathFunction = Math::Tan; else @@ -212,7 +212,7 @@ public: dst_data[i] = sinhf(src_data[i]); }); break; - case Math::Softplus: + case Math::SoftPlus: parallel_for(dataSize, [&](size_t i) { dst_data[i] = logf(expf(src_data[i]) + 1); }); @@ -260,7 +260,7 @@ private: Sign, Sin, Sinh, - Softplus, + SoftPlus, Softsign, Tan }; @@ -291,7 +291,7 @@ REG_FACTORY_FOR(MathImpl, Selu); REG_FACTORY_FOR(MathImpl, Sign); REG_FACTORY_FOR(MathImpl, Sin); REG_FACTORY_FOR(MathImpl, Sinh); -REG_FACTORY_FOR(MathImpl, Softplus); +REG_FACTORY_FOR(MathImpl, SoftPlus); REG_FACTORY_FOR(MathImpl, Softsign); REG_FACTORY_FOR(MathImpl, Tan); diff --git a/inference-engine/src/transformations/include/transformations/softplus_decomposition.hpp b/inference-engine/src/transformations/include/transformations/softplus_decomposition.hpp new file mode 100644 index 0000000..9b167c5 --- /dev/null +++ b/inference-engine/src/transformations/include/transformations/softplus_decomposition.hpp @@ -0,0 +1,29 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include + +#include +#include + +namespace ngraph { +namespace pass { + +class TRANSFORMATIONS_API SoftPlusDecomposition; + +} // namespace pass +} // namespace ngraph + +/** + * @ingroup ie_transformation_common_api + * @brief SoftPlusDecomposition transformation replaces SoftPlus op to + * group of operations: log(exp(x) + 1). + */ +class ngraph::pass::SoftPlusDecomposition: public ngraph::pass::MatcherPass { +public: + SoftPlusDecomposition(); +}; diff --git a/inference-engine/src/transformations/include/transformations/softplus_fusion.hpp b/inference-engine/src/transformations/include/transformations/softplus_fusion.hpp new file mode 100644 index 0000000..3da9e88 --- /dev/null +++ b/inference-engine/src/transformations/include/transformations/softplus_fusion.hpp @@ -0,0 +1,29 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include + +#include +#include + +namespace ngraph { +namespace pass { + +class TRANSFORMATIONS_API SoftPlusFusion; + +} // namespace pass +} // namespace ngraph + +/** + * @ingroup ie_transformation_common_api + * @brief SoftPlusFusion transformation replaces group of + * operations: log(exp(x) + 1) to SoftPlus op. + */ +class ngraph::pass::SoftPlusFusion: public ngraph::pass::MatcherPass { +public: + SoftPlusFusion(); +}; diff --git a/inference-engine/src/transformations/src/transformations/common_optimizations/common_optimizations.cpp b/inference-engine/src/transformations/src/transformations/common_optimizations/common_optimizations.cpp index 8d43916..7f988f7 100644 --- a/inference-engine/src/transformations/src/transformations/common_optimizations/common_optimizations.cpp +++ b/inference-engine/src/transformations/src/transformations/common_optimizations/common_optimizations.cpp @@ -16,6 +16,7 @@ #include "transformations/init_node_info.hpp" #include "transformations/itt.hpp" #include "transformations/mish_fusion.hpp" +#include "transformations/softplus_fusion.hpp" #include "transformations/swish_fusion.hpp" #include "transformations/hswish_fusion.hpp" @@ -40,6 +41,7 @@ bool ngraph::pass::CommonOptimizations::run_on_function(std::shared_ptr(); // partially depends on CF manager.register_pass(); manager.register_pass(); + manager.register_pass(); manager.register_pass(); manager.register_pass(); manager.register_pass(); diff --git a/inference-engine/src/transformations/src/transformations/convert_opset3_to_opset2/convert_opset3_to_opset2.cpp b/inference-engine/src/transformations/src/transformations/convert_opset3_to_opset2/convert_opset3_to_opset2.cpp index 8b8873d..c579244 100644 --- a/inference-engine/src/transformations/src/transformations/convert_opset3_to_opset2/convert_opset3_to_opset2.cpp +++ b/inference-engine/src/transformations/src/transformations/convert_opset3_to_opset2/convert_opset3_to_opset2.cpp @@ -10,6 +10,7 @@ #include "transformations/convert_opset3_to_opset2/convert_shuffle_channels3.hpp" #include "transformations/convert_opset3_to_opset2/convert_topk3.hpp" #include "transformations/convert_extract_image_patches_to_reorg_yolo.hpp" +#include "transformations/softplus_decomposition.hpp" #include "transformations/itt.hpp" #include @@ -28,6 +29,7 @@ bool ngraph::pass::ConvertOpSet3ToOpSet2::run_on_function(std::shared_ptr(); manager.register_pass(); manager.register_pass(); + manager.register_pass(); manager.set_callback(m_transformation_callback); manager.run_passes(f); diff --git a/inference-engine/src/transformations/src/transformations/softplus_decomposition.cpp b/inference-engine/src/transformations/src/transformations/softplus_decomposition.cpp new file mode 100644 index 0000000..bfac0af --- /dev/null +++ b/inference-engine/src/transformations/src/transformations/softplus_decomposition.cpp @@ -0,0 +1,41 @@ +// Copyright (C) 2018-2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "transformations/softplus_decomposition.hpp" + +#include +#include + +#include +#include +#include + +ngraph::pass::SoftPlusDecomposition::SoftPlusDecomposition() { + // decomposes SoftPlus(x) operation into ln(exp(x) + 1.0) + auto input = ngraph::pattern::any_input(); + auto softplus = std::make_shared(input); + + ngraph::matcher_pass_callback callback = [=](ngraph::pattern::Matcher& m) { + auto &pattern_to_output = m.get_pattern_value_map(); + auto softplus_input = pattern_to_output.at(input); + auto softplus_node = pattern_to_output.at(softplus).get_node_shared_ptr(); + + if (m_transformation_callback(softplus_node)) { + return false; + } + + auto exp = std::make_shared(softplus_input); + auto add = std::make_shared(exp, + opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.0})); + auto log = std::make_shared(add); + + log->set_friendly_name(softplus_node->get_friendly_name()); + ngraph::copy_runtime_info(softplus_node, {exp, add, log}); + ngraph::replace_node(softplus_node, log); + return true; + }; + + auto m = std::make_shared(softplus, "SoftPlusDecomposition"); + register_matcher(m, callback); +} diff --git a/inference-engine/src/transformations/src/transformations/softplus_fusion.cpp b/inference-engine/src/transformations/src/transformations/softplus_fusion.cpp new file mode 100644 index 0000000..a2a3056 --- /dev/null +++ b/inference-engine/src/transformations/src/transformations/softplus_fusion.cpp @@ -0,0 +1,53 @@ +// Copyright (C) 2018-2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "transformations/softplus_fusion.hpp" + +#include +#include + +#include +#include +#include + +ngraph::pass::SoftPlusFusion::SoftPlusFusion() { + // fuses ln(exp(x) + 1.0) operations into SoftPlus(x) + auto input = ngraph::pattern::any_input(); + auto exp = std::make_shared(input); + auto add_constant = ngraph::pattern::wrap_type(); + auto add = std::make_shared(exp, add_constant); + auto log = std::make_shared(add); + + ngraph::matcher_pass_callback callback = [=](ngraph::pattern::Matcher &m) { + auto &pattern_to_output = m.get_pattern_value_map(); + auto exp_input = pattern_to_output.at(input); + + auto constant = std::dynamic_pointer_cast(pattern_to_output.at(add_constant).get_node_shared_ptr()); + + if (constant == nullptr) { + return false; + } + + if (constant->get_element_type() == ngraph::element::f32 || constant->get_element_type() == ngraph::element::f16) { + auto data = constant->cast_vector(); + if (data.size() != 1 || data[0] != 1.0) { + return false; + } + } else { + return false; + } + + auto softplus = std::make_shared(exp_input); + + softplus->set_friendly_name(m.get_match_root()->get_friendly_name()); + ngraph::copy_runtime_info({pattern_to_output.at(log).get_node_shared_ptr(), + pattern_to_output.at(add).get_node_shared_ptr(), + pattern_to_output.at(exp).get_node_shared_ptr()}, softplus); + ngraph::replace_node(m.get_match_root(), softplus); + return true; + }; + + auto m = std::make_shared(log, "SoftPlusFusion"); + register_matcher(m, callback); +} diff --git a/inference-engine/src/vpu/graph_transformer/src/frontend/frontend.cpp b/inference-engine/src/vpu/graph_transformer/src/frontend/frontend.cpp index a504d74..b4b48be 100644 --- a/inference-engine/src/vpu/graph_transformer/src/frontend/frontend.cpp +++ b/inference-engine/src/vpu/graph_transformer/src/frontend/frontend.cpp @@ -22,6 +22,7 @@ #include #include #include +#include #include #include #include @@ -387,7 +388,8 @@ ModelPtr FrontEnd::runCommonPasses(ie::ICNNNetwork& network, const UnsupportedLa const auto transformationsPredicate = [](const std::shared_ptr &node) -> bool { return std::dynamic_pointer_cast(node) || (std::dynamic_pointer_cast(node) && - std::dynamic_pointer_cast(node->input_value(0).get_node_shared_ptr())); + std::dynamic_pointer_cast(node->input_value(0).get_node_shared_ptr())) || + std::dynamic_pointer_cast(node); }; auto nGraphFunc = originalOrConvertNetwork->getFunction(); diff --git a/inference-engine/tests/functional/inference_engine/transformations/softplus_decomposition_test.cpp b/inference-engine/tests/functional/inference_engine/transformations/softplus_decomposition_test.cpp new file mode 100644 index 0000000..ad66191 --- /dev/null +++ b/inference-engine/tests/functional/inference_engine/transformations/softplus_decomposition_test.cpp @@ -0,0 +1,48 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "common_test_utils/ngraph_test_utils.hpp" + +using namespace testing; + +TEST(TransformationTests, SoftPlusDecomposition) { + std::shared_ptr f(nullptr), f_ref(nullptr); + { + auto data = std::make_shared(ngraph::element::f32, ngraph::Shape{3, 1, 2}); + auto softplus = std::make_shared(data); + + f = std::make_shared(ngraph::NodeVector{softplus}, ngraph::ParameterVector{data}); + + ngraph::pass::Manager manager; + manager.register_pass(); + manager.register_pass(); + manager.run_passes(f); + ASSERT_NO_THROW(check_rt_info(f)); + } + + { + auto input = std::make_shared(ngraph::element::f32, ngraph::Shape{3, 1, 2}); + auto exp = std::make_shared(input); + auto add = std::make_shared(exp, + ngraph::opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.0})); + auto log = std::make_shared(add); + + f_ref = std::make_shared(ngraph::NodeVector{log}, ngraph::ParameterVector{input}); + } + + auto res = compare_functions(f, f_ref); + ASSERT_TRUE(res.first) << res.second; +} diff --git a/inference-engine/tests/functional/inference_engine/transformations/softplus_fusion_test.cpp b/inference-engine/tests/functional/inference_engine/transformations/softplus_fusion_test.cpp new file mode 100644 index 0000000..d7a2f9e --- /dev/null +++ b/inference-engine/tests/functional/inference_engine/transformations/softplus_fusion_test.cpp @@ -0,0 +1,109 @@ +// Copyright (C) 2020 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "common_test_utils/ngraph_test_utils.hpp" + +using namespace testing; + +TEST(TransformationTests, SoftPlusFusing) { + std::shared_ptr f(nullptr), f_ref(nullptr); + { + auto input0 = std::make_shared(ngraph::element::f32, ngraph::Shape{3, 1, 2}); + auto exp = std::make_shared(input0); + auto input_const = ngraph::opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.0}); + auto add = std::make_shared(exp, input_const); + auto log = std::make_shared(add); + + f = std::make_shared(ngraph::NodeVector{log}, ngraph::ParameterVector{input0}); + + ngraph::pass::Manager manager; + manager.register_pass(); + manager.register_pass(); + manager.run_passes(f); + ASSERT_NO_THROW(check_rt_info(f)); + } + + { + auto data = std::make_shared(ngraph::element::f32, ngraph::Shape{3, 1, 2}); + auto softplus = std::make_shared(data); + + f_ref = std::make_shared(ngraph::NodeVector{softplus}, ngraph::ParameterVector{data}); + } + + auto res = compare_functions(f, f_ref); + ASSERT_TRUE(res.first) << res.second; +} + +TEST(TransformationTests, SoftPlusFusingDynamic) { + std::shared_ptr f(nullptr), f_ref(nullptr); + { + auto input0 = std::make_shared(ngraph::element::f32, ngraph::PartialShape::dynamic(1)); + auto exp = std::make_shared(input0); + auto input_const = ngraph::opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {1.0}); + auto add = std::make_shared(exp, input_const); + auto log = std::make_shared(add); + + f = std::make_shared(ngraph::NodeVector{log}, ngraph::ParameterVector{input0}); + + ngraph::pass::Manager manager; + manager.register_pass(); + manager.register_pass(); + manager.run_passes(f); + ASSERT_NO_THROW(check_rt_info(f)); + } + + { + auto data = std::make_shared(ngraph::element::f32, ngraph::PartialShape::dynamic(1)); + auto softplus = std::make_shared(data); + + f_ref = std::make_shared(ngraph::NodeVector{softplus}, ngraph::ParameterVector{data}); + } + + auto res = compare_functions(f, f_ref); + ASSERT_TRUE(res.first) << res.second; +} + +TEST(TransformationTests, SoftPlusFusingNegative) { + std::shared_ptr f(nullptr), f_ref(nullptr); + { + auto input0 = std::make_shared(ngraph::element::f32, ngraph::PartialShape::dynamic(1)); + auto exp = std::make_shared(input0); + auto input_const = ngraph::opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {-1.0}); + auto add = std::make_shared(exp, input_const); + auto log = std::make_shared(add); + + f = std::make_shared(ngraph::NodeVector{log}, ngraph::ParameterVector{input0}); + + ngraph::pass::Manager manager; + manager.register_pass(); + manager.register_pass(); + manager.run_passes(f); + ASSERT_NO_THROW(check_rt_info(f)); + } + + { + auto input0 = std::make_shared(ngraph::element::f32, ngraph::PartialShape::dynamic(1)); + auto exp = std::make_shared(input0); + auto input_const = ngraph::opset4::Constant::create(ngraph::element::f32, ngraph::Shape{1}, {-1.0}); + auto add = std::make_shared(exp, input_const); + auto log = std::make_shared(add); + + f_ref = std::make_shared(ngraph::NodeVector{log}, ngraph::ParameterVector{input0}); + } + + auto res = compare_functions(f, f_ref); + ASSERT_TRUE(res.first) << res.second; +} diff --git a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/activation.cpp b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/activation.cpp index 286a708..0088bb9 100644 --- a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/activation.cpp +++ b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/activation.cpp @@ -48,7 +48,8 @@ const std::vector activationTypes = { Selu, Ceiling, Mish, - HSwish + HSwish, + SoftPlus }; const std::vector activationParamTypes = { diff --git a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/activation.cpp b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/activation.cpp index eb41567..703ac25 100644 --- a/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/activation.cpp +++ b/inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/activation.cpp @@ -43,7 +43,8 @@ const std::vector activationTypes = { Selu, Ceiling, Mish, - HSwish + HSwish, + SoftPlus }; std::map, std::vector>> basic = { diff --git a/inference-engine/tests/functional/plugin/myriad/shared_tests_instances/single_layer_tests/activation.cpp b/inference-engine/tests/functional/plugin/myriad/shared_tests_instances/single_layer_tests/activation.cpp index 25fba89..cced8e4 100644 --- a/inference-engine/tests/functional/plugin/myriad/shared_tests_instances/single_layer_tests/activation.cpp +++ b/inference-engine/tests/functional/plugin/myriad/shared_tests_instances/single_layer_tests/activation.cpp @@ -22,7 +22,8 @@ const std::vector activationTypes = { Exp, Log, Gelu, - Mish + Mish, + SoftPlus }; std::map, std::vector>> basic = { diff --git a/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/activation.hpp b/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/activation.hpp index 2744a85..6593d5a 100644 --- a/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/activation.hpp +++ b/inference-engine/tests/functional/plugin/shared/include/single_layer_tests/activation.hpp @@ -69,6 +69,7 @@ static std::map activationNames = {ngraph::helpers::ActivationTypes::PReLu, "PReLu"}, {ngraph::helpers::ActivationTypes::Mish, "Mish"}, {ngraph::helpers::ActivationTypes::HSwish, "HSwish"}, + {ngraph::helpers::ActivationTypes::SoftPlus, "SoftPlus"}, }; typedef std::tuple< diff --git a/inference-engine/tests/ngraph_functions/include/ngraph_functions/utils/ngraph_helpers.hpp b/inference-engine/tests/ngraph_functions/include/ngraph_functions/utils/ngraph_helpers.hpp index 11baba4..e1d3b56 100644 --- a/inference-engine/tests/ngraph_functions/include/ngraph_functions/utils/ngraph_helpers.hpp +++ b/inference-engine/tests/ngraph_functions/include/ngraph_functions/utils/ngraph_helpers.hpp @@ -109,7 +109,8 @@ enum ActivationTypes { Ceiling, PReLu, Mish, - HSwish + HSwish, + SoftPlus }; enum EltwiseTypes { diff --git a/inference-engine/tests/ngraph_functions/src/activation.cpp b/inference-engine/tests/ngraph_functions/src/activation.cpp index ab8c878..02c83f0 100644 --- a/inference-engine/tests/ngraph_functions/src/activation.cpp +++ b/inference-engine/tests/ngraph_functions/src/activation.cpp @@ -95,6 +95,8 @@ std::shared_ptr makeActivation(const ngraph::Output &in, return std::make_shared(in); case ngraph::helpers::ActivationTypes::HSwish: return std::make_shared(in); + case ngraph::helpers::ActivationTypes::SoftPlus: + return std::make_shared(in); default: throw std::runtime_error("Can't create layer for this activation type"); } diff --git a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/math_tests.cpp b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/math_tests.cpp index 930fea5..6e7eb38 100644 --- a/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/math_tests.cpp +++ b/inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/extensions/math_tests.cpp @@ -127,7 +127,7 @@ void ref_math( for (i = 0; i < dst_size; i++) { dst_data[i] = sinhf(src_data[i]); } - } else if (math_function == "Softplus") { + } else if (math_function == "SoftPlus") { for (i = 0; i < dst_size; i++) { dst_data[i] = logf(expf(src_data[i]) + 1); } @@ -313,7 +313,7 @@ INSTANTIATE_TEST_CASE_P( math_test_params{ "Sign",{ 3 },{ -0.5f, 0.f, 0.5f },{},{},{},{-1, 0, 1} }, math_test_params{ "Sin",{ 3 },{ -1, 0, 1 },{},{},{},{ -0.841470957f, 0.0f, 0.841470957f } }, math_test_params{ "Sinh",{ 3 },{ -0.5f, 0.f, 0.5f },{},{},{},{ } }, - math_test_params{ "Softplus",{ 3 },{ -1, 0, 1 },{},{},{},{ 0.31326166f, 0.69314718f, 1.31326163f } }, + math_test_params{ "SoftPlus",{ 3 },{ -1, 0, 1 },{},{},{},{ 0.31326166f, 0.69314718f, 1.31326163f } }, math_test_params{ "Softsign",{ 3 },{ -1, 0, 1 },{},{},{},{ -0.5f, 0.f, 0.5f } }, math_test_params{ "Tan",{ 3 },{ -1, 0, 1 },{},{},{},{ -1.55740774f, 0.0f, 1.55740774f } } )); diff --git a/model-optimizer/automation/package_BOM.txt b/model-optimizer/automation/package_BOM.txt index 569cf75..bd74604 100644 --- a/model-optimizer/automation/package_BOM.txt +++ b/model-optimizer/automation/package_BOM.txt @@ -326,7 +326,6 @@ extensions/front/reshape_dim_normalizer.py extensions/front/restore_ports.py extensions/front/scatter_normalizer.py extensions/front/softmax.py -extensions/front/softplus.py extensions/front/softsign_replacer.py extensions/front/split_normalizer.py extensions/front/SqueezeNormalize.py diff --git a/model-optimizer/extensions/front/softplus.py b/model-optimizer/extensions/front/softplus.py deleted file mode 100644 index d6d082e..0000000 --- a/model-optimizer/extensions/front/softplus.py +++ /dev/null @@ -1,44 +0,0 @@ -""" - Copyright (C) 2020 Intel Corporation - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" - -from extensions.ops.activation_ops import Exp, Log -from extensions.ops.elementwise import Add -from mo.front.common.partial_infer.utils import float_array -from mo.front.common.replacement import FrontReplacementOp -from mo.front.tf.graph_utils import create_op_node_with_second_input -from mo.graph.graph import Graph, rename_nodes - - -class SoftPlus(FrontReplacementOp): - """ - The transformation replaces SoftPlus(x) with log(1.0 + exp(x)). - """ - op = 'SoftPlus' - enabled = True - - def replace_sub_graph(self, graph: Graph, match: dict): - softplus = match['op'] - - name = softplus.soft_get('name', softplus.id) - exp_node = Exp(graph, {'name': name + '/Exp'}).create_node() - add_node = create_op_node_with_second_input(graph, Add, float_array([1.0]), {'name': name + '/Add'}) - log_node = Log(graph, {'name': name + '/Log'}).create_node() - rename_nodes([(softplus, name + '/Log'), (log_node, name)]) - - softplus.in_port(0).get_connection().set_destination(exp_node.in_port(0)) - add_node.in_port(0).connect(exp_node.out_port(0)) - log_node.in_port(0).connect(add_node.out_port(0)) - softplus.out_port(0).get_connection().set_source(log_node.out_port(0)) diff --git a/model-optimizer/extensions/front/softplus_test.py b/model-optimizer/extensions/front/softplus_test.py deleted file mode 100644 index 5020095..0000000 --- a/model-optimizer/extensions/front/softplus_test.py +++ /dev/null @@ -1,50 +0,0 @@ -""" - Copyright (C) 2020 Intel Corporation - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" - -import unittest - -from extensions.front.softplus import SoftPlus -from mo.front.common.partial_infer.utils import int64_array -from mo.utils.ir_engine.compare_graphs import compare_graphs -from mo.utils.unittest.graph import build_graph - - -class TestSoftPlus(unittest.TestCase): - nodes = { - 'node_1': {'shape': int64_array([1, 2, 3, 4]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'softplus': {'value': None, 'kind': 'op', 'op': 'SoftPlus'}, - 'exp': {'value': None, 'kind': 'op', 'op': 'Exp'}, - 'add': {'value': None, 'kind': 'op', 'op': 'Add'}, - 'add_const': {'value': None, 'kind': 'op', 'op': 'Const'}, - 'log': {'value': None, 'kind': 'op', 'op': 'Log'}, - 'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'} - } - - def test_softplus_1(self): - graph = build_graph(self.nodes, [('node_1', 'softplus'), - ('softplus', 'last')], nodes_with_edges_only=True) - - graph_ref = build_graph(self.nodes, [('node_1', 'exp'), - ('exp', 'add'), - ('add_const', 'add'), - ('add', 'log'), - ('log', 'last')], nodes_with_edges_only=True) - - graph.stage = 'front' - SoftPlus().find_and_replace_pattern(graph) - - (flag, resp) = compare_graphs(graph, graph_ref, 'last', check_op_attrs=True) - self.assertTrue(flag, resp) diff --git a/model-optimizer/extensions/ops/activation_ops.py b/model-optimizer/extensions/ops/activation_ops.py index a05dba3..c6c3f3e 100644 --- a/model-optimizer/extensions/ops/activation_ops.py +++ b/model-optimizer/extensions/ops/activation_ops.py @@ -28,13 +28,14 @@ class Activation(Op): enabled = False operation = None op = None + version = 'opset1' def __init__(self, graph: Graph, attrs: dict): super().__init__(graph, { 'type': self.op, 'op': self.op, 'operation': self.operation, - 'version': 'opset1', + 'version': self.version, 'infer': self.infer, 'in_ports_count': 1, 'out_ports_count': 1, @@ -67,13 +68,9 @@ class Asin(Activation): class Asinh(Activation): op = 'Asinh' + version = 'opset4' operation = staticmethod(lambda x: np.arcsinh(x)) - def __init__(self, graph: Graph, attrs: dict): - sp_attrs = {'version': 'opset4'} - sp_attrs.update(attrs) - super().__init__(graph, sp_attrs) - class Cos(Activation): op = 'Cos' @@ -92,13 +89,9 @@ class Acos(Activation): class Acosh(Activation): op = 'Acosh' + version = 'opset4' operation = staticmethod(lambda x: np.arccosh(x)) - def __init__(self, graph: Graph, attrs: dict): - sp_attrs = {'version': 'opset4'} - sp_attrs.update(attrs) - super().__init__(graph, sp_attrs) - class Tan(Activation): op = 'Tan' @@ -117,13 +110,9 @@ class Atan(Activation): class Atanh(Activation): op = 'Atanh' + version = 'opset4' operation = staticmethod(lambda x: np.arctanh(x)) - def __init__(self, graph: Graph, attrs: dict): - sp_attrs = {'version': 'opset4'} - sp_attrs.update(attrs) - super().__init__(graph, sp_attrs) - class ReLU6(AttributedClamp): op = 'ReLU6' @@ -243,29 +232,17 @@ class Log(Activation): operation = staticmethod(lambda x: np.log(x)) -class SoftPlus(Op): +class SoftPlus(Activation): op = 'SoftPlus' - - def __init__(self, graph: Graph, attrs: dict): - mandatory_props = { - 'op': self.op, - 'type': None, - 'in_ports_count': 1, - 'out_ports_count': 1, - 'infer': None - } - super().__init__(graph, mandatory_props, attrs) + version = 'opset4' + operation = staticmethod(lambda x: np.ln(np.exp(x) + 1.0)) class Mish(Activation): op = 'Mish' + version = 'opset4' operation = staticmethod(lambda x: x * np.tanh(np.ln(np.exp(x) + 1.0))) - def __init__(self, graph: Graph, attrs: dict): - sp_attrs = {'version': 'opset4'} - sp_attrs.update(attrs) - super().__init__(graph, sp_attrs) - class Swish(Op): op = 'Swish' diff --git a/ngraph/core/include/ngraph/op/softplus.hpp b/ngraph/core/include/ngraph/op/softplus.hpp new file mode 100644 index 0000000..5bec1fa --- /dev/null +++ b/ngraph/core/include/ngraph/op/softplus.hpp @@ -0,0 +1,52 @@ +//***************************************************************************** +// Copyright 2017-2020 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +//***************************************************************************** + +#pragma once + +#include "ngraph/node.hpp" +#include "ngraph/op/op.hpp" + +namespace ngraph +{ + namespace op + { + namespace v4 + { + /// \brief A Self Regularized Non-Monotonic Neural Activation Function + /// f(x) = ln(exp(x) + 1.) + /// + class NGRAPH_API SoftPlus : public ngraph::op::Op + { + public: + NGRAPH_RTTI_DECLARATION; + + SoftPlus() = default; + /// \brief Constructs an SoftPlus operation. + /// + /// \param data Input tensor + SoftPlus(const Output& arg); + bool visit_attributes(AttributeVisitor& visitor) override; + void validate_and_infer_types() override; + + virtual std::shared_ptr + clone_with_new_inputs(const OutputVector& new_args) const override; + + bool evaluate(const HostTensorVector& outputs, + const HostTensorVector& inputs) const override; + }; + } + } +} diff --git a/ngraph/core/include/ngraph/ops.hpp b/ngraph/core/include/ngraph/ops.hpp index 1f7bac8..f070bac 100644 --- a/ngraph/core/include/ngraph/ops.hpp +++ b/ngraph/core/include/ngraph/ops.hpp @@ -147,6 +147,7 @@ #include "ngraph/op/sinh.hpp" #include "ngraph/op/slice.hpp" #include "ngraph/op/softmax.hpp" +#include "ngraph/op/softplus.hpp" #include "ngraph/op/space_to_batch.hpp" #include "ngraph/op/space_to_depth.hpp" #include "ngraph/op/split.hpp" diff --git a/ngraph/core/include/ngraph/opsets/opset4_tbl.hpp b/ngraph/core/include/ngraph/opsets/opset4_tbl.hpp index 61aae11..e5e4428 100644 --- a/ngraph/core/include/ngraph/opsets/opset4_tbl.hpp +++ b/ngraph/core/include/ngraph/opsets/opset4_tbl.hpp @@ -161,4 +161,5 @@ NGRAPH_OP(NonMaxSuppression, ngraph::op::v4) NGRAPH_OP(Mish, ngraph::op::v4) NGRAPH_OP(ReduceL1, ngraph::op::v4) NGRAPH_OP(ReduceL2, ngraph::op::v4) +NGRAPH_OP(SoftPlus, ngraph::op::v4) NGRAPH_OP(Swish, ngraph::op::v4) diff --git a/ngraph/core/reference/include/ngraph/runtime/reference/softplus.hpp b/ngraph/core/reference/include/ngraph/runtime/reference/softplus.hpp new file mode 100644 index 0000000..fbbbba4 --- /dev/null +++ b/ngraph/core/reference/include/ngraph/runtime/reference/softplus.hpp @@ -0,0 +1,38 @@ +//***************************************************************************** +// Copyright 2017-2020 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +//***************************************************************************** + +#pragma once + +#include +#include + +namespace ngraph +{ + namespace runtime + { + namespace reference + { + template + void softplus(const T* arg, T* out, size_t count) + { + for (size_t i = 0; i < count; i++) + { + out[i] = std::log(std::exp(arg[i]) + 1.0); + } + } + } + } +} diff --git a/ngraph/core/src/op/softplus.cpp b/ngraph/core/src/op/softplus.cpp new file mode 100644 index 0000000..fc63ed4 --- /dev/null +++ b/ngraph/core/src/op/softplus.cpp @@ -0,0 +1,85 @@ +//***************************************************************************** +// Copyright 2017-2020 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +//***************************************************************************** + +#include "ngraph/op/softplus.hpp" +#include "itt.hpp" +#include "ngraph/attribute_visitor.hpp" +#include "ngraph/runtime/host_tensor.hpp" +#include "ngraph/runtime/reference/softplus.hpp" + +using namespace std; +using namespace ngraph; + +NGRAPH_RTTI_DEFINITION(op::v4::SoftPlus, "SoftPlus", 4); + +op::v4::SoftPlus::SoftPlus(const Output& arg) + : Op({arg}) +{ + constructor_validate_and_infer_types(); +} + +bool op::v4::SoftPlus::visit_attributes(AttributeVisitor& visitor) +{ + return true; +} + +void op::v4::SoftPlus::validate_and_infer_types() +{ + set_output_size(1); + set_output_type(0, get_input_element_type(0), get_input_partial_shape(0)); +} + +shared_ptr op::v4::SoftPlus::clone_with_new_inputs(const OutputVector& new_args) const +{ + check_new_args_count(this, new_args); + return make_shared(new_args.at(0)); +} + +namespace +{ + template + inline bool evaluate(const HostTensorPtr& arg, const HostTensorPtr& out, const size_t count) + { + using T = typename element_type_traits::value_type; + runtime::reference::softplus(arg->get_data_ptr(), out->get_data_ptr(), count); + return true; + } + + bool evaluate_softplus(const HostTensorPtr& arg, const HostTensorPtr& out, const size_t count) + { + bool rc = true; + out->set_unary(arg); + + switch (arg->get_element_type()) + { + TYPE_CASE(bf16)(arg, out, count); + break; + TYPE_CASE(f16)(arg, out, count); + break; + TYPE_CASE(f32)(arg, out, count); + break; + default: rc = false; break; + } + return rc; + } +} + +bool op::v4::SoftPlus::evaluate(const HostTensorVector& outputs, + const HostTensorVector& inputs) const +{ + OV_ITT_SCOPED_TASK(itt::domains::nGraphOp, "op::SoftPlus::evaluate"); + return evaluate_softplus(inputs[0], outputs[0], shape_size(get_output_shape(0))); +} diff --git a/ngraph/python/src/ngraph/__init__.py b/ngraph/python/src/ngraph/__init__.py index 19e5b36..f8ed499 100644 --- a/ngraph/python/src/ngraph/__init__.py +++ b/ngraph/python/src/ngraph/__init__.py @@ -149,6 +149,7 @@ from ngraph.opset4 import sign from ngraph.opset4 import sin from ngraph.opset4 import sinh from ngraph.opset4 import softmax +from ngraph.opset4 import softplus from ngraph.opset4 import space_to_batch from ngraph.opset4 import space_to_depth from ngraph.opset4 import split diff --git a/ngraph/python/src/ngraph/opset4/__init__.py b/ngraph/python/src/ngraph/opset4/__init__.py index 2980f88..07d2c07 100644 --- a/ngraph/python/src/ngraph/opset4/__init__.py +++ b/ngraph/python/src/ngraph/opset4/__init__.py @@ -137,6 +137,7 @@ from ngraph.opset1.ops import sign from ngraph.opset1.ops import sin from ngraph.opset1.ops import sinh from ngraph.opset1.ops import softmax +from ngraph.opset4.ops import softplus from ngraph.opset2.ops import space_to_batch from ngraph.opset1.ops import space_to_depth from ngraph.opset1.ops import split diff --git a/ngraph/python/src/ngraph/opset4/ops.py b/ngraph/python/src/ngraph/opset4/ops.py index 00e31b0..8149a32 100644 --- a/ngraph/python/src/ngraph/opset4/ops.py +++ b/ngraph/python/src/ngraph/opset4/ops.py @@ -140,6 +140,16 @@ def non_max_suppression( @nameable_op +def softplus(data: NodeInput, name: Optional[str] = None) -> Node: + """Apply SoftPlus operation on each element of input tensor. + + :param data: The tensor providing input data. + :return: The new node with SoftPlus operation applied on each element. + """ + return _get_node_factory_opset4().create("SoftPlus", as_nodes(data), {}) + + +@nameable_op def mish(data: NodeInput, name: Optional[str] = None,) -> Node: """Return a node which performs Mish. diff --git a/ngraph/test/CMakeLists.txt b/ngraph/test/CMakeLists.txt index cd5a2d0..c1e6536 100644 --- a/ngraph/test/CMakeLists.txt +++ b/ngraph/test/CMakeLists.txt @@ -75,6 +75,7 @@ set(SRC op_eval/non_zero.cpp op_eval/reduce_l1.cpp op_eval/reduce_l2.cpp + op_eval/softplus.cpp op_eval/split.cpp op_eval/strided_slice.cpp op_eval/variadic_split.cpp @@ -163,6 +164,7 @@ set(SRC type_prop/shape_of.cpp type_prop/shuffle_channels.cpp type_prop/slice.cpp + type_prop/softplus.cpp type_prop/space_to_batch.cpp type_prop/space_to_depth.cpp type_prop/split.cpp diff --git a/ngraph/test/op_eval/softplus.cpp b/ngraph/test/op_eval/softplus.cpp new file mode 100644 index 0000000..5404e74 --- /dev/null +++ b/ngraph/test/op_eval/softplus.cpp @@ -0,0 +1,48 @@ +//***************************************************************************** +// Copyright 2017-2020 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +//***************************************************************************** + +#include +#include + +#include "gtest/gtest.h" + +#include "ngraph/op/softplus.hpp" +#include "ngraph/runtime/host_tensor.hpp" +#include "ngraph/validation_util.hpp" +#include "runtime/backend.hpp" +#include "util/test_tools.hpp" + +using namespace std; +using namespace ngraph; + +TEST(op_eval, softplus_4D) +{ + auto p = make_shared(element::f32, Shape{4}); + auto softplus = make_shared(p); + auto fun = make_shared(OutputVector{softplus}, ParameterVector{p}); + + std::vector inputs{-1.0, 0.0, 1.0, 20.0}; + std::vector expected_result{0.31326166, 0.69314718, 1.3132616, 20.0}; + + auto result = make_shared(); + ASSERT_TRUE( + fun->evaluate({result}, {make_host_tensor(Shape{4}, inputs)})); + EXPECT_EQ(result->get_element_type(), element::f32); + EXPECT_EQ(result->get_shape(), Shape{4}); + auto result_data = read_vector(result); + for (size_t i = 0; i < inputs.size(); i++) + EXPECT_NEAR(result_data[i], expected_result[i], 0.000001); +} diff --git a/ngraph/test/type_prop/softplus.cpp b/ngraph/test/type_prop/softplus.cpp new file mode 100644 index 0000000..7e40369 --- /dev/null +++ b/ngraph/test/type_prop/softplus.cpp @@ -0,0 +1,54 @@ +//***************************************************************************** +// Copyright 2017-2020 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +//***************************************************************************** + +#include "gtest/gtest.h" +#include "ngraph/ngraph.hpp" +#include "util/type_prop.hpp" + +using namespace std; +using namespace ngraph; + +TEST(type_prop, softplus) +{ + auto data = make_shared(element::f32, Shape{1, 3, 6}); + auto softplus_func = make_shared(data); + EXPECT_EQ(softplus_func->get_element_type(), element::f32); + EXPECT_EQ(softplus_func->get_shape(), (Shape{1, 3, 6})); +} + +TEST(type_prop, softplus_partial) +{ + auto data = make_shared(element::f32, PartialShape{1, Dimension::dynamic(), 6}); + auto softplus_func = make_shared(data); + EXPECT_EQ(softplus_func->get_element_type(), element::f32); + ASSERT_TRUE(softplus_func->get_output_partial_shape(0).same_scheme( + (PartialShape{1, Dimension::dynamic(), 6}))); + + // rank unknown + auto softplus_partial = make_shared( + make_shared(element::f32, PartialShape::dynamic())); + ASSERT_TRUE(softplus_partial->get_output_partial_shape(0).same_scheme(PartialShape::dynamic())); +} + +TEST(type_prop, softplus_partial_static_rank) +{ + auto data = make_shared(element::f32, PartialShape{1, Dimension::dynamic(), 6}); + auto softplus_func = make_shared(data); + EXPECT_EQ(softplus_func->get_element_type(), element::f32); + ASSERT_TRUE(softplus_func->get_output_partial_shape(0).same_scheme( + (PartialShape{1, Dimension::dynamic(), 6}))); + ASSERT_TRUE(softplus_func->get_output_partial_shape(0).rank().is_static()); +} -- 2.7.4