[IE TEST] LRN tests fixed params (#743)
authorLiubov Batanina <liubov.batanina@intel.com>
Wed, 1 Jul 2020 19:35:28 +0000 (22:35 +0300)
committerGitHub <noreply@github.com>
Wed, 1 Jul 2020 19:35:28 +0000 (22:35 +0300)
* LRN tests fixed params

* Fix comment

* Swiched to opset3

inference-engine/tests/functional/plugin/cpu/shared_tests_instances/single_layer_tests/lrn.cpp
inference-engine/tests/functional/plugin/gpu/shared_tests_instances/single_layer_tests/lrn.cpp
inference-engine/tests/functional/plugin/shared/include/single_layer_tests/lrn.hpp
inference-engine/tests/functional/plugin/shared/src/single_layer_tests/lrn.cpp

index 8ba02c8..6ab0256 100644 (file)
@@ -18,8 +18,8 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::
                                                                InferenceEngine::Precision::FP16};
 
 const double alpha = 9.9e-05;
-const size_t beta = 2;
-const size_t bias = 1.0f;
+const double beta = 2;
+const double bias = 1.0;
 const size_t size = 5;
 
 INSTANTIATE_TEST_CASE_P(LrnCheck, LrnLayerTest,
@@ -27,6 +27,7 @@ INSTANTIATE_TEST_CASE_P(LrnCheck, LrnLayerTest,
                                            ::testing::Values(beta),
                                            ::testing::Values(bias),
                                            ::testing::Values(size),
+                                           ::testing::Values(std::vector<size_t>({1})),
                                            ::testing::ValuesIn(netPrecisions),
                                            ::testing::Values(std::vector<size_t>({10, 10, 3, 2})),
                                            ::testing::Values(CommonTestUtils::DEVICE_CPU)),
index 3d08b0c..d49a323 100644 (file)
@@ -16,8 +16,8 @@ const std::vector<InferenceEngine::Precision> netPrecisions = {InferenceEngine::
                                                                InferenceEngine::Precision::FP16};
 
 const double alpha = 9.9e-05;
-const size_t beta = 2;
-const size_t bias = 1.0f;
+const double beta = 2;
+const double bias = 1.0;
 const size_t size = 5;
 
 INSTANTIATE_TEST_CASE_P(LrnCheck, LrnLayerTest,
@@ -25,6 +25,7 @@ INSTANTIATE_TEST_CASE_P(LrnCheck, LrnLayerTest,
                                            ::testing::Values(beta),
                                            ::testing::Values(bias),
                                            ::testing::Values(size),
+                                           ::testing::Values(std::vector<size_t>({1})),
                                            ::testing::ValuesIn(netPrecisions),
                                            ::testing::Values(std::vector<size_t>({10, 10, 3, 2})),
                                            ::testing::Values(CommonTestUtils::DEVICE_GPU)),
index cb785b2..3494bad 100644 (file)
@@ -19,9 +19,10 @@ namespace LayerTestsDefinitions {
 
 typedef std::tuple<
         double,                        // Alpha
-        size_t,                        // Beta
-        size_t,                        // Bias
-        size_t,                        // Size,
+        double,                        // Beta
+        double,                        // Bias
+        size_t,                        // Size
+        std::vector<size_t>,           // Reduction axes
         InferenceEngine::Precision,    // Network precision
         InferenceEngine::SizeVector,   // Input shapes
         std::string                    // Device name
index 746ce10..c2dc0a9 100644 (file)
 namespace LayerTestsDefinitions {
 
 std::string LrnLayerTest::getTestCaseName(testing::TestParamInfo<lrnLayerTestParamsSet> obj) {
-    double alpha;
-    size_t beta, bias, size;
+    double alpha, beta, bias;
+    size_t size;
+    std::vector<size_t> axes;
     InferenceEngine::Precision  netPrecision;
     std::vector<size_t> inputShapes;
     std::string targetDevice;
-    std::tie(alpha, beta, bias, size, netPrecision, inputShapes, targetDevice) = obj.param;
+    std::tie(alpha, beta, bias, size, axes, netPrecision, inputShapes, targetDevice) = obj.param;
 
     std::ostringstream result;
     const char separator = '_';
@@ -27,6 +28,7 @@ std::string LrnLayerTest::getTestCaseName(testing::TestParamInfo<lrnLayerTestPar
     result << "Beta=" << beta << separator;
     result << "Bias=" << bias << separator;
     result << "Size=" << size << separator;
+    result << "Axes=" << CommonTestUtils::vec2str(axes) << separator;
     result << "netPRC=" << netPrecision.name() << separator;
     result << "targetDevice=" << targetDevice;
 
@@ -36,16 +38,19 @@ std::string LrnLayerTest::getTestCaseName(testing::TestParamInfo<lrnLayerTestPar
 void LrnLayerTest::SetUp() {
     std::vector<size_t> inputShapes;
     auto netPrecision   = InferenceEngine::Precision::UNSPECIFIED;
-    size_t alpha, beta, bias, size;
-    std::tie(alpha, beta, bias, size, netPrecision, inputShapes, targetDevice) = GetParam();
+    double alpha, beta, bias;
+    size_t size;
+    std::vector<size_t> axes;
+    std::tie(alpha, beta, bias, size, axes, netPrecision, inputShapes, targetDevice) = GetParam();
 
     auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
     auto params = ngraph::builder::makeParams(ngPrc, {inputShapes});
     auto paramIn =
         ngraph::helpers::convert2OutputVector(ngraph::helpers::castOps2Nodes<ngraph::op::Parameter>(params));
 
-    auto lrn = std::make_shared<ngraph::opset1::LRN>(paramIn[0], alpha, beta, bias, size);
-    ngraph::ResultVector results {std::make_shared<ngraph::opset1::Result>(lrn)};
+    auto axes_node = std::make_shared<ngraph::op::Constant>(ngraph::element::i64, ngraph::Shape{axes.size()}, axes.data());
+    auto lrn = std::make_shared<ngraph::opset3::LRN>(paramIn[0], axes_node, alpha, beta, bias, size);
+    ngraph::ResultVector results {std::make_shared<ngraph::opset3::Result>(lrn)};
     function = std::make_shared<ngraph::Function>(results, params, "lrn");
 }