test/machine_learning/inference: add test cases for legacy path 59/264859/2
authorInki Dae <inki.dae@samsung.com>
Thu, 30 Sep 2021 10:45:34 +0000 (19:45 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 5 Oct 2021 07:22:11 +0000 (16:22 +0900)
Added test cases for legacy path of inference engine, which uses
user-given model information instead of ones from json file.

As for this, this patch has a little bit code refactoring which
uses parameterized test instead of fixture one of google test
to decide API path in runtime - json or legacy.

This patch enlarges exsiting test coverage from 119 to 132 test cases.

[==========] 132 tests from 6 test suites ran. (49021 ms total)
[  PASSED  ] 132 tests.

Change-Id: I9829725aad8037cbe5a82d50e7790a3e7a6bfe6b
Signed-off-by: Inki Dae <inki.dae@samsung.com>
test/testsuites/machine_learning/inference/test_face_detection.cpp
test/testsuites/machine_learning/inference/test_face_landmark_detection.cpp
test/testsuites/machine_learning/inference/test_image_classification.cpp
test/testsuites/machine_learning/inference/test_inference_helper.cpp
test/testsuites/machine_learning/inference/test_inference_helper.hpp
test/testsuites/machine_learning/inference/test_object_detection.cpp
test/testsuites/machine_learning/inference/test_pose_landmark_detection.cpp

index ebf37e5..376a717 100644 (file)
@@ -27,6 +27,7 @@ public:
        {
                ASSERT_EQ(mv_inference_configure(infer, engine_cfg),
                                  MEDIA_VISION_ERROR_NONE);
+
                ASSERT_EQ(mv_inference_prepare(infer), MEDIA_VISION_ERROR_NONE);
                ASSERT_EQ(MediaVision::Common::ImageHelper::loadImageToSource(
                                                  IMG_FACE, mv_source),
@@ -37,9 +38,42 @@ public:
        }
 };
 
-TEST_F(TestFaceDetection, CPU_TFLITE_MobilenetV1_SSD)
+TEST_P(TestFaceDetection, CPU_TFLITE_MobilenetV1_SSD)
 {
        engine_config_hosted_cpu_tflite(engine_cfg,
-                                                                       FD_TFLITE_WEIGHT_MOBILENET_V1_SSD_300_PATH);
+                                                                       FD_TFLITE_WEIGHT_MOBILENET_V1_SSD_300_PATH, _use_json_parser);
+       if (!_use_json_parser) {
+               const char *inputNodeName = "normalized_input_image_tensor";
+               const char *outputNodeName[] = { "TFLite_Detection_PostProcess",
+                                                                                       "TFLite_Detection_PostProcess:1",
+                                                                                       "TFLite_Detection_PostProcess:2",
+                                                                                       "TFLite_Detection_PostProcess:3" };
+
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3),
+                                       MEDIA_VISION_ERROR_NONE);
+
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_string_attribute(engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
+                                                                                                               inputNodeName), MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_array_string_attribute(engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
+                                                                                                                               outputNodeName, 4), MEDIA_VISION_ERROR_NONE);
+       }
+
        inferenceFace();
-}
\ No newline at end of file
+}
+
+INSTANTIATE_TEST_CASE_P(Prefix, TestFaceDetection,
+                                               ::testing::Values(
+                                                       ParamTypeOne(false),
+                                                       ParamTypeOne(true)
+                                               )
+);
\ No newline at end of file
index 6d4ada9..e186c6f 100644 (file)
@@ -37,7 +37,7 @@ public:
        }
 };
 
-TEST_F(TestFaceLandmarkDetection, CPU_OPENCV_CAFFE_CNNCASCADE)
+TEST_P(TestFaceLandmarkDetection, CPU_OPENCV_CAFFE_CNNCASCADE)
 {
        const char *inputNodeName = "data";
        const char *outputNodeName[] = { "Sigmoid_fc2" };
@@ -85,4 +85,11 @@ TEST_F(TestFaceLandmarkDetection, CPU_OPENCV_CAFFE_CNNCASCADE)
                                          outputNodeName, 1),
                          MEDIA_VISION_ERROR_NONE);
        inferenceFaceLandmark();
-}
\ No newline at end of file
+}
+
+INSTANTIATE_TEST_CASE_P(Prefix, TestFaceLandmarkDetection,
+                                               ::testing::Values(
+                                                       ParamTypeOne(false),
+                                                       ParamTypeOne(true)
+                                               )
+);
\ No newline at end of file
index 0aab594..cdda3f6 100644 (file)
@@ -63,58 +63,232 @@ public:
        }
 };
 
-TEST_F(TestImageClassification, CPU_TFLITE_MobilenetV1)
+TEST_P(TestImageClassification, CPU_TFLITE_MobilenetV1)
 {
        engine_config_hosted_cpu_tflite_user_model(
                        engine_cfg, IC_TFLITE_WEIGHT_MOBILENET_V1_224_PATH,
-                       IC_LABEL_MOBILENET_V1_224_PATH);
+                       IC_LABEL_MOBILENET_V1_224_PATH, _use_json_parser);
+
+       if (!_use_json_parser) {
+               const char *inputNodeName = "input";
+               const char *outputNodeName[] = { "MobilenetV1/Predictions/Reshape_1" };
+
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.0),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_string_attribute(engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
+                                                                                                               inputNodeName), MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_array_string_attribute(engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
+                                                                                                                         outputNodeName, 1), MEDIA_VISION_ERROR_NONE);
+       }
+
        inferenceBanana();
 }
 
-TEST_F(TestImageClassification, CPU_TFLITE_MobilenetV2)
+TEST_P(TestImageClassification, CPU_TFLITE_MobilenetV2)
 {
        engine_config_hosted_cpu_tflite_user_model(
                        engine_cfg, IC_TFLITE_WEIGHT_MOBILENET_V2_224_PATH,
-                       IC_LABEL_MOBILENET_V1_224_PATH);
+                       IC_LABEL_MOBILENET_V1_224_PATH, _use_json_parser);
+
+       if (!_use_json_parser) {
+               const char *inputNodeName = "input";
+               const char *outputNodeName[] = { "MobilenetV2/Predictions/Reshape_1" };
+
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.01),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_string_attribute(engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
+                                                                                                               inputNodeName), MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_array_string_attribute(engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
+                                                                                                                         outputNodeName, 1), MEDIA_VISION_ERROR_NONE);
+       }
+
        inferenceBanana();
 }
 
-TEST_F(TestImageClassification, CPU_TFLITE_Densenet)
+TEST_P(TestImageClassification, CPU_TFLITE_Densenet)
 {
        engine_config_hosted_cpu_tflite_user_model(
                        engine_cfg, IC_TFLITE_WEIGHT_DENSENET_224_PATH,
-                       IC_LABEL_MOBILENET_V1_224_PATH);
+                       IC_LABEL_MOBILENET_V1_224_PATH, _use_json_parser);
+
+       if (!_use_json_parser) {
+               const char *inputNodeName = "Placeholder";
+               const char *outputNodeName[] = { "softmax_tensor" };
+
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 255.0),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.0),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_string_attribute(engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
+                                                                                                               inputNodeName), MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_array_string_attribute(engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
+                                                                                                                         outputNodeName, 1), MEDIA_VISION_ERROR_NONE);
+       }
+
        inferenceBanana();
 }
 
-TEST_F(TestImageClassification, CPU_TFLITE_Nasnet)
+TEST_P(TestImageClassification, CPU_TFLITE_Nasnet)
 {
        engine_config_hosted_cpu_tflite_user_model(engine_cfg,
                                                                                           IC_TFLITE_WEIGHT_NASNET_224_PATH,
-                                                                                          IC_LABEL_MOBILENET_V1_224_PATH);
+                                                                                          IC_LABEL_MOBILENET_V1_224_PATH,
+                                                                                          _use_json_parser);
+
+       if (!_use_json_parser) {
+               const char *inputNodeName = "input";
+               const char *outputNodeName[] = { "final_layer/predictions" };
+
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.0),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_string_attribute(engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
+                                                                                                               inputNodeName), MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_array_string_attribute(engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
+                                                                                                                         outputNodeName, 1), MEDIA_VISION_ERROR_NONE);
+       }
+
        inferenceBanana();
 }
 
-TEST_F(TestImageClassification, CPU_TFLITE_MNasnet)
+TEST_P(TestImageClassification, CPU_TFLITE_MNasnet)
 {
        engine_config_hosted_cpu_tflite_user_model(
                        engine_cfg, IC_TFLITE_WEIGHT_MNASNET_224_PATH,
-                       IC_LABEL_MOBILENET_V1_224_PATH);
+                       IC_LABEL_MOBILENET_V1_224_PATH,
+                       _use_json_parser);
+
+       if (!_use_json_parser) {
+               const char *inputNodeName = "input";
+               const char *outputNodeName[] = { "output" };
+
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 57.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.0),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_string_attribute(engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
+                                                                                                               inputNodeName), MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_array_string_attribute(engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
+                                                                                                                         outputNodeName, 1), MEDIA_VISION_ERROR_NONE);
+       }
+
        inferenceBanana();
 }
 
-TEST_F(TestImageClassification, CPU_TFLITE_Squeezenet)
+TEST_P(TestImageClassification, CPU_TFLITE_Squeezenet)
 {
        engine_config_hosted_cpu_tflite_user_model(
                        engine_cfg, IC_TFLITE_WEIGHT_SQUEEZENET_224_PATH,
-                       IC_LABEL_MOBILENET_V1_224_PATH);
+                       IC_LABEL_MOBILENET_V1_224_PATH,
+                       _use_json_parser);
+
+       if (!_use_json_parser) {
+               const char *inputNodeName = "Placeholder";
+               const char *outputNodeName[] = { "softmax_tensor" };
+
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.0),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_string_attribute(engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
+                                                                                                               inputNodeName), MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_array_string_attribute(engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
+                                                                                                                         outputNodeName, 1), MEDIA_VISION_ERROR_NONE);
+       }
+
        inferenceBanana();
 }
 
-TEST_F(TestImageClassification, CPU_TFLITE_QUANT_MobilenetV1)
+TEST_P(TestImageClassification, CPU_TFLITE_QUANT_MobilenetV1)
 {
        engine_config_hosted_cpu_tflite_user_model(
                        engine_cfg, IC_TFLITE_WEIGHT_QUANT_MOBILENET_V1_224_PATH,
-                       IC_LABEL_MOBILENET_V1_224_PATH);
+                       IC_LABEL_MOBILENET_V1_224_PATH,
+                       _use_json_parser);
+
+       if (!_use_json_parser) {
+               const char *inputNodeName = "input";
+               const char *outputNodeName[] = { "MobilenetV1/Predictions/Reshape_1" };
+
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_DATA_TYPE, MV_INFERENCE_DATA_UINT8),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 0.0),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 1.0),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.0),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 224),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_string_attribute(engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
+                                                                                                               inputNodeName), MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_array_string_attribute(engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
+                                                                                                                         outputNodeName, 1), MEDIA_VISION_ERROR_NONE);
+       }
+
        inferenceBanana();
-}
\ No newline at end of file
+}
+
+INSTANTIATE_TEST_CASE_P(Prefix, TestImageClassification,
+                                               ::testing::Values(
+                                                       ParamTypeOne(false),
+                                                       ParamTypeOne(true)
+                                               )
+);
\ No newline at end of file
index 1c88689..81a0380 100644 (file)
@@ -16,19 +16,23 @@ TestInference::~TestInference()
 }
 
 void engine_config_hosted_cpu_tflite(mv_engine_config_h handle,
-                                                                        const char *tf_weight)
+                                                                        const char *tf_weight,
+                                                                        const bool use_json_parser)
 {
        EXPECT_EQ(mv_engine_config_set_string_attribute(
                                          handle, MV_INFERENCE_MODEL_WEIGHT_FILE_PATH, tf_weight),
                          MEDIA_VISION_ERROR_NONE);
 
-       std::string meta_file_path = tf_weight;
-       meta_file_path = meta_file_path.substr(0, meta_file_path.find('.'));
-       meta_file_path += std::string(".json");
+       if (use_json_parser) {
+               std::string meta_file_path = tf_weight;
+               meta_file_path = meta_file_path.substr(0, meta_file_path.find('.'));
+               meta_file_path += std::string(".json");
+
+               EXPECT_EQ(mv_engine_config_set_string_attribute(
+                                               handle, MV_INFERENCE_MODEL_META_FILE_PATH , meta_file_path.c_str()),
+                               MEDIA_VISION_ERROR_NONE);
+       }
 
-       EXPECT_EQ(mv_engine_config_set_string_attribute(
-                                         handle, MV_INFERENCE_MODEL_META_FILE_PATH , meta_file_path.c_str()),
-                         MEDIA_VISION_ERROR_NONE);
        EXPECT_EQ(mv_engine_config_set_int_attribute(handle,
                                                                                                 MV_INFERENCE_BACKEND_TYPE,
                                                                                                 MV_INFERENCE_BACKEND_TFLITE),
@@ -41,10 +45,11 @@ void engine_config_hosted_cpu_tflite(mv_engine_config_h handle,
 
 void engine_config_hosted_cpu_tflite_user_model(mv_engine_config_h handle,
                                                                                                const char *tf_weight,
-                                                                                               const char *user_file)
+                                                                                               const char *user_file,
+                                                                                               const bool use_json_parser)
 {
-       engine_config_hosted_cpu_tflite(handle, tf_weight);
+       engine_config_hosted_cpu_tflite(handle, tf_weight, use_json_parser);
        EXPECT_EQ(mv_engine_config_set_string_attribute(
                                          handle, MV_INFERENCE_MODEL_USER_FILE_PATH, user_file),
                          MEDIA_VISION_ERROR_NONE);
-}
+}
\ No newline at end of file
index 16bb4c6..a04fb00 100644 (file)
@@ -3,8 +3,18 @@
 
 #include <mv_inference.h>
 
-class TestInference : public ::testing::Test
+typedef std::tuple<int> ParamTypeOne;
+
+class TestInference : public ::testing::TestWithParam<ParamTypeOne>
 {
+protected:
+       void SetUp() final
+       {
+               std::tie(_use_json_parser) = GetParam();
+       }
+
+       bool _use_json_parser;
+
 public:
        TestInference();
        virtual ~TestInference();
@@ -14,10 +24,12 @@ public:
 };
 
 void engine_config_hosted_cpu_tflite(mv_engine_config_h handle,
-                                                                        const char *tf_weight);
+                                                                        const char *tf_weight,
+                                                                        const bool use_json_parser);
 
 void engine_config_hosted_cpu_tflite_user_model(mv_engine_config_h handle,
                                                                                                const char *tf_weight,
-                                                                                               const char *user_file);
+                                                                                               const char *user_file,
+                                                                                               const bool use_json_parser);
 
 #endif //__TEST_INFERENCE_HELPER_HPP__
index 8cea9f7..b849c20 100644 (file)
@@ -46,10 +46,45 @@ public:
        }
 };
 
-TEST_F(TestObjectDetection, CPU_TFLITE_MobilenetV1_SSD)
+TEST_P(TestObjectDetection, CPU_TFLITE_MobilenetV1_SSD)
 {
        engine_config_hosted_cpu_tflite_user_model(
                        engine_cfg, OD_TFLITE_WEIGHT_MOBILENET_V1_SSD_300_PATH,
-                       OD_LABEL_MOBILENET_V1_SSD_300_PATH);
+                       OD_LABEL_MOBILENET_V1_SSD_300_PATH,
+                       _use_json_parser);
+
+       if (!_use_json_parser) {
+               const char *inputNodeName = "normalized_input_image_tensor";
+               const char *outputNodeName[] = { "TFLite_Detection_PostProcess",
+                                                                                       "TFLite_Detection_PostProcess:1",
+                                                                                       "TFLite_Detection_PostProcess:2",
+                                                                                       "TFLite_Detection_PostProcess:3" };
+
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3),
+                                       MEDIA_VISION_ERROR_NONE);
+
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_string_attribute(engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
+                                                                                                               inputNodeName), MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_array_string_attribute(engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
+                                                                                                                               outputNodeName, 4), MEDIA_VISION_ERROR_NONE);
+       }
+
        inferenceDog();
-}
\ No newline at end of file
+}
+
+INSTANTIATE_TEST_CASE_P(Prefix, TestObjectDetection,
+                                               ::testing::Values(
+                                                       ParamTypeOne(false),
+                                                       ParamTypeOne(true)
+                                               )
+);
\ No newline at end of file
index 033488c..58c4b43 100644 (file)
@@ -37,9 +37,44 @@ public:
        }
 };
 
-TEST_F(TestPoseLandmarkDetection, CPU_TFLITE_MobilenetV1)
+TEST_P(TestPoseLandmarkDetection, CPU_TFLITE_MobilenetV1)
 {
        engine_config_hosted_cpu_tflite(
-                       engine_cfg, PLD_TFLITE_WEIGHT_MOBILENET_V1_POSENET_257_PATH);
+                       engine_cfg, PLD_TFLITE_WEIGHT_MOBILENET_V1_POSENET_257_PATH, _use_json_parser);
+
+       if (!_use_json_parser) {
+               const char *inputNodeName = "sub_2";
+               const char *outputNodeName[] = { "MobilenetV1/heatmap_2/BiasAdd",
+                                                                                       "MobilenetV1/offset_2/BiasAdd",
+                                                                                       "MobilenetV1/displacement_fwd_2/BiasAdd",
+                                                                                       "MobilenetV1/displacement_bwd_2/BiasAdd" };
+
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 127.5),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3),
+                                       MEDIA_VISION_ERROR_NONE);
+
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
+                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_string_attribute(engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
+                                                                                                               inputNodeName), MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_array_string_attribute(engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
+                                                                                                                               outputNodeName, 4), MEDIA_VISION_ERROR_NONE);
+       }
+
+
        inferencePoseLandmark();
-}
\ No newline at end of file
+}
+
+INSTANTIATE_TEST_CASE_P(Prefix, TestPoseLandmarkDetection,
+                                               ::testing::Values(
+                                                       ParamTypeOne(false),
+                                                       ParamTypeOne(true)
+                                               )
+);
\ No newline at end of file