Apply clang-format rule
authorKwanghoon Son <k.son@samsung.com>
Thu, 1 Sep 2022 07:07:38 +0000 (03:07 -0400)
committerInki Dae <inki.dae@samsung.com>
Fri, 2 Sep 2022 08:09:25 +0000 (17:09 +0900)
[Issue type] refactoring

Change-Id: Ib947e0660c3b1057859dbc1c96650a8de4370a0a
Signed-off-by: Kwanghoon Son <k.son@samsung.com>
test/testsuites/machine_learning/inference/test_object_detection.cpp

index 75b54b0..7e685d6 100644 (file)
@@ -47,42 +47,51 @@ public:
 
 TEST_P(TestObjectDetectionTflite, MobilenetV1_SSD)
 {
-       engine_config_hosted_tflite_model(engine_cfg, OD_TFLITE_WEIGHT_MOBILENET_V1_SSD_300_PATH,
-                                                                         OD_LABEL_MOBILENET_V1_SSD_300_PATH, _use_json_parser,
-                                                                         _target_device_type);
+       engine_config_hosted_tflite_model(
+                       engine_cfg, OD_TFLITE_WEIGHT_MOBILENET_V1_SSD_300_PATH,
+                       OD_LABEL_MOBILENET_V1_SSD_300_PATH, _use_json_parser,
+                       _target_device_type);
 
        if (!_use_json_parser) {
                const char *inputNodeName = "normalized_input_image_tensor";
                const char *outputNodeName[] = { "TFLite_Detection_PostProcess",
-                                                                                       "TFLite_Detection_PostProcess:1",
-                                                                                       "TFLite_Detection_PostProcess:2",
-                                                                                       "TFLite_Detection_PostProcess:3" };
+                                                                                "TFLite_Detection_PostProcess:1",
+                                                                                "TFLite_Detection_PostProcess:2",
+                                                                                "TFLite_Detection_PostProcess:3" };
 
-               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5),
-                                       MEDIA_VISION_ERROR_NONE);
-               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 127.5),
-                                       MEDIA_VISION_ERROR_NONE);
-               ASSERT_EQ(mv_engine_config_set_double_attribute(engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3),
-                                       MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(
+                                                 engine_cfg, MV_INFERENCE_MODEL_MEAN_VALUE, 127.5),
+                                 MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(
+                                                 engine_cfg, MV_INFERENCE_MODEL_STD_VALUE, 127.5),
+                                 MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_double_attribute(
+                                                 engine_cfg, MV_INFERENCE_CONFIDENCE_THRESHOLD, 0.3),
+                                 MEDIA_VISION_ERROR_NONE);
 
-               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300),
-                                       MEDIA_VISION_ERROR_NONE);
-               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300),
-                                       MEDIA_VISION_ERROR_NONE);
-               ASSERT_EQ(mv_engine_config_set_int_attribute(engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
-                                       MEDIA_VISION_ERROR_NONE);
-               ASSERT_EQ(mv_engine_config_set_string_attribute(engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
-                                                                                                               inputNodeName), MEDIA_VISION_ERROR_NONE);
-               ASSERT_EQ(mv_engine_config_set_array_string_attribute(engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
-                                                                                                                               outputNodeName, 4), MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(
+                                                 engine_cfg, MV_INFERENCE_INPUT_TENSOR_WIDTH, 300),
+                                 MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(
+                                                 engine_cfg, MV_INFERENCE_INPUT_TENSOR_HEIGHT, 300),
+                                 MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_int_attribute(
+                                                 engine_cfg, MV_INFERENCE_INPUT_TENSOR_CHANNELS, 3),
+                                 MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_string_attribute(
+                                                 engine_cfg, MV_INFERENCE_INPUT_NODE_NAME,
+                                                 inputNodeName),
+                                 MEDIA_VISION_ERROR_NONE);
+               ASSERT_EQ(mv_engine_config_set_array_string_attribute(
+                                                 engine_cfg, MV_INFERENCE_OUTPUT_NODE_NAMES,
+                                                 outputNodeName, 4),
+                                 MEDIA_VISION_ERROR_NONE);
        }
 
        inferenceDog();
 }
 
-INSTANTIATE_TEST_CASE_P(Prefix, TestObjectDetectionTflite,
-                                               ::testing::Values(
-                                                       ParamTypes(false, MV_INFERENCE_TARGET_DEVICE_CPU),
-                                                       ParamTypes(true, MV_INFERENCE_TARGET_DEVICE_CPU)
-                                               )
-);
+INSTANTIATE_TEST_CASE_P(
+               Prefix, TestObjectDetectionTflite,
+               ::testing::Values(ParamTypes(false, MV_INFERENCE_TARGET_DEVICE_CPU),
+                                                 ParamTypes(true, MV_INFERENCE_TARGET_DEVICE_CPU)));