test: Do not return in error case
authorInki Dae <inki.dae@samsung.com>
Thu, 4 Feb 2021 03:58:00 +0000 (12:58 +0900)
committerInki Dae <inki.dae@samsung.com>
Thu, 25 Mar 2021 02:11:40 +0000 (11:11 +0900)
Google test framework provides assertion macro so
we don't have to return in error case.
It must be enough with the assertion macro.

Change-Id: I9a8f1375e5ddfbe06a2803567a3eea8f11f1e74d
Signed-off-by: Inki Dae <inki.dae@samsung.com>
test/src/inference_engine_profiler.cpp
test/src/inference_engine_tc.cpp

index 96cb21f..41c83ec 100644 (file)
@@ -136,16 +136,10 @@ TEST_P(InferenceEngineTfliteTest, Inference)
                                                                           .target_devices = target_devices };
 
        auto engine = std::make_unique<InferenceEngineCommon>();
-       if (engine == nullptr) {
-               ASSERT_TRUE(engine);
-               return;
-       }
+       ASSERT_TRUE(engine);
 
        int ret = engine->EnableProfiler(true);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        if (backend_type == INFERENCE_BACKEND_ONE)
                backend_name = "one";
@@ -153,19 +147,13 @@ TEST_P(InferenceEngineTfliteTest, Inference)
        ret = engine->DumpProfileToFile("profile_data_" + backend_name +
                                                                        "_" + Target_Formats[target_devices] +
                                                                        "_tflite_model.txt");
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->LoadConfigFile();
        ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->BindBackend(&config);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        inference_engine_capacity capacity;
        ret = engine->GetBackendCapacity(&capacity);
@@ -176,10 +164,7 @@ TEST_P(InferenceEngineTfliteTest, Inference)
 
        std::vector<std::string> models;
        int model_type = GetModelInfo(model_paths, models);
-       if (model_type == -1) {
-               ASSERT_NE(model_type, -1);
-               return;
-       }
+       ASSERT_NE(model_type, -1);
 
        inference_engine_layer_property input_property;
        std::vector<std::string>::iterator iter;
@@ -197,10 +182,7 @@ TEST_P(InferenceEngineTfliteTest, Inference)
        }
 
        ret = engine->SetInputLayerProperty(input_property);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        inference_engine_layer_property output_property;
 
@@ -209,23 +191,14 @@ TEST_P(InferenceEngineTfliteTest, Inference)
        }
 
        ret = engine->SetOutputLayerProperty(output_property);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->Load(models, (inference_model_format_e) model_type);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        std::vector<inference_engine_tensor_buffer> inputs, outputs;
        ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        // Copy input image tensor data from a given file to input tensor buffer.
        for (int i = 0; i < (int) image_paths.size(); ++i) {
@@ -556,32 +529,20 @@ TEST_P(InferenceEngineCaffeTest, Inference)
                                                                           .target_devices = target_devices };
 
        auto engine = std::make_unique<InferenceEngineCommon>();
-       if (engine == nullptr) {
-               ASSERT_TRUE(engine);
-               return;
-       }
+       ASSERT_TRUE(engine);
 
        int ret = engine->EnableProfiler(true);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->DumpProfileToFile("profile_data_" + backend_name +
                                                                        "_caffe_model.txt");
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->LoadConfigFile();
        ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->BindBackend(&config);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        inference_engine_capacity capacity;
        ret = engine->GetBackendCapacity(&capacity);
@@ -592,10 +553,7 @@ TEST_P(InferenceEngineCaffeTest, Inference)
 
        std::vector<std::string> models;
        int model_type = GetModelInfo(model_paths, models);
-       if (model_type == -1) {
-               ASSERT_NE(model_type, -1);
-               return;
-       }
+       ASSERT_NE(model_type, -1);
 
        inference_engine_layer_property input_property;
        std::vector<std::string>::iterator iter;
@@ -613,10 +571,7 @@ TEST_P(InferenceEngineCaffeTest, Inference)
        }
 
        ret = engine->SetInputLayerProperty(input_property);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        inference_engine_layer_property output_property;
 
@@ -625,23 +580,14 @@ TEST_P(InferenceEngineCaffeTest, Inference)
        }
 
        ret = engine->SetOutputLayerProperty(output_property);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->Load(models, (inference_model_format_e) model_type);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        std::vector<inference_engine_tensor_buffer> inputs, outputs;
        ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        // Copy input image tensor data from a given file to input tensor buffer.
        for (int i = 0; i < (int) image_paths.size(); ++i) {
@@ -756,32 +702,20 @@ TEST_P(InferenceEngineDldtTest, Inference)
                                                                           .target_devices = target_devices };
 
        auto engine = std::make_unique<InferenceEngineCommon>();
-       if (engine == nullptr) {
-               ASSERT_TRUE(engine);
-               return;
-       }
+       ASSERT_TRUE(engine);
 
        int ret = engine->EnableProfiler(true);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->DumpProfileToFile("profile_data_" + backend_name +
                                                                        "_dldt_model.txt");
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->LoadConfigFile();
        ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->BindBackend(&config);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        inference_engine_capacity capacity;
        ret = engine->GetBackendCapacity(&capacity);
@@ -792,10 +726,7 @@ TEST_P(InferenceEngineDldtTest, Inference)
 
        std::vector<std::string> models;
        int model_type = GetModelInfo(model_paths, models);
-       if (model_type == -1) {
-               ASSERT_NE(model_type, -1);
-               return;
-       }
+       ASSERT_NE(model_type, -1);
 
        inference_engine_layer_property input_property;
        std::vector<std::string>::iterator iter;
@@ -813,10 +744,7 @@ TEST_P(InferenceEngineDldtTest, Inference)
        }
 
        ret = engine->SetInputLayerProperty(input_property);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        inference_engine_layer_property output_property;
 
@@ -825,23 +753,14 @@ TEST_P(InferenceEngineDldtTest, Inference)
        }
 
        ret = engine->SetOutputLayerProperty(output_property);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->Load(models, (inference_model_format_e) model_type);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        std::vector<inference_engine_tensor_buffer> inputs, outputs;
        ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        // Copy input image tensor data from a given file to input tensor buffer.
        for (int i = 0; i < (int) image_paths.size(); ++i) {
index b46b4e8..d5b5b92 100644 (file)
@@ -244,10 +244,7 @@ TEST_P(InferenceEngineTestCase_G3, Load_P)
 
        std::vector<std::string> models;
        int model_type = GetModelInfo(model_paths, models);
-       if (model_type == -1) {
-               ASSERT_NE(model_type, -1);
-               return;
-       }
+       ASSERT_NE(model_type, -1);
 
        ret = engine->Load(models, (inference_model_format_e) model_type);
        EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
@@ -548,10 +545,7 @@ TEST_P(InferenceEngineTestCase_G6, Inference_P)
        std::cout << "backend = " << backend_name << std::endl;
 
        auto engine = std::make_unique<InferenceEngineCommon>();
-       if (engine == nullptr) {
-               ASSERT_TRUE(engine);
-               return;
-       }
+       ASSERT_TRUE(engine);
 
        if (profiler > INFERENCE_ENGINE_PROFILER_OFF &&
                profiler < INFERENCE_ENGINE_PROFILER_MAX) {
@@ -574,10 +568,7 @@ TEST_P(InferenceEngineTestCase_G6, Inference_P)
 
        std::vector<std::string> models;
        int model_type = GetModelInfo(model_paths, models);
-       if (model_type == -1) {
-               ASSERT_NE(model_type, -1);
-               return;
-       }
+       ASSERT_NE(model_type, -1);
 
        inference_engine_layer_property input_property;
        std::vector<std::string>::iterator iter;
@@ -595,10 +586,7 @@ TEST_P(InferenceEngineTestCase_G6, Inference_P)
        }
 
        ret = engine->SetInputLayerProperty(input_property);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        inference_engine_layer_property output_property;
 
@@ -607,23 +595,14 @@ TEST_P(InferenceEngineTestCase_G6, Inference_P)
        }
 
        ret = engine->SetOutputLayerProperty(output_property);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        ret = engine->Load(models, (inference_model_format_e) model_type);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        std::vector<inference_engine_tensor_buffer> inputs, outputs;
        ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-               return;
-       }
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        // Copy input image tensor data from a given file to input tensor buffer.
        for (int i = 0; i < (int) image_paths.size(); ++i) {