test: use smart pointer instead of raw one 89/232889/2
authorInki Dae <inki.dae@samsung.com>
Fri, 8 May 2020 06:46:40 +0000 (15:46 +0900)
committerInki Dae <inki.dae@samsung.com>
Fri, 8 May 2020 08:00:21 +0000 (17:00 +0900)
Change-Id: I04e4f4e2f887d1850b4e2440a6d2f06a6029b546
Signed-off-by: Inki Dae <inki.dae@samsung.com>
test/src/inference_engine_profiler.cpp
test/src/inference_engine_tc.cpp

index 58324e5..d46ea1b 100644 (file)
@@ -81,7 +81,7 @@ TEST_P(InferenceEngineTfliteTest, Inference)
                .target_devices = target_devices
        };
 
-       InferenceEngineCommon *engine = new InferenceEngineCommon();
+       auto engine = std::make_unique<InferenceEngineCommon>();
        if (engine == nullptr) {
                ASSERT_TRUE(engine);
                return;
@@ -89,21 +89,18 @@ TEST_P(InferenceEngineTfliteTest, Inference)
 
        int ret = engine->EnableProfiler(true);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        ret = engine->DumpProfileToFile("profile_data_" + backend_name + "_tflite_model.txt");
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        ret = engine->BindBackend(&config);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -118,7 +115,6 @@ TEST_P(InferenceEngineTfliteTest, Inference)
        std::vector <std::string> models;
        int model_type = GetModelInfo(model_paths, models);
        if (model_type == -1) {
-               delete engine;
                ASSERT_NE(model_type, -1);
                return;
        }
@@ -140,7 +136,6 @@ TEST_P(InferenceEngineTfliteTest, Inference)
 
        ret = engine->SetInputLayerProperty(input_property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -153,22 +148,19 @@ TEST_P(InferenceEngineTfliteTest, Inference)
 
        ret = engine->SetOutputLayerProperty(output_property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        ret = engine->Load(models, (inference_model_format_e)model_type);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        std::vector<inference_engine_tensor_buffer> inputs, outputs;
-       ret = PrepareTensorBuffers(engine, inputs, outputs);
+       ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -184,7 +176,7 @@ TEST_P(InferenceEngineTfliteTest, Inference)
        }
 
        tensor_t result;
-       FillOutputResult(engine, outputs, result);
+       FillOutputResult(engine.get(), outputs, result);
 
        switch (test_type) {
        case TEST_IMAGE_CLASSIFICATION:
@@ -215,8 +207,6 @@ TEST_P(InferenceEngineTfliteTest, Inference)
 
        engine->UnbindBackend();
        models.clear();
-
-       delete engine;
 }
 
 TEST_P(InferenceEngineCaffeTest, Inference)
@@ -266,7 +256,7 @@ TEST_P(InferenceEngineCaffeTest, Inference)
                .target_devices = target_devices
        };
 
-       InferenceEngineCommon *engine = new InferenceEngineCommon();
+       auto engine = std::make_unique<InferenceEngineCommon>();
        if (engine == nullptr) {
                ASSERT_TRUE(engine);
                return;
@@ -274,21 +264,18 @@ TEST_P(InferenceEngineCaffeTest, Inference)
 
        int ret = engine->EnableProfiler(true);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        ret = engine->DumpProfileToFile("profile_data_" + backend_name + "_caffe_model.txt");
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        ret = engine->BindBackend(&config);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -303,7 +290,6 @@ TEST_P(InferenceEngineCaffeTest, Inference)
        std::vector <std::string> models;
        int model_type = GetModelInfo(model_paths, models);
        if (model_type == -1) {
-               delete engine;
                ASSERT_NE(model_type, -1);
                return;
        }
@@ -325,7 +311,6 @@ TEST_P(InferenceEngineCaffeTest, Inference)
 
        ret = engine->SetInputLayerProperty(input_property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -338,22 +323,19 @@ TEST_P(InferenceEngineCaffeTest, Inference)
 
        ret = engine->SetOutputLayerProperty(output_property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        ret = engine->Load(models, (inference_model_format_e)model_type);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        std::vector<inference_engine_tensor_buffer> inputs, outputs;
-       ret = PrepareTensorBuffers(engine, inputs, outputs);
+       ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -369,7 +351,7 @@ TEST_P(InferenceEngineCaffeTest, Inference)
        }
 
        tensor_t result;
-       FillOutputResult(engine, outputs, result);
+       FillOutputResult(engine.get(), outputs, result);
 
        switch (test_type) {
        case TEST_IMAGE_CLASSIFICATION:
@@ -402,8 +384,6 @@ TEST_P(InferenceEngineCaffeTest, Inference)
 
        engine->UnbindBackend();
        models.clear();
-
-       delete engine;
 }
 
 TEST_P(InferenceEngineDldtTest, Inference)
@@ -453,7 +433,7 @@ TEST_P(InferenceEngineDldtTest, Inference)
                .target_devices = target_devices
        };
 
-       InferenceEngineCommon *engine = new InferenceEngineCommon();
+       auto engine = std::make_unique<InferenceEngineCommon>();
        if (engine == nullptr) {
                ASSERT_TRUE(engine);
                return;
@@ -461,21 +441,18 @@ TEST_P(InferenceEngineDldtTest, Inference)
 
        int ret = engine->EnableProfiler(true);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        ret = engine->DumpProfileToFile("profile_data_" + backend_name + "_dldt_model.txt");
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        ret = engine->BindBackend(&config);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -490,7 +467,6 @@ TEST_P(InferenceEngineDldtTest, Inference)
        std::vector <std::string> models;
        int model_type = GetModelInfo(model_paths, models);
        if (model_type == -1) {
-               delete engine;
                ASSERT_NE(model_type, -1);
                return;
        }
@@ -512,7 +488,6 @@ TEST_P(InferenceEngineDldtTest, Inference)
 
        ret = engine->SetInputLayerProperty(input_property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -525,22 +500,19 @@ TEST_P(InferenceEngineDldtTest, Inference)
 
        ret = engine->SetOutputLayerProperty(output_property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        ret = engine->Load(models, (inference_model_format_e)model_type);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        std::vector<inference_engine_tensor_buffer> inputs, outputs;
-       ret = PrepareTensorBuffers(engine, inputs, outputs);
+       ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -556,7 +528,7 @@ TEST_P(InferenceEngineDldtTest, Inference)
        }
 
        tensor_t result;
-       FillOutputResult(engine, outputs, result);
+       FillOutputResult(engine.get(), outputs, result);
 
        switch (test_type) {
        case TEST_IMAGE_CLASSIFICATION:
@@ -589,8 +561,6 @@ TEST_P(InferenceEngineDldtTest, Inference)
 
        engine->UnbindBackend();
        models.clear();
-
-       delete engine;
 }
 
 INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTfliteTest,
index 27c5004..2c1441e 100644 (file)
@@ -50,12 +50,11 @@ TEST_P(InferenceEngineTestCase_G1, Bind_P)
                .target_devices = target_devices
        };
 
-       InferenceEngineCommon *engine = new InferenceEngineCommon();
+       auto engine = std::make_unique<InferenceEngineCommon>();
        ASSERT_TRUE(engine);
 
        int ret = engine->BindBackend(&config);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -65,8 +64,6 @@ TEST_P(InferenceEngineTestCase_G1, Bind_P)
        EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        engine->UnbindBackend();
-
-       delete engine;
 }
 
 TEST_P(InferenceEngineTestCase_G2, Load_P)
@@ -84,12 +81,11 @@ TEST_P(InferenceEngineTestCase_G2, Load_P)
                .target_devices = target_devices
        };
 
-       InferenceEngineCommon *engine = new InferenceEngineCommon();
+       auto engine = std::make_unique<InferenceEngineCommon>();
        ASSERT_TRUE(engine);
 
        int ret = engine->BindBackend(&config);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -104,7 +100,6 @@ TEST_P(InferenceEngineTestCase_G2, Load_P)
        std::vector <std::string> models;
        int model_type = GetModelInfo(model_paths, models);
        if (model_type == -1) {
-               delete engine;
                ASSERT_NE(model_type, -1);
                return;
        }
@@ -113,8 +108,6 @@ TEST_P(InferenceEngineTestCase_G2, Load_P)
        EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
        engine->UnbindBackend();
-
-       delete engine;
 }
 
 TEST_P(InferenceEngineTestCase_G3, Inference)
@@ -154,7 +147,7 @@ TEST_P(InferenceEngineTestCase_G3, Inference)
                .target_devices = target_devices
        };
 
-       InferenceEngineCommon *engine = new InferenceEngineCommon();
+       auto engine = std::make_unique<InferenceEngineCommon>();
        if (engine == nullptr) {
                ASSERT_TRUE(engine);
                return;
@@ -162,7 +155,6 @@ TEST_P(InferenceEngineTestCase_G3, Inference)
 
        int ret = engine->BindBackend(&config);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -177,7 +169,6 @@ TEST_P(InferenceEngineTestCase_G3, Inference)
        std::vector <std::string> models;
        int model_type = GetModelInfo(model_paths, models);
        if (model_type == -1) {
-               delete engine;
                ASSERT_NE(model_type, -1);
                return;
        }
@@ -199,7 +190,6 @@ TEST_P(InferenceEngineTestCase_G3, Inference)
 
        ret = engine->SetInputLayerProperty(input_property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -212,22 +202,19 @@ TEST_P(InferenceEngineTestCase_G3, Inference)
 
        ret = engine->SetOutputLayerProperty(output_property);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        ret = engine->Load(models, (inference_model_format_e)model_type);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
 
        std::vector<inference_engine_tensor_buffer> inputs, outputs;
-       ret = PrepareTensorBuffers(engine, inputs, outputs);
+       ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
        if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               delete engine;
                ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
                return;
        }
@@ -243,7 +230,7 @@ TEST_P(InferenceEngineTestCase_G3, Inference)
        }
 
        tensor_t result;
-       FillOutputResult(engine, outputs, result);
+       FillOutputResult(engine.get(), outputs, result);
 
        ret = VerifyImageClassificationResults(result, answers[0]);
        EXPECT_EQ(ret, 1);
@@ -252,8 +239,6 @@ TEST_P(InferenceEngineTestCase_G3, Inference)
 
        engine->UnbindBackend();
        models.clear();
-
-       delete engine;
 }
 
 INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G1,