Fix SVACE and Coverity issues
authorInki Dae <inki.dae@samsung.com>
Fri, 27 Mar 2020 04:57:39 +0000 (13:57 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Apr 2020 00:42:53 +0000 (09:42 +0900)
Change-Id: I525cd68275dcbedade157dd3e494150762ed1b95
Signed-off-by: Inki Dae <inki.dae@samsung.com>
test/src/inference_engine_test.cpp

index 0979aee..31f648b 100644 (file)
@@ -73,7 +73,11 @@ TEST_P(InferenceEngineCommonTest, Bind)
        ASSERT_TRUE(engine);
 
        int ret = engine->BindBackend(&config);
-       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               delete engine;
+               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+               return;
+       }
 
        inference_engine_capacity capacity;
        ret = engine->GetBackendCapacity(&capacity);
@@ -228,10 +232,17 @@ void CleanupTensorBuffers(std::vector<inference_engine_tensor_buffer> &inputs, s
 void CopyFileToMemory(const char *file_name, inference_engine_tensor_buffer &buffer, unsigned int size)
 {
        int fd = open(file_name, O_RDONLY);
-       ASSERT_NE(fd, -1);
+       if (fd == -1) {
+               ASSERT_NE(fd, -1);
+               return;
+       }
 
        int num = read(fd, buffer.buffer, size);
-       ASSERT_NE(num, -1);
+       if (num == -1) {
+               close(fd);
+               ASSERT_NE(num, -1);
+               return;
+       }
 
        close(fd);
 }
@@ -255,7 +266,11 @@ TEST_P(InferenceEngineCommonTest_2, Load)
        ASSERT_TRUE(engine);
 
        int ret = engine->BindBackend(&config);
-       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               delete engine;
+               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+               return;
+       }
 
        inference_engine_capacity capacity;
        ret = engine->GetBackendCapacity(&capacity);
@@ -266,7 +281,11 @@ TEST_P(InferenceEngineCommonTest_2, Load)
 
        std::vector <std::string> models;
        int model_type = GetModelInfo(model_paths, models);
-       ASSERT_NE(model_type, -1);
+       if (model_type == -1) {
+               delete engine;
+               ASSERT_NE(model_type, -1);
+               return;
+       }
 
        ret = engine->Load(models, (inference_model_format_e)model_type);
        EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
@@ -444,10 +463,17 @@ TEST_P(InferenceEngineCommonTest_3, Inference)
        };
 
        InferenceEngineCommon *engine = new InferenceEngineCommon(&config);
-       ASSERT_TRUE(engine);
+       if (engine == nullptr) {
+               ASSERT_TRUE(engine);
+               return;
+       }
 
        int ret = engine->BindBackend(&config);
-       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               delete engine;
+               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+               return;
+       }
 
        inference_engine_capacity capacity;
        ret = engine->GetBackendCapacity(&capacity);
@@ -458,7 +484,11 @@ TEST_P(InferenceEngineCommonTest_3, Inference)
 
        std::vector <std::string> models;
        int model_type = GetModelInfo(model_paths, models);
-       ASSERT_NE(model_type, -1);
+       if (model_type == -1) {
+               delete engine;
+               ASSERT_NE(model_type, -1);
+               return;
+       }
 
        inference_engine_layer_property input_property;
        std::vector<std::string>::iterator iter;
@@ -476,7 +506,11 @@ TEST_P(InferenceEngineCommonTest_3, Inference)
     }
 
        ret = engine->SetInputLayerProperty(input_property);
-       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               delete engine;
+               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+               return;
+       }
 
        inference_engine_layer_property output_property;
 
@@ -485,14 +519,26 @@ TEST_P(InferenceEngineCommonTest_3, Inference)
        }
 
        ret = engine->SetOutputLayerProperty(output_property);
-       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               delete engine;
+               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+               return;
+       }
 
        ret = engine->Load(models, (inference_model_format_e)model_type);
-       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               delete engine;
+               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+               return;
+       }
 
        std::vector<inference_engine_tensor_buffer> inputs, outputs;
        ret = PrepareTensorBuffers(engine, inputs, outputs);
-       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               delete engine;
+               ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+               return;
+       }
 
        // Copy input image tensor data from a given file to input tensor buffer.
        for (int i = 0; i < (int)image_paths.size(); ++i) {
@@ -533,6 +579,7 @@ TEST_P(InferenceEngineCommonTest_3, Inference)
        CleanupTensorBuffers(inputs, outputs);
 
        engine->UnbindBackend();
+       models.clear();
 
        delete engine;
 }