fix coverity issue 87/279587/3
authorInki Dae <inki.dae@samsung.com>
Fri, 12 Aug 2022 03:49:12 +0000 (12:49 +0900)
committerInki Dae <inki.dae@samsung.com>
Fri, 12 Aug 2022 06:08:06 +0000 (15:08 +0900)
[Version] : 0.4.4
[Issue type] : bug fix

Fixed resource leak issues. CID : 1249891 and 1249898.

Change-Id: I6791684bcad416a5a7a7c36cf82d8d522c9ce59f
Signed-off-by: Inki Dae <inki.dae@samsung.com>
packaging/inference-engine-mlapi.spec
src/inference_engine_mlapi.cpp
src/inference_engine_mlapi_private.h

index b278fbd..1bf2368 100644 (file)
@@ -1,6 +1,6 @@
 Name:       inference-engine-mlapi
 Summary:    ML Single API backend of NNStreamer for MediaVision
-Version:    0.4.3
+Version:    0.4.4
 Release:    0
 Group:      Multimedia/Libraries
 License:    Apache-2.0
index bf403f2..b0ee2f5 100644 (file)
@@ -138,21 +138,10 @@ namespace MLAPIImpl
                return INFERENCE_ENGINE_ERROR_NONE;
        }
 
-       int InferenceMLAPI::CreateMLAPITensorInfo(ml_tensors_info_h& tensor_info,
-                                                                                         inference_engine_layer_property& layer_property)
+       int InferenceMLAPI::SetTensorInfo(ml_tensors_info_h& tensor_info,
+                                                                         inference_engine_layer_property& layer_property)
        {
-               if (layer_property.layers.empty()) {
-                       LOGE("input or output property is empty.");
-                       return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
-               }
-
-               int err = ml_tensors_info_create(&tensor_info);
-               if (err != ML_ERROR_NONE) {
-                       LOGE("Failed to create tensor info(%d).", err);
-                       return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
-               }
-
-               err = ml_tensors_info_set_count(tensor_info, layer_property.layers.size());
+               int     err = ml_tensors_info_set_count(tensor_info, layer_property.layers.size());
                if (err != ML_ERROR_NONE) {
                        LOGE("Failed to set tensor count(%d).", err);
                        return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
@@ -200,6 +189,27 @@ namespace MLAPIImpl
                return INFERENCE_ENGINE_ERROR_NONE;
        }
 
+       int InferenceMLAPI::CreateMLAPITensorInfo(ml_tensors_info_h& tensor_info,
+                                                                                         inference_engine_layer_property& layer_property)
+       {
+               if (layer_property.layers.empty()) {
+                       LOGE("input or output property is empty.");
+                       return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               int err = ml_tensors_info_create(&tensor_info);
+               if (err != ML_ERROR_NONE) {
+                       LOGE("Failed to create tensor info(%d).", err);
+                       return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+               err = SetTensorInfo(tensor_info, layer_property);
+               if (err != INFERENCE_ENGINE_ERROR_NONE)
+                       ml_tensors_info_destroy(tensor_info);
+
+               return err;
+       }
+
        std::tuple<ml_nnfw_type_e, ml_nnfw_hw_e> InferenceMLAPI::GetNNFWInfo()
        {
                switch (mPluginType) {
@@ -325,17 +335,24 @@ namespace MLAPIImpl
                                return ret;
 
                        ret = CreateMLAPITensorInfo(out_info, mOutputProperty);
-                       if (ret != INFERENCE_ENGINE_ERROR_NONE)
+                       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+                               ml_tensors_info_destroy(in_info);
                                return ret;
+                       }
                }
 
                int err = ml_single_open_full(&mSingle, model_str.c_str(), in_info, out_info,
                                                                 nnfw_type, nnfw_hw, GetCustomProp());
                if (err != ML_ERROR_NONE) {
                        LOGE("Failed to request ml_single_open_full(%d).", err);
+                       ml_tensors_info_destroy(in_info);
+                       ml_tensors_info_destroy(out_info);
                        return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                }
 
+               ml_tensors_info_destroy(in_info);
+               ml_tensors_info_destroy(out_info);
+
                if (mInputInfoHandle) {
                        ml_tensors_info_destroy(mInputInfoHandle);
                        mInputInfoHandle = NULL;
@@ -355,6 +372,7 @@ namespace MLAPIImpl
                err = ml_single_get_output_info(mSingle, &mOutputInfoHandle);
                if (err != ML_ERROR_NONE) {
                        LOGE("Failed to request ml_single_get_output_info(%d).", err);
+                       ml_tensors_info_destroy(mOutputInfoHandle);
                        return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                }
 
@@ -401,6 +419,8 @@ namespace MLAPIImpl
                        ret = ml_tensors_data_get_tensor_data(mInputDataHandle, input.second, &in_buffer.buffer, &in_buffer.size);
                        if (ret != ML_ERROR_NONE) {
                                LOGE("Failed to request ml_tensors_data_get_tensor_data(%d).", ret);
+                               ml_tensors_data_destroy(mInputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                        }
 
@@ -409,6 +429,8 @@ namespace MLAPIImpl
                        ret = ml_tensors_info_get_tensor_type(mInputInfoHandle, input.second, &in_type);
                        if (ret != ML_ERROR_NONE) {
                                LOGE("Failed to request ml_tensors_info_get_tensor_type(%d).", ret);
+                               ml_tensors_data_destroy(mInputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                        }
 
@@ -420,6 +442,8 @@ namespace MLAPIImpl
                                type = ConvertTensorTypeToInternal(in_type);
                        } catch (const std::invalid_argument& ex) {
                                LOGE("Error (%s) (%d)", ex.what(), in_type);
+                               ml_tensors_data_destroy(mInputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
                        }
 
@@ -466,6 +490,8 @@ namespace MLAPIImpl
                        ret = ml_tensors_data_get_tensor_data(mOutputDataHandle, output.second, &out_buffer.buffer, &out_buffer.size);
                        if (ret != ML_ERROR_NONE) {
                                LOGE("Failed to request ml_tensors_data_get_tensor_data(%d).", ret);
+                               ml_tensors_data_destroy(mOutputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                        }
 
@@ -474,6 +500,8 @@ namespace MLAPIImpl
                        ret = ml_tensors_info_get_tensor_type(mOutputInfoHandle, output.second, &out_type);
                        if (ret != ML_ERROR_NONE) {
                                LOGE("Failed to request ml_tensors_info_get_tensor_type(%d).", ret);
+                               ml_tensors_data_destroy(mOutputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                        }
 
@@ -485,6 +513,8 @@ namespace MLAPIImpl
                                type = ConvertTensorTypeToInternal(out_type);
                        } catch (const std::invalid_argument& ex) {
                                LOGE("Error (%s) (%d)", ex.what(), out_type);
+                               ml_tensors_data_destroy(mOutputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
                        }
 
index 52dfaff..be74a6c 100644 (file)
@@ -79,6 +79,8 @@ namespace MLAPIImpl
                int CheckTensorBuffers(
                                std::map<std::string, inference_engine_tensor_buffer> &input_buffers,
                                std::map<std::string, inference_engine_tensor_buffer> &output_buffers);
+               int SetTensorInfo(ml_tensors_info_h& tensor_info,
+                                                 inference_engine_layer_property& layer_property);
                int ConvertTensorTypeToInternal(int tensor_type);
                int ConvertTensorTypeToMLAPI(int tensor_type);
                int UpdateTensorsInfo();