fix coverity issue
[platform/core/multimedia/inference-engine-mlapi.git] / src / inference_engine_mlapi.cpp
index bf403f2..b0ee2f5 100644 (file)
@@ -138,21 +138,10 @@ namespace MLAPIImpl
                return INFERENCE_ENGINE_ERROR_NONE;
        }
 
-       int InferenceMLAPI::CreateMLAPITensorInfo(ml_tensors_info_h& tensor_info,
-                                                                                         inference_engine_layer_property& layer_property)
+       int InferenceMLAPI::SetTensorInfo(ml_tensors_info_h& tensor_info,
+                                                                         inference_engine_layer_property& layer_property)
        {
-               if (layer_property.layers.empty()) {
-                       LOGE("input or output property is empty.");
-                       return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
-               }
-
-               int err = ml_tensors_info_create(&tensor_info);
-               if (err != ML_ERROR_NONE) {
-                       LOGE("Failed to create tensor info(%d).", err);
-                       return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
-               }
-
-               err = ml_tensors_info_set_count(tensor_info, layer_property.layers.size());
+               int     err = ml_tensors_info_set_count(tensor_info, layer_property.layers.size());
                if (err != ML_ERROR_NONE) {
                        LOGE("Failed to set tensor count(%d).", err);
                        return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
@@ -200,6 +189,27 @@ namespace MLAPIImpl
                return INFERENCE_ENGINE_ERROR_NONE;
        }
 
+       int InferenceMLAPI::CreateMLAPITensorInfo(ml_tensors_info_h& tensor_info,
+                                                                                         inference_engine_layer_property& layer_property)
+       {
+               if (layer_property.layers.empty()) {
+                       LOGE("input or output property is empty.");
+                       return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               int err = ml_tensors_info_create(&tensor_info);
+               if (err != ML_ERROR_NONE) {
+                       LOGE("Failed to create tensor info(%d).", err);
+                       return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
+               }
+
+               err = SetTensorInfo(tensor_info, layer_property);
+               if (err != INFERENCE_ENGINE_ERROR_NONE)
+                       ml_tensors_info_destroy(tensor_info);
+
+               return err;
+       }
+
        std::tuple<ml_nnfw_type_e, ml_nnfw_hw_e> InferenceMLAPI::GetNNFWInfo()
        {
                switch (mPluginType) {
@@ -325,17 +335,24 @@ namespace MLAPIImpl
                                return ret;
 
                        ret = CreateMLAPITensorInfo(out_info, mOutputProperty);
-                       if (ret != INFERENCE_ENGINE_ERROR_NONE)
+                       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+                               ml_tensors_info_destroy(in_info);
                                return ret;
+                       }
                }
 
                int err = ml_single_open_full(&mSingle, model_str.c_str(), in_info, out_info,
                                                                 nnfw_type, nnfw_hw, GetCustomProp());
                if (err != ML_ERROR_NONE) {
                        LOGE("Failed to request ml_single_open_full(%d).", err);
+                       ml_tensors_info_destroy(in_info);
+                       ml_tensors_info_destroy(out_info);
                        return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                }
 
+               ml_tensors_info_destroy(in_info);
+               ml_tensors_info_destroy(out_info);
+
                if (mInputInfoHandle) {
                        ml_tensors_info_destroy(mInputInfoHandle);
                        mInputInfoHandle = NULL;
@@ -355,6 +372,7 @@ namespace MLAPIImpl
                err = ml_single_get_output_info(mSingle, &mOutputInfoHandle);
                if (err != ML_ERROR_NONE) {
                        LOGE("Failed to request ml_single_get_output_info(%d).", err);
+                       ml_tensors_info_destroy(mOutputInfoHandle);
                        return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                }
 
@@ -401,6 +419,8 @@ namespace MLAPIImpl
                        ret = ml_tensors_data_get_tensor_data(mInputDataHandle, input.second, &in_buffer.buffer, &in_buffer.size);
                        if (ret != ML_ERROR_NONE) {
                                LOGE("Failed to request ml_tensors_data_get_tensor_data(%d).", ret);
+                               ml_tensors_data_destroy(mInputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                        }
 
@@ -409,6 +429,8 @@ namespace MLAPIImpl
                        ret = ml_tensors_info_get_tensor_type(mInputInfoHandle, input.second, &in_type);
                        if (ret != ML_ERROR_NONE) {
                                LOGE("Failed to request ml_tensors_info_get_tensor_type(%d).", ret);
+                               ml_tensors_data_destroy(mInputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                        }
 
@@ -420,6 +442,8 @@ namespace MLAPIImpl
                                type = ConvertTensorTypeToInternal(in_type);
                        } catch (const std::invalid_argument& ex) {
                                LOGE("Error (%s) (%d)", ex.what(), in_type);
+                               ml_tensors_data_destroy(mInputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
                        }
 
@@ -466,6 +490,8 @@ namespace MLAPIImpl
                        ret = ml_tensors_data_get_tensor_data(mOutputDataHandle, output.second, &out_buffer.buffer, &out_buffer.size);
                        if (ret != ML_ERROR_NONE) {
                                LOGE("Failed to request ml_tensors_data_get_tensor_data(%d).", ret);
+                               ml_tensors_data_destroy(mOutputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                        }
 
@@ -474,6 +500,8 @@ namespace MLAPIImpl
                        ret = ml_tensors_info_get_tensor_type(mOutputInfoHandle, output.second, &out_type);
                        if (ret != ML_ERROR_NONE) {
                                LOGE("Failed to request ml_tensors_info_get_tensor_type(%d).", ret);
+                               ml_tensors_data_destroy(mOutputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
                        }
 
@@ -485,6 +513,8 @@ namespace MLAPIImpl
                                type = ConvertTensorTypeToInternal(out_type);
                        } catch (const std::invalid_argument& ex) {
                                LOGE("Error (%s) (%d)", ex.what(), out_type);
+                               ml_tensors_data_destroy(mOutputDataHandle);
+
                                return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
                        }