[fix] Fix memi leak in pipeline API
authorYongjoo Ahn <yongjoo1.ahn@samsung.com>
Tue, 13 Jun 2023 12:35:46 +0000 (21:35 +0900)
committerjaeyun-jung <39614140+jaeyun-jung@users.noreply.github.com>
Wed, 14 Jun 2023 06:54:52 +0000 (15:54 +0900)
- Free GstTensorsConfig using gst_tensors_config_free
- Initialize ml_tensors_info_s before set a new one

Signed-off-by: Yongjoo Ahn <yongjoo1.ahn@samsung.com>
c/src/ml-api-inference-pipeline.c

index a480933..5433a22 100644 (file)
@@ -271,8 +271,11 @@ get_tensors_info_from_caps (GstCaps * caps, ml_tensors_info_s * info,
     if (found) {
       _ml_tensors_info_copy_from_gst (info, &config.info);
       *is_flexible = gst_tensors_config_is_flexible (&config);
-      break;
     }
+
+    gst_tensors_config_free (&config);
+    if (found)
+      break;
   }
 
   return found;
@@ -1547,6 +1550,7 @@ ml_pipeline_src_parse_tensors_info (ml_pipeline_element * elem)
     return ML_ERROR_TRY_AGAIN;
   }
 
+  _ml_tensors_info_free (_info);
   found = get_tensors_info_from_caps (caps, _info, &flexible);
 
   if (found) {