From 4ee3239b829b4865475eeca23801609631312935 Mon Sep 17 00:00:00 2001 From: Yongjoo Ahn Date: Tue, 13 Jun 2023 21:35:46 +0900 Subject: [PATCH] [fix] Fix memi leak in pipeline API - Free GstTensorsConfig using gst_tensors_config_free - Initialize ml_tensors_info_s before set a new one Signed-off-by: Yongjoo Ahn --- c/src/ml-api-inference-pipeline.c | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/c/src/ml-api-inference-pipeline.c b/c/src/ml-api-inference-pipeline.c index a480933..5433a22 100644 --- a/c/src/ml-api-inference-pipeline.c +++ b/c/src/ml-api-inference-pipeline.c @@ -271,8 +271,11 @@ get_tensors_info_from_caps (GstCaps * caps, ml_tensors_info_s * info, if (found) { _ml_tensors_info_copy_from_gst (info, &config.info); *is_flexible = gst_tensors_config_is_flexible (&config); - break; } + + gst_tensors_config_free (&config); + if (found) + break; } return found; @@ -1547,6 +1550,7 @@ ml_pipeline_src_parse_tensors_info (ml_pipeline_element * elem) return ML_ERROR_TRY_AGAIN; } + _ml_tensors_info_free (_info); found = get_tensors_info_from_caps (caps, _info, &flexible); if (found) { -- 2.7.4