webrtc_source: Apply GENERATE_DOT() macro to loopback and filesrc 50/265450/5
authorSangchul Lee <sc11.lee@samsung.com>
Tue, 19 Oct 2021 11:09:00 +0000 (20:09 +0900)
committerSangchul Lee <sc11.lee@samsung.com>
Wed, 20 Oct 2021 05:52:52 +0000 (14:52 +0900)
The filesrc pipeline name and loopback render pipeline name are
changed to be identified easily that which source belongs to it
by its name.

Dot file names are also changed.

[Version] 0.2.135
[Issue Type] Debug feature

Change-Id: I843eb7b4e8f42df20c5cd04a89c42a773e79af8c
Signed-off-by: Sangchul Lee <sc11.lee@samsung.com>
packaging/capi-media-webrtc.spec
src/webrtc_private.c
src/webrtc_sink.c
src/webrtc_source.c

index 8e9b29afae278edd9c36541a565934f9648a9c12..ac77e418e84c6736b2e6859cea1b20d100c294cf 100644 (file)
@@ -1,6 +1,6 @@
 Name:       capi-media-webrtc
 Summary:    A WebRTC library in Tizen Native API
-Version:    0.2.134
+Version:    0.2.135
 Release:    0
 Group:      Multimedia/API
 License:    Apache-2.0
index 362b83f50bec8955c0a085909524ea33c0727f3a..f53c714c79958663f226df7a7b2c3e1bd70da8ca 100644 (file)
@@ -229,7 +229,7 @@ void _invoke_state_changed_cb(webrtc_s *webrtc, webrtc_state_e old, webrtc_state
        if (new == WEBRTC_STATE_PLAYING)
                _set_stats_timer(webrtc);
 
-       GENERATE_DOT(webrtc, webrtc->gst.pipeline, "STATE_%s", __state_str[webrtc->state]);
+       GENERATE_DOT(webrtc, webrtc->gst.pipeline, "%s.state_%s", GST_ELEMENT_NAME(webrtc->gst.pipeline), __state_str[webrtc->state]);
 }
 
 static void __invoke_error_cb(webrtc_s *webrtc, webrtc_error_e error)
@@ -1116,7 +1116,7 @@ static void __webrtcbin_pad_added_cb(GstElement *webrtcbin, GstPad *new_pad, gpo
                RET_IF(ret != WEBRTC_ERROR_NONE, "failed to _add_rendering_sink_bin()");
        }
 
-       GENERATE_DOT(webrtc, webrtc->gst.pipeline, "webrtcbin_%s", GST_PAD_NAME(new_pad));
+       GENERATE_DOT(webrtc, webrtc->gst.pipeline, "%s.webrtcbin-%s", GST_ELEMENT_NAME(webrtc->gst.pipeline), GST_PAD_NAME(new_pad));
 }
 
 static void __webrtcbin_no_more_pads_cb(GstElement *webrtcbin, gpointer user_data)
@@ -1185,7 +1185,7 @@ int _gst_build_pipeline(webrtc_s *webrtc)
 {
        RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
 
-       webrtc->gst.pipeline = gst_pipeline_new("webrtc-pipeline");
+       webrtc->gst.pipeline = gst_pipeline_new("main-pipeline");
        RET_VAL_IF(webrtc->gst.pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "pipeline is NULL");
 
        if (!(webrtc->gst.bus = gst_pipeline_get_bus(GST_PIPELINE(webrtc->gst.pipeline)))) {
index 11b988d8aed334c08424d9587b8f63ed4d775a3e..828ade24c74852027cd32aef88df80fd2169e59d 100644 (file)
@@ -394,7 +394,7 @@ static void __decodebin_pad_added_cb(GstElement *decodebin, GstPad *new_pad, gpo
                _post_error_cb_in_idle(webrtc, ret);
        }
 
-       GENERATE_DOT(webrtc, webrtc->gst.pipeline, "%s", GST_ELEMENT_NAME(decodebin));
+       GENERATE_DOT(webrtc, webrtc->gst.pipeline, "%s.%s", GST_ELEMENT_NAME(webrtc->gst.pipeline), GST_ELEMENT_NAME(decodebin));
 }
 
 static bool __is_factory_name_for_hw(gchar *factory_name)
index e2dbcca6d8a56e0aff8fc5a4a293910ef99bab61..a85583474806a1f97adde1e976b650e177542d5e 100644 (file)
@@ -2026,6 +2026,9 @@ static void __filesrc_pipeline_decodebin_pad_added_cb(GstElement *element, GstPa
        source->av[av_idx].render.need_decoding = true;
        source->av[av_idx].render.appsrc_caps = gst_pad_get_current_caps(pad);
        __add_probe_to_pad_for_render(source, av_idx, gst_element_get_static_pad(queue, "src"), __source_data_probe_cb);
+
+       GENERATE_DOT(source->webrtc, source->filesrc_pipeline, "%s.%s-%s",
+               GST_ELEMENT_NAME(source->filesrc_pipeline), GST_ELEMENT_NAME(element), GST_PAD_NAME(pad));
 }
 
 static GstAutoplugSelectResult __filesrc_pipeline_decodebin_autoplug_select_cb(GstElement *bin, GstPad *pad, GstCaps *caps, GstElementFactory* factory, gpointer udata)
@@ -2128,12 +2131,15 @@ static int __build_filesrc_pipeline(webrtc_s *webrtc, webrtc_gst_slot_s *source)
 {
        GstElement *filesrc = NULL;
        GstElement *decodebin = NULL;
+       gchar *pipeline_name;
 
        RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
        RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
        RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
 
-       source->filesrc_pipeline = gst_pipeline_new("filesrc-pipeline");
+       pipeline_name = g_strdup_printf("filesrc-pipeline-for-source_%u", source->id);
+       source->filesrc_pipeline = gst_pipeline_new(pipeline_name);
+       g_free(pipeline_name);
        RET_VAL_IF(source->filesrc_pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "pipeline is NULL");
 
        if (!(source->filesrc_bus = gst_pipeline_get_bus(GST_PIPELINE(source->filesrc_pipeline)))) {
@@ -3858,11 +3864,17 @@ static void __loopback_decodebin_pad_added_cb(GstElement *decodebin, GstPad *new
                ret = __build_loopback_audiosink(source, decodebin);
                if (ret != WEBRTC_ERROR_NONE)
                        SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_AUDIO].render.pipeline);
+               else
+                       GENERATE_DOT(source->webrtc, source->av[AV_IDX_AUDIO].render.pipeline, "%s.%s-%s",
+                               GST_ELEMENT_NAME(source->av[AV_IDX_AUDIO].render.pipeline), GST_ELEMENT_NAME(decodebin), GST_PAD_NAME(new_pad));
 
        } else if (g_strrstr(media_type, "video")) {
                ret = __build_loopback_videosink(source, decodebin);
                if (ret != WEBRTC_ERROR_NONE)
                        SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_VIDEO].render.pipeline);
+               else
+                       GENERATE_DOT(source->webrtc, source->av[AV_IDX_VIDEO].render.pipeline, "%s.%s-%s",
+                               GST_ELEMENT_NAME(source->av[AV_IDX_VIDEO].render.pipeline), GST_ELEMENT_NAME(decodebin), GST_PAD_NAME(new_pad));
 
        } else {
                LOG_ERROR("not supported media type[%s]", media_type);
@@ -3888,7 +3900,7 @@ static int __build_loopback_render_pipeline(webrtc_s *webrtc, webrtc_gst_slot_s
                RET_VAL_IF(source->display->surface == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display->surface is NULL");
        }
 
-       pipeline_name = g_strdup_printf("webrtc-source_%u-%s-render-pipeline", source->id, GET_MEDIA_TYPE_NAME(type == MEDIA_TYPE_AUDIO));
+       pipeline_name = g_strdup_printf("loopback-pipeline-for-source_%u", source->id);
        source->av[idx].render.pipeline = gst_pipeline_new(pipeline_name);
        g_free(pipeline_name);
        RET_VAL_IF(source->av[idx].render.pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "render_pipeline is NULL");
@@ -3950,6 +3962,9 @@ static int __build_loopback_render_pipeline(webrtc_s *webrtc, webrtc_gst_slot_s
        if (track_id)
                *track_id = source->av[idx].render.track_id;
 
+       if (!source->av[idx].render.need_decoding)
+               GENERATE_DOT(webrtc, source->av[idx].render.pipeline, "%s", GST_ELEMENT_NAME(source->av[idx].render.pipeline));
+
        return WEBRTC_ERROR_NONE;
 
 error: