From: Sangchul Lee Date: Mon, 18 Mar 2024 06:15:46 +0000 (+0900) Subject: webrtc_private: Add probe callback to the newly added source pad of webrtcbin X-Git-Tag: accepted/tizen/unified/20240327.141705~2 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=c9e37c732d8c1a7f6bb4565501d22c43ba52a9b3;p=platform%2Fcore%2Fapi%2Fwebrtc.git webrtc_private: Add probe callback to the newly added source pad of webrtcbin _add_probe_to_pad_for_render() and _remove_probe_to_pad_for_render() are moved from webrtc_source_loopback.c. It has been fixed to use gst_object_ref() before assigning it to slot's variable. This probe callback will be used to decide to render the RTP data or not. [Version] 0.4.53 [Issue Type] Improvement Change-Id: I9b539c842e754e4d6d93cc9f95f28f1b79184f00 Signed-off-by: Sangchul Lee (cherry picked from commit ba84ea6e3b92131c67206f7545f328c8915d9066) --- diff --git a/include/webrtc_private.h b/include/webrtc_private.h index f8f0a077..2afdf1fe 100644 --- a/include/webrtc_private.h +++ b/include/webrtc_private.h @@ -793,8 +793,6 @@ int _get_display_mode_from_loopback(webrtc_s *webrtc, unsigned int track_id, web int _set_display_visible_to_loopback(webrtc_s *webrtc, unsigned int track_id, bool visible); int _get_display_visible_from_loopback(webrtc_s *webrtc, unsigned int track_id, bool *visible); GstPadProbeReturn _source_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data); -void _add_probe_to_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb); -void _remove_probe_from_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx); void _set_caps_for_render(webrtc_gst_slot_s *source, GstCaps *caps, int av_idx); void _unset_caps_for_render(webrtc_gst_slot_s *source, int av_idx); int _update_caps_for_render_with_resolution(webrtc_gst_slot_s *source, int width, int height); @@ -841,8 +839,9 @@ bool _is_owner_of_track_build_context(webrtc_s *webrtc, unsigned int track_id); int _decodebin_autoplug_select_cb(GstElement *decodebin, GstPad *pad, GstCaps *caps, GstElementFactory *factory, gpointer user_data); void _track_build_context_destroy_cb(gpointer data); void _sink_slot_destroy_cb(gpointer data); -int _add_rendering_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_audio); -int _add_forwarding_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_audio); +GstPadProbeReturn _webrtcbin_payloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data); +int _add_rendering_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_audio, webrtc_gst_slot_s **sink_slot); +int _add_forwarding_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_audio, webrtc_gst_slot_s **sink_slot); int _set_stream_info_to_sink(webrtc_s *webrtc, unsigned int track_id, sound_stream_info_h stream_info); int _set_display_to_sink(webrtc_s *webrtc, unsigned int track_id, unsigned int type, void *display); int _set_display_surface_id_to_sink(webrtc_s *webrtc, unsigned int track_id, int surface_id, int x, int y, int width, int height); @@ -932,6 +931,8 @@ int _check_feature(const char *feature); /* private */ int _stop(webrtc_s *webrtc); int _gst_init(webrtc_s *webrtc); +void _add_probe_to_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, GstPadProbeCallback probe_cb); +void _remove_probe_from_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx); int _gst_build_pipeline(webrtc_s *webrtc); void _gst_destroy_pipeline(webrtc_s *webrtc); int _gst_pipeline_set_state(webrtc_s *webrtc, GstState state); diff --git a/packaging/capi-media-webrtc.spec b/packaging/capi-media-webrtc.spec index 1cc92c08..9a260a06 100644 --- a/packaging/capi-media-webrtc.spec +++ b/packaging/capi-media-webrtc.spec @@ -1,6 +1,6 @@ Name: capi-media-webrtc Summary: A WebRTC library in Tizen Native API -Version: 0.4.52 +Version: 0.4.53 Release: 0 Group: Multimedia/API License: Apache-2.0 diff --git a/src/webrtc_private.c b/src/webrtc_private.c index 1d1859b9..c6df867c 100644 --- a/src/webrtc_private.c +++ b/src/webrtc_private.c @@ -1457,15 +1457,56 @@ bool _is_audio_media_type(const char *media_type) return (bool)g_strrstr(media_type, "audio"); } +void _add_probe_to_pad_for_render(webrtc_gst_slot_s *slot, unsigned int idx, GstPad *pad, GstPadProbeCallback probe_cb) +{ + probe_userdata_s *probe_userdata; + + ASSERT(slot); + ASSERT(idx < AV_IDX_MAX); + ASSERT(pad); + ASSERT(probe_cb); + + probe_userdata = g_new0(probe_userdata_s, 1); + probe_userdata->slot = slot; + probe_userdata->av_idx = idx; + slot->av[idx].render.src_pad = gst_object_ref(pad); + slot->av[idx].render.src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, + probe_cb, probe_userdata, g_free); + + LOG_DEBUG("slot[id:%u, av_idx:%u] pad[%p] probe[id:%lu, callback:%p]", + slot->id, idx, pad, slot->av[idx].render.src_pad_probe_id, probe_cb); +} + +void _remove_probe_from_pad_for_render(webrtc_gst_slot_s *slot, unsigned int idx) +{ + ASSERT(slot); + ASSERT(idx < AV_IDX_MAX); + + if (slot->av[idx].render.src_pad_probe_id == 0) + return; + + LOG_DEBUG("slot[id:%u, av_idx:%s] pad[%p] probe_id[%lu]", + slot->id, GET_MEDIA_TYPE_NAME(idx == AV_IDX_AUDIO), slot->av[idx].render.src_pad, slot->av[idx].render.src_pad_probe_id); + + gst_pad_remove_probe(slot->av[idx].render.src_pad, slot->av[idx].render.src_pad_probe_id); + slot->av[idx].render.src_pad_probe_id = 0; + + gst_object_unref(slot->av[idx].render.src_pad); + slot->av[idx].render.src_pad = NULL; + + _unset_caps_for_render(slot, idx); +} + static void __webrtcbin_pad_added_cb(GstElement *webrtcbin, GstPad *new_pad, gpointer user_data) { int ret = WEBRTC_ERROR_NONE; webrtc_s *webrtc = (webrtc_s *)user_data; gchar *media_type = NULL; bool is_audio; + webrtc_gst_slot_s *sink; g_autoptr(GMutexLocker) locker = NULL; - RET_IF(webrtc == NULL, "webrtc is NULL"); + ASSERT(webrtc); if (GST_PAD_DIRECTION(new_pad) != GST_PAD_SRC) return; @@ -1492,13 +1533,15 @@ static void __webrtcbin_pad_added_cb(GstElement *webrtcbin, GstPad *new_pad, gpo if ((!is_audio && webrtc->encoded_video_frame_cb.callback) || (is_audio && webrtc->encoded_audio_frame_cb.callback)) { - ret = _add_forwarding_sink_bin(webrtc, new_pad, is_audio); + ret = _add_forwarding_sink_bin(webrtc, new_pad, is_audio, &sink); RET_IF(ret != WEBRTC_ERROR_NONE, "failed to _add_forwarding_sink_bin()"); } else { - ret = _add_rendering_sink_bin(webrtc, new_pad, is_audio); + ret = _add_rendering_sink_bin(webrtc, new_pad, is_audio, &sink); RET_IF(ret != WEBRTC_ERROR_NONE, "failed to _add_rendering_sink_bin()"); } + _add_probe_to_pad_for_render(sink, is_audio ? AV_IDX_AUDIO : AV_IDX_VIDEO, new_pad, _webrtcbin_payloaded_data_probe_cb); + GENERATE_DOT(webrtc, webrtc->gst.pipeline, "%s.webrtcbin-%s", GST_ELEMENT_NAME(webrtc->gst.pipeline), GST_PAD_NAME(new_pad)); } diff --git a/src/webrtc_sink.c b/src/webrtc_sink.c index 4e164e90..64c892e7 100644 --- a/src/webrtc_sink.c +++ b/src/webrtc_sink.c @@ -632,6 +632,8 @@ void _sink_slot_destroy_cb(gpointer data) if (sink->sound_stream_info.type) free(sink->sound_stream_info.type); + _remove_probe_from_pad_for_render(sink, AV_IDX_AUDIO); + _remove_probe_from_pad_for_render(sink, AV_IDX_VIDEO); _remove_probe_from_pad_for_dump(sink); #ifdef TIZEN_FEATURE_SNAPSHOT @@ -704,15 +706,16 @@ error: return WEBRTC_ERROR_INVALID_OPERATION; } -int _add_rendering_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_audio) +int _add_rendering_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_audio, webrtc_gst_slot_s **sink_slot) { gchar *bin_name; gchar *track_name; webrtc_gst_slot_s *sink; GstElement *decodebin; - RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); - RET_VAL_IF(src_pad == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "src_pad is NULL"); + ASSERT(webrtc); + ASSERT(src_pad); + ASSERT(sink_slot); if (__alloc_sink_slot(webrtc, GST_PAD_NAME(src_pad), false, &sink, &bin_name) != WEBRTC_ERROR_NONE) return WEBRTC_ERROR_INVALID_OPERATION; @@ -745,6 +748,8 @@ int _add_rendering_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_audio) LOG_INFO("added a sink slot[%p, id:%u]", sink, sink->id); + *sink_slot = sink; + return WEBRTC_ERROR_NONE; error_before_insert: @@ -1034,7 +1039,22 @@ static GstCaps* __make_caps_if_h264_or_h265(GstPad *pad) return new_caps; } -int _add_forwarding_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_audio) +GstPadProbeReturn _webrtcbin_payloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data) +{ + probe_userdata_s *probe_data = (probe_userdata_s *)user_data; + g_autofree gchar *media_type = _get_mime_type_from_pad(pad); + webrtc_gst_slot_s *sink; + + ASSERT(probe_data); + + sink = probe_data->slot; + LOG_VERBOSE("pad[%p] media_type[%s] av_idx[%d] sink[%p, id:%u]", + pad, media_type, probe_data->av_idx, sink, sink->id); + + return GST_PAD_PROBE_OK; +} + +int _add_forwarding_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_audio, webrtc_gst_slot_s **sink_slot) { gchar *bin_name; gchar *track_name; @@ -1046,8 +1066,9 @@ int _add_forwarding_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_audio) GstCaps *sink_caps; webrtc_gst_slot_s *sink; - RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); - RET_VAL_IF(src_pad == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "src_pad is NULL"); + ASSERT(webrtc); + ASSERT(src_pad); + ASSERT(sink_slot); if (__alloc_sink_slot(webrtc, GST_PAD_NAME(src_pad), true, &sink, &bin_name) != WEBRTC_ERROR_NONE) return WEBRTC_ERROR_INVALID_OPERATION; @@ -1101,6 +1122,8 @@ int _add_forwarding_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_audio) LOG_INFO("added a sink slot[%p, id:%u] to webrtc[%p]", sink, sink->id, webrtc); + *sink_slot = sink; + return WEBRTC_ERROR_NONE; error_before_insert: diff --git a/src/webrtc_source_loopback.c b/src/webrtc_source_loopback.c index 7455f6e6..70999a11 100644 --- a/src/webrtc_source_loopback.c +++ b/src/webrtc_source_loopback.c @@ -588,46 +588,6 @@ GstPadProbeReturn _source_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpoi return GST_PAD_PROBE_OK; } -void _add_probe_to_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb) -{ - probe_userdata_s *probe_userdata; - - RET_IF(source == NULL, "source is NULL"); - RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx); - RET_IF(pad == NULL, "pad is NULL"); - RET_IF(probe_cb == NULL, "probe_cb is NULL"); - - probe_userdata = g_new0(probe_userdata_s, 1); - probe_userdata->slot = source; - probe_userdata->av_idx = idx; - source->av[idx].render.src_pad = pad; - source->av[idx].render.src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, - probe_cb, probe_userdata, g_free); - - LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe[id:%lu, callback:%p]", - source->id, idx, pad, source->av[idx].render.src_pad_probe_id, probe_cb); -} - -void _remove_probe_from_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx) -{ - RET_IF(source == NULL, "source is NULL"); - RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx); - - if (source->av[idx].render.src_pad_probe_id == 0) - return; - - LOG_DEBUG("source[id:%u, av_idx:%s] pad[%p] probe_id[%lu]", - source->id, GET_MEDIA_TYPE_NAME(idx == AV_IDX_AUDIO), source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id); - - gst_pad_remove_probe(source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id); - source->av[idx].render.src_pad_probe_id = 0; - - gst_object_unref(source->av[idx].render.src_pad); - source->av[idx].render.src_pad = NULL; - - _unset_caps_for_render(source, idx); -} - void _set_caps_for_render(webrtc_gst_slot_s *source, GstCaps *caps, int av_idx) { RET_IF(source == NULL, "source is NULL");