From: backto.kim Date: Thu, 9 Dec 2021 04:22:47 +0000 (+0900) Subject: Improve the code to check media type X-Git-Tag: submit/tizen/20211210.074454^0 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=3ac909bb3d0223ea4c3e382382d3f15457d63a1d;p=platform%2Fcore%2Fapi%2Fwebrtc.git Improve the code to check media type [Version] 0.3.17 [Issue Type] Refactoring Change-Id: Icc49b8e30ab2744d6d08a64f59dcc2b7c4b93c30 --- diff --git a/include/webrtc_private.h b/include/webrtc_private.h index bf2022df..0d82d364 100644 --- a/include/webrtc_private.h +++ b/include/webrtc_private.h @@ -689,6 +689,8 @@ int _set_filesrc_looping(webrtc_s *webrtc, unsigned int source_id, bool looping) int _get_filesrc_looping(webrtc_s *webrtc, unsigned int source_id, bool *looping); int _remove_filesrc_pad_block_probe(webrtc_s *webrtc); +bool _is_supported_media_type(const char *media_type); +bool _is_audio_media_type(const char *media_type); #ifdef __cplusplus } #endif /* __cplusplus */ diff --git a/packaging/capi-media-webrtc.spec b/packaging/capi-media-webrtc.spec index 84b4fde4..cc9a9205 100644 --- a/packaging/capi-media-webrtc.spec +++ b/packaging/capi-media-webrtc.spec @@ -1,6 +1,6 @@ Name: capi-media-webrtc Summary: A WebRTC library in Tizen Native API -Version: 0.3.16 +Version: 0.3.17 Release: 0 Group: Multimedia/API License: Apache-2.0 diff --git a/src/webrtc_private.c b/src/webrtc_private.c index 27141582..c3442bfe 100644 --- a/src/webrtc_private.c +++ b/src/webrtc_private.c @@ -1119,6 +1119,25 @@ static int __get_media_type_from_pad(GstPad *pad, bool *is_video) } //LCOV_EXCL_STOP +bool _is_supported_media_type(const char *media_type) +{ + RET_VAL_IF(media_type == NULL, false, "media_type is NULL"); + + if (!g_strrstr(media_type, "audio") && !g_strrstr(media_type, "video")) { + LOG_ERROR("not supported media type [%s]", media_type); + return false; + } + + return true; +} + +bool _is_audio_media_type(const char *media_type) +{ + RET_VAL_IF(media_type == NULL, false, "media_type is NULL"); + + return (bool)g_strrstr(media_type, "audio"); +} + static void __webrtcbin_pad_added_cb(GstElement *webrtcbin, GstPad *new_pad, gpointer user_data) { int ret = WEBRTC_ERROR_NONE; diff --git a/src/webrtc_sink.c b/src/webrtc_sink.c index 71348767..8745e8f9 100644 --- a/src/webrtc_sink.c +++ b/src/webrtc_sink.c @@ -370,24 +370,23 @@ static void __decodebin_pad_added_cb(GstElement *decodebin, GstPad *new_pad, gpo return; media_type = gst_structure_get_name(gst_caps_get_structure(gst_pad_get_current_caps(new_pad), 0)); + + if(!_is_supported_media_type(media_type)) + return; + LOG_INFO("decodebin[%p, name:%s] new_pad[%s] media_type[%s]", decodebin, GST_ELEMENT_NAME(decodebin), GST_PAD_NAME(new_pad), media_type); sink = __find_sink_slot(webrtc, GST_ELEMENT_NAME(decodebin)); RET_IF(sink == NULL, "could not find an item by [%s] in sink slots", GST_ELEMENT_NAME(decodebin)); - if (g_strrstr(media_type, "video")) { - sink->media_types |= MEDIA_TYPE_VIDEO; - __invoke_track_added_cb(webrtc, GST_ELEMENT_NAME(decodebin), true, true); - ret = __build_videosink(webrtc, decodebin, new_pad); - - } else if (g_strrstr(media_type, "audio")) { + if (_is_audio_media_type(media_type)) { sink->media_types |= MEDIA_TYPE_AUDIO; __invoke_track_added_cb(webrtc, GST_ELEMENT_NAME(decodebin), false, true); ret = __build_audiosink(webrtc, decodebin, new_pad); - } else { - LOG_ERROR("not supported media type[%s]", media_type); - return; + sink->media_types |= MEDIA_TYPE_VIDEO; + __invoke_track_added_cb(webrtc, GST_ELEMENT_NAME(decodebin), true, true); + ret = __build_videosink(webrtc, decodebin, new_pad); } if (ret != WEBRTC_ERROR_NONE) { diff --git a/src/webrtc_source.c b/src/webrtc_source.c index e82712f3..c047325d 100644 --- a/src/webrtc_source.c +++ b/src/webrtc_source.c @@ -1899,25 +1899,23 @@ static GstPadProbeReturn __fakesink_probe_cb(GstPad *pad, GstPadProbeInfo *info webrtc_gst_slot_s *source = u_data; GstCaps *new_cap = NULL; GstElement *appsrc = NULL; - gchar *media = NULL; + const gchar *media_type = NULL; int av_idx; - gst_structure_get(gst_caps_get_structure(gst_pad_get_current_caps(pad), 0), "media", G_TYPE_STRING, &media, NULL); + gst_structure_get(gst_caps_get_structure(gst_pad_get_current_caps(pad), 0), "media", G_TYPE_STRING, &media_type, NULL); - if (!g_strrstr(media, "audio") && !g_strrstr(media, "video")) { - LOG_ERROR("not supported media type [%s]", media); + if(!_is_supported_media_type(media_type)) return GST_PAD_PROBE_OK; - } - av_idx = GET_AV_IDX(g_strrstr(media, "audio")); + av_idx = GET_AV_IDX(_is_audio_media_type(media_type)); appsrc = gst_bin_get_by_name(source->bin, _av_tbl[av_idx].appsrc_name); - RET_VAL_IF(appsrc == NULL, GST_PAD_PROBE_OK, "There is no appsrc for [%s]", media); + RET_VAL_IF(appsrc == NULL, GST_PAD_PROBE_OK, "There is no appsrc for [%s]", media_type); new_cap = gst_caps_copy(gst_pad_get_current_caps(pad)); g_object_set(G_OBJECT(appsrc), "caps", new_cap, NULL); - LOG_INFO("setting caps for [%s appsrc] successfully", media); + LOG_INFO("setting caps for [%s appsrc] successfully", media_type); PRINT_CAPS(new_cap, "appsrc"); source->filesrc_av[av_idx].sink_pad = pad; @@ -2101,7 +2099,7 @@ static void __filesrc_pipeline_decodebin_pad_added_cb(GstElement *element, GstPa int ret = WEBRTC_ERROR_NONE; webrtc_gst_slot_s *source = data; const gchar *media_type = NULL; - gboolean is_audio; + bool is_audio; int av_idx; GstElement *queue = NULL; @@ -2111,14 +2109,12 @@ static void __filesrc_pipeline_decodebin_pad_added_cb(GstElement *element, GstPa media_type = gst_structure_get_name(gst_caps_get_structure(gst_pad_get_current_caps(pad), 0)); RET_IF(media_type == NULL, "media_type is NULL"); - LOG_INFO("[%s] new_pad[%s] media_type[%s]", GST_ELEMENT_NAME(element), GST_PAD_NAME(pad), media_type); - - if (!g_strrstr(media_type, "audio") && !g_strrstr(media_type, "video")) { - LOG_ERROR("not supported media type [%s]", media_type); + if(!_is_supported_media_type(media_type)) return; - } - is_audio = (g_strrstr(media_type, "audio")) ? TRUE : FALSE; + LOG_INFO("[%s] new_pad[%s] media_type[%s]", GST_ELEMENT_NAME(element), GST_PAD_NAME(pad), media_type); + + is_audio = _is_audio_media_type(media_type); av_idx = GET_AV_IDX(is_audio); if (source->av[av_idx].src_pad_probe_id > 0) { @@ -4052,7 +4048,10 @@ static void __loopback_decodebin_pad_added_cb(GstElement *decodebin, GstPad *new media_type = gst_structure_get_name(gst_caps_get_structure(gst_pad_get_current_caps(new_pad), 0)); LOG_INFO("source_id[%u], media_type[%s], new_pad[%s]", source->id, media_type, GST_PAD_NAME(new_pad)); - if (g_strrstr(media_type, "audio")) { + if(!_is_supported_media_type(media_type)) + return; + + if (_is_audio_media_type(media_type)) { ret = __build_loopback_audiosink(source, decodebin); if (ret != WEBRTC_ERROR_NONE) SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_AUDIO].render.pipeline); @@ -4060,7 +4059,7 @@ static void __loopback_decodebin_pad_added_cb(GstElement *decodebin, GstPad *new GENERATE_DOT(source->webrtc, source->av[AV_IDX_AUDIO].render.pipeline, "%s.%s-%s", GST_ELEMENT_NAME(source->av[AV_IDX_AUDIO].render.pipeline), GST_ELEMENT_NAME(decodebin), GST_PAD_NAME(new_pad)); - } else if (g_strrstr(media_type, "video")) { + } else { ret = __build_loopback_videosink(source, decodebin); if (ret != WEBRTC_ERROR_NONE) SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_VIDEO].render.pipeline); @@ -4068,9 +4067,6 @@ static void __loopback_decodebin_pad_added_cb(GstElement *decodebin, GstPad *new GENERATE_DOT(source->webrtc, source->av[AV_IDX_VIDEO].render.pipeline, "%s.%s-%s", GST_ELEMENT_NAME(source->av[AV_IDX_VIDEO].render.pipeline), GST_ELEMENT_NAME(decodebin), GST_PAD_NAME(new_pad)); - } else { - LOG_ERROR("not supported media type[%s]", media_type); - return; } if (ret != WEBRTC_ERROR_NONE) {