int _remove_filesrc_pad_block_probe(webrtc_s *webrtc);
gchar * _get_media_type_from_pad(GstPad *pad);
+gchar * _get_mime_type_from_pad(GstPad *pad);
bool _is_supported_media_type(const char *media_type);
bool _is_audio_media_type(const char *media_type);
Name: capi-media-webrtc
Summary: A WebRTC library in Tizen Native API
-Version: 0.3.29
+Version: 0.3.30
Release: 0
Group: Multimedia/API
License: Apache-2.0
return media_type;
}
+
+gchar * _get_mime_type_from_pad(GstPad *pad)
+{
+ GstCaps *caps = NULL;
+ gchar *mime_type = NULL;
+
+ RET_VAL_IF(pad == NULL, NULL, "pad is NULL");
+
+ caps = gst_pad_get_current_caps(pad);
+ RET_VAL_IF(caps == NULL, NULL, "caps is NULL");
+
+ mime_type = g_strdup(gst_structure_get_name(gst_caps_get_structure(caps, 0)));
+ LOG_DEBUG("mime_type [%s]", mime_type);
+
+ gst_caps_unref(caps);
+
+ return mime_type;
+}
+
//LCOV_EXCL_STOP
bool _is_supported_media_type(const char *media_type)
{
int ret = WEBRTC_ERROR_NONE;
webrtc_s *webrtc = (webrtc_s *)user_data;
- const gchar *media_type;
+ gchar *media_type;
webrtc_gst_slot_s *sink;
RET_IF(webrtc == NULL, "webrtc is NULL");
if (GST_PAD_DIRECTION(new_pad) != GST_PAD_SRC)
return;
- media_type = gst_structure_get_name(gst_caps_get_structure(gst_pad_get_current_caps(new_pad), 0));
+ media_type = _get_mime_type_from_pad(new_pad);
- if(!_is_supported_media_type(media_type))
+ if(!_is_supported_media_type(media_type)) {
+ g_free(media_type);
return;
+ }
LOG_INFO("decodebin[%p, name:%s] new_pad[%s] media_type[%s]", decodebin, GST_ELEMENT_NAME(decodebin), GST_PAD_NAME(new_pad), media_type);
sink = __find_sink_slot(webrtc, GST_ELEMENT_NAME(decodebin));
- RET_IF(sink == NULL, "could not find an item by [%s] in sink slots", GST_ELEMENT_NAME(decodebin));
+ if (!sink) {
+ g_free(media_type);
+ LOG_ERROR("could not find an item by [%s] in sink slots", GST_ELEMENT_NAME(decodebin));
+ return;
+ }
if (_is_audio_media_type(media_type)) {
sink->media_types |= MEDIA_TYPE_AUDIO;
_post_error_cb_in_idle(webrtc, ret);
}
+ g_free(media_type);
+
GENERATE_DOT(webrtc, webrtc->gst.pipeline, "%s.%s", GST_ELEMENT_NAME(webrtc->gst.pipeline), GST_ELEMENT_NAME(decodebin));
}
{
int ret = WEBRTC_ERROR_NONE;
webrtc_gst_slot_s *source = data;
- const gchar *media_type = NULL;
+ gchar *media_type = NULL;
bool is_audio;
int av_idx;
GstElement *queue = NULL;
RET_IF(source == NULL, "source is NULL");
RET_IF(source->filesrc_pipeline == NULL, "filesrc_pipeline is NULL");
- media_type = gst_structure_get_name(gst_caps_get_structure(gst_pad_get_current_caps(pad), 0));
+ media_type = _get_mime_type_from_pad(pad);
RET_IF(media_type == NULL, "media_type is NULL");
- if(!_is_supported_media_type(media_type))
+ if(!_is_supported_media_type(media_type)) {
+ g_free(media_type);
return;
+ }
LOG_INFO("[%s] new_pad[%s] media_type[%s]", GST_ELEMENT_NAME(element), GST_PAD_NAME(pad), media_type);
is_audio = _is_audio_media_type(media_type);
av_idx = GET_AV_IDX(is_audio);
+ g_free(media_type);
+
if (source->av[av_idx].src_pad_probe_id > 0) {
LOG_INFO("Pipeline already built");
ret = __link_decodebin_with_queue(pad, source, is_audio);
{
int ret = WEBRTC_ERROR_NONE;
webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)user_data;
- const gchar *media_type;
+ gchar *media_type;
RET_IF(source == NULL, "source is NULL");
if (GST_PAD_DIRECTION(new_pad) != GST_PAD_SRC)
return;
- media_type = gst_structure_get_name(gst_caps_get_structure(gst_pad_get_current_caps(new_pad), 0));
+ media_type = _get_mime_type_from_pad(new_pad);
LOG_INFO("source_id[%u], media_type[%s], new_pad[%s]", source->id, media_type, GST_PAD_NAME(new_pad));
- if(!_is_supported_media_type(media_type))
+ if(!_is_supported_media_type(media_type)) {
+ g_free(media_type);
return;
+ }
if (_is_audio_media_type(media_type)) {
ret = __build_loopback_audiosink(source, decodebin);
}
+ g_free(media_type);
+
if (ret != WEBRTC_ERROR_NONE) {
LOG_ERROR("failed to build loopback rendering sink, ret[0x%x]", ret);
_post_error_cb_in_idle(source->webrtc, ret);
{
media_packet_source_s *packet_source = (media_packet_source_s *)data;
GstPad *sinkpad;
- const gchar *media_type;
+ const gchar *mime_type;
GstElement *h264parse;
GstElement *capsfilter;
GstCaps *caps;
g_print("__demux_pad_added_cb(), object[%s] pad[%s] packet_source[%p]\n",
GST_ELEMENT_NAME(object), GST_PAD_NAME(pad), packet_source);
- media_type = gst_structure_get_name(gst_caps_get_structure(gst_pad_get_current_caps(pad), 0));
+ caps = gst_pad_get_current_caps(pad);
+ mime_type = gst_structure_get_name(gst_caps_get_structure(caps, 0));
+ gst_caps_unref(caps);
- if (g_strrstr(media_type, "audio")) {
- g_print("skip pad for audio\n");
+ if (g_strrstr(mime_type, "audio")) {
+ g_print("skip pad for audio stream\n");
return;
}