+ __mmplayer_gst_handle_element_message(player, msg);
+ break;
+
+ case GST_MESSAGE_DURATION_CHANGED:
+ {
+ LOGD("GST_MESSAGE_DURATION_CHANGED");
+ if (!__mmplayer_gst_handle_duration(player, msg))
+ LOGW("failed to update duration");
+ }
+ break;
+
+ case GST_MESSAGE_ASYNC_START:
+ LOGD("GST_MESSAGE_ASYNC_START : %s", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg)));
+ break;
+
+ case GST_MESSAGE_ASYNC_DONE:
+ __mmplayer_gst_handle_async_done_message(player, msg);
+ break;
+ case GST_MESSAGE_STREAM_COLLECTION:
+ {
+ GstStreamCollection *collection = NULL;
+ LOGD("GST_MESSAGE_STREAM_COLLECTION : %s", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg)));
+
+ gst_message_parse_stream_collection(msg, &collection);
+ if (collection) {
+ __mmplayer_dump_collection(collection);
+ if (player->collection && player->stream_notify_id) {
+ g_signal_handler_disconnect(player->collection, player->stream_notify_id);
+ player->stream_notify_id = 0;
+ }
+ gst_object_replace((GstObject **)&player->collection, (GstObject *)collection);
+ if (player->collection) {
+ player->stream_notify_id = g_signal_connect(player->collection, "stream-notify",
+ (GCallback)__mmplayer_stream_notify_cb, player);
+ }
+ gst_object_unref(collection);
+ }
+ } break;
+ case GST_MESSAGE_STREAMS_SELECTED:
+ {
+ GstStreamCollection *collection = NULL;
+ LOGD("GST_MESSAGE_STREAMS_SELECTED : %s", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg)));
+
+ gst_message_parse_streams_selected(msg, &collection);
+ if (collection) {
+ guint i = 0, len = 0;
+ len = gst_message_streams_selected_get_size(msg);
+ for (i = 0; i < len; i++) {
+ GstStream *stream = gst_message_streams_selected_get_stream(msg, i);
+ LOGD (" Stream #%d : %s\n", i, gst_stream_get_stream_id(stream));
+ gst_object_unref(stream);
+ }
+ gst_object_unref (collection);
+ }
+ } break;
+
+#ifdef __DEBUG__
+ case GST_MESSAGE_REQUEST_STATE: LOGD("GST_MESSAGE_REQUEST_STATE"); break;
+ case GST_MESSAGE_STEP_START: LOGD("GST_MESSAGE_STEP_START"); break;
+ case GST_MESSAGE_QOS: LOGD("GST_MESSAGE_QOS"); break;
+ case GST_MESSAGE_PROGRESS: LOGD("GST_MESSAGE_PROGRESS"); break;
+ case GST_MESSAGE_ANY: LOGD("GST_MESSAGE_ANY"); break;
+ case GST_MESSAGE_INFO: LOGD("GST_MESSAGE_STATE_DIRTY"); break;
+ case GST_MESSAGE_STATE_DIRTY: LOGD("GST_MESSAGE_STATE_DIRTY"); break;
+ case GST_MESSAGE_STEP_DONE: LOGD("GST_MESSAGE_STEP_DONE"); break;
+ case GST_MESSAGE_CLOCK_PROVIDE: LOGD("GST_MESSAGE_CLOCK_PROVIDE"); break;
+ case GST_MESSAGE_STRUCTURE_CHANGE: LOGD("GST_MESSAGE_STRUCTURE_CHANGE"); break;
+ case GST_MESSAGE_STREAM_STATUS: LOGD("GST_MESSAGE_STREAM_STATUS"); break;
+ case GST_MESSAGE_APPLICATION: LOGD("GST_MESSAGE_APPLICATION"); break;
+ case GST_MESSAGE_SEGMENT_START: LOGD("GST_MESSAGE_SEGMENT_START"); break;
+ case GST_MESSAGE_SEGMENT_DONE: LOGD("GST_MESSAGE_SEGMENT_DONE"); break;
+ case GST_MESSAGE_LATENCY: LOGD("GST_MESSAGE_LATENCY"); break;
+#endif
+
+ default:
+ break;
+ }
+
+ /* should not call 'gst_message_unref(msg)' */
+ return;
+}
+
+static GstBusSyncReply
+__mmplayer_gst_bus_sync_callback(GstBus *bus, GstMessage *message, gpointer data)
+{
+ mmplayer_t *player = (mmplayer_t *)data;
+ GstBusSyncReply reply = GST_BUS_DROP;
+
+ if (!(player->pipeline && player->pipeline->mainbin)) {
+ LOGE("player pipeline handle is null");
+ return GST_BUS_PASS;
+ }
+
+ if (!__mmplayer_gst_check_useful_message(player, message)) {
+ gst_message_unref(message);
+ return GST_BUS_DROP;
+ }
+
+ switch (GST_MESSAGE_TYPE(message)) {
+ case GST_MESSAGE_TAG:
+ __mmplayer_gst_extract_tag_from_msg(player, message);
+
+#ifdef __DEBUG__
+ {
+ GstTagList *tags = NULL;
+
+ gst_message_parse_tag(message, &tags);
+ if (tags) {
+ LOGE("TAGS received from element \"%s\".",
+ GST_STR_NULL(GST_ELEMENT_NAME(GST_MESSAGE_SRC(message))));
+
+ gst_tag_list_foreach(tags, print_tag, NULL);
+ gst_tag_list_unref(tags);
+ tags = NULL;
+ }
+ break;
+ }
+#endif
+ break;
+
+ case GST_MESSAGE_DURATION_CHANGED:
+ __mmplayer_gst_handle_duration(player, message);
+ break;
+ case GST_MESSAGE_ELEMENT:
+ {
+ const gchar *klass = NULL;
+ klass = gst_element_factory_get_metadata
+ (gst_element_get_factory((GstElement *)message->src), GST_ELEMENT_METADATA_KLASS);
+ if (!klass || !g_strrstr(klass, "Codec/Decoder")) {
+ reply = GST_BUS_PASS;
+ break;
+ }
+ __mmplayer_gst_handle_element_message(player, message);
+ }
+ break;
+ case GST_MESSAGE_ASYNC_DONE:
+ /* NOTE:Don't call gst_callback directly
+ * because previous frame can be showed even though this message is received for seek.
+ */
+ default:
+ reply = GST_BUS_PASS;
+ break;
+ }
+
+ if (reply == GST_BUS_DROP)
+ gst_message_unref(message);
+
+ return reply;
+}
+
+static void
+__mmplayer_gst_appsrc_feed_data_mem(GstElement *element, guint size, gpointer user_data)
+{
+ GstElement *appsrc = element;
+ mmplayer_input_buffer_t *buf = (mmplayer_input_buffer_t *)user_data;
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gint len = size;
+
+ MMPLAYER_RETURN_IF_FAIL(element);
+ MMPLAYER_RETURN_IF_FAIL(buf);
+
+ buffer = gst_buffer_new();
+
+ if (buf->offset < 0 || buf->len < 0) {
+ LOGE("invalid buf info %d %d", buf->offset, buf->len);
+ return;
+ }
+
+ if (buf->offset >= buf->len) {
+ LOGD("call eos appsrc");
+ g_signal_emit_by_name(appsrc, "end-of-stream", &ret);
+ return;
+ }
+
+ if (buf->len - buf->offset < size)
+ len = buf->len - buf->offset;
+
+ gst_buffer_insert_memory(buffer, -1, gst_memory_new_wrapped(0, (guint8 *)(buf->buf + buf->offset), len, 0, len, NULL, NULL));
+ GST_BUFFER_OFFSET(buffer) = (guint64)buf->offset;
+ GST_BUFFER_OFFSET_END(buffer) = (guint64)(buf->offset + len);
+
+#ifdef __DEBUG__
+ LOGD("feed buffer %p, offset %u-%u length %u", buffer, buf->offset, (buf->offset+len), len);
+#endif
+ g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
+
+ buf->offset += len;
+}
+
+static gboolean
+__mmplayer_gst_appsrc_seek_data_mem(GstElement *element, guint64 size, gpointer user_data)
+{
+ mmplayer_input_buffer_t *buf = (mmplayer_input_buffer_t *)user_data;
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(buf, FALSE);
+
+ buf->offset = (int)size;
+
+ return TRUE;
+}
+
+void
+__mmplayer_gst_appsrc_feed_data(GstElement *element, guint size, gpointer user_data)
+{
+ mmplayer_t *player = (mmplayer_t *)user_data;
+ mmplayer_stream_type_e stream_type = MM_PLAYER_STREAM_TYPE_DEFAULT;
+ MMMessageParamType msg_param = {0,};
+ guint64 current_level_bytes = 0;
+
+ MMPLAYER_RETURN_IF_FAIL(player);
+
+ if (g_strrstr(GST_ELEMENT_NAME(element), "audio")) {
+ stream_type = MM_PLAYER_STREAM_TYPE_AUDIO;
+ } else if (g_strrstr(GST_ELEMENT_NAME(element), "video")) {
+ stream_type = MM_PLAYER_STREAM_TYPE_VIDEO;
+ } else {
+ LOGW("invalid feed-data signal from %s", GST_ELEMENT_NAME(element));
+ return;
+ }
+
+ g_object_get(G_OBJECT(element), "current-level-bytes", ¤t_level_bytes, NULL);
+
+ LOGI("stream type: %d, level: %"G_GUINT64_FORMAT, stream_type, current_level_bytes);
+
+ msg_param.union_type = MM_MSG_UNION_BUFFER_STATUS;
+ msg_param.buffer_status.stream_type = stream_type;
+ msg_param.buffer_status.status = MM_PLAYER_MEDIA_STREAM_BUFFER_UNDERRUN;
+ msg_param.buffer_status.bytes = current_level_bytes;
+
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_PUSH_BUFFER_STATUS, &msg_param);
+}
+
+void
+__mmplayer_gst_appsrc_enough_data(GstElement *element, gpointer user_data)
+{
+ mmplayer_t *player = (mmplayer_t *)user_data;
+ mmplayer_stream_type_e stream_type = MM_PLAYER_STREAM_TYPE_DEFAULT;
+ MMMessageParamType msg_param = {0,};
+ guint64 current_level_bytes = 0;
+
+ MMPLAYER_RETURN_IF_FAIL(player);
+
+ if (g_strrstr(GST_ELEMENT_NAME(element), "audio")) {
+ stream_type = MM_PLAYER_STREAM_TYPE_AUDIO;
+ } else if (g_strrstr(GST_ELEMENT_NAME(element), "video")) {
+ stream_type = MM_PLAYER_STREAM_TYPE_VIDEO;
+ } else {
+ LOGW("invalid enough-data signal from %s", GST_ELEMENT_NAME(element));
+ return;
+ }
+
+ g_object_get(G_OBJECT(element), "current-level-bytes", ¤t_level_bytes, NULL);
+
+ LOGI("stream type: %d, level: %"G_GUINT64_FORMAT, stream_type, current_level_bytes);
+
+ msg_param.union_type = MM_MSG_UNION_BUFFER_STATUS;
+ msg_param.buffer_status.stream_type = stream_type;
+ msg_param.buffer_status.status = MM_PLAYER_MEDIA_STREAM_BUFFER_OVERFLOW;
+ msg_param.buffer_status.bytes = current_level_bytes;
+
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_PUSH_BUFFER_STATUS, &msg_param);
+}
+
+gboolean
+__mmplayer_gst_appsrc_seek_data(GstElement *element, guint64 position, gpointer user_data)
+{
+ mmplayer_t *player = (mmplayer_t *)user_data;
+ mmplayer_stream_type_e stream_type = MM_PLAYER_STREAM_TYPE_DEFAULT;
+ MMMessageParamType msg_param = {0,};
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
+
+ if (g_strrstr(GST_ELEMENT_NAME(element), "audio")) {
+ stream_type = MM_PLAYER_STREAM_TYPE_AUDIO;
+ } else if (g_strrstr(GST_ELEMENT_NAME(element), "video")) {
+ stream_type = MM_PLAYER_STREAM_TYPE_VIDEO;
+ } else {
+ LOGW("invalid seek-data signal from %s", GST_ELEMENT_NAME(element));
+ return TRUE;
+ }
+
+ LOGD("stream type: %d, pos: %"G_GUINT64_FORMAT, stream_type, position);
+
+ msg_param.union_type = MM_MSG_UNION_SEEK_DATA;
+ msg_param.seek_data.stream_type = stream_type;
+ msg_param.seek_data.offset = position;
+
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_PUSH_BUFFER_SEEK_DATA, &msg_param);
+
+ return TRUE;
+}
+
+static gboolean
+__mmplayer_gst_create_es_decoder(mmplayer_t *player, mmplayer_stream_type_e type, GstPad *srcpad)
+{
+#define MAX_LEN_NAME 20
+
+ gboolean ret = FALSE;
+ GstPad *sinkpad = NULL;
+ gchar *prefix = NULL;
+ gchar dec_name[MAX_LEN_NAME] = {0, };
+ main_element_id_e elem_id = MMPLAYER_M_NUM;
+
+ mmplayer_gst_element_t *mainbin = NULL;
+ GstElement *decodebin = NULL;
+ GstCaps *dec_caps = NULL;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player &&
+ player->pipeline &&
+ player->pipeline->mainbin, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL(srcpad, FALSE);
+
+ mainbin = player->pipeline->mainbin;
+ switch (type) {
+ case MM_PLAYER_STREAM_TYPE_AUDIO:
+ prefix = "audio";
+ elem_id = MMPLAYER_M_AUTOPLUG_A_DEC;
+ break;
+ case MM_PLAYER_STREAM_TYPE_VIDEO:
+ prefix = "video";
+ elem_id = MMPLAYER_M_AUTOPLUG_V_DEC;
+ break;
+ default:
+ LOGE("invalid type %d", type);
+ return FALSE;
+ }
+
+ if (mainbin[elem_id].gst) {
+ LOGE("elem(%d) is already created", elem_id);
+ return FALSE;
+ }
+
+ snprintf(dec_name, sizeof(dec_name), "%s_decodebin", prefix);
+
+ /* create decodebin */
+ decodebin = gst_element_factory_make("decodebin", dec_name);
+ if (!decodebin) {
+ LOGE("failed to create %s", dec_name);
+ return FALSE;
+ }
+
+ /* raw pad handling signal */
+ _mmplayer_add_signal_connection(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added",
+ G_CALLBACK(_mmplayer_gst_decode_pad_added), (gpointer)player);
+
+ /* This signal is emitted whenever decodebin finds a new stream. It is emitted
+ before looking for any elements that can handle that stream.*/
+ _mmplayer_add_signal_connection(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-select",
+ G_CALLBACK(_mmplayer_gst_decode_autoplug_select), (gpointer)player);
+
+ if (player->need_video_dec_sorting || player->need_audio_dec_sorting)
+ _mmplayer_add_signal_connection(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-sort",
+ G_CALLBACK(_mmplayer_gst_decode_autoplug_sort), (gpointer)player);
+
+ /* This signal is emitted when a element is added to the bin.*/
+ _mmplayer_add_signal_connection(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "element-added",
+ G_CALLBACK(_mmplayer_gst_element_added), (gpointer)player);
+
+ if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), decodebin)) {
+ LOGE("failed to add new decodebin");
+ return FALSE;
+ }
+
+ dec_caps = gst_pad_query_caps(srcpad, NULL);
+ if (dec_caps) {
+#ifdef __DEBUG__
+ LOGD("got pad %s:%s , dec_caps %" GST_PTR_FORMAT, GST_DEBUG_PAD_NAME(srcpad), dec_caps);
+#endif
+ g_object_set(G_OBJECT(decodebin), "sink-caps", dec_caps, NULL);
+ gst_caps_unref(dec_caps);
+ }
+
+ sinkpad = gst_element_get_static_pad(decodebin, "sink");
+
+ if (!sinkpad || gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK) {
+ LOGE("failed to link [%s:%s] to decoder", GST_DEBUG_PAD_NAME(srcpad));
+ goto ERROR;
+ }
+ gst_object_unref(GST_OBJECT(sinkpad));
+
+ gst_element_sync_state_with_parent(decodebin);
+
+ mainbin[elem_id].id = elem_id;
+ mainbin[elem_id].gst = decodebin;
+
+ MMPLAYER_FLEAVE();
+ return TRUE;
+
+ERROR:
+ if (sinkpad)
+ gst_object_unref(GST_OBJECT(sinkpad));
+
+ if (decodebin) {
+ gst_element_set_state(decodebin, GST_STATE_NULL);
+ if (!gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), decodebin))
+ gst_object_unref(decodebin);
+ }
+
+ MMPLAYER_FLEAVE();
+ return ret;
+}
+
+static gboolean
+__mmplayer_gst_create_es_path(mmplayer_t *player, mmplayer_stream_type_e type, GstCaps *caps)
+{
+#define MAX_LEN_NAME 20
+ mmplayer_gst_element_t *mainbin = NULL;
+ gchar *prefix = NULL;
+ main_element_id_e src_id = MMPLAYER_M_NUM, queue_id = MMPLAYER_M_NUM;
+
+ gchar src_name[MAX_LEN_NAME] = {0, }, queue_name[MAX_LEN_NAME] = {0, };
+ GstElement *src = NULL, *queue = NULL;
+ GstPad *srcpad = NULL;
+
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline &&
+ player->pipeline->mainbin, FALSE);
+
+ mainbin = player->pipeline->mainbin;
+
+ LOGD("type(%d) path is creating", type);
+ switch (type) {
+ case MM_PLAYER_STREAM_TYPE_AUDIO:
+ prefix = "audio";
+ if (mainbin[MMPLAYER_M_SRC].gst)
+ src_id = MMPLAYER_M_2ND_SRC;
+ else
+ src_id = MMPLAYER_M_SRC;
+ queue_id = MMPLAYER_M_A_BUFFER;
+ break;
+ case MM_PLAYER_STREAM_TYPE_VIDEO:
+ prefix = "video";
+ src_id = MMPLAYER_M_SRC;
+ queue_id = MMPLAYER_M_V_BUFFER;
+ break;
+ case MM_PLAYER_STREAM_TYPE_TEXT:
+ prefix = "subtitle";
+ src_id = MMPLAYER_M_SUBSRC;
+ queue_id = MMPLAYER_M_S_BUFFER;
+ break;
+ default:
+ LOGE("invalid type %d", type);
+ return FALSE;
+ }
+
+ snprintf(src_name, sizeof(src_name), "%s_appsrc", prefix);
+ snprintf(queue_name, sizeof(queue_name), "%s_queue", prefix);
+
+ /* create source */
+ src = gst_element_factory_make("appsrc", src_name);
+ if (!src) {
+ LOGF("failed to create %s", src_name);
+ goto ERROR;
+ }
+
+ mainbin[src_id].id = src_id;
+ mainbin[src_id].gst = src;
+
+ g_object_set(G_OBJECT(src), "format", GST_FORMAT_TIME,
+ "caps", caps, NULL);
+
+ /* size of many video frames are larger than default blocksize as 4096 */
+ if (type == MM_PLAYER_STREAM_TYPE_VIDEO)
+ g_object_set(G_OBJECT(src), "blocksize", (guint)1048576, NULL);
+
+ if (player->media_stream_buffer_max_size[type] > 0)
+ g_object_set(G_OBJECT(src), "max-bytes", player->media_stream_buffer_max_size[type], NULL);
+
+ if (player->media_stream_buffer_min_percent[type] > 0)
+ g_object_set(G_OBJECT(src), "min-percent", player->media_stream_buffer_min_percent[type], NULL);
+
+ /*Fix Seek External Demuxer: set audio and video appsrc as seekable */
+ gst_app_src_set_stream_type((GstAppSrc*)G_OBJECT(src), GST_APP_STREAM_TYPE_SEEKABLE);
+
+ _mmplayer_add_signal_connection(player, G_OBJECT(src), MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
+ G_CALLBACK(__mmplayer_gst_appsrc_seek_data), (gpointer)player);
+ _mmplayer_add_signal_connection(player, G_OBJECT(src), MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
+ G_CALLBACK(__mmplayer_gst_appsrc_feed_data), (gpointer)player);
+ _mmplayer_add_signal_connection(player, G_OBJECT(src), MM_PLAYER_SIGNAL_TYPE_OTHERS, "enough-data",
+ G_CALLBACK(__mmplayer_gst_appsrc_enough_data), (gpointer)player);
+
+ /* create queue */
+ queue = gst_element_factory_make("queue2", queue_name);
+ if (!queue) {
+ LOGE("failed to create %s", queue_name);
+ goto ERROR;
+ }
+ g_object_set(G_OBJECT(queue), "max-size-buffers", 2, NULL);
+
+ mainbin[queue_id].id = queue_id;
+ mainbin[queue_id].gst = queue;
+
+ if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[src_id].gst)) {
+ LOGE("failed to add src");
+ goto ERROR;
+ }
+
+ if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[queue_id].gst)) {
+ LOGE("failed to add queue");
+ goto ERROR;
+ }
+
+ if (!gst_element_link(mainbin[src_id].gst, mainbin[queue_id].gst)) {
+ LOGE("failed to link src and queue");
+ goto ERROR;
+ }
+
+ /* create decoder */
+ srcpad = gst_element_get_static_pad(mainbin[queue_id].gst, "src");
+ if (!srcpad) {
+ LOGE("failed to get srcpad of queue");
+ goto ERROR;
+ }
+
+ if (type == MM_PLAYER_STREAM_TYPE_TEXT) {
+ _mmplayer_gst_create_decoder(player, srcpad, caps);
+ } else {
+ if (!__mmplayer_gst_create_es_decoder(player, type, srcpad)) {
+ LOGE("failed to create decoder");
+ gst_object_unref(GST_OBJECT(srcpad));
+ goto ERROR;
+ }
+ }
+ gst_object_unref(GST_OBJECT(srcpad));
+ return TRUE;
+
+ERROR:
+ if (mainbin[src_id].gst) {
+ gst_element_set_state(mainbin[src_id].gst, GST_STATE_NULL);
+ if (!gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[src_id].gst))
+ gst_object_unref(mainbin[src_id].gst);
+ mainbin[src_id].gst = NULL;
+ }
+
+ if (mainbin[queue_id].gst) {
+ gst_element_set_state(mainbin[queue_id].gst, GST_STATE_NULL);
+ if (!gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[queue_id].gst))
+ gst_object_unref(mainbin[queue_id].gst);
+ mainbin[queue_id].gst = NULL;
+ }
+
+ return FALSE;
+}
+
+static void
+__mmplayer_gst_rtp_dynamic_pad(GstElement *element, GstPad *pad, gpointer data)
+{
+ GstPad *sinkpad = NULL;
+ GstCaps *caps = NULL;
+ GstElement *new_element = NULL;
+ GstStructure *str = NULL;
+ const gchar *name = NULL;
+
+ mmplayer_t *player = (mmplayer_t *)data;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_IF_FAIL(element && pad);
+ MMPLAYER_RETURN_IF_FAIL(player &&
+ player->pipeline &&
+ player->pipeline->mainbin);
+
+ /* payload type is recognizable. increase num_dynamic and wait for sinkbin creation.
+ * num_dynamic_pad will decreased after creating a sinkbin.
+ */
+ player->num_dynamic_pad++;
+ LOGD("stream count inc : %d", player->num_dynamic_pad);
+
+ caps = gst_pad_query_caps(pad, NULL);
+ MMPLAYER_CHECK_NULL(caps);
+
+ str = gst_caps_get_structure(caps, 0);
+ name = gst_structure_get_string(str, "media");
+ if (!name) {
+ LOGE("cannot get mimetype from structure.");
+ goto ERROR;
+ }
+
+ if (strstr(name, "video")) {
+ gint stype = 0;
+ mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype);
+
+ if ((stype == MM_DISPLAY_SURFACE_NULL) && (!player->set_mode.video_export)) {
+ if (player->v_stream_caps) {
+ gst_caps_unref(player->v_stream_caps);
+ player->v_stream_caps = NULL;
+ }
+
+ new_element = gst_element_factory_make("fakesink", NULL);
+ player->num_dynamic_pad--;
+ goto NEW_ELEMENT;
+ }
+ }
+
+ if (!_mmplayer_gst_create_decoder(player, pad, caps)) {
+ LOGE("failed to autoplug for caps");
+ goto ERROR;
+ }
+
+ gst_caps_unref(caps);
+ caps = NULL;
+
+NEW_ELEMENT:
+
+ /* execute new_element if created*/
+ if (new_element) {
+ LOGD("adding new element to pipeline");
+
+ /* set state to READY before add to bin */
+ MMPLAYER_ELEMENT_SET_STATE(new_element, GST_STATE_READY);
+
+ /* add new element to the pipeline */
+ if (FALSE == gst_bin_add(GST_BIN(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst), new_element)) {
+ LOGE("failed to add autoplug element to bin");
+ goto ERROR;
+ }
+
+ /* get pad from element */
+ sinkpad = gst_element_get_static_pad(GST_ELEMENT(new_element), "sink");
+ if (!sinkpad) {
+ LOGE("failed to get sinkpad from autoplug element");
+ goto ERROR;
+ }
+
+ /* link it */
+ if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
+ LOGE("failed to link autoplug element");
+ goto ERROR;
+ }
+
+ gst_object_unref(sinkpad);
+ sinkpad = NULL;
+
+ /* run. setting PLAYING here since streaming source is live source */
+ MMPLAYER_ELEMENT_SET_STATE(new_element, GST_STATE_PLAYING);
+ }
+
+ if (caps)
+ gst_caps_unref(caps);
+
+ MMPLAYER_FLEAVE();
+
+ return;
+
+STATE_CHANGE_FAILED:
+ERROR:
+ /* FIXIT : take care if new_element has already added to pipeline */
+ if (new_element)
+ gst_object_unref(GST_OBJECT(new_element));
+
+ if (sinkpad)
+ gst_object_unref(GST_OBJECT(sinkpad));
+
+ if (caps)
+ gst_caps_unref(caps);
+
+ /* FIXIT : how to inform this error to MSL ????? */
+ /* FIXIT : I think we'd better to use g_idle_add() to destroy pipeline and
+ * then post an error to application
+ */
+}
+
+static void
+__mmplayer_gst_rtp_no_more_pads(GstElement *element, gpointer data)
+{
+ mmplayer_t *player = (mmplayer_t *)data;
+
+ MMPLAYER_FENTER();
+
+ /* NOTE : we can remove fakesink here if there's no rtp_dynamic_pad. because whenever
+ * we connect autoplugging element to the pad which is just added to rtspsrc, we increase
+ * num_dynamic_pad. and this is no-more-pad situation which means no more pad will be added.
+ * So we can say this. if num_dynamic_pad is zero, it must be one of followings
+
+ * [1] audio and video will be dumped with filesink.
+ * [2] autoplugging is done by just using pad caps.
+ * [3] typefinding has happened in audio but audiosink is created already before no-more-pad signal
+ * and the video will be dumped via filesink.
+ */
+ if (player->num_dynamic_pad == 0) {
+ LOGD("it seems pad caps is directly used for autoplugging. removing fakesink now");
+
+ if (!_mmplayer_gst_remove_fakesink(player,
+ &player->pipeline->mainbin[MMPLAYER_M_SRC_FAKESINK]))
+ /* NOTE : _mmplayer_pipeline_complete() can be called several time. because
+ * signaling mechanism(pad-added, no-more-pad, new-decoded-pad) from various
+ * source element are not same. To overcome this situation, this function will called
+ * several places and several times. Therefore, this is not an error case.
+ */
+ return;
+ }
+
+ /* create dot before error-return. for debugging */
+ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-no-more-pad");
+
+ player->no_more_pad = TRUE;
+
+ MMPLAYER_FLEAVE();
+}
+
+static GstElement *
+__mmplayer_gst_make_rtsp_src(mmplayer_t *player)
+{
+ GstElement *element = NULL;
+ gchar *user_agent = NULL;
+ MMHandleType attrs = 0;
+
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, NULL);
+
+ /* get profile attribute */
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (!attrs) {
+ LOGE("failed to get content attribute");
+ return NULL;
+ }
+
+ element = gst_element_factory_make("rtspsrc", "rtsp source");
+ if (!element) {
+ LOGE("failed to create rtspsrc element");
+ return NULL;
+ }
+
+ /* get attribute */
+ mm_attrs_get_string_by_name(attrs, "streaming_user_agent", &user_agent);
+
+ SECURE_LOGD("user_agent : %s", user_agent);
+
+ /* setting property to streaming source */
+ g_object_set(G_OBJECT(element), "location", player->profile.uri, NULL);
+ if (user_agent)
+ g_object_set(G_OBJECT(element), "user-agent", user_agent, NULL);
+
+ _mmplayer_add_signal_connection(player, G_OBJECT(element), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added",
+ G_CALLBACK(__mmplayer_gst_rtp_dynamic_pad), (gpointer)player);
+ _mmplayer_add_signal_connection(player, G_OBJECT(element), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "no-more-pads",
+ G_CALLBACK(__mmplayer_gst_rtp_no_more_pads), (gpointer)player);
+
+ MMPLAYER_FLEAVE();
+ return element;
+}
+
+static void __mmplayer_http_src_setup(GstElement *source, gpointer data)
+{
+#define HTTP_SOURCE_BLOCK_SIZE (64 * 1024)
+
+ mmplayer_t *player = (mmplayer_t *)data;
+ MMHandleType attrs = 0;
+ gchar *user_agent, *cookies, **cookie_list;
+ gint http_timeout = DEFAULT_HTTP_TIMEOUT;
+ user_agent = cookies = NULL;
+ cookie_list = NULL;
+
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_IF_FAIL(player);
+
+ LOGD("source element %s", GST_ELEMENT_NAME(source));
+
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (!attrs) {
+ LOGE("failed to get content attribute");
+ return;
+ }
+
+ mm_attrs_get_string_by_name(attrs, "streaming_cookie", &cookies);
+ mm_attrs_get_string_by_name(attrs, "streaming_user_agent", &user_agent);
+
+ if (player->ini.http_timeout != DEFAULT_HTTP_TIMEOUT)
+ http_timeout = player->ini.http_timeout;
+
+ SECURE_LOGD("cookies : %s", cookies);
+ SECURE_LOGD("user_agent : %s", user_agent);
+ LOGD("timeout : %d", http_timeout);
+
+ g_object_set(G_OBJECT(source), "timeout", http_timeout, "blocksize", (unsigned long)(HTTP_SOURCE_BLOCK_SIZE), NULL);
+
+ if ((cookie_list = _mmplayer_get_cookie_list((const char *)cookies))) {
+ g_object_set(G_OBJECT(source), "cookies", cookie_list, NULL);
+ g_strfreev(cookie_list);
+ }
+
+ if (user_agent)
+ g_object_set(G_OBJECT(source), "user-agent", user_agent, NULL);
+
+ MMPLAYER_FLEAVE();
+ return;
+}
+
+static void __mmplayer_rtsp_src_setup(GstElement *source, gpointer data)
+{
+ mmplayer_t *player = (mmplayer_t *)data;
+ gchar *user_agent = NULL;
+ MMHandleType attrs = 0;
+
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_IF_FAIL(player);
+
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (!attrs) {
+ LOGE("failed to get content attribute");
+ return;
+ }
+
+ mm_attrs_get_string_by_name(attrs, "streaming_user_agent", &user_agent);
+
+ SECURE_LOGD("user_agent : %s", user_agent);
+
+ if (user_agent)
+ g_object_set(G_OBJECT(source), "user-agent", user_agent, NULL);
+
+ MMPLAYER_FLEAVE();
+}
+
+static void
+__mmplayer_gst_found_source(GObject *object, GObject *orig, GParamSpec *pspec, gpointer data)
+{
+ mmplayer_t *player = (mmplayer_t *)data;
+ GstElement *source = NULL;
+
+ MMPLAYER_FENTER();
+ LOGD("%s >> %s", GST_ELEMENT_NAME(object), pspec->name);
+
+ g_object_get(orig, pspec->name, &source, NULL);
+
+ player->pipeline->mainbin[MMPLAYER_M_SRC].id = MMPLAYER_M_SRC;
+ player->pipeline->mainbin[MMPLAYER_M_SRC].gst = source;
+
+ if (MMPLAYER_IS_HTTP_STREAMING(player)) {
+ __mmplayer_http_src_setup(source, data);
+ } else if (MMPLAYER_IS_RTSP_STREAMING(player)) {
+ __mmplayer_rtsp_src_setup(source, data);
+ } else if (MMPLAYER_IS_SMOOTH_STREAMING(player)) {
+ g_object_set(G_OBJECT(source), "timeout", DEFAULT_HTTP_TIMEOUT, NULL);
+ } else if (player->profile.uri_type == MM_PLAYER_URI_TYPE_MEM) {
+ g_object_set(source, "stream-type", GST_APP_STREAM_TYPE_RANDOM_ACCESS,
+ "size", (gint64)player->profile.input_mem.len, "blocksize", 20480, NULL);
+
+ _mmplayer_add_signal_connection(player, G_OBJECT(source), MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
+ G_CALLBACK(__mmplayer_gst_appsrc_seek_data_mem), (gpointer)&player->profile.input_mem);
+ _mmplayer_add_signal_connection(player, G_OBJECT(source), MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
+ G_CALLBACK(__mmplayer_gst_appsrc_feed_data_mem), (gpointer)&player->profile.input_mem);
+ }
+ gst_object_unref (source);
+
+ MMPLAYER_FLEAVE();
+}
+
+static gint
+__mmplayer_gst_select_stream (GstElement * uridecodebin, GstStreamCollection * collection,
+ GstStream * stream, gpointer data)
+{
+ gint ret = 0; /* 1: select, 0: skip, -1: depends on decodebin */
+ GstStreamType stype = gst_stream_get_stream_type(stream);
+ mmplayer_t *player = (mmplayer_t *)data;
+ mmplayer_track_type_e type = MM_PLAYER_TRACK_TYPE_MAX;
+ GstCaps *caps = gst_stream_get_caps(stream);
+ gchar *caps_str = NULL;
+
+ LOGD("Stream type %s flags 0x%x",
+ gst_stream_type_get_name(stype),
+ gst_stream_get_stream_flags(stream));
+ LOGD(" ID: %s", gst_stream_get_stream_id(stream));
+
+ if (caps) {
+ caps_str = gst_caps_to_string(caps);
+ LOGD(" caps: %s", caps_str);
+ }
+
+ switch (stype) {
+ case GST_STREAM_TYPE_AUDIO:
+ {
+ GstStructure *caps_structure = NULL;
+ gint samplerate = 0;
+ gint channels = 0;
+
+ type = MM_PLAYER_TRACK_TYPE_AUDIO;
+
+ if (caps) {
+ caps_structure = gst_caps_get_structure(caps, 0);
+ gst_structure_get_int(caps_structure, "rate", &samplerate);
+ gst_structure_get_int(caps_structure, "channels", &channels);
+
+ if (channels > 0 && samplerate == 0) {
+ LOGW("Skip corrupted audio stream");
+ goto EXIT;
+ }
+
+ if (g_strrstr(caps_str, "mobile-xmf"))
+ mm_player_set_attribute((MMHandleType)player, NULL,
+ "content_audio_codec", "mobile-xmf", strlen("mobile-xmf"), NULL);
+ }
+ break;
+ }
+ case GST_STREAM_TYPE_VIDEO:
+ {
+ GstStructure *caps_structure = NULL;
+ gint stype = 0;
+ gint width = 0;
+
+ type = MM_PLAYER_TRACK_TYPE_VIDEO;
+
+ /* do not support multi track video */
+ if (player->track[MM_PLAYER_TRACK_TYPE_VIDEO].total_track_num >= 1)
+ goto EXIT;
+
+ mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype);
+
+ /* don't make video because of not required */
+ if ((stype == MM_DISPLAY_SURFACE_NULL) &&
+ (!player->set_mode.video_export)) {
+ LOGD("no need video decoding, skip video stream");
+ goto EXIT;
+ }
+
+ if (caps) {
+ caps_structure = gst_caps_get_structure(caps, 0);
+ gst_structure_get_int(caps_structure, "width", &width);
+
+ if (width != 0) {
+ if (player->v_stream_caps) {
+ gst_caps_unref(player->v_stream_caps);
+ player->v_stream_caps = NULL;
+ }
+
+ player->v_stream_caps = gst_caps_copy(caps);
+ MMPLAYER_LOG_GST_CAPS_TYPE(player->v_stream_caps);
+ }
+ }
+ break;
+ }
+ case GST_STREAM_TYPE_TEXT:
+ type = MM_PLAYER_TRACK_TYPE_TEXT;
+ break;
+ default:
+ LOGW("Skip not supported stream type");
+ goto EXIT;
+ }
+
+ _mmplayer_track_update_stream(player, type, stream);
+
+ if (player->track[type].active_track_index == (player->track[type].total_track_num - 1)) {
+ LOGD("select this stream, active idx : %d", player->track[type].active_track_index);
+ if (type == MM_PLAYER_TRACK_TYPE_AUDIO)
+ _mmplayer_set_audio_attrs(player, caps);
+ ret = 1;
+ }
+
+EXIT:
+ g_free(caps_str);
+ if (caps)
+ gst_caps_unref(caps);
+
+ LOGD("ret %d", ret);
+ return ret;
+}
+
+static gboolean
+__mmplayer_gst_decode_request_resource(GstElement * uridecodebin, GstStreamCollection * collection,
+ GstStream * stream, gpointer data)
+{
+ mmplayer_t *player = (mmplayer_t *)data;
+ GstStreamType stype = gst_stream_get_stream_type(stream);
+
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
+
+ LOGD("stream type %s", gst_stream_type_get_name(stype));
+
+ /* public does not support audio hw decoder at the moment */
+
+ if (player->hw_resource[MMPLAYER_RESOURCE_TYPE_VIDEO_DECODER] != NULL) {
+ LOGW("video decoder resource is already acquired, skip it.");
+ return TRUE;
+ }
+
+ if (_mmplayer_acquire_hw_resource(player, MMPLAYER_RESOURCE_TYPE_VIDEO_DECODER) != MM_ERROR_NONE) {
+ LOGE("failed to acquire video decoder resource");
+ return FALSE;
+ }
+ player->interrupted_by_resource = FALSE;
+ MMPLAYER_FLEAVE();
+ return TRUE;
+}
+
+static GstElement *
+__mmplayer_gst_find_child_element(GstBin *bin, const gchar *element_name)
+{
+ GstIterator *iter = NULL;
+ GValue item = {0, };
+ GstElement *ch_element = NULL;
+ GstElementFactory *ch_factory = NULL;
+
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_VAL_IF_FAIL(bin && element_name, NULL);
+
+ iter = gst_bin_iterate_recurse(bin);
+ if (iter != NULL) {
+ while (gst_iterator_next(iter, &item) == GST_ITERATOR_OK) {
+ ch_element = g_value_get_object(&item);
+ ch_factory = gst_element_get_factory(ch_element);
+ LOGD("children factory %s", GST_OBJECT_NAME(ch_factory));
+ if (g_strrstr(GST_OBJECT_NAME(ch_factory), element_name)) {
+ LOGD("Find %s element", element_name);
+ break;
+ }
+ ch_element = NULL;
+ g_value_reset(&item);
+ }
+ gst_iterator_free(iter);
+ }
+
+ MMPLAYER_FLEAVE();
+ return ch_element;
+}
+
+static void
+__mmplayer_gst_deep_element_added(GstElement *bin, GstBin *child, GstElement *element, gpointer data)
+{
+ gchar *factory_name = NULL;
+ mmplayer_t *player = (mmplayer_t *)data;
+ mmplayer_gst_element_t *mainbin = NULL;
+
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
+
+ factory_name = GST_OBJECT_NAME(gst_element_get_factory(element));
+ mainbin = player->pipeline->mainbin;
+
+ LOGD("%s > %s > %s : %s", GST_ELEMENT_NAME(bin), GST_ELEMENT_NAME(child),
+ factory_name, GST_ELEMENT_NAME(element));
+
+ /* keep the first typefind reference only */
+ if (!mainbin[MMPLAYER_M_TYPEFIND].gst && g_strrstr(factory_name, "typefind")) { // FIXME : not required for local playback+
+ mainbin[MMPLAYER_M_TYPEFIND].id = MMPLAYER_M_TYPEFIND;
+ mainbin[MMPLAYER_M_TYPEFIND].gst = element;
+
+ _mmplayer_add_signal_connection(player, G_OBJECT(element),
+ MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "have-type", G_CALLBACK(_mmplayer_typefind_have_type), (gpointer)player);
+ LOGD("typefind reference is added");
+ return;
+ }
+
+ if ((MMPLAYER_IS_STREAMING(player)) && (!MMPLAYER_IS_RTSP_STREAMING(player))) {
+ /* update queue2 setting */
+ if (g_strrstr(factory_name, "queue2") && (!mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst)) {
+ gint64 dur_bytes = 0L;
+ muxed_buffer_type_e type = MUXED_BUFFER_TYPE_MEM_QUEUE;
+
+ mainbin[MMPLAYER_M_MUXED_S_BUFFER].id = MMPLAYER_M_MUXED_S_BUFFER;
+ mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst = element;
+
+ if (!gst_element_query_duration(mainbin[MMPLAYER_M_SRC].gst, GST_FORMAT_BYTES, &dur_bytes))
+ LOGW("failed to get duration from source %s", GST_ELEMENT_NAME(mainbin[MMPLAYER_M_SRC].gst));
+
+ LOGD("type %s, dur_bytes = %"G_GINT64_FORMAT, player->type, dur_bytes);
+ /* NOTE : in case of ts streaming, player could not get the correct duration info *
+ * skip the pull mode(file or ring buffering) setting. */
+ if (dur_bytes > 0) {
+ if ((!g_strrstr(player->type, "video/mpegts")) && (!g_strrstr(player->type, "application/x-hls"))) {
+ type = MUXED_BUFFER_TYPE_MEM_RING_BUFFER;
+ player->streamer->ring_buffer_size = player->ini.http_ring_buffer_size;
+ }
+ } else {
+ dur_bytes = 0;
+ }
+
+ _mm_player_streaming_set_queue2(player->streamer,
+ element,
+ FALSE,
+ type,
+ (guint64)dur_bytes); /* no meaning at the moment */
+ return;
+ }
+ }
+
+ if (g_strrstr(factory_name, "parsebin")) {
+ if ((!MMPLAYER_IS_RTSP_STREAMING(player)) && (!mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst)) {
+ GstElement *ch_element = __mmplayer_gst_find_child_element(child, "multiqueue");
+ if (ch_element) {
+ player->pipeline->mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].id = MMPLAYER_M_DEMUXED_S_BUFFER;
+ player->pipeline->mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst = ch_element;
+
+ /* in case of multiqueue, max bytes size is defined with fixed value in mm_player_streaming.h */
+ if (MMPLAYER_IS_STREAMING(player)) {
+ _mm_player_streaming_set_multiqueue(player->streamer, ch_element);
+ } else {
+ g_object_set(G_OBJECT(ch_element),
+ "max-size-time", (guint64)(5 * GST_SECOND),
+ "use-interleave", FALSE, NULL);
+ }
+ }
+ }
+ int video_codec_type = 0;
+ int audio_codec_type = 0;
+
+ g_object_set(G_OBJECT(child), "message-forward", TRUE, NULL);
+ g_object_set(G_OBJECT(element), "message-forward", TRUE, NULL);
+ if (player->type_caps &&
+ !MMPLAYER_IS_HTTP_LIVE_STREAMING(player) &&
+ !MMPLAYER_IS_DASH_STREAMING(player))
+ g_object_set(G_OBJECT(element), "sink-caps", player->type_caps, NULL);
+
+ mm_attrs_get_int_by_name(player->attrs, MM_PLAYER_VIDEO_CODEC_TYPE, &video_codec_type);
+ mm_attrs_get_int_by_name(player->attrs, MM_PLAYER_AUDIO_CODEC_TYPE, &audio_codec_type);
+
+ /* CAUTION: if there is hw decoder, the rank value has to be higher than sw decoder
+ and codec default type in ini has to be hw.
+ */
+ LOGD("set codec type v(%d) a(%d)", video_codec_type, audio_codec_type);
+ if (video_codec_type == MM_PLAYER_CODEC_TYPE_SW)
+ g_object_set(G_OBJECT(child), "force-sw-decoders-for-video", TRUE, NULL);
+ if (audio_codec_type == MM_PLAYER_CODEC_TYPE_SW)
+ g_object_set(G_OBJECT(child), "force-sw-decoders-for-audio", TRUE, NULL);
+
+ mainbin[MMPLAYER_M_AUTOPLUG_PARSEBIN].id = MMPLAYER_M_AUTOPLUG_PARSEBIN;
+ mainbin[MMPLAYER_M_AUTOPLUG_PARSEBIN].gst = element;
+ _mmplayer_add_signal_connection(player, G_OBJECT(element),
+ MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "unknown-type", G_CALLBACK(_mmplayer_gst_decode_unknown_type), (gpointer)player);
+
+ _mmplayer_add_signal_connection(player, G_OBJECT(element),
+ MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-continue", G_CALLBACK(_mmplayer_gst_decode_autoplug_continue), (gpointer)player);
+
+ _mmplayer_add_signal_connection(player, G_OBJECT(element),
+ MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-select", G_CALLBACK(_mmplayer_gst_decode_autoplug_select), (gpointer)player);
+
+ _mmplayer_add_signal_connection(player, G_OBJECT(child),
+ MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "request-resource", G_CALLBACK(__mmplayer_gst_decode_request_resource), (gpointer)player);
+
+ } else {
+ _mmplayer_gst_element_added((GstElement *)child, element, data);
+ }
+ return;
+}
+
+void
+__mmplayer_gst_deep_element_removed(GstElement *bin, GstBin *child, GstElement *element, gpointer data)
+{
+ LOGD("%s > %s > %s", GST_ELEMENT_NAME(bin), GST_ELEMENT_NAME(child), GST_ELEMENT_NAME(element));
+ return;
+}
+
+static GstElement *
+__mmplayer_gst_make_uridecodebin(mmplayer_t *player)
+{
+ GstElement *uridecodebin3 = NULL;
+
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, NULL);
+
+ uridecodebin3 = gst_element_factory_make("uridecodebin3", "uridecodebin3");
+ if (!uridecodebin3) {
+ LOGE("failed to create uridecodebin3");
+ return NULL;
+ }
+
+ /* get attribute */
+ SECURE_LOGD("uri : %s", player->profile.uri);
+
+ /* setting property to streaming source */
+ g_object_set(G_OBJECT(uridecodebin3), "uri", player->profile.uri,
+ "message-forward", TRUE,
+ "buffer-size", DEFAULT_BUFFER_SIZE_BYTES, NULL);
+
+ _mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3),
+ MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "deep-notify::source", G_CALLBACK(__mmplayer_gst_found_source), (gpointer)player);
+
+ _mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3),
+ MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added", G_CALLBACK(_mmplayer_gst_decode_pad_added), (gpointer)player);
+
+ _mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3),
+ MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-removed", G_CALLBACK(_mmplayer_gst_decode_pad_removed), (gpointer)player);