| FUNCTION DEFINITIONS |
| |
========================================================================================== */
-#ifdef __DEBUG__
-static void
-print_tag(const GstTagList *list, const gchar *tag, gpointer unused)
-{
- gint i, count;
-
- count = gst_tag_list_get_tag_size(list, tag);
-
- LOGD("count = %d", count);
-
- for (i = 0; i < count; i++) {
- gchar *str;
-
- if (gst_tag_get_type(tag) == G_TYPE_STRING) {
- if (!gst_tag_list_get_string_index(list, tag, i, &str))
- g_assert_not_reached();
- } else {
- str = g_strdup_value_contents(gst_tag_list_get_value_index(list, tag, i));
- }
-
- if (i == 0)
- g_print(" %15s: %s", gst_tag_get_nick(tag), str);
- else
- g_print(" : %s", str);
-
- g_free(str);
- }
-}
-#endif
-
-static gboolean
-__mmplayer_is_hls_type(gchar *type) {
- if (g_strrstr(type, "application/x-hls"))
- return TRUE;
- return FALSE;
-}
-
-static gboolean
-__mmplayer_is_mpegts_type(gchar *type) {
- if (g_strrstr(type, "video/mpegts"))
- return TRUE;
- return FALSE;
-}
-
-static gboolean
-__mmplayer_is_mp3_type(gchar *type) {
- if (g_strrstr(type, "application/x-id3") ||
- (g_strrstr(type, "audio/mpeg") && g_strrstr(type, "mpegversion=(int)1")))
- return TRUE;
- return FALSE;
-}
static gboolean
__mmplayer_check_error_posted_from_activated_track(mmplayer_t *player, gchar *src_element_name)
}
static gboolean
-__mmplayer_handle_streaming_error(mmplayer_t *player, GstMessage *message)
+__mmplayer_handle_streaming_error(mmplayer_t *player, GstMessage *message, GError *error)
{
LOGD("\n");
MMMessageParamType msg_param = {0, };
gchar *msg_src_element = NULL;
- GstStructure *s = NULL;
- guint error_id = 0;
- gchar *error_string = NULL;
MMPLAYER_FENTER();
MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
MMPLAYER_RETURN_VAL_IF_FAIL(message, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL(error, FALSE);
- s = gst_structure_copy(gst_message_get_structure(message));
-
-
- if (!gst_structure_get_uint(s, "error_id", &error_id))
- error_id = MMPLAYER_STREAMING_ERROR_NONE;
-
- switch (error_id) {
+ switch (error->code) {
case MMPLAYER_STREAMING_ERROR_UNSUPPORTED_AUDIO:
msg_param.code = MM_ERROR_PLAYER_STREAMING_UNSUPPORTED_AUDIO;
break;
break;
default:
{
- gst_structure_free(s);
return MM_ERROR_PLAYER_STREAMING_FAIL;
}
}
- error_string = g_strdup(gst_structure_get_string(s, "error_string"));
- if (error_string)
- msg_param.data = (void *)error_string;
+ if (error->message)
+ msg_param.data = (void *)(error->message);
if (message->src) {
msg_src_element = GST_ELEMENT_NAME(GST_ELEMENT_CAST(message->src));
LOGD("skip error post because it's sent already.");
}
- gst_structure_free(s);
- MMPLAYER_FREEIF(error_string);
-
MMPLAYER_FLEAVE();
return TRUE;
if (gst_tag_list_get_date(tag_list, gsttag, &date)) {\
if (date != NULL) {\
string = g_strdup_printf("%d", g_date_get_year(date));\
+ if (string == NULL) {\
+ LOGD("failed to get date/time from tag");\
+ g_date_free(date);\
+ break;\
+ } \
mm_player_set_attribute((MMHandleType)player, NULL,\
playertag, string, strlen(string), NULL); \
SECURE_LOGD("metainfo year : %s", string);\
return TRUE;
}
+
+static mmplayer_track_type_e
+__mmplayer_convert_gst_stream_type_to_track_type (GstStreamType stype)
+{
+ switch (stype) {
+ case GST_STREAM_TYPE_AUDIO:
+ return MM_PLAYER_TRACK_TYPE_AUDIO;
+ case GST_STREAM_TYPE_VIDEO:
+ return MM_PLAYER_TRACK_TYPE_VIDEO;
+ case GST_STREAM_TYPE_TEXT:
+ return MM_PLAYER_TRACK_TYPE_TEXT;
+ default:
+ LOGD("not supported stream stype");
+ return MM_PLAYER_TRACK_TYPE_MAX;
+ }
+}
+
/* if retval is FALSE, it will be dropped for performance. */
static gboolean
__mmplayer_gst_check_useful_message(mmplayer_t *player, GstMessage *message)
case GST_MESSAGE_ASYNC_DONE:
case GST_MESSAGE_STATE_CHANGED:
/* we only handle messages from pipeline */
+ MMPLAYER_RECONFIGURE_LOCK(player);
if ((message->src == (GstObject *)player->pipeline->mainbin[MMPLAYER_M_PIPE].gst) && (!player->gapless.reconfigure))
retval = TRUE;
else
retval = FALSE;
+ MMPLAYER_RECONFIGURE_UNLOCK(player);
break;
case GST_MESSAGE_BUFFERING:
{
LOGD("GST_MESSAGE_STREAMS_SELECTED");
player->no_more_pad = TRUE;
_mmplayer_set_reconfigure_state(player, FALSE);
- _mmplayer_pipeline_complete(NULL, player);
+ if (!MMPLAYER_IS_ADAPTIVE_STREAMING(player))
+ _mmplayer_pipeline_complete(NULL, player);
retval = TRUE;
break;
}
_mmplayer_gst_get_position(player, &pos_nsec); /* to update player->last_position */
- if (MMPLAYER_IS_HTTP_STREAMING(player)) {
+ if (MMPLAYER_IS_HTTP_STREAMING(player))
data_size = player->http_content_size;
- }
_mm_player_streaming_buffering(player->streamer, buffering_msg, data_size, player->last_position, player->duration);
- _mm_player_streaming_sync_property(player->streamer, player->pipeline->mainbin[MMPLAYER_M_AUTOPLUG].gst);
+ if (!player->streamer->is_adaptive_streaming) {
+ _mm_player_streaming_sync_property(player->streamer, player->pipeline->mainbin[MMPLAYER_M_AUTOPLUG].gst);
+ return;
+ }
- return;
+ /* adaptivedemux2 is used for buffering in uridecodebin3 */
+ if (!player->streamer->buffering_req.is_pre_buffering) {
+ LOGD("adaptive> set rebuffer time : %d ms", player->streamer->buffering_req.rebuffer_time);
+ g_object_set(player->pipeline->mainbin[MMPLAYER_M_ADAPTIVE_DEMUX].gst,
+ "low-watermark-time", (guint64)(player->streamer->buffering_req.rebuffer_time * GST_MSECOND),
+ NULL);
+ }
}
static int
/* Note : the streaming error from the streaming source is handled
* using __mmplayer_handle_streaming_error.
*/
- __mmplayer_handle_streaming_error(player, msg);
+ __mmplayer_handle_streaming_error(player, msg, error);
/* dump state of all element */
_mmplayer_dump_pipeline_state(player);
__mmplayer_gst_handle_state_message(mmplayer_t *player, GstMessage *msg)
{
mmplayer_gst_element_t *mainbin;
- const GValue *voldstate, *vnewstate, *vpending;
GstState oldstate = GST_STATE_NULL;
GstState newstate = GST_STATE_NULL;
GstState pending = GST_STATE_NULL;
if (msg->src != (GstObject *)mainbin[MMPLAYER_M_PIPE].gst)
return;
- /* get state info from msg */
- voldstate = gst_structure_get_value(gst_message_get_structure(msg), "old-state");
- vnewstate = gst_structure_get_value(gst_message_get_structure(msg), "new-state");
- vpending = gst_structure_get_value(gst_message_get_structure(msg), "pending-state");
-
- if (!voldstate || !vnewstate) {
- LOGE("received msg has wrong format.");
- return;
- }
-
- oldstate = (GstState)voldstate->data[0].v_int;
- newstate = (GstState)vnewstate->data[0].v_int;
- if (vpending)
- pending = (GstState)vpending->data[0].v_int;
+ gst_message_parse_state_changed(msg, &oldstate, &newstate, &pending);
LOGD("state changed [%s] : %s ---> %s final : %s",
GST_OBJECT_NAME(GST_MESSAGE_SRC(msg)),
sdp which has contents info is received when rtsp connection is opened.
extract duration ,codec info , resolution from sdp and get it by GstMessage */
if (!strcmp(structure_name, "rtspsrc_properties")) {
- gchar *audio_codec = NULL;
- gchar *video_codec = NULL;
- gchar *video_frame_size = NULL;
+ g_autofree gchar *audio_codec = NULL;
+ g_autofree gchar *video_codec = NULL;
+ g_autofree gchar *video_frame_size = NULL;
gst_structure_get(gst_message_get_structure(msg),
"rtsp_duration", G_TYPE_UINT64, &player->duration, NULL);
return;
if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS) {
+ if (player->is_external_subtitle_present)
+ _mmplayer_sync_subtitle_pipeline(player);
+
if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) {
player->seek_state = MMPLAYER_SEEK_NONE;
MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
return;
}
+#ifdef __DEBUG__
static void
__mmplayer_print_tag_foreach(const GstTagList *tags, const gchar *tag, gpointer user_data)
{
g_free(str);
g_value_unset(&val);
}
+#endif
static void
__mmplayer_dump_collection(GstStreamCollection * collection)
{
guint i = 0;
+#ifdef __DEBUG__
GstTagList *tags = NULL;
+#endif
GstCaps *caps = NULL;
for (i = 0; i < gst_stream_collection_get_size(collection); i++) {
GstStream *stream = gst_stream_collection_get_stream(collection, i);
- LOGD ("collection: Stream %u type %s flags 0x%x\n", i,
+ LOGD ("collection: [%u] Stream, type: %s, flags 0x%x\n", i,
gst_stream_type_get_name(gst_stream_get_stream_type(stream)),
gst_stream_get_stream_flags(stream));
LOGD (" ID: %s\n", gst_stream_get_stream_id(stream));
caps = gst_stream_get_caps(stream);
if (caps) {
- gchar *caps_str = gst_caps_to_string(caps);
- LOGD (" caps: %s\n", caps_str);
- g_free(caps_str);
+ MMPLAYER_LOG_GST_CAPS_TYPE(caps);
gst_caps_unref(caps);
}
+#ifdef __DEBUG__
tags = gst_stream_get_tags(stream);
if (tags) {
LOGD (" tags:\n");
gst_tag_list_foreach(tags, __mmplayer_print_tag_foreach, GUINT_TO_POINTER(MMPLAYER_TAG_INDENT));
gst_tag_list_unref(tags);
}
+#endif
}
}
gst_stream_get_stream_id(stream), pspec->name, collection);
if (g_str_equal(pspec->name, "caps")) {
GstCaps *caps = gst_stream_get_caps(stream);
- gchar *caps_str = gst_caps_to_string(caps);
- LOGD (" New caps: %s\n", caps_str);
- g_free(caps_str);
+ MMPLAYER_LOG_GST_CAPS_TYPE(caps);
gst_caps_unref(caps);
}
+#ifdef __DEBUG__
if (g_str_equal (pspec->name, "tags")) {
GstTagList *tags = gst_stream_get_tags(stream);
if (tags) {
gst_tag_list_unref(tags);
}
}
+#endif
}
static void
gst_message_parse_streams_selected(msg, &collection);
if (collection) {
- guint i = 0, len = 0;
- len = gst_message_streams_selected_get_size(msg);
- for (i = 0; i < len; i++) {
+ guint len = gst_message_streams_selected_get_size(msg);
+ for (guint i = 0; i < len; i++) {
GstStream *stream = gst_message_streams_selected_get_stream(msg, i);
+ mmplayer_track_type_e type = __mmplayer_convert_gst_stream_type_to_track_type(
+ gst_stream_get_stream_type(stream));
+ if (type == MM_PLAYER_TRACK_TYPE_MAX) {
+ LOGD("not supported track type");
+ gst_object_unref(stream);
+ break;
+ }
LOGD (" Stream #%d : %s\n", i, gst_stream_get_stream_id(stream));
+ if (player->track[type].active_track_index == INVALID_TRACK_INDEX) {
+ int stream_index = INVALID_TRACK_INDEX;
+ if (_mmplayer_get_track_index(player, type, stream, &stream_index) == MM_ERROR_NONE) {
+ player->track[type].active_track_index = stream_index;
+ LOGD("selected this stream, update active idx : %d",
+ player->track[type].active_track_index);
+ }
+ }
gst_object_unref(stream);
}
- gst_object_unref (collection);
+ gst_object_unref(collection);
}
} break;
LOGE("TAGS received from element \"%s\".",
GST_STR_NULL(GST_ELEMENT_NAME(GST_MESSAGE_SRC(message))));
- gst_tag_list_foreach(tags, print_tag, NULL);
+ gst_tag_list_foreach(tags, __mmplayer_print_tag_foreach, GUINT_TO_POINTER(MMPLAYER_TAG_INDENT));
gst_tag_list_unref(tags);
tags = NULL;
}
return FALSE;
}
- mainbin[elem_id].id = elem_id;
- mainbin[elem_id].gst = decodebin;
-
/* raw pad handling signal */
_mmplayer_add_signal_connection(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added",
G_CALLBACK(_mmplayer_gst_decode_pad_added), (gpointer)player);
gst_object_unref(GST_OBJECT(sinkpad));
gst_element_sync_state_with_parent(decodebin);
+
+ mainbin[elem_id].id = elem_id;
+ mainbin[elem_id].gst = decodebin;
+
MMPLAYER_FLEAVE();
return TRUE;
if (sinkpad)
gst_object_unref(GST_OBJECT(sinkpad));
- if (mainbin[elem_id].gst) {
- gst_element_set_state(mainbin[elem_id].gst, GST_STATE_NULL);
- gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[elem_id].gst);
- gst_object_unref(mainbin[elem_id].gst);
- mainbin[elem_id].gst = NULL;
+ if (decodebin) {
+ gst_element_set_state(decodebin, GST_STATE_NULL);
+ if (!gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), decodebin))
+ gst_object_unref(decodebin);
}
MMPLAYER_FLEAVE();
ERROR:
if (mainbin[src_id].gst) {
gst_element_set_state(mainbin[src_id].gst, GST_STATE_NULL);
- gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[src_id].gst);
- gst_object_unref(mainbin[src_id].gst);
+ if (!gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[src_id].gst))
+ gst_object_unref(mainbin[src_id].gst);
mainbin[src_id].gst = NULL;
}
if (mainbin[queue_id].gst) {
gst_element_set_state(mainbin[queue_id].gst, GST_STATE_NULL);
- gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[queue_id].gst);
- gst_object_unref(mainbin[queue_id].gst);
+ if (!gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[queue_id].gst))
+ gst_object_unref(mainbin[queue_id].gst);
mainbin[queue_id].gst = NULL;
}
__mmplayer_gst_select_stream (GstElement * uridecodebin, GstStreamCollection * collection,
GstStream * stream, gpointer data)
{
- gint ret = 0; /* 1: select, 0: skip, -1: depends on decodebin */
+#define RET_SELECT 1
+#define RET_SKIP 0
+#define RET_DEPENDS_ON_DECODEBIN -1
+
GstStreamType stype = gst_stream_get_stream_type(stream);
mmplayer_t *player = (mmplayer_t *)data;
mmplayer_track_type_e type = MM_PLAYER_TRACK_TYPE_MAX;
- GstCaps *caps = gst_stream_get_caps(stream);
- gchar *caps_str = NULL;
+ g_autoptr(GstCaps) caps = gst_stream_get_caps(stream);
+ g_autofree gchar *caps_str = NULL;
+ GstStructure *caps_structure = NULL;
+ int stream_index = INVALID_TRACK_INDEX;
+ int ret = MM_ERROR_NONE;
LOGD("Stream type %s flags 0x%x",
gst_stream_type_get_name(stype),
gst_stream_get_stream_flags(stream));
LOGD(" ID: %s", gst_stream_get_stream_id(stream));
+ type = __mmplayer_convert_gst_stream_type_to_track_type(stype);
+
if (caps) {
caps_str = gst_caps_to_string(caps);
+ caps_structure = gst_caps_get_structure(caps, 0);
+ const gchar *mime = gst_structure_get_name(caps_structure);
+
LOGD(" caps: %s", caps_str);
+
+ for (int idx = 0; player->ini.unsupported_codec_keyword[idx][0] != '\0'; idx++) {
+ if (caps_str && strstr(caps_str, player->ini.unsupported_codec_keyword[idx])) {
+ LOGW("skip [%s] by unsupported codec keyword [%s]",
+ mime, player->ini.unsupported_codec_keyword[idx]);
+
+ _mmplayer_update_not_supported_codec_info(player, NULL, mime);
+ return RET_SKIP;
+ }
+ }
+ } else if (type == MM_PLAYER_TRACK_TYPE_AUDIO || type == MM_PLAYER_TRACK_TYPE_VIDEO) {
+ if (MMPLAYER_IS_HTTP_LIVE_STREAMING(player) || MMPLAYER_IS_DASH_STREAMING(player)) {
+ LOGD("No caps info, depends on decodebin");
+ _mmplayer_track_update_stream(player, type, stream);
+ return RET_DEPENDS_ON_DECODEBIN;
+ }
+
+ LOGD("No caps info, skip it");
+ return RET_SKIP;
}
switch (stype) {
case GST_STREAM_TYPE_AUDIO:
{
- GstStructure *caps_structure = NULL;
- gint samplerate = 0;
- gint channels = 0;
-
- type = MM_PLAYER_TRACK_TYPE_AUDIO;
+ if (caps_structure) {
+ gint samplerate = 0;
+ gint channels = 0;
- if (caps) {
- caps_structure = gst_caps_get_structure(caps, 0);
gst_structure_get_int(caps_structure, "rate", &samplerate);
gst_structure_get_int(caps_structure, "channels", &channels);
-
- if (channels > 0 && samplerate == 0) {
+ if (samplerate == 0 && channels > 0) {
LOGW("Skip corrupted audio stream");
- goto EXIT;
+ return RET_SKIP;
}
if (g_strrstr(caps_str, "mobile-xmf"))
}
case GST_STREAM_TYPE_VIDEO:
{
- GstStructure *caps_structure = NULL;
- gint stype = 0;
- gint width = 0;
-
- type = MM_PLAYER_TRACK_TYPE_VIDEO;
-
- /* do not support multi track video */
- if (player->track[MM_PLAYER_TRACK_TYPE_VIDEO].total_track_num >= 1)
- goto EXIT;
+ if (player->track[MM_PLAYER_TRACK_TYPE_VIDEO].total_track_num >= 1) {
+ LOGD("do not support muti track video");
+ break;
+ }
- mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype);
+ // FIXME: it cause block during preparing
+ if ((!MMPLAYER_IS_HTTP_LIVE_STREAMING(player)) && (!MMPLAYER_IS_DASH_STREAMING(player))) {
+ gint stype = 0;
- /* don't make video because of not required */
- if ((stype == MM_DISPLAY_SURFACE_NULL) &&
- (!player->set_mode.video_export)) {
- LOGD("no need video decoding, skip video stream");
- goto EXIT;
+ mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype);
+ /* don't make video because of not required */
+ if ((stype == MM_DISPLAY_SURFACE_NULL) &&
+ (!player->set_mode.video_export)) {
+ LOGD("no need video decoding, skip video stream");
+ return RET_SKIP;
+ }
}
- if (caps) {
- caps_structure = gst_caps_get_structure(caps, 0);
- gst_structure_get_int(caps_structure, "width", &width);
+ if (caps_structure) {
+ gint width = 0;
+ gst_structure_get_int(caps_structure, "width", &width);
if (width != 0) {
if (player->v_stream_caps) {
gst_caps_unref(player->v_stream_caps);
break;
}
case GST_STREAM_TYPE_TEXT:
- type = MM_PLAYER_TRACK_TYPE_TEXT;
break;
default:
LOGW("Skip not supported stream type");
- goto EXIT;
+ return RET_SKIP;
}
_mmplayer_track_update_stream(player, type, stream);
- if (player->track[type].active_track_index == (player->track[type].total_track_num - 1)) {
- LOGD("select this stream, active idx : %d", player->track[type].active_track_index);
+ ret = _mmplayer_get_track_index(player, type, stream, &stream_index);
+
+ if ((player->track[type].active_track_index == INVALID_TRACK_INDEX) &&
+ (ret == MM_ERROR_NONE)) {
+ player->track[type].active_track_index = stream_index;
+ LOGD("select this stream, active track idx : %d", player->track[type].active_track_index);
if (type == MM_PLAYER_TRACK_TYPE_AUDIO)
_mmplayer_set_audio_attrs(player, caps);
- ret = 1;
+ return RET_SELECT;
}
-EXIT:
- g_free(caps_str);
- if (caps)
- gst_caps_unref(caps);
+ if (player->track[type].active_track_index == stream_index) {
+ LOGD("already activate track idx : %d", player->track[type].active_track_index);
+ return RET_SELECT;
+ }
- LOGD("ret %d", ret);
- return ret;
+ LOGD("Skip stream");
+ return RET_SKIP;
}
static gboolean
return TRUE;
}
-static void
-__mmplayer_gst_deep_element_added(GstElement *bin, GstBin *child, GstElement *element, gpointer data)
+static GstElement *
+__mmplayer_gst_find_child_element(GstBin *bin, const gchar *element_name)
{
- gchar *factory_name = NULL;
- mmplayer_t *player = (mmplayer_t *)data;
- mmplayer_gst_element_t *mainbin = NULL;
+ GstIterator *iter = NULL;
+ GValue item = {0, };
+ GstElement *ch_element = NULL;
+ GstElementFactory *ch_factory = NULL;
MMPLAYER_FENTER();
- MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
+ MMPLAYER_RETURN_VAL_IF_FAIL(bin && element_name, NULL);
- factory_name = GST_OBJECT_NAME(gst_element_get_factory(element));
- mainbin = player->pipeline->mainbin;
+ iter = gst_bin_iterate_recurse(bin);
+ MMPLAYER_RETURN_VAL_IF_FAIL(iter, NULL);
+
+ while (gst_iterator_next(iter, &item) == GST_ITERATOR_OK) {
+ ch_element = g_value_get_object(&item);
+ ch_factory = gst_element_get_factory(ch_element);
+ LOGD("children factory %s", GST_OBJECT_NAME(ch_factory));
+ if (g_strrstr(GST_OBJECT_NAME(ch_factory), element_name)) {
+ LOGD("Find %s element", element_name);
+ break;
+ }
+ ch_element = NULL;
+ g_value_reset(&item);
+ }
+ gst_iterator_free(iter);
- LOGD("%s > %s > %s : %s", GST_ELEMENT_NAME(bin), GST_ELEMENT_NAME(child),
- factory_name, GST_ELEMENT_NAME(element));
+ MMPLAYER_FLEAVE();
+ return ch_element;
+}
- /* keep the first typefind reference only */
- if (!mainbin[MMPLAYER_M_TYPEFIND].gst && g_strrstr(factory_name, "typefind")) { // FIXME : not required for local playback+
- mainbin[MMPLAYER_M_TYPEFIND].id = MMPLAYER_M_TYPEFIND;
- mainbin[MMPLAYER_M_TYPEFIND].gst = element;
+static void __mmplayer_parsebin_setup(GstBin *bin, gpointer data)
+{
+ mmplayer_t *player = (mmplayer_t *)data;
- _mmplayer_add_signal_connection(player, G_OBJECT(element),
- MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "have-type", G_CALLBACK(_mmplayer_typefind_have_type), (gpointer)player);
- LOGD("typefind reference is added");
- return;
- }
+ g_object_set(G_OBJECT(bin), "message-forward", TRUE, NULL);
- if ((MMPLAYER_IS_STREAMING(player)) && (!MMPLAYER_IS_RTSP_STREAMING(player))) {
- /* update queue2 setting */
- if (g_strrstr(factory_name, "queue2") && (!mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst)) {
- gint64 dur_bytes = 0L;
- muxed_buffer_type_e type = MUXED_BUFFER_TYPE_MEM_QUEUE;
-
- mainbin[MMPLAYER_M_MUXED_S_BUFFER].id = MMPLAYER_M_MUXED_S_BUFFER;
- mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst = element;
-
- if (!gst_element_query_duration(mainbin[MMPLAYER_M_SRC].gst, GST_FORMAT_BYTES, &dur_bytes))
- LOGW("failed to get duration from source %s", GST_ELEMENT_NAME(mainbin[MMPLAYER_M_SRC].gst));
-
- LOGD("type %s, dur_bytes = %"G_GINT64_FORMAT, player->type, dur_bytes);
- /* NOTE : in case of ts streaming, player could not get the correct duration info *
- * skip the pull mode(file or ring buffering) setting. */
- if (dur_bytes > 0) {
- if (!(__mmplayer_is_mpegts_type(player->type) || __mmplayer_is_hls_type(player->type)
- || __mmplayer_is_mp3_type(player->type))) {
- type = MUXED_BUFFER_TYPE_MEM_RING_BUFFER;
- player->streamer->ring_buffer_size = player->ini.http_ring_buffer_size;
- }
- } else {
- dur_bytes = 0;
- }
+ _mmplayer_add_signal_connection(player, G_OBJECT(bin),
+ MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "unknown-type",
+ G_CALLBACK(_mmplayer_gst_decode_unknown_type), (gpointer)player);
- _mm_player_streaming_set_queue2(player->streamer,
- element,
- FALSE,
- type,
- (guint64)dur_bytes); /* no meaning at the moment */
- return;
- }
+ _mmplayer_add_signal_connection(player, G_OBJECT(bin),
+ MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-select",
+ G_CALLBACK(_mmplayer_gst_decode_autoplug_select), (gpointer)player);
+}
- /* update mq setting */
- if (g_strrstr(factory_name, "parsebin") && (!mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst)) {
- GstIterator *iter = NULL;
- GValue item = {0, };
- GstElement *ch_element = NULL;
- GstElementFactory *ch_factory = NULL;
-
- iter = gst_bin_iterate_recurse(child);
- if (iter != NULL) {
- while (gst_iterator_next(iter, &item) == GST_ITERATOR_OK) {
- ch_element = g_value_get_object(&item);
- ch_factory = gst_element_get_factory(ch_element);
- LOGD("children factory %s", GST_OBJECT_NAME(ch_factory));
- if (g_strrstr(GST_OBJECT_NAME(ch_factory), "multiqueue")) {
- LOGD("get multiqueue");
- player->pipeline->mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].id = MMPLAYER_M_DEMUXED_S_BUFFER;
- player->pipeline->mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst = ch_element;
-
- /* in case of multiqueue, max bytes size is defined with fixed value in mm_player_streaming.h */
- _mm_player_streaming_set_multiqueue(player->streamer, ch_element);
- g_value_reset(&item);
- break;
- }
- g_value_reset(&item);
- }
- gst_iterator_free(iter);
- }
- }
- }
+static void __mmplayer_decodebin3_setup(GstBin *bin, gpointer data)
+{
+ mmplayer_t *player = (mmplayer_t *)data;
+ int video_codec_type = 0;
+ int audio_codec_type = 0;
- if (g_strrstr(factory_name, "parsebin")) {
- int video_codec_type = 0;
- int audio_codec_type = 0;
+ g_object_set(G_OBJECT(bin), "message-forward", TRUE, NULL);
- g_object_set(G_OBJECT(child), "message-forward", TRUE, NULL);
- g_object_set(G_OBJECT(element), "message-forward", TRUE, NULL);
- if (player->type_caps &&
- !MMPLAYER_IS_HTTP_LIVE_STREAMING(player) &&
- !MMPLAYER_IS_DASH_STREAMING(player))
- g_object_set(G_OBJECT(element), "sink-caps", player->type_caps, NULL);
+ mm_attrs_get_int_by_name(player->attrs, MM_PLAYER_VIDEO_CODEC_TYPE, &video_codec_type);
+ mm_attrs_get_int_by_name(player->attrs, MM_PLAYER_AUDIO_CODEC_TYPE, &audio_codec_type);
- mm_attrs_get_int_by_name(player->attrs, MM_PLAYER_VIDEO_CODEC_TYPE, &video_codec_type);
- mm_attrs_get_int_by_name(player->attrs, MM_PLAYER_AUDIO_CODEC_TYPE, &audio_codec_type);
+ LOGD("set codec type v(%d) a(%d)", video_codec_type, audio_codec_type);
- /* CAUTION: if there is hw decoder, the rank value has to be higher than sw decoder
- and codec default type in ini has to be hw.
- */
- LOGD("set codec type v(%d) a(%d)", video_codec_type, audio_codec_type);
- if (video_codec_type == MM_PLAYER_CODEC_TYPE_SW)
- g_object_set(G_OBJECT(child), "force-sw-decoders-for-video", TRUE, NULL);
- if (audio_codec_type == MM_PLAYER_CODEC_TYPE_SW)
- g_object_set(G_OBJECT(child), "force-sw-decoders-for-audio", TRUE, NULL);
-
- mainbin[MMPLAYER_M_AUTOPLUG_PARSEBIN].id = MMPLAYER_M_AUTOPLUG_PARSEBIN;
- mainbin[MMPLAYER_M_AUTOPLUG_PARSEBIN].gst = element;
- _mmplayer_add_signal_connection(player, G_OBJECT(element),
- MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "unknown-type", G_CALLBACK(_mmplayer_gst_decode_unknown_type), (gpointer)player);
+ if (video_codec_type == MM_PLAYER_CODEC_TYPE_SW)
+ g_object_set(G_OBJECT(bin), "force-sw-decoders-for-video", TRUE, NULL);
+ if (audio_codec_type == MM_PLAYER_CODEC_TYPE_SW)
+ g_object_set(G_OBJECT(bin), "force-sw-decoders-for-audio", TRUE, NULL);
- _mmplayer_add_signal_connection(player, G_OBJECT(element),
- MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-continue", G_CALLBACK(_mmplayer_gst_decode_autoplug_continue), (gpointer)player);
+ _mmplayer_add_signal_connection(player, G_OBJECT(bin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG,
+ "request-resource", G_CALLBACK(__mmplayer_gst_decode_request_resource), (gpointer)player);
+}
- _mmplayer_add_signal_connection(player, G_OBJECT(element),
- MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-select", G_CALLBACK(_mmplayer_gst_decode_autoplug_select), (gpointer)player);
+static void
+__mmplayer_gst_deep_element_added(GstElement *bin, GstBin *child, GstElement *element, gpointer data)
+{
+ gchar *factory_name = NULL;
+ mmplayer_t *player = (mmplayer_t *)data;
+
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
+
+ factory_name = GST_OBJECT_NAME(gst_element_get_factory(element));
+
+ LOGD("child: %s, elem: %s (%s)", GST_ELEMENT_NAME(child), factory_name, GST_ELEMENT_NAME(element));
- _mmplayer_add_signal_connection(player, G_OBJECT(child),
- MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "request-resource", G_CALLBACK(__mmplayer_gst_decode_request_resource), (gpointer)player);
+ if (g_strrstr(factory_name, "urisourcebin")) {
+ GstElement *dbin3 = __mmplayer_gst_find_child_element(child, "decodebin3");
+ if (dbin3) {
+ GstElement *mq = __mmplayer_gst_find_child_element(child, "multiqueue");
+ if (mq)
+ g_object_set(G_OBJECT(mq), "use-interleave", FALSE, NULL);
+ __mmplayer_decodebin3_setup(GST_BIN(dbin3), data);
+ } else {
+ LOGW("failed to find decodebin3");
+ }
+ } else if (g_strrstr(factory_name, "parsebin")) {
+ g_object_set(G_OBJECT(child), "message-forward", TRUE, NULL); /* urisourcebin */
+ __mmplayer_parsebin_setup(GST_BIN(element), data);
} else {
- _mmplayer_gst_element_added((GstElement *)child, element, data);
+ _mmplayer_gst_element_added(child, element, data);
}
- return;
+}
+
+static void
+__mmplayer_delete_signal_connection(mmplayer_t *player, GstElement *removed_element)
+{
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_IF_FAIL(player);
+ MMPLAYER_RETURN_IF_FAIL(removed_element);
+
+ LOGD("delete signal on %s", GST_ELEMENT_NAME(removed_element));
+
+ for (int type = MM_PLAYER_SIGNAL_TYPE_AUTOPLUG; type < MM_PLAYER_SIGNAL_TYPE_ALL; type++) {
+ GList *node = player->signals[type];
+ while (node) {
+ GList *next_node = node->next;
+ mmplayer_signal_item_t *item = node->data;
+ if (item && item->obj == G_OBJECT(removed_element)) {
+ player->signals[type] = g_list_delete_link(player->signals[type], node);
+ MMPLAYER_FREEIF(item);
+ }
+ node = next_node;
+ }
+ }
+
+ MMPLAYER_FLEAVE();
}
void
__mmplayer_gst_deep_element_removed(GstElement *bin, GstBin *child, GstElement *element, gpointer data)
{
+ mmplayer_t *player = (mmplayer_t *)data;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_IF_FAIL(player);
+
LOGD("%s > %s > %s", GST_ELEMENT_NAME(bin), GST_ELEMENT_NAME(child), GST_ELEMENT_NAME(element));
- return;
+
+ __mmplayer_delete_signal_connection(player, element);
+
+ MMPLAYER_FLEAVE();
}
static GstElement *
/* setting property to streaming source */
g_object_set(G_OBJECT(uridecodebin3), "uri", player->profile.uri,
"message-forward", TRUE,
- "buffer-size", DEFAULT_BUFFER_SIZE_BYTES, NULL);
+ "buffer-size", DEFAULT_BUFFER_SIZE_BYTES,
+ "use-buffering", TRUE, NULL);
_mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3),
MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "deep-notify::source", G_CALLBACK(__mmplayer_gst_found_source), (gpointer)player);
}
static int
-__mmplayer_gst_check_duration(mmplayer_t *player, gint64 position)
+__mmplayer_gst_check_position(mmplayer_t *player, gint64 position)
{
gint64 dur_nsec = 0;
+ gint64 pos_nsec = 0;
MMPLAYER_FENTER();
MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
return MM_ERROR_INVALID_ARGUMENT;
}
+ if (gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &pos_nsec)) {
+ if ((pos_nsec == player->duration) && /* current pos is end of stream */
+ ((position / GST_MSECOND) == (player->duration / GST_MSECOND))) {
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
+ player->seek_state = MMPLAYER_SEEK_NONE;
+ return MM_ERROR_PLAYER_NO_OP;
+ }
+ }
+
MMPLAYER_FLEAVE();
return MM_ERROR_NONE;
}
/* Note : the streaming error from the streaming source is handled
* using __mmplayer_handle_streaming_error.
*/
- __mmplayer_handle_streaming_error(player, msg);
+ __mmplayer_handle_streaming_error(player, msg, error);
} else if (error) {
LOGE("paring error posted from bus, domain : %s, code : %d", g_quark_to_string(error->domain), error->code);
}
/* Note : Textbin is not linked to the video or audio bin.
- * It needs to send the event to the text sink seperately.
+ * It needs to send the event to the text sink separately.
*/
if (player->play_subtitle && player->pipeline) {
GstElement *text_sink = GST_ELEMENT_CAST(player->pipeline->textbin[MMPLAYER_T_FAKE_SINK].gst);
&& (MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PAUSED))
goto PENDING;
- ret = __mmplayer_gst_check_duration(player, position);
+ ret = __mmplayer_gst_check_position(player, position);
if (ret != MM_ERROR_NONE) {
- LOGE("failed to check duration 0x%X", ret);
+ LOGW("result of check position info 0x%X", ret);
return (ret == MM_ERROR_PLAYER_NO_OP) ? MM_ERROR_NONE : ret;
}
g_object_get(G_OBJECT(mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst),
"curr-size-bytes", &curr_size_bytes, NULL);
LOGD("curr_size_bytes of multiqueue = %d", curr_size_bytes);
- buffered_total += curr_size_bytes;
+ buffered_total += (gint64)curr_size_bytes;
}
if (avg_byterate > 0)
goto ERROR;
}
- /* FIXME: required ?*/
/* create fakesink element for keeping the pipeline state PAUSED. if needed */
mainbin[MMPLAYER_M_SRC_FAKESINK].id = MMPLAYER_M_SRC_FAKESINK;
mainbin[MMPLAYER_M_SRC_FAKESINK].gst = gst_element_factory_make("fakesink", "state-holder");