X-Git-Url: http://review.tizen.org/git/?a=blobdiff_plain;f=src%2Fmm_player_gst.c;h=269a8d1b5594f99c12e9a0f8c4c42c6adb0b5e74;hb=88fc0772baa9c68adb75c3c22faeb11a27d9a621;hp=2de93e6e374b7898847314483c030ba65f9e4c07;hpb=fb45999c73fb6ce294096c59e91c24ff4720f4e7;p=platform%2Fcore%2Fmultimedia%2Flibmm-player.git diff --git a/src/mm_player_gst.c b/src/mm_player_gst.c index 2de93e6..269a8d1 100644 --- a/src/mm_player_gst.c +++ b/src/mm_player_gst.c @@ -27,12 +27,13 @@ ========================================================================================== */ #include #include -#include +#include #include "mm_player_gst.h" #include "mm_player_priv.h" #include "mm_player_attrs.h" #include "mm_player_utils.h" +#include "mm_player_tracks.h" /*=========================================================================================== | | @@ -41,55 +42,165 @@ ========================================================================================== */ /*--------------------------------------------------------------------------- -| GLOBAL CONSTANT DEFINITIONS: | +| LOCAL CONSTANT DEFINITIONS: | ---------------------------------------------------------------------------*/ +#define MMPLAYER_TAG_INDENT 3 -/*--------------------------------------------------------------------------- -| IMPORTED VARIABLE DECLARATIONS: | ----------------------------------------------------------------------------*/ +/*=========================================================================================== +| | +| FUNCTION DEFINITIONS | +| | +========================================================================================== */ +#ifdef __DEBUG__ +static void +print_tag(const GstTagList *list, const gchar *tag, gpointer unused) +{ + gint i, count; -/*--------------------------------------------------------------------------- -| IMPORTED FUNCTION DECLARATIONS: | ----------------------------------------------------------------------------*/ + count = gst_tag_list_get_tag_size(list, tag); -/*--------------------------------------------------------------------------- -| LOCAL #defines: | ----------------------------------------------------------------------------*/ + LOGD("count = %d", count); -/*--------------------------------------------------------------------------- -| LOCAL CONSTANT DEFINITIONS: | ----------------------------------------------------------------------------*/ + for (i = 0; i < count; i++) { + gchar *str; -/*--------------------------------------------------------------------------- -| LOCAL DATA TYPE DEFINITIONS: | ----------------------------------------------------------------------------*/ + if (gst_tag_get_type(tag) == G_TYPE_STRING) { + if (!gst_tag_list_get_string_index(list, tag, i, &str)) + g_assert_not_reached(); + } else { + str = g_strdup_value_contents(gst_tag_list_get_value_index(list, tag, i)); + } -/*--------------------------------------------------------------------------- -| GLOBAL VARIABLE DEFINITIONS: | ----------------------------------------------------------------------------*/ + if (i == 0) + g_print(" %15s: %s", gst_tag_get_nick(tag), str); + else + g_print(" : %s", str); -/*--------------------------------------------------------------------------- -| LOCAL VARIABLE DEFINITIONS: | ----------------------------------------------------------------------------*/ + g_free(str); + } +} +#endif -/*--------------------------------------------------------------------------- -| LOCAL FUNCTION PROTOTYPES: | ----------------------------------------------------------------------------*/ +static gboolean +__mmplayer_check_error_posted_from_activated_track(mmplayer_t *player, gchar *src_element_name) +{ + /* check whether the error is posted from not-activated track or not */ + int msg_src_pos = 0; + gint active_index = 0; -/*=========================================================================================== -| | -| FUNCTION DEFINITIONS | -| | -========================================================================================== */ + MMPLAYER_RETURN_VAL_IF_FAIL(player->pipeline->mainbin[MMPLAYER_M_A_INPUT_SELECTOR].gst, TRUE); + + active_index = player->track[MM_PLAYER_TRACK_TYPE_AUDIO].active_track_index; + LOGD("current active pad index -%d", active_index); + + if (src_element_name) { + int idx = 0; + + if (player->audio_decoders) { + GList *adec = player->audio_decoders; + for (; adec ; adec = g_list_next(adec)) { + gchar *name = adec->data; + + LOGD("found audio decoder name = %s", name); + if (g_strrstr(name, src_element_name)) { + msg_src_pos = idx; + break; + } + idx++; + } + } + LOGD("active pad = %d, error src index = %d", active_index, msg_src_pos); + } + + if (active_index != msg_src_pos) { + LOGD("skip error because error is posted from no activated track"); + return FALSE; + } + + return TRUE; +} + +static int +__mmplayer_gst_transform_error_decode(mmplayer_t *player, const char *klass) +{ + /* Demuxer can't parse one track because it's corrupted. + * So, the decoder for it is not linked. + * But, it has one playable track. + */ + if (g_strrstr(klass, "Demux")) { + if (player->can_support_codec == FOUND_PLUGIN_VIDEO) { + return MM_ERROR_PLAYER_AUDIO_CODEC_NOT_FOUND; + } else if (player->can_support_codec == FOUND_PLUGIN_AUDIO) { + return MM_ERROR_PLAYER_VIDEO_CODEC_NOT_FOUND; + } else { + if (player->pipeline->audiobin) { // PCM + return MM_ERROR_PLAYER_VIDEO_CODEC_NOT_FOUND; + } else { + LOGD("not found any available codec. Player should be destroyed."); + return MM_ERROR_PLAYER_CODEC_NOT_FOUND; + } + } + } + + return MM_ERROR_PLAYER_INVALID_STREAM; +} + +static int +__mmplayer_gst_transform_error_type(mmplayer_t *player, GstElement *src_element) +{ + if (src_element == player->pipeline->mainbin[MMPLAYER_M_SUBPARSE].gst) { + LOGE("Not supported subtitle."); + return MM_ERROR_PLAYER_NOT_SUPPORTED_SUBTITLE; + } + return MM_ERROR_PLAYER_NOT_SUPPORTED_FORMAT; +} + +static int +__mmplayer_gst_transform_error_failed(mmplayer_t *player, const char *klass, GError *error) +{ + /* Decoder Custom Message */ + if (!strstr(error->message, "ongoing")) + return MM_ERROR_PLAYER_NOT_SUPPORTED_FORMAT; + + if (strncasecmp(klass, "audio", 5)) { + if ((player->can_support_codec & FOUND_PLUGIN_VIDEO)) { + LOGD("Video can keep playing."); + return MM_ERROR_PLAYER_AUDIO_CODEC_NOT_FOUND; + } + } else if (strncasecmp(klass, "video", 5)) { + if ((player->can_support_codec & FOUND_PLUGIN_AUDIO)) { + LOGD("Audio can keep playing."); + return MM_ERROR_PLAYER_VIDEO_CODEC_NOT_FOUND; + } + } + + LOGD("not found any available codec. Player should be destroyed."); + return MM_ERROR_PLAYER_CODEC_NOT_FOUND; +} + +static int +__mmplayer_gst_transform_error_decrypt(mmplayer_t *player, GError *error) +{ + if (strstr(error->message, "rights expired")) + return MM_ERROR_PLAYER_DRM_EXPIRED; + else if (strstr(error->message, "no rights")) + return MM_ERROR_PLAYER_DRM_NO_LICENSE; + else if (strstr(error->message, "has future rights")) + return MM_ERROR_PLAYER_DRM_FUTURE_USE; + else if (strstr(error->message, "opl violation")) + return MM_ERROR_PLAYER_DRM_OUTPUT_PROTECTION; + + return MM_ERROR_PLAYER_DRM_NOT_AUTHORIZED; +} /* NOTE : decide gstreamer state whether there is some playable track or not. */ static gint -__mmplayer_gst_transform_gsterror(mm_player_t* player, GstMessage * message, GError* error) +__mmplayer_gst_transform_gsterror(mmplayer_t *player, GstMessage *message, GError *error) { gchar *src_element_name = NULL; GstElement *src_element = NULL; GstElementFactory *factory = NULL; - const gchar* klass = NULL; + const gchar *klass = NULL; MMPLAYER_FENTER(); @@ -101,129 +212,38 @@ __mmplayer_gst_transform_gsterror(mm_player_t* player, GstMessage * message, GEr player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED); src_element = GST_ELEMENT_CAST(message->src); - if (!src_element) - goto INTERNAL_ERROR; - src_element_name = GST_ELEMENT_NAME(src_element); if (!src_element_name) - goto INTERNAL_ERROR; + return MM_ERROR_PLAYER_INTERNAL; factory = gst_element_get_factory(src_element); if (!factory) - goto INTERNAL_ERROR; + return MM_ERROR_PLAYER_INTERNAL; klass = gst_element_factory_get_metadata(factory, GST_ELEMENT_METADATA_KLASS); if (!klass) - goto INTERNAL_ERROR; + return MM_ERROR_PLAYER_INTERNAL; - LOGD("error code=%d, msg=%s, src element=%s, class=%s\n", + LOGD("error code=%d, msg=%s, src element=%s, class=%s", error->code, error->message, src_element_name, klass); - /* check whether the error is posted from not-activated track or not */ - if (player->pipeline->mainbin[MMPLAYER_M_A_INPUT_SELECTOR].gst) { - int msg_src_pos = 0; - gint active_pad_index = player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].active_pad_index; - LOGD("current active pad index -%d", active_pad_index); - - if (src_element_name) { - int idx = 0; - - if (player->audio_decoders) { - GList *adec = player->audio_decoders; - for (; adec ; adec = g_list_next(adec)) { - gchar *name = adec->data; - - LOGD("found audio decoder name = %s", name); - if (g_strrstr(name, src_element_name)) { - msg_src_pos = idx; - break; - } - idx++; - } - } - LOGD("active pad = %d, error src index = %d", active_pad_index, msg_src_pos); - } - - if (active_pad_index != msg_src_pos) { - LOGD("skip error because error is posted from no activated track"); - return MM_ERROR_NONE; - } - } + if (MMPLAYER_USE_DECODEBIN(player) && + !__mmplayer_check_error_posted_from_activated_track(player, src_element_name)) + return MM_ERROR_NONE; switch (error->code) { case GST_STREAM_ERROR_DECODE: - { - /* Demuxer can't parse one track because it's corrupted. - * So, the decoder for it is not linked. - * But, it has one playable track. - */ - if (g_strrstr(klass, "Demux")) { - if (player->can_support_codec == FOUND_PLUGIN_VIDEO) { - return MM_ERROR_PLAYER_AUDIO_CODEC_NOT_FOUND; - } else if (player->can_support_codec == FOUND_PLUGIN_AUDIO) { - return MM_ERROR_PLAYER_VIDEO_CODEC_NOT_FOUND; - } else { - if (player->pipeline->audiobin) // PCM - return MM_ERROR_PLAYER_VIDEO_CODEC_NOT_FOUND; - else - goto CODEC_NOT_FOUND; - } - } - return MM_ERROR_PLAYER_INVALID_STREAM; - } - break; - + return __mmplayer_gst_transform_error_decode(player, klass); case GST_STREAM_ERROR_CODEC_NOT_FOUND: case GST_STREAM_ERROR_TYPE_NOT_FOUND: case GST_STREAM_ERROR_WRONG_TYPE: - { - if (src_element == player->pipeline->mainbin[MMPLAYER_M_SUBPARSE].gst) { - LOGE("Not supported subtitle."); - return MM_ERROR_PLAYER_NOT_SUPPORTED_SUBTITLE; - } - return MM_ERROR_PLAYER_NOT_SUPPORTED_FORMAT; - } - + return __mmplayer_gst_transform_error_type(player, src_element); case GST_STREAM_ERROR_FAILED: - { - /* Decoder Custom Message */ - if (strstr(error->message, "ongoing")) { - if (strncasecmp(klass, "audio", 5)) { - if ((player->can_support_codec & FOUND_PLUGIN_VIDEO)) { - LOGD("Video can keep playing.\n"); - return MM_ERROR_PLAYER_AUDIO_CODEC_NOT_FOUND; - } else - goto CODEC_NOT_FOUND; - - } else if (strncasecmp(klass, "video", 5)) { - if ((player->can_support_codec & FOUND_PLUGIN_AUDIO)) { - LOGD("Audio can keep playing.\n"); - return MM_ERROR_PLAYER_VIDEO_CODEC_NOT_FOUND; - } else - goto CODEC_NOT_FOUND; - } - } - return MM_ERROR_PLAYER_NOT_SUPPORTED_FORMAT; - } - break; - + return __mmplayer_gst_transform_error_failed(player, klass, error); case GST_STREAM_ERROR_DECRYPT: case GST_STREAM_ERROR_DECRYPT_NOKEY: - { - LOGE("decryption error, [%s] failed, reason : [%s]\n", src_element_name, error->message); - - if (strstr(error->message, "rights expired")) - return MM_ERROR_PLAYER_DRM_EXPIRED; - else if (strstr(error->message, "no rights")) - return MM_ERROR_PLAYER_DRM_NO_LICENSE; - else if (strstr(error->message, "has future rights")) - return MM_ERROR_PLAYER_DRM_FUTURE_USE; - else if (strstr(error->message, "opl violation")) - return MM_ERROR_PLAYER_DRM_OUTPUT_PROTECTION; - return MM_ERROR_PLAYER_DRM_NOT_AUTHORIZED; - } - break; - + LOGE("decryption error, [%s] failed, reason : [%s]", src_element_name, error->message); + return __mmplayer_gst_transform_error_decrypt(player, error); default: break; } @@ -231,17 +251,10 @@ __mmplayer_gst_transform_gsterror(mm_player_t* player, GstMessage * message, GEr MMPLAYER_FLEAVE(); return MM_ERROR_PLAYER_INVALID_STREAM; - -INTERNAL_ERROR: - return MM_ERROR_PLAYER_INTERNAL; - -CODEC_NOT_FOUND: - LOGD("not found any available codec. Player should be destroyed.\n"); - return MM_ERROR_PLAYER_CODEC_NOT_FOUND; } gint -__mmplayer_gst_handle_core_error(mm_player_t* player, int code) +__mmplayer_gst_handle_core_error(mmplayer_t *player, int code) { gint trans_err = MM_ERROR_NONE; @@ -276,7 +289,7 @@ __mmplayer_gst_handle_core_error(mm_player_t* player, int code) } gint -__mmplayer_gst_handle_library_error(mm_player_t* player, int code) +__mmplayer_gst_handle_library_error(mmplayer_t *player, int code) { gint trans_err = MM_ERROR_NONE; @@ -302,7 +315,7 @@ __mmplayer_gst_handle_library_error(mm_player_t* player, int code) } gint -__mmplayer_gst_handle_resource_error(mm_player_t* player, int code, GstMessage * message) +__mmplayer_gst_handle_resource_error(mmplayer_t *player, int code, GstMessage *message) { gint trans_err = MM_ERROR_NONE; @@ -328,7 +341,7 @@ __mmplayer_gst_handle_resource_error(mm_player_t* player, int code, GstMessage * break; } else if (message != NULL && message->src != NULL) { storage_state_e storage_state = STORAGE_STATE_UNMOUNTABLE; - MMPlayerPathType path_type = MMPLAYER_PATH_MAX; + mmplayer_path_type_e path_type = MMPLAYER_PATH_MAX; if (message->src == (GstObject *)player->pipeline->mainbin[MMPLAYER_M_SRC].gst) path_type = MMPLAYER_PATH_VOD; @@ -363,7 +376,7 @@ __mmplayer_gst_handle_resource_error(mm_player_t* player, int code, GstMessage * } gint -__mmplayer_gst_handle_stream_error(mm_player_t* player, GError* error, GstMessage * message) +__mmplayer_gst_handle_stream_error(mmplayer_t *player, GError *error, GstMessage *message) { gint trans_err = MM_ERROR_NONE; @@ -401,7 +414,7 @@ __mmplayer_gst_handle_stream_error(mm_player_t* player, GError* error, GstMessag } gboolean -__mmplayer_handle_gst_error(mm_player_t* player, GstMessage * message, GError* error) +__mmplayer_handle_gst_error(mmplayer_t *player, GstMessage *message, GError *error) { MMMessageParamType msg_param; gchar *msg_src_element; @@ -424,7 +437,7 @@ __mmplayer_handle_gst_error(mm_player_t* player, GstMessage * message, GError* e } else if (error->domain == GST_STREAM_ERROR) { msg_param.code = __mmplayer_gst_handle_stream_error(player, error, message); } else { - LOGW("This error domain is not defined.\n"); + LOGW("This error domain is not defined."); /* we treat system error as an internal error */ msg_param.code = MM_ERROR_PLAYER_INVALID_STREAM; @@ -433,9 +446,9 @@ __mmplayer_handle_gst_error(mm_player_t* player, GstMessage * message, GError* e if (message->src) { msg_src_element = GST_ELEMENT_NAME(GST_ELEMENT_CAST(message->src)); - msg_param.data = (void *) error->message; + msg_param.data = (void *)error->message; - LOGE("-Msg src : [%s] Domain : [%s] Error : [%s] Code : [%d] is tranlated to error code : [0x%x]\n", + LOGE("-Msg src : [%s] Domain : [%s] Error : [%s] Code : [%d] is tranlated to error code : [0x%x]", msg_src_element, g_quark_to_string(error->domain), error->message, error->code, msg_param.code); } @@ -461,8 +474,9 @@ __mmplayer_handle_gst_error(mm_player_t* player, GstMessage * message, GError* e MMPLAYER_POST_MSG(player, MM_MESSAGE_ERROR, &msg_param); /* don't post more if one was sent already */ player->msg_posted = TRUE; - } else - LOGD("skip error post because it's sent already.\n"); + } else { + LOGD("skip error post because it's sent already."); + } MMPLAYER_FLEAVE(); @@ -470,7 +484,7 @@ __mmplayer_handle_gst_error(mm_player_t* player, GstMessage * message, GError* e } static gboolean -__mmplayer_handle_streaming_error(mm_player_t* player, GstMessage * message) +__mmplayer_handle_streaming_error(mmplayer_t *player, GstMessage *message) { LOGD("\n"); MMMessageParamType msg_param; @@ -635,13 +649,13 @@ __mmplayer_handle_streaming_error(mm_player_t* player, GstMessage * message) error_string = g_strdup(gst_structure_get_string(s, "error_string")); if (error_string) - msg_param.data = (void *) error_string; + msg_param.data = (void *)error_string; if (message->src) { msg_src_element = GST_ELEMENT_NAME(GST_ELEMENT_CAST(message->src)); - LOGE("-Msg src : [%s] Code : [%x] Error : [%s] \n", - msg_src_element, msg_param.code, (char*)msg_param.data); + LOGE("-Msg src : [%s] Code : [0x%x] Error : [%s]", + msg_src_element, msg_param.code, (char *)msg_param.data); } /* post error to application */ @@ -650,19 +664,20 @@ __mmplayer_handle_streaming_error(mm_player_t* player, GstMessage * message) /* don't post more if one was sent already */ player->msg_posted = TRUE; - } else - LOGD("skip error post because it's sent already.\n"); + } else { + LOGD("skip error post because it's sent already."); + } gst_structure_free(s); - MMPLAYER_FLEAVE(); - g_free(error_string); + MMPLAYER_FREEIF(error_string); + MMPLAYER_FLEAVE(); return TRUE; } static void -__mmplayer_get_metadata_360_from_tags(GstTagList *tags, mm_player_spherical_metadata_t *metadata) +__mmplayer_get_metadata_360_from_tags(GstTagList *tags, mmplayer_spherical_metadata_t *metadata) { gst_tag_list_get_int(tags, "is_spherical", &metadata->is_spherical); gst_tag_list_get_int(tags, "is_stitched", &metadata->is_stitched); @@ -694,140 +709,130 @@ __mmplayer_get_metadata_360_from_tags(GstTagList *tags, mm_player_spherical_meta } static gboolean -__mmplayer_gst_extract_tag_from_msg(mm_player_t* player, GstMessage* msg) +__mmplayer_gst_extract_tag_from_msg(mmplayer_t *player, GstMessage *msg) { /* macro for better code readability */ -#define MMPLAYER_UPDATE_TAG_STRING(gsttag, attribute, playertag) \ -if (gst_tag_list_get_string(tag_list, gsttag, &string)) {\ - if (string != NULL) { \ - SECURE_LOGD("update tag string : %s\n", string); \ - if (strlen(string) > MM_MAX_STRING_LENGTH) { \ - char *new_string = malloc(MM_MAX_STRING_LENGTH); \ - strncpy(new_string, string, MM_MAX_STRING_LENGTH-1); \ - new_string[MM_MAX_STRING_LENGTH-1] = '\0'; \ - mm_attrs_set_string_by_name(attribute, playertag, new_string); \ - g_free(new_string); \ - new_string = NULL; \ - } else { \ - mm_attrs_set_string_by_name(attribute, playertag, string); \ +#define MMPLAYER_UPDATE_TAG_STRING(gsttag, player, playertag) \ + do { \ + if (gst_tag_list_get_string(tag_list, gsttag, &string)) {\ + if (string != NULL) { \ + SECURE_LOGD("update tag string : %s", string); \ + if (strlen(string) > MM_MAX_STRING_LENGTH) { \ + char *new_string = g_malloc(MM_MAX_STRING_LENGTH); \ + strncpy(new_string, string, MM_MAX_STRING_LENGTH - 1); \ + new_string[MM_MAX_STRING_LENGTH - 1] = '\0'; \ + mm_player_set_attribute((MMHandleType)player, NULL,\ + playertag, new_string, strlen(new_string), NULL); \ + MMPLAYER_FREEIF(new_string); \ + } else { \ + mm_player_set_attribute((MMHandleType)player, NULL,\ + playertag, string, strlen(string), NULL); \ + } \ + MMPLAYER_FREEIF(string); \ + } \ } \ - g_free(string); \ - string = NULL; \ - } \ -} - -#define MMPLAYER_UPDATE_TAG_IMAGE(gsttag, attribute, playertag) \ -do { \ - GstSample *sample = NULL;\ - if (gst_tag_list_get_sample_index(tag_list, gsttag, index, &sample)) {\ - GstMapInfo info = GST_MAP_INFO_INIT;\ - buffer = gst_sample_get_buffer(sample);\ - if (!gst_buffer_map(buffer, &info, GST_MAP_READ)) {\ - LOGD("failed to get image data from tag");\ + } while (0) + +#define MMPLAYER_UPDATE_TAG_IMAGE(gsttag, player, playertag) \ + do { \ + GstSample *sample = NULL;\ + if (gst_tag_list_get_sample_index(tag_list, gsttag, index, &sample)) {\ + GstMapInfo info = GST_MAP_INFO_INIT;\ + buffer = gst_sample_get_buffer(sample);\ + if (!gst_buffer_map(buffer, &info, GST_MAP_READ)) {\ + LOGD("failed to get image data from tag");\ + gst_sample_unref(sample);\ + return FALSE;\ + } \ + SECURE_LOGD("update album cover data : %p, size : %zu", info.data, info.size);\ + MMPLAYER_FREEIF(player->album_art);\ + player->album_art = (gchar *)g_malloc(info.size);\ + if (player->album_art) {\ + memcpy(player->album_art, info.data, info.size);\ + mm_player_set_attribute((MMHandleType)player, NULL,\ + playertag, (void *)player->album_art, info.size, NULL); \ + if (MMPLAYER_IS_HTTP_LIVE_STREAMING(player)) {\ + msg_param.data = (void *)player->album_art;\ + msg_param.size = info.size;\ + MMPLAYER_POST_MSG(player, MM_MESSAGE_IMAGE_BUFFER, &msg_param);\ + SECURE_LOGD("post message image buffer data : %p, size : %zu", info.data, info.size);\ + } \ + } \ + gst_buffer_unmap(buffer, &info);\ gst_sample_unref(sample);\ - return FALSE;\ + } \ + } while (0) + +#define MMPLAYER_UPDATE_TAG_UINT(gsttag, player, playertag) \ + do { \ + if (gst_tag_list_get_uint(tag_list, gsttag, &v_uint)) { \ + if (v_uint) { \ + int i = 0; \ + mmplayer_track_type_e track_type = MM_PLAYER_TRACK_TYPE_AUDIO; \ + if (strstr(GST_OBJECT_NAME(msg->src), "audio")) \ + track_type = MM_PLAYER_TRACK_TYPE_AUDIO; \ + else if (strstr(GST_OBJECT_NAME(msg->src), "video")) \ + track_type = MM_PLAYER_TRACK_TYPE_VIDEO; \ + else \ + track_type = MM_PLAYER_TRACK_TYPE_TEXT; \ + if (!strncmp(gsttag, GST_TAG_BITRATE, strlen(GST_TAG_BITRATE))) { \ + if (track_type == MM_PLAYER_TRACK_TYPE_AUDIO) \ + mm_player_set_attribute((MMHandleType)player, NULL,\ + "content_audio_bitrate", v_uint, NULL); \ + player->bitrate[track_type] = v_uint; \ + player->total_bitrate = 0; \ + for (i = 0; i < MM_PLAYER_STREAM_COUNT_MAX; i++) \ + player->total_bitrate += player->bitrate[i]; \ + mm_player_set_attribute((MMHandleType)player, NULL,\ + playertag, player->total_bitrate, NULL); \ + SECURE_LOGD("update bitrate %d[bps] of stream #%d.", v_uint, (int)track_type); \ + } else if (!strncmp(gsttag, GST_TAG_MAXIMUM_BITRATE, strlen(GST_TAG_MAXIMUM_BITRATE))) { \ + player->maximum_bitrate[track_type] = v_uint; \ + player->total_maximum_bitrate = 0; \ + for (i = 0; i < MM_PLAYER_STREAM_COUNT_MAX; i++) \ + player->total_maximum_bitrate += player->maximum_bitrate[i]; \ + mm_player_set_attribute((MMHandleType)player, NULL,\ + playertag, player->total_maximum_bitrate, NULL); \ + SECURE_LOGD("update maximum bitrate %d[bps] of stream #%d", v_uint, (int)track_type);\ + } else { \ + mm_player_set_attribute((MMHandleType)player, NULL, playertag, v_uint, NULL); \ + } \ + v_uint = 0;\ + } \ } \ - SECURE_LOGD("update album cover data : %p, size : %d\n", info.data, info.size);\ - MMPLAYER_FREEIF(player->album_art);\ - player->album_art = (gchar *)g_malloc(info.size);\ - if (player->album_art) {\ - memcpy(player->album_art, info.data, info.size);\ - mm_attrs_set_data_by_name(attribute, playertag, (void *)player->album_art, info.size);\ - if (MMPLAYER_IS_HTTP_LIVE_STREAMING(player)) {\ - msg_param.data = (void *)player->album_art;\ - msg_param.size = info.size;\ - MMPLAYER_POST_MSG(player, MM_MESSAGE_IMAGE_BUFFER, &msg_param);\ - SECURE_LOGD("post message image buffer data : %p, size : %d\n", info.data, info.size);\ + } while (0) + +#define MMPLAYER_UPDATE_TAG_DATE(gsttag, player, playertag) \ + do { \ + if (gst_tag_list_get_date(tag_list, gsttag, &date)) {\ + if (date != NULL) {\ + string = g_strdup_printf("%d", g_date_get_year(date));\ + mm_player_set_attribute((MMHandleType)player, NULL,\ + playertag, string, strlen(string), NULL); \ + SECURE_LOGD("metainfo year : %s", string);\ + MMPLAYER_FREEIF(string);\ + g_date_free(date);\ } \ } \ - gst_buffer_unmap(buffer, &info);\ - gst_sample_unref(sample);\ - } \ -} while (0) - -#define MMPLAYER_UPDATE_TAG_UINT(gsttag, attribute, playertag) \ -do { \ - if (gst_tag_list_get_uint(tag_list, gsttag, &v_uint)) { \ - if (v_uint) { \ - int i = 0; \ - gchar *tag_list_str = NULL; \ - MMPlayerTrackType track_type = MM_PLAYER_TRACK_TYPE_AUDIO; \ - if (strstr(GST_OBJECT_NAME(msg->src), "audio")) \ - track_type = MM_PLAYER_TRACK_TYPE_AUDIO; \ - else if (strstr(GST_OBJECT_NAME(msg->src), "video")) \ - track_type = MM_PLAYER_TRACK_TYPE_VIDEO; \ - else \ - track_type = MM_PLAYER_TRACK_TYPE_TEXT; \ - if (!strncmp(gsttag, GST_TAG_BITRATE, strlen(GST_TAG_BITRATE))) { \ - if (track_type == MM_PLAYER_TRACK_TYPE_AUDIO) \ - mm_attrs_set_int_by_name(attribute, "content_audio_bitrate", v_uint); \ - player->bitrate[track_type] = v_uint; \ - player->total_bitrate = 0; \ - for (i = 0; i < MM_PLAYER_STREAM_COUNT_MAX; i++) \ - player->total_bitrate += player->bitrate[i]; \ - mm_attrs_set_int_by_name(attribute, playertag, player->total_bitrate); \ - SECURE_LOGD("update bitrate %d[bps] of stream #%d.\n", v_uint, (int)track_type); \ - } else if (!strncmp(gsttag, GST_TAG_MAXIMUM_BITRATE, strlen(GST_TAG_MAXIMUM_BITRATE))) { \ - player->maximum_bitrate[track_type] = v_uint; \ - player->total_maximum_bitrate = 0; \ - for (i = 0; i < MM_PLAYER_STREAM_COUNT_MAX; i++) \ - player->total_maximum_bitrate += player->maximum_bitrate[i]; \ - mm_attrs_set_int_by_name(attribute, playertag, player->total_maximum_bitrate);\ - SECURE_LOGD("update maximum bitrate %d[bps] of stream #%d\n", v_uint, (int)track_type);\ - } else { \ - mm_attrs_set_int_by_name(attribute, playertag, v_uint); \ + } while (0) + +#define MMPLAYER_UPDATE_TAG_DATE_TIME(gsttag, player, playertag) \ + do { \ + if (gst_tag_list_get_date_time(tag_list, gsttag, &datetime)) {\ + if (datetime != NULL) {\ + string = g_strdup_printf("%d", gst_date_time_get_year(datetime));\ + mm_player_set_attribute((MMHandleType)player, NULL,\ + playertag, string, strlen(string), NULL); \ + SECURE_LOGD("metainfo year : %s", string);\ + MMPLAYER_FREEIF(string);\ + gst_date_time_unref(datetime);\ } \ - v_uint = 0;\ - g_free(tag_list_str); \ } \ - } \ -} while (0) - -#define MMPLAYER_UPDATE_TAG_DATE(gsttag, attribute, playertag) \ -if (gst_tag_list_get_date(tag_list, gsttag, &date)) {\ - if (date != NULL) {\ - string = g_strdup_printf("%d", g_date_get_year(date));\ - mm_attrs_set_string_by_name(attribute, playertag, string);\ - SECURE_LOGD("metainfo year : %s\n", string);\ - MMPLAYER_FREEIF(string);\ - g_date_free(date);\ - } \ -} - -#define MMPLAYER_UPDATE_TAG_DATE_TIME(gsttag, attribute, playertag) \ -if (gst_tag_list_get_date_time(tag_list, gsttag, &datetime)) {\ - if (datetime != NULL) {\ - string = g_strdup_printf("%d", gst_date_time_get_year(datetime));\ - mm_attrs_set_string_by_name(attribute, playertag, string);\ - SECURE_LOGD("metainfo year : %s\n", string);\ - MMPLAYER_FREEIF(string);\ - gst_date_time_unref(datetime);\ - } \ -} - -#define MMPLAYER_UPDATE_TAG_UINT64(gsttag, attribute, playertag) \ -if (gst_tag_list_get_uint64(tag_list, gsttag, &v_uint64)) {\ - if (v_uint64) {\ - /* FIXIT : don't know how to store date */\ - g_assert(1);\ - v_uint64 = 0;\ - } \ -} - -#define MMPLAYER_UPDATE_TAG_DOUBLE(gsttag, attribute, playertag) \ -if (gst_tag_list_get_double(tag_list, gsttag, &v_double)) {\ - if (v_double) {\ - /* FIXIT : don't know how to store date */\ - g_assert(1);\ - v_double = 0;\ - } \ -} + } while (0) /* function start */ - GstTagList* tag_list = NULL; - - MMHandleType attrs = 0; + GstTagList *tag_list = NULL; char *string = NULL; guint v_uint = 0; @@ -844,69 +849,34 @@ if (gst_tag_list_get_double(tag_list, gsttag, &v_double)) {\ MMPLAYER_RETURN_VAL_IF_FAIL(player && msg, FALSE); - attrs = MMPLAYER_GET_ATTRS(player); - - MMPLAYER_RETURN_VAL_IF_FAIL(attrs, FALSE); - /* get tag list from gst message */ gst_message_parse_tag(msg, &tag_list); /* store tags to player attributes */ - MMPLAYER_UPDATE_TAG_STRING(GST_TAG_TITLE, attrs, "tag_title"); - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_TITLE_SORTNAME, ?, ?); */ - MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ARTIST, attrs, "tag_artist"); - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ARTIST_SORTNAME, ?, ?); */ - MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ALBUM, attrs, "tag_album"); - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ALBUM_SORTNAME, ?, ?); */ - MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COMPOSER, attrs, "tag_author"); - MMPLAYER_UPDATE_TAG_DATE(GST_TAG_DATE, attrs, "tag_date"); - MMPLAYER_UPDATE_TAG_DATE_TIME(GST_TAG_DATE_TIME, attrs, "tag_date"); - MMPLAYER_UPDATE_TAG_STRING(GST_TAG_GENRE, attrs, "tag_genre"); - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COMMENT, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_EXTENDED_COMMENT, ?, ?); */ - MMPLAYER_UPDATE_TAG_UINT(GST_TAG_TRACK_NUMBER, attrs, "tag_track_num"); - /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_TRACK_COUNT, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_ALBUM_VOLUME_NUMBER, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_ALBUM_VOLUME_COUNT, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LOCATION, ?, ?); */ - MMPLAYER_UPDATE_TAG_STRING(GST_TAG_DESCRIPTION, attrs, "tag_description"); - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_VERSION, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ISRC, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ORGANIZATION, ?, ?); */ - MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COPYRIGHT, attrs, "tag_copyright"); - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COPYRIGHT_URI, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_CONTACT, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LICENSE, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LICENSE_URI, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_PERFORMER, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_UINT64(GST_TAG_DURATION, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_CODEC, ?, ?); */ - MMPLAYER_UPDATE_TAG_STRING(GST_TAG_VIDEO_CODEC, attrs, "content_video_codec"); - MMPLAYER_UPDATE_TAG_STRING(GST_TAG_AUDIO_CODEC, attrs, "content_audio_codec"); - MMPLAYER_UPDATE_TAG_UINT(GST_TAG_BITRATE, attrs, "content_bitrate"); - MMPLAYER_UPDATE_TAG_UINT(GST_TAG_MAXIMUM_BITRATE, attrs, "content_max_bitrate"); + MMPLAYER_UPDATE_TAG_STRING(GST_TAG_TITLE, player, "tag_title"); + MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ARTIST, player, "tag_artist"); + MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ALBUM, player, "tag_album"); + MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COMPOSER, player, "tag_author"); + MMPLAYER_UPDATE_TAG_DATE(GST_TAG_DATE, player, "tag_date"); + MMPLAYER_UPDATE_TAG_DATE_TIME(GST_TAG_DATE_TIME, player, "tag_date"); + MMPLAYER_UPDATE_TAG_STRING(GST_TAG_GENRE, player, "tag_genre"); + MMPLAYER_UPDATE_TAG_UINT(GST_TAG_TRACK_NUMBER, player, "tag_track_num"); + MMPLAYER_UPDATE_TAG_STRING(GST_TAG_DESCRIPTION, player, "tag_description"); + MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COPYRIGHT, player, "tag_copyright"); + MMPLAYER_UPDATE_TAG_STRING(GST_TAG_VIDEO_CODEC, player, "content_video_codec"); + MMPLAYER_UPDATE_TAG_STRING(GST_TAG_AUDIO_CODEC, player, "content_audio_codec"); + MMPLAYER_UPDATE_TAG_UINT(GST_TAG_BITRATE, player, "content_bitrate"); + MMPLAYER_UPDATE_TAG_UINT(GST_TAG_MAXIMUM_BITRATE, player, "content_max_bitrate"); MMPLAYER_UPDATE_TAG_LOCK(player); - MMPLAYER_UPDATE_TAG_IMAGE(GST_TAG_IMAGE, attrs, "tag_album_cover"); + MMPLAYER_UPDATE_TAG_IMAGE(GST_TAG_IMAGE, player, "tag_album_cover"); MMPLAYER_UPDATE_TAG_UNLOCK(player); - /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_NOMINAL_BITRATE, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_MINIMUM_BITRATE, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_SERIAL, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ENCODER, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_ENCODER_VERSION, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_TRACK_GAIN, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_TRACK_PEAK, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_ALBUM_GAIN, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_ALBUM_PEAK, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_REFERENCE_LEVEL, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LANGUAGE_CODE, ?, ?); */ - /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_BEATS_PER_MINUTE, ?, ?); */ - MMPLAYER_UPDATE_TAG_STRING(GST_TAG_IMAGE_ORIENTATION, attrs, "content_video_orientation"); + MMPLAYER_UPDATE_TAG_STRING(GST_TAG_IMAGE_ORIENTATION, player, "content_video_orientation"); if (strstr(GST_OBJECT_NAME(msg->src), "demux")) { if (player->video360_metadata.is_spherical == -1) { __mmplayer_get_metadata_360_from_tags(tag_list, &player->video360_metadata); - mm_attrs_set_int_by_name(attrs, "content_video_is_spherical", - player->video360_metadata.is_spherical); + mm_player_set_attribute((MMHandleType)player, NULL, + "content_video_is_spherical", player->video360_metadata.is_spherical, NULL); if (player->video360_metadata.is_spherical == 1) { LOGD("This is spherical content for 360 playback."); player->is_content_spherical = TRUE; @@ -919,7 +889,7 @@ if (gst_tag_list_get_double(tag_list, gsttag, &v_double)) {\ if (!strcmp(player->video360_metadata.projection_type_string, "equirectangular")) { player->video360_metadata.projection_type = VIDEO360_PROJECTION_TYPE_EQUIRECTANGULAR; } else { - LOGE("Projection %s: code not implemented.\n", player->video360_metadata.projection_type_string); + LOGE("Projection %s: code not implemented.", player->video360_metadata.projection_type_string); player->is_content_spherical = player->is_video360_enabled = FALSE; } } @@ -932,24 +902,21 @@ if (gst_tag_list_get_double(tag_list, gsttag, &v_double)) {\ } else if (!strcmp(player->video360_metadata.stereo_mode_string, "top-bottom")) { player->video360_metadata.stereo_mode = VIDEO360_MODE_STEREOSCOPIC_TOP_BOTTOM; } else { - LOGE("Stereo mode %s: code not implemented.\n", player->video360_metadata.stereo_mode_string); + LOGE("Stereo mode %s: code not implemented.", player->video360_metadata.stereo_mode_string); player->is_content_spherical = player->is_video360_enabled = FALSE; } } } } - if (mmf_attrs_commit(attrs)) - LOGE("failed to commit.\n"); - - gst_tag_list_free(tag_list); + gst_tag_list_unref(tag_list); return TRUE; } /* if retval is FALSE, it will be dropped for perfomance. */ static gboolean -__mmplayer_gst_check_useful_message(mm_player_t *player, GstMessage * message) +__mmplayer_gst_check_useful_message(mmplayer_t *player, GstMessage *message) { gboolean retval = FALSE; @@ -968,6 +935,7 @@ __mmplayer_gst_check_useful_message(mm_player_t *player, GstMessage * message) case GST_MESSAGE_ELEMENT: case GST_MESSAGE_DURATION_CHANGED: case GST_MESSAGE_ASYNC_START: + case GST_MESSAGE_STREAM_COLLECTION: retval = TRUE; break; case GST_MESSAGE_ASYNC_DONE: @@ -985,7 +953,7 @@ __mmplayer_gst_check_useful_message(mm_player_t *player, GstMessage * message) retval = TRUE; gst_message_parse_buffering(message, &buffer_percent); if (buffer_percent != MAX_BUFFER_PERCENT) { - LOGD("[%s] buffering msg %d%%!!\n", GST_OBJECT_NAME(GST_MESSAGE_SRC(message)), buffer_percent); + LOGD("[%s] buffering msg %d%%!!", GST_OBJECT_NAME(GST_MESSAGE_SRC(message)), buffer_percent); break; } @@ -995,7 +963,7 @@ __mmplayer_gst_check_useful_message(mm_player_t *player, GstMessage * message) } if ((player->streamer) && (player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) { - LOGD("[%s] Buffering DONE is detected !!\n", GST_OBJECT_NAME(GST_MESSAGE_SRC(message))); + LOGD("[%s] Buffering DONE is detected !", GST_OBJECT_NAME(GST_MESSAGE_SRC(message))); player->streamer->buffering_state |= MM_PLAYER_BUFFERING_COMPLETE; } @@ -1003,6 +971,36 @@ __mmplayer_gst_check_useful_message(mm_player_t *player, GstMessage * message) break; } + case GST_MESSAGE_STREAMS_SELECTED: + { + if (MMPLAYER_USE_DECODEBIN(player)) + break; /* drop msg */ + + if ((MMPLAYER_IS_HTTP_STREAMING(player)) && + (!player->pipeline->mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst) && + (player->pipeline->mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst)) { + + gint64 dur_bytes = 0L; + + if (!gst_element_query_duration(player->pipeline->mainbin[MMPLAYER_M_SRC].gst, GST_FORMAT_BYTES, &dur_bytes)) + LOGE("fail to get duration."); + + /* there is no mq, enable use-buffering on queue2 (ex) wav streaming + * use file information was already set on Q2 when it was created. */ + _mm_player_streaming_set_queue2(player->streamer, + player->pipeline->mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst, + TRUE, /* use_buffering */ + MUXED_BUFFER_TYPE_MAX, /* use previous buffer type setting */ + ((dur_bytes > 0) ? ((guint64)dur_bytes) : 0)); + } + + LOGD("GST_MESSAGE_STREAMS_SELECTED"); + player->no_more_pad = TRUE; + _mmplayer_set_reconfigure_state(player, FALSE); + _mmplayer_pipeline_complete(NULL, player); + retval = TRUE; + break; + } default: retval = FALSE; break; @@ -1012,49 +1010,36 @@ __mmplayer_gst_check_useful_message(mm_player_t *player, GstMessage * message) } static void -__mmplayer_update_buffer_setting(mm_player_t *player, GstMessage *buffering_msg) +__mmplayer_update_buffer_setting(mmplayer_t *player, GstMessage *buffering_msg) { - MMHandleType attrs = 0; guint64 data_size = 0; - gchar* path = NULL; gint64 pos_nsec = 0; - struct stat sb; MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin); - __mmplayer_gst_get_position(player, MM_PLAYER_POS_FORMAT_TIME, &pos_nsec); /* to update player->last_position */ - - attrs = MMPLAYER_GET_ATTRS(player); - if (!attrs) { - LOGE("fail to get attributes.\n"); - return; - } + _mmplayer_gst_get_position(player, &pos_nsec); /* to update player->last_position */ - if (!MMPLAYER_IS_STREAMING(player) && (player->can_support_codec & FOUND_PLUGIN_VIDEO)) { - mm_attrs_get_string_by_name(attrs, "profile_uri", &path); - - if (stat(path, &sb) == 0) - data_size = (guint64)sb.st_size; - } else if (MMPLAYER_IS_HTTP_STREAMING(player)) + if (MMPLAYER_IS_HTTP_STREAMING(player)) { data_size = player->http_content_size; + } - __mm_player_streaming_buffering(player->streamer, buffering_msg, data_size, player->last_position, player->duration); - __mm_player_streaming_sync_property(player->streamer, player->pipeline->mainbin[MMPLAYER_M_AUTOPLUG].gst); + _mm_player_streaming_buffering(player->streamer, buffering_msg, data_size, player->last_position, player->duration); + _mm_player_streaming_sync_property(player->streamer, player->pipeline->mainbin[MMPLAYER_M_AUTOPLUG].gst); return; } static int -__mmplayer_handle_buffering_message(mm_player_t* player) +__mmplayer_handle_buffering_playback(mmplayer_t *player) { int ret = MM_ERROR_NONE; - MMPlayerStateType prev_state = MM_PLAYER_STATE_NONE; - MMPlayerStateType current_state = MM_PLAYER_STATE_NONE; - MMPlayerStateType target_state = MM_PLAYER_STATE_NONE; - MMPlayerStateType pending_state = MM_PLAYER_STATE_NONE; + mmplayer_state_e prev_state = MM_PLAYER_STATE_NONE; + mmplayer_state_e current_state = MM_PLAYER_STATE_NONE; + mmplayer_state_e target_state = MM_PLAYER_STATE_NONE; + mmplayer_state_e pending_state = MM_PLAYER_STATE_NONE; if (!player || !player->streamer || (MMPLAYER_IS_LIVE_STREAMING(player) && MMPLAYER_IS_RTSP_STREAMING(player))) { - LOGW("do nothing for buffering msg\n"); + LOGW("do nothing for buffering msg"); ret = MM_ERROR_PLAYER_INVALID_STATE; goto exit; } @@ -1078,18 +1063,18 @@ __mmplayer_handle_buffering_message(mm_player_t* player) { switch (pending_state) { case MM_PLAYER_STATE_PLAYING: - __mmplayer_gst_pause(player, TRUE); + _mmplayer_gst_pause(player, TRUE); break; case MM_PLAYER_STATE_PAUSED: - LOGD("player is already going to paused state, there is nothing to do.\n"); + LOGD("player is already going to paused state, there is nothing to do."); break; case MM_PLAYER_STATE_NONE: case MM_PLAYER_STATE_NULL: case MM_PLAYER_STATE_READY: default: - LOGW("invalid pending state [%s].\n", MMPLAYER_STATE_GET_NAME(pending_state)); + LOGW("invalid pending state [%s].", MMPLAYER_STATE_GET_NAME(pending_state)); break; } } @@ -1101,7 +1086,7 @@ __mmplayer_handle_buffering_message(mm_player_t* player) case MM_PLAYER_STATE_NONE: { if (current_state != MM_PLAYER_STATE_PLAYING) - __mmplayer_gst_resume(player, TRUE); + _mmplayer_gst_resume(player, TRUE); } break; @@ -1110,22 +1095,22 @@ __mmplayer_handle_buffering_message(mm_player_t* player) * Because, buffering can be completed during autoplugging when pipeline would try to go playing state directly. */ if (current_state == MM_PLAYER_STATE_PLAYING) { - /* NOTE: If the current state is PLAYING, it means, async __mmplayer_gst_pause() is not completed yet. + /* NOTE: If the current state is PLAYING, it means, async _mmplayer_gst_pause() is not completed yet. * The current state should be changed to paused purposely to prevent state conflict. */ MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PAUSED); } - __mmplayer_gst_resume(player, TRUE); + _mmplayer_gst_resume(player, TRUE); break; case MM_PLAYER_STATE_PLAYING: - LOGD("player is already going to playing state, there is nothing to do.\n"); + LOGD("player is already going to playing state, there is nothing to do."); break; case MM_PLAYER_STATE_NULL: case MM_PLAYER_STATE_READY: default: - LOGW("invalid pending state [%s].\n", MMPLAYER_STATE_GET_NAME(pending_state)); + LOGW("invalid pending state [%s].", MMPLAYER_STATE_GET_NAME(pending_state)); break; } } @@ -1135,7 +1120,7 @@ __mmplayer_handle_buffering_message(mm_player_t* player) case MM_PLAYER_STATE_READY: case MM_PLAYER_STATE_NONE: default: - LOGW("invalid target state [%s].\n", MMPLAYER_STATE_GET_NAME(target_state)); + LOGW("invalid target state [%s].", MMPLAYER_STATE_GET_NAME(target_state)); break; } } else { @@ -1148,8 +1133,8 @@ __mmplayer_handle_buffering_message(mm_player_t* player) if (current_state != MM_PLAYER_STATE_PAUSED) { /* rtsp streaming pause makes rtsp server stop sending data. */ if (!MMPLAYER_IS_RTSP_STREAMING(player)) { - LOGD("set pause state during buffering\n"); - __mmplayer_gst_pause(player, TRUE); + LOGD("set pause state during buffering"); + _mmplayer_gst_pause(player, TRUE); } } } @@ -1158,7 +1143,7 @@ __mmplayer_handle_buffering_message(mm_player_t* player) case MM_PLAYER_STATE_PLAYING: /* rtsp streaming pause makes rtsp server stop sending data. */ if (!MMPLAYER_IS_RTSP_STREAMING(player)) - __mmplayer_gst_pause(player, TRUE); + _mmplayer_gst_pause(player, TRUE); break; case MM_PLAYER_STATE_PAUSED: @@ -1167,7 +1152,7 @@ __mmplayer_handle_buffering_message(mm_player_t* player) case MM_PLAYER_STATE_NULL: case MM_PLAYER_STATE_READY: default: - LOGW("invalid pending state [%s].\n", MMPLAYER_STATE_GET_NAME(pending_state)); + LOGW("invalid pending state [%s].", MMPLAYER_STATE_GET_NAME(pending_state)); break; } } @@ -1176,13 +1161,13 @@ exit: return ret; } -static VariantData * -__mmplayer_adaptive_var_info(const VariantData *self, gpointer user_data) +static stream_variant_t * +__mmplayer_adaptive_var_info(const stream_variant_t *self, gpointer user_data) { - VariantData *var_info = NULL; + stream_variant_t *var_info = NULL; g_return_val_if_fail(self != NULL, NULL); - var_info = g_new0(VariantData, 1); + var_info = g_new0(stream_variant_t, 1); if (!var_info) return NULL; var_info->bandwidth = self->bandwidth; var_info->width = self->width; @@ -1191,7 +1176,7 @@ __mmplayer_adaptive_var_info(const VariantData *self, gpointer user_data) } static gboolean -__mmplayer_gst_handle_duration(mm_player_t* player, GstMessage* msg) +__mmplayer_gst_handle_duration(mmplayer_t *player, GstMessage *msg) { gint64 bytes = 0; @@ -1206,11 +1191,11 @@ __mmplayer_gst_handle_duration(mm_player_t* player, GstMessage* msg) if (gst_element_query_duration(GST_ELEMENT_CAST(msg->src), GST_FORMAT_BYTES, &bytes)) { LOGD("data total size of http content: %"G_GINT64_FORMAT, bytes); - player->http_content_size = (bytes > 0) ? (bytes) : (0); + player->http_content_size = (bytes > 0) ? bytes : 0; } } else { /* handling audio clip which has vbr. means duration is keep changing */ - __mmplayer_update_content_attrs(player, ATTR_DURATION); + _mmplayer_update_content_attrs(player, ATTR_DURATION); } MMPLAYER_FLEAVE(); @@ -1221,20 +1206,20 @@ __mmplayer_gst_handle_duration(mm_player_t* player, GstMessage* msg) static gboolean __mmplayer_eos_timer_cb(gpointer u_data) { - mm_player_t* player = NULL; + mmplayer_t *player = NULL; MMHandleType attrs = 0; int count = 0; MMPLAYER_RETURN_VAL_IF_FAIL(u_data, FALSE); - player = (mm_player_t*) u_data; + player = (mmplayer_t *)u_data; attrs = MMPLAYER_GET_ATTRS(player); mm_attrs_get_int_by_name(attrs, "profile_play_count", &count); if (count == -1) { gint ret_value = 0; - ret_value = __mmplayer_gst_set_position(player, MM_PLAYER_POS_FORMAT_TIME, 0, TRUE); + ret_value = _mmplayer_gst_set_position(player, 0, TRUE); if (ret_value != MM_ERROR_NONE) LOGE("seeking to 0 failed in repeat play"); } else { @@ -1247,27 +1232,27 @@ __mmplayer_eos_timer_cb(gpointer u_data) } static void -__mmplayer_handle_eos_delay(mm_player_t* player, int delay_in_ms) +__mmplayer_handle_eos_delay(mmplayer_t *player, int delay_in_ms) { MMPLAYER_RETURN_IF_FAIL(player); /* post now if delay is zero */ - if (delay_in_ms == 0 || player->set_mode.pcm_extraction) { - LOGD("eos delay is zero. posting EOS now\n"); + if (delay_in_ms == 0 || player->audio_decoded_cb) { + LOGD("eos delay is zero. posting EOS now"); MMPLAYER_POST_MSG(player, MM_MESSAGE_END_OF_STREAM, NULL); - if (player->set_mode.pcm_extraction) - __mmplayer_cancel_eos_timer(player); + if (player->audio_decoded_cb) + _mmplayer_cancel_eos_timer(player); return; } /* cancel if existing */ - __mmplayer_cancel_eos_timer(player); + _mmplayer_cancel_eos_timer(player); /* init new timeout */ /* NOTE : consider give high priority to this timer */ - LOGD("posting EOS message after [%d] msec\n", delay_in_ms); + LOGD("posting EOS message after [%d] msec", delay_in_ms); player->eos_timer = g_timeout_add(delay_in_ms, __mmplayer_eos_timer_cb, player); @@ -1277,14 +1262,15 @@ __mmplayer_handle_eos_delay(mm_player_t* player, int delay_in_ms) /* check timer is valid. if not, send EOS now */ if (player->eos_timer == 0) { - LOGW("creating timer for delayed EOS has failed. sending EOS now\n"); + LOGW("creating timer for delayed EOS has failed. sending EOS now"); MMPLAYER_POST_MSG(player, MM_MESSAGE_END_OF_STREAM, NULL); } } -static int __mmplayer_gst_pending_seek(mm_player_t* player) +static int +__mmplayer_gst_pending_seek(mmplayer_t *player) { - MMPlayerStateType current_state = MM_PLAYER_STATE_NONE; + mmplayer_state_e current_state = MM_PLAYER_STATE_NONE; int ret = MM_ERROR_NONE; MMPLAYER_FENTER(); @@ -1292,7 +1278,7 @@ static int __mmplayer_gst_pending_seek(mm_player_t* player) MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED); if (!player->pending_seek.is_pending) { - LOGD("pending seek is not reserved. nothing to do.\n"); + LOGD("pending seek is not reserved. nothing to do."); return ret; } @@ -1300,19 +1286,18 @@ static int __mmplayer_gst_pending_seek(mm_player_t* player) current_state = MMPLAYER_CURRENT_STATE(player); if (current_state != MM_PLAYER_STATE_PAUSED && current_state != MM_PLAYER_STATE_PLAYING) { - LOGW("try to pending seek in %s state, try next time. \n", + LOGW("try to pending seek in %s state, try next time. ", MMPLAYER_STATE_GET_NAME(current_state)); return ret; } - LOGD("trying to play from(%"G_GINT64_FORMAT") pending position\n", player->pending_seek.pos); - - ret = __mmplayer_gst_set_position(player, player->pending_seek.format, player->pending_seek.pos, FALSE); + LOGD("trying to play from(%"G_GINT64_FORMAT") pending position", player->pending_seek.pos); - if (MM_ERROR_NONE != ret) - LOGE("failed to seek pending postion. just keep staying current position.\n"); + ret = _mmplayer_gst_set_position(player, player->pending_seek.pos, FALSE); + if (ret != MM_ERROR_NONE) + LOGE("failed to seek pending postion. just keep staying current position."); - player->pending_seek.is_pending = FALSE; + player->pending_seek.is_pending = false; MMPLAYER_FLEAVE(); @@ -1320,9 +1305,9 @@ static int __mmplayer_gst_pending_seek(mm_player_t* player) } static void -__mmplayer_gst_handle_async(mm_player_t* player, gboolean async, enum MMPlayerSinkType type) +__mmplayer_gst_set_async(mmplayer_t *player, gboolean async, enum mmplayer_sink_type type) { - MMPlayerGstElement *videobin = NULL, *audiobin = NULL, *textbin = NULL; + mmplayer_gst_element_t *videobin = NULL, *audiobin = NULL, *textbin = NULL; MMPLAYER_RETURN_IF_FAIL(player && player->pipeline); @@ -1345,9 +1330,9 @@ __mmplayer_gst_handle_async(mm_player_t* player, gboolean async, enum MMPlayerSi } static void -__mmplayer_drop_subtitle(mm_player_t* player, gboolean is_drop) +__mmplayer_drop_subtitle(mmplayer_t *player, gboolean is_drop) { - MMPlayerGstElement *textbin; + mmplayer_gst_element_t *textbin; MMPLAYER_FENTER(); MMPLAYER_RETURN_IF_FAIL(player && @@ -1359,18 +1344,18 @@ __mmplayer_drop_subtitle(mm_player_t* player, gboolean is_drop) textbin = player->pipeline->textbin; if (is_drop) { - LOGD("Drop subtitle text after getting EOS\n"); + LOGD("Drop subtitle text after getting EOS"); - __mmplayer_gst_handle_async(player, FALSE, MMPLAYER_TEXT_SINK); + __mmplayer_gst_set_async(player, FALSE, MMPLAYER_TEXT_SINK); g_object_set(textbin[MMPLAYER_T_IDENTITY].gst, "drop-probability", (gfloat)1.0, NULL); player->is_subtitle_force_drop = TRUE; } else { if (player->is_subtitle_force_drop == TRUE) { - LOGD("Enable subtitle data path without drop\n"); + LOGD("Enable subtitle data path without drop"); g_object_set(textbin[MMPLAYER_T_IDENTITY].gst, "drop-probability", (gfloat)0.0, NULL); - __mmplayer_gst_handle_async(player, TRUE, MMPLAYER_TEXT_SINK); + __mmplayer_gst_set_async(player, TRUE, MMPLAYER_TEXT_SINK); LOGD("non-connected with external display"); @@ -1379,495 +1364,675 @@ __mmplayer_drop_subtitle(mm_player_t* player, gboolean is_drop) } } - -#if 0 -#endif - -int -__mmplayer_gst_set_state(mm_player_t* player, GstElement * element, GstState state, gboolean async, gint timeout) +static void +__mmplayer_gst_handle_eos_message(mmplayer_t *player, GstMessage *msg) { - GstState element_state = GST_STATE_VOID_PENDING; - GstState element_pending_state = GST_STATE_VOID_PENDING; - GstStateChangeReturn ret = GST_STATE_CHANGE_FAILURE; + MMHandleType attrs = 0; + gint count = 0; MMPLAYER_FENTER(); - MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED); - MMPLAYER_RETURN_VAL_IF_FAIL(element, MM_ERROR_INVALID_ARGUMENT); - - LOGD("setting [%s] element state to : %s\n", GST_ELEMENT_NAME(element), gst_element_state_get_name(state)); + /* NOTE : EOS event is comming multiple time. watch out it */ + /* check state. we only process EOS when pipeline state goes to PLAYING */ + if (!(player->cmd == MMPLAYER_COMMAND_START || player->cmd == MMPLAYER_COMMAND_RESUME)) { + LOGD("EOS received on non-playing state. ignoring it"); + return; + } - /* set state */ - ret = gst_element_set_state(element, state); + if (player->pipeline && player->pipeline->textbin) + __mmplayer_drop_subtitle(player, TRUE); - if (ret == GST_STATE_CHANGE_FAILURE) { - LOGE("failed to set [%s] state\n", GST_ELEMENT_NAME(element)); + if ((player->audio_decoded_cb) && (player->audio_extract_opt & MM_PLAYER_AUDIO_EXTRACT_NO_SYNC_WITH_CLOCK)) + _mmplayer_audio_stream_clear_buffer(player, TRUE); - /* dump state of all element */ - __mmplayer_dump_pipeline_state(player); + /* rewind if repeat count is greater then zero */ + /* get play count */ + attrs = MMPLAYER_GET_ATTRS(player); + if (attrs) { + mm_attrs_get_int_by_name(attrs, "profile_play_count", &count); - return MM_ERROR_PLAYER_INTERNAL; - } + LOGD("play count: %d, playback rate: %f", count, player->playback_rate); - /* return here so state transition to be done in async mode */ - if (async) { - LOGD("async state transition. not waiting for state complete.\n"); - return MM_ERROR_NONE; - } + if (count == -1 || player->playback_rate < 0.0) /* default value is 1 */ { + if (player->playback_rate < 0.0) { + player->resumed_by_rewind = TRUE; + _mmplayer_set_mute((MMHandleType)player, false); + MMPLAYER_POST_MSG(player, MM_MESSAGE_RESUMED_BY_REW, NULL); + } - /* wait for state transition */ - ret = gst_element_get_state(element, &element_state, &element_pending_state, timeout * GST_SECOND); + __mmplayer_handle_eos_delay(player, player->ini.delay_before_repeat); - if (ret == GST_STATE_CHANGE_FAILURE || (state != element_state)) { - LOGE("failed to change [%s] element state to [%s] within %d sec\n", - GST_ELEMENT_NAME(element), - gst_element_state_get_name(state), timeout); + /* initialize */ + player->sent_bos = FALSE; - LOGE(" [%s] state : %s pending : %s \n", - GST_ELEMENT_NAME(element), - gst_element_state_get_name(element_state), - gst_element_state_get_name(element_pending_state)); + LOGD("do not post eos msg for repeating"); + return; + } + } - /* dump state of all element */ - __mmplayer_dump_pipeline_state(player); + if (player->pipeline) + MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-eos"); - return MM_ERROR_PLAYER_INTERNAL; - } + /* post eos message to application */ + __mmplayer_handle_eos_delay(player, player->ini.eos_delay); - LOGD("[%s] element state has changed\n", GST_ELEMENT_NAME(element)); + /* reset last position */ + player->last_position = 0; MMPLAYER_FLEAVE(); - - return MM_ERROR_NONE; + return; } -void -__mmplayer_gst_callback(GstMessage *msg, gpointer data) +static void +__mmplayer_gst_handle_error_message(mmplayer_t *player, GstMessage *msg) { - mm_player_t* player = (mm_player_t*)(data); + GError *error = NULL; + gchar *debug = NULL; - MMPLAYER_RETURN_IF_FAIL(player); - MMPLAYER_RETURN_IF_FAIL(msg && GST_IS_MESSAGE(msg)); + MMPLAYER_FENTER(); - switch (GST_MESSAGE_TYPE(msg)) { - case GST_MESSAGE_UNKNOWN: - LOGD("unknown message received\n"); - break; + /* generating debug info before returning error */ + MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-error"); - case GST_MESSAGE_EOS: - { - MMHandleType attrs = 0; - gint count = 0; + /* get error code */ + gst_message_parse_error(msg, &error, &debug); - LOGD("GST_MESSAGE_EOS received\n"); + if (gst_structure_has_name(gst_message_get_structure(msg), "streaming_error")) { + /* Note : the streaming error from the streaming source is handled + * using __mmplayer_handle_streaming_error. + */ + __mmplayer_handle_streaming_error(player, msg); - /* NOTE : EOS event is comming multiple time. watch out it */ - /* check state. we only process EOS when pipeline state goes to PLAYING */ - if (!(player->cmd == MMPLAYER_COMMAND_START || player->cmd == MMPLAYER_COMMAND_RESUME)) { - LOGD("EOS received on non-playing state. ignoring it\n"); - break; - } + /* dump state of all element */ + _mmplayer_dump_pipeline_state(player); + } else { + /* traslate gst error code to msl error code. then post it + * to application if needed + */ + __mmplayer_handle_gst_error(player, msg, error); - if (player->pipeline) { - if (player->pipeline->textbin) - __mmplayer_drop_subtitle(player, TRUE); + if (debug) + LOGE("error debug : %s", debug); + } - if ((player->audio_stream_cb) && (player->set_mode.pcm_extraction) && (!player->audio_stream_render_cb_ex)) { - GstPad *pad = NULL; + MMPLAYER_FREEIF(debug); + g_error_free(error); - pad = gst_element_get_static_pad(player->pipeline->audiobin[MMPLAYER_A_SINK].gst, "sink"); + MMPLAYER_FLEAVE(); + return; +} - LOGD("release audio callback\n"); +static void +__mmplayer_gst_handle_buffering_message(mmplayer_t *player, GstMessage *msg) +{ + MMMessageParamType msg_param = {0, }; + int bRet = MM_ERROR_NONE; - /* release audio callback */ - gst_pad_remove_probe(pad, player->audio_cb_probe_id); - player->audio_cb_probe_id = 0; - /* audio callback should be free because it can be called even though probe remove.*/ - player->audio_stream_cb = NULL; - player->audio_stream_cb_user_param = NULL; + MMPLAYER_FENTER(); + MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin); - } - } - if ((player->audio_stream_render_cb_ex) && (!player->audio_stream_sink_sync)) - __mmplayer_audio_stream_clear_buffer(player, TRUE); + if (!MMPLAYER_IS_STREAMING(player)) { + LOGW("this is not streaming playback."); + return; + } - /* rewind if repeat count is greater then zero */ - /* get play count */ - attrs = MMPLAYER_GET_ATTRS(player); + MMPLAYER_CMD_LOCK(player); - if (attrs) { - mm_attrs_get_int_by_name(attrs, "profile_play_count", &count); + if (!player->streamer) { + LOGW("Pipeline is shutting down"); + MMPLAYER_CMD_UNLOCK(player); + return; + } - LOGD("play count: %d, playback rate: %f\n", count, player->playback_rate); + /* ignore the remained buffering message till getting 100% msg */ + if (player->streamer->buffering_state == MM_PLAYER_BUFFERING_COMPLETE) { + gint buffer_percent = 0; - if (count == -1 || player->playback_rate < 0.0) /* default value is 1 */ { - if (player->playback_rate < 0.0) { - player->resumed_by_rewind = TRUE; - _mmplayer_set_mute((MMHandleType)player, 0); - MMPLAYER_POST_MSG(player, MM_MESSAGE_RESUMED_BY_REW, NULL); - } + gst_message_parse_buffering(msg, &buffer_percent); - __mmplayer_handle_eos_delay(player, player->ini.delay_before_repeat); + if (buffer_percent == MAX_BUFFER_PERCENT) { + LOGD("Ignored all the previous buffering msg!(got %d%%)", buffer_percent); + __mmplayer_update_buffer_setting(player, NULL); /* update buffering size for next buffering */ + player->streamer->buffering_state = MM_PLAYER_BUFFERING_DEFAULT; + } + MMPLAYER_CMD_UNLOCK(player); + return; + } - /* initialize */ - player->sent_bos = FALSE; + /* ignore the remained buffering message */ + if (player->streamer->buffering_state == MM_PLAYER_BUFFERING_ABORT) { + gint buffer_percent = 0; - /* not posting eos when repeating */ - break; - } - } + gst_message_parse_buffering(msg, &buffer_percent); - if (player->pipeline) - MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-eos"); + LOGD("interrupted buffering -last posted %d %%, new per %d %%", + player->streamer->buffering_percent, buffer_percent); - /* post eos message to application */ - __mmplayer_handle_eos_delay(player, player->ini.eos_delay); + if (player->streamer->buffering_percent > buffer_percent || buffer_percent <= 0) { + player->streamer->buffering_state = MM_PLAYER_BUFFERING_DEFAULT; + player->streamer->buffering_req.is_pre_buffering = FALSE; - /* reset last position */ - player->last_position = 0; + LOGD("interrupted buffering - need to enter the buffering mode again - %d %%", buffer_percent); + } else { + LOGD("interrupted buffering - ignored the remained buffering msg!"); + MMPLAYER_CMD_UNLOCK(player); + return; } - break; + } - case GST_MESSAGE_ERROR: - { - GError *error = NULL; - gchar* debug = NULL; + __mmplayer_update_buffer_setting(player, msg); - /* generating debug info before returning error */ - MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-error"); + bRet = __mmplayer_handle_buffering_playback(player); /* playback control */ - /* get error code */ - gst_message_parse_error(msg, &error, &debug); + if (bRet == MM_ERROR_NONE) { + msg_param.connection.buffering = player->streamer->buffering_percent; + MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param); - if (gst_structure_has_name(gst_message_get_structure(msg), "streaming_error")) { - /* Note : the streaming error from the streaming source is handled - * using __mmplayer_handle_streaming_error. - */ - __mmplayer_handle_streaming_error(player, msg); + if (MMPLAYER_IS_RTSP_STREAMING(player) && + player->pending_resume && + (player->streamer->buffering_percent >= MAX_BUFFER_PERCENT)) { - /* dump state of all element */ - __mmplayer_dump_pipeline_state(player); - } else { - /* traslate gst error code to msl error code. then post it - * to application if needed - */ - __mmplayer_handle_gst_error(player, msg, error); + player->is_external_subtitle_added_now = FALSE; + player->pending_resume = FALSE; + _mmplayer_resume((MMHandleType)player); + } + + if (MMPLAYER_IS_RTSP_STREAMING(player) && + (player->streamer->buffering_percent >= MAX_BUFFER_PERCENT)) { - if (debug) - LOGE("error debug : %s", debug); + if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS) { + if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) { + player->seek_state = MMPLAYER_SEEK_NONE; + MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL); + } else if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PLAYING) { + /* Considering the async state trasition in case of RTSP. + After getting state change gst msg, seek cmpleted msg will be posted. */ + player->seek_state = MMPLAYER_SEEK_COMPLETED; + } } + } + } else if (bRet == MM_ERROR_PLAYER_INVALID_STATE) { + if (!player->streamer) { + LOGW("player->streamer is NULL, so discarding the buffering percent update"); + MMPLAYER_CMD_UNLOCK(player); + return; + } - if (MMPLAYER_IS_HTTP_PD(player)) - _mmplayer_unrealize_pd_downloader((MMHandleType)player); + if ((MMPLAYER_IS_LIVE_STREAMING(player)) && (MMPLAYER_IS_RTSP_STREAMING(player))) { - MMPLAYER_FREEIF(debug); - g_error_free(error); + LOGD("player->last_position=%"G_GINT64_FORMAT" , player->streamer->buffering_percent=%d", + GST_TIME_AS_SECONDS(player->last_position), player->streamer->buffering_percent); + + if ((GST_TIME_AS_SECONDS(player->last_position) <= 0) && (MMPLAYER_CURRENT_STATE(player) == MM_PLAYER_STATE_PAUSED)) { + msg_param.connection.buffering = player->streamer->buffering_percent; + MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param); + } else { + LOGD("Not updating Buffering Message for Live RTSP case !!!"); + } + } else { + msg_param.connection.buffering = player->streamer->buffering_percent; + MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param); } - break; + } + MMPLAYER_CMD_UNLOCK(player); - case GST_MESSAGE_WARNING: - { - char* debug = NULL; - GError* error = NULL; + MMPLAYER_FLEAVE(); + return; - gst_message_parse_warning(msg, &error, &debug); +} - LOGD("warning : %s\n", error->message); - LOGD("debug : %s\n", debug); +static void +__mmplayer_gst_handle_state_message(mmplayer_t *player, GstMessage *msg) +{ + mmplayer_gst_element_t *mainbin; + const GValue *voldstate, *vnewstate, *vpending; + GstState oldstate = GST_STATE_NULL; + GstState newstate = GST_STATE_NULL; + GstState pending = GST_STATE_NULL; - MMPLAYER_POST_MSG(player, MM_MESSAGE_WARNING, NULL); + MMPLAYER_FENTER(); + MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin); - MMPLAYER_FREEIF(debug); - g_error_free(error); - } - break; + mainbin = player->pipeline->mainbin; - case GST_MESSAGE_TAG: - { - LOGD("GST_MESSAGE_TAG\n"); - if (!__mmplayer_gst_extract_tag_from_msg(player, msg)) - LOGW("failed to extract tags from gstmessage\n"); - } - break; + /* we only handle messages from pipeline */ + if (msg->src != (GstObject *)mainbin[MMPLAYER_M_PIPE].gst) + return; - case GST_MESSAGE_BUFFERING: - { - MMMessageParamType msg_param = {0, }; - int bRet = MM_ERROR_NONE; + /* get state info from msg */ + voldstate = gst_structure_get_value(gst_message_get_structure(msg), "old-state"); + vnewstate = gst_structure_get_value(gst_message_get_structure(msg), "new-state"); + vpending = gst_structure_get_value(gst_message_get_structure(msg), "pending-state"); - if (!(player->pipeline && player->pipeline->mainbin)) { - LOGE("Pipeline is not initialized"); - break; - } + if (!voldstate || !vnewstate) { + LOGE("received msg has wrong format."); + return; + } - if (!MMPLAYER_IS_STREAMING(player)) - break; + oldstate = (GstState)voldstate->data[0].v_int; + newstate = (GstState)vnewstate->data[0].v_int; + if (vpending) + pending = (GstState)vpending->data[0].v_int; - if (player->pd_mode == MM_PLAYER_PD_MODE_URI) { - if (!MMPLAYER_CMD_TRYLOCK(player)) { - /* skip the playback control by buffering msg while user request is handled. */ - gint per = 0; + LOGD("state changed [%s] : %s ---> %s final : %s", + GST_OBJECT_NAME(GST_MESSAGE_SRC(msg)), + gst_element_state_get_name((GstState)oldstate), + gst_element_state_get_name((GstState)newstate), + gst_element_state_get_name((GstState)pending)); - LOGW("[PD mode] can't get cmd lock, only post buffering msg"); + if (newstate == GST_STATE_PLAYING) { + if ((MMPLAYER_IS_RTSP_STREAMING(player)) && (player->pending_seek.is_pending)) { - gst_message_parse_buffering(msg, &per); - LOGD("[PD mode][%s] buffering %d %%....", GST_OBJECT_NAME(GST_MESSAGE_SRC(msg)), per); + int retVal = MM_ERROR_NONE; + LOGD("trying to play from (%"G_GINT64_FORMAT") pending position", player->pending_seek.pos); - msg_param.connection.buffering = per; - MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param); - break; - } - } else { - MMPLAYER_CMD_LOCK(player); - } + retVal = _mmplayer_gst_set_position(player, player->pending_seek.pos, TRUE); - if (!player->streamer) { - LOGW("Pipeline is shutting down"); - MMPLAYER_CMD_UNLOCK(player); - break; + if (MM_ERROR_NONE != retVal) + LOGE("failed to seek pending postion. just keep staying current position."); + + player->pending_seek.is_pending = false; + } + } + + if (oldstate == newstate) { + LOGD("pipeline reports state transition to old state"); + return; + } + + switch (newstate) { + case GST_STATE_PAUSED: + { + gboolean prepare_async = FALSE; + + if (!player->sent_bos && oldstate == GST_STATE_READY) { + // managed prepare async case + mm_attrs_get_int_by_name(player->attrs, "profile_prepare_async", &prepare_async); + LOGD("checking prepare mode for async transition - %d", prepare_async); } - /* ignore the remained buffering message till getting 100% msg */ - if (player->streamer->buffering_state == MM_PLAYER_BUFFERING_COMPLETE) { - gint buffer_percent = 0; + if (MMPLAYER_IS_STREAMING(player) || MMPLAYER_IS_MS_BUFF_SRC(player) || prepare_async) { + MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PAUSED); - gst_message_parse_buffering(msg, &buffer_percent); + if (MMPLAYER_IS_STREAMING(player) && (player->streamer)) + _mm_player_streaming_set_content_bitrate(player->streamer, + player->total_maximum_bitrate, player->total_bitrate); - if (buffer_percent == MAX_BUFFER_PERCENT) { - LOGD("Ignored all the previous buffering msg!(got %d%%)\n", buffer_percent); - player->streamer->buffering_state = MM_PLAYER_BUFFERING_DEFAULT; + if (player->pending_seek.is_pending) { + LOGW("trying to do pending seek"); + MMPLAYER_CMD_LOCK(player); + __mmplayer_gst_pending_seek(player); + MMPLAYER_CMD_UNLOCK(player); } - MMPLAYER_CMD_UNLOCK(player); - break; } + } + break; - /* ignore the remained buffering message */ - if (player->streamer->buffering_state == MM_PLAYER_BUFFERING_ABORT) { - gint buffer_percent = 0; + case GST_STATE_PLAYING: + { + if (MMPLAYER_IS_STREAMING(player)) { + // managed prepare async case when buffering is completed + // pending state should be reset otherwise, it's still playing even though it's resumed after bufferging. + if ((MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PLAYING) || + (MMPLAYER_PENDING_STATE(player) == MM_PLAYER_STATE_PLAYING)) + MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING); - gst_message_parse_buffering(msg, &buffer_percent); + if (MMPLAYER_IS_RTSP_STREAMING(player) && (MMPLAYER_IS_LIVE_STREAMING(player))) { - LOGD("interrupted buffering -last posted %d %%, new per %d %%", - player->streamer->buffering_percent, buffer_percent); + LOGD("Current Buffering Percent = %d", player->streamer->buffering_percent); + if (player->streamer->buffering_percent < 100) { - if (player->streamer->buffering_percent > buffer_percent || buffer_percent <= 0) { - player->streamer->buffering_state = MM_PLAYER_BUFFERING_DEFAULT; - player->streamer->buffering_req.is_pre_buffering = FALSE; + MMMessageParamType msg_param = {0, }; + LOGW("Posting Buffering Completed Message to Application !!!"); - LOGD("interrupted buffering - need to enter the buffering mode again - %d %%", buffer_percent); - } else { - LOGD("interrupted buffering - ignored the remained buffering msg!"); - MMPLAYER_CMD_UNLOCK(player); - break; + msg_param.connection.buffering = 100; + MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param); + } } } - __mmplayer_update_buffer_setting(player, msg); + if (player->gapless.stream_changed) { + _mmplayer_update_content_attrs(player, ATTR_ALL); + player->gapless.stream_changed = FALSE; + } - bRet = __mmplayer_handle_buffering_message(player); /* playback control */ + if (player->seek_state == MMPLAYER_SEEK_COMPLETED) { + player->seek_state = MMPLAYER_SEEK_NONE; + MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL); + } + } + break; + case GST_STATE_VOID_PENDING: + case GST_STATE_NULL: + case GST_STATE_READY: + default: + break; + } - if (bRet == MM_ERROR_NONE) { - msg_param.connection.buffering = player->streamer->buffering_percent; - MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param); + MMPLAYER_FLEAVE(); + return; +} - if (MMPLAYER_IS_RTSP_STREAMING(player) && - player->pending_resume && - (player->streamer->buffering_percent >= MAX_BUFFER_PERCENT)) { +static void +__mmplayer_gst_handle_element_message(mmplayer_t *player, GstMessage *msg) +{ + const gchar *structure_name; + gint count = 0, idx = 0; - player->is_external_subtitle_added_now = FALSE; - player->pending_resume = FALSE; - _mmplayer_resume((MMHandleType)player); - } + MMPLAYER_FENTER(); + MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin); - if (MMPLAYER_IS_RTSP_STREAMING(player) && - (player->streamer->buffering_percent >= MAX_BUFFER_PERCENT)) { + if (gst_message_get_structure(msg) == NULL) + return; - if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS) { - if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) { - player->seek_state = MMPLAYER_SEEK_NONE; - MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL); - } else if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PLAYING) { - /* Considering the async state trasition in case of RTSP. - After getting state change gst msg, seek cmpleted msg will be posted. */ - player->seek_state = MMPLAYER_SEEK_COMPLETED; - } - } - } - } else if (bRet == MM_ERROR_PLAYER_INVALID_STATE) { - if (!player->streamer) { - LOGW("player->streamer is NULL, so discarding the buffering percent update\n"); - MMPLAYER_CMD_UNLOCK(player); - break; - } + structure_name = gst_structure_get_name(gst_message_get_structure(msg)); + if (!structure_name) + return; - if ((MMPLAYER_IS_LIVE_STREAMING(player)) && (MMPLAYER_IS_RTSP_STREAMING(player))) { + LOGD("GST_MESSAGE_ELEMENT %s from %s", structure_name, GST_OBJECT_NAME(GST_MESSAGE_SRC(msg))); - LOGD("player->last_position=%"G_GINT64_FORMAT" , player->streamer->buffering_percent=%d \n", - GST_TIME_AS_SECONDS(player->last_position), player->streamer->buffering_percent); + if (!strcmp(structure_name, "adaptive-streaming-variant")) { + const GValue *var_info = NULL; - if ((GST_TIME_AS_SECONDS(player->last_position) <= 0) && (MMPLAYER_CURRENT_STATE(player) == MM_PLAYER_STATE_PAUSED)) { - msg_param.connection.buffering = player->streamer->buffering_percent; - MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param); - } else { - LOGD("Not updating Buffering Message for Live RTSP case !!!\n"); - } - } else { - msg_param.connection.buffering = player->streamer->buffering_percent; - MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param); + var_info = gst_structure_get_value(gst_message_get_structure(msg), "video-variant-info"); + if (var_info != NULL) { + if (player->adaptive_info.var_list) + g_list_free_full(player->adaptive_info.var_list, g_free); + + /* share addr or copy the list */ + player->adaptive_info.var_list = + g_list_copy_deep((GList *)g_value_get_pointer(var_info), (GCopyFunc)__mmplayer_adaptive_var_info, NULL); + + count = g_list_length(player->adaptive_info.var_list); + if (count > 0) { + stream_variant_t *temp = NULL; + + /* print out for debug */ + LOGD("num of variant_info %d", count); + for (idx = 0; idx < count; idx++) { + temp = g_list_nth_data(player->adaptive_info.var_list, idx); + if (temp) + LOGD("variant(%d) [b]%d [w]%d [h]%d ", idx, temp->bandwidth, temp->width, temp->height); } } - MMPLAYER_CMD_UNLOCK(player); } - break; + } - case GST_MESSAGE_STATE_CHANGED: - { - MMPlayerGstElement *mainbin; - const GValue *voldstate, *vnewstate, *vpending; - GstState oldstate = GST_STATE_NULL; - GstState newstate = GST_STATE_NULL; - GstState pending = GST_STATE_NULL; - - if (!(player->pipeline && player->pipeline->mainbin)) { - LOGE("player pipeline handle is null"); - break; - } + if (!strcmp(structure_name, "prepare-decode-buffers")) { + gint num_buffers = 0; + gint extra_num_buffers = 0; - mainbin = player->pipeline->mainbin; + if (gst_structure_get_int(gst_message_get_structure(msg), "num_buffers", &num_buffers)) { + LOGD("video_num_buffers : %d", num_buffers); + mm_player_set_attribute((MMHandleType)player, NULL, + MM_PLAYER_VIDEO_BUFFER_TOTAL_SIZE, num_buffers, NULL); + } - /* we only handle messages from pipeline */ - if (msg->src != (GstObject *)mainbin[MMPLAYER_M_PIPE].gst) - break; + if (gst_structure_get_int(gst_message_get_structure(msg), "extra_num_buffers", &extra_num_buffers)) { + LOGD("num_of_vout_extra num buffers : %d", extra_num_buffers); + mm_player_set_attribute((MMHandleType)player, NULL, + MM_PLAYER_VIDEO_BUFFER_EXTRA_SIZE, extra_num_buffers, NULL); + } + return; + } - /* get state info from msg */ - voldstate = gst_structure_get_value(gst_message_get_structure(msg), "old-state"); - vnewstate = gst_structure_get_value(gst_message_get_structure(msg), "new-state"); - vpending = gst_structure_get_value(gst_message_get_structure(msg), "pending-state"); + if (!strcmp(structure_name, "Ext_Sub_Language_List")) + _mmplayer_track_update_text_attr_info(player, msg); - if (!voldstate || !vnewstate) { - LOGE("received msg has wrong format."); - break; - } + /* custom message */ + if (!strcmp(structure_name, "audio_codec_not_supported")) { + MMMessageParamType msg_param = {0,}; + msg_param.code = MM_ERROR_PLAYER_AUDIO_CODEC_NOT_FOUND; + MMPLAYER_POST_MSG(player, MM_MESSAGE_ERROR, &msg_param); + } + + /* custom message for RTSP attribute : + RTSP case, buffer is not come from server before PLAYING state. However,we have to get attribute after PAUSE state chaged. + sdp which has contents info is received when rtsp connection is opened. + extract duration ,codec info , resolution from sdp and get it by GstMessage */ + if (!strcmp(structure_name, "rtspsrc_properties")) { + gchar *audio_codec = NULL; + gchar *video_codec = NULL; + gchar *video_frame_size = NULL; + + gst_structure_get(gst_message_get_structure(msg), + "rtsp_duration", G_TYPE_UINT64, &player->duration, NULL); + LOGD("rtsp duration : %"G_GINT64_FORMAT" msec", GST_TIME_AS_MSECONDS(player->duration)); + player->streaming_type = _mmplayer_get_stream_service_type(player); + + gst_structure_get(gst_message_get_structure(msg), + "rtsp_audio_codec", G_TYPE_STRING, &audio_codec, NULL); + LOGD("rtsp_audio_codec : %s", audio_codec); + if (audio_codec) + mm_player_set_attribute((MMHandleType)player, NULL, + "content_audio_codec", audio_codec, strlen(audio_codec), NULL); + + gst_structure_get(gst_message_get_structure(msg), + "rtsp_video_codec", G_TYPE_STRING, &video_codec, NULL); + LOGD("rtsp_video_codec : %s", video_codec); + if (video_codec) + mm_player_set_attribute((MMHandleType)player, NULL, + "content_video_codec", video_codec, strlen(video_codec), NULL); + + gst_structure_get(gst_message_get_structure(msg), + "rtsp_video_frame_size", G_TYPE_STRING, &video_frame_size, NULL); + LOGD("rtsp_video_frame_size : %s", video_frame_size); + if (video_frame_size) { + gchar **res_str = g_strsplit(video_frame_size, "-", 0); + mm_player_set_attribute((MMHandleType)player, NULL, + MM_PLAYER_VIDEO_WIDTH, atoi(res_str[0]), + MM_PLAYER_VIDEO_HEIGHT, atoi(res_str[1]), + NULL); + g_strfreev(res_str); + } + } - oldstate = (GstState)voldstate->data[0].v_int; - newstate = (GstState)vnewstate->data[0].v_int; - if (vpending) - pending = (GstState)vpending->data[0].v_int; + MMPLAYER_FLEAVE(); + return; +} - LOGD("state changed [%s] : %s ---> %s final : %s\n", - GST_OBJECT_NAME(GST_MESSAGE_SRC(msg)), - gst_element_state_get_name((GstState)oldstate), - gst_element_state_get_name((GstState)newstate), - gst_element_state_get_name((GstState)pending)); +static void +__mmplayer_gst_handle_async_done_message(mmplayer_t *player, GstMessage *msg) +{ + mmplayer_gst_element_t *mainbin; - if (newstate == GST_STATE_PLAYING) { - if ((MMPLAYER_IS_RTSP_STREAMING(player)) && (player->pending_seek.is_pending)) { + MMPLAYER_FENTER(); + MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin); - int retVal = MM_ERROR_NONE; - LOGD("trying to play from (%"G_GINT64_FORMAT") pending position\n", player->pending_seek.pos); + mainbin = player->pipeline->mainbin; - retVal = __mmplayer_gst_set_position(player, player->pending_seek.format, player->pending_seek.pos, TRUE); + LOGD("GST_MESSAGE_ASYNC_DONE : %s", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg))); - if (MM_ERROR_NONE != retVal) - LOGE("failed to seek pending postion. just keep staying current position.\n"); + /* we only handle messages from pipeline */ + if (msg->src != (GstObject *)mainbin[MMPLAYER_M_PIPE].gst) + return; - player->pending_seek.is_pending = FALSE; - } + if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS) { + if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) { + player->seek_state = MMPLAYER_SEEK_NONE; + MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL); + } else if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PLAYING) { + if (mainbin[MMPLAYER_M_AUTOPLUG].gst) { + LOGD("sync %s state(%s) with parent state(%s)", + GST_ELEMENT_NAME(mainbin[MMPLAYER_M_AUTOPLUG].gst), + gst_element_state_get_name(GST_STATE(mainbin[MMPLAYER_M_AUTOPLUG].gst)), + gst_element_state_get_name(GST_STATE(mainbin[MMPLAYER_M_PIPE].gst))); + + /* In case of streaming, pause is required before finishing seeking by buffering. + After completing the seek(during buffering), the player and sink elems has paused state but others in playing state. + Because the buffering state is controlled according to the state transition for force resume, + the decodebin state should be paused as player state. */ + gst_element_sync_state_with_parent(mainbin[MMPLAYER_M_AUTOPLUG].gst); } - if (oldstate == newstate) { - LOGD("pipeline reports state transition to old state"); - break; + if ((MMPLAYER_IS_HTTP_STREAMING(player)) && + (player->streamer) && + (player->streamer->streaming_buffer_type == BUFFER_TYPE_MUXED) && + !(player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) { + GstQuery *query = NULL; + gboolean busy = FALSE; + gint percent = 0; + + if (player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer) { + query = gst_query_new_buffering(GST_FORMAT_PERCENT); + if (gst_element_query(player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer, query)) + gst_query_parse_buffering_percent(query, &busy, &percent); + gst_query_unref(query); + + LOGD("buffered percent(%s): %d", + GST_ELEMENT_NAME(player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer), percent); + } + + if (percent >= 100) + __mmplayer_handle_buffering_playback(player); } - switch (newstate) { - case GST_STATE_VOID_PENDING: - break; + player->seek_state = MMPLAYER_SEEK_COMPLETED; + } + } - case GST_STATE_NULL: - break; + MMPLAYER_FLEAVE(); + return; +} - case GST_STATE_READY: - break; +static void +__mmplayer_print_tag_foreach(const GstTagList *tags, const gchar *tag, gpointer user_data) +{ + GValue val = { 0, }; + gchar *str = NULL; + guint indent = GPOINTER_TO_UINT(user_data); - case GST_STATE_PAUSED: - { - gboolean prepare_async = FALSE; + if (!gst_tag_list_copy_value(&val, tags, tag)) + return; - if (!player->audio_cb_probe_id && player->set_mode.pcm_extraction && !player->audio_stream_render_cb_ex) - __mmplayer_configure_audio_callback(player); + if (G_VALUE_HOLDS_STRING(&val)) + str = g_value_dup_string(&val); + else + str = gst_value_serialize(&val); - if (!player->sent_bos && oldstate == GST_STATE_READY) { - // managed prepare async case - mm_attrs_get_int_by_name(player->attrs, "profile_prepare_async", &prepare_async); - LOGD("checking prepare mode for async transition - %d", prepare_async); - } + LOGD("%*s%s: %s\n", 2 * indent, " ", gst_tag_get_nick(tag), str); + g_free(str); + g_value_unset(&val); +} - if (MMPLAYER_IS_STREAMING(player) || MMPLAYER_IS_MS_BUFF_SRC(player) || prepare_async) { - MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PAUSED); +static void +__mmplayer_dump_collection(GstStreamCollection * collection) +{ + guint i = 0; + GstTagList *tags = NULL; + GstCaps *caps = NULL; + + for (i = 0; i < gst_stream_collection_get_size(collection); i++) { + GstStream *stream = gst_stream_collection_get_stream(collection, i); + LOGD ("collection: Stream %u type %s flags 0x%x\n", i, + gst_stream_type_get_name(gst_stream_get_stream_type(stream)), + gst_stream_get_stream_flags(stream)); + LOGD (" ID: %s\n", gst_stream_get_stream_id(stream)); + + caps = gst_stream_get_caps(stream); + if (caps) { + gchar *caps_str = gst_caps_to_string(caps); + LOGD (" caps: %s\n", caps_str); + g_free(caps_str); + gst_caps_unref(caps); + } - if (MMPLAYER_IS_STREAMING(player) && (player->streamer)) - __mm_player_streaming_set_content_bitrate(player->streamer, - player->total_maximum_bitrate, player->total_bitrate); + tags = gst_stream_get_tags(stream); + if (tags) { + LOGD (" tags:\n"); + gst_tag_list_foreach(tags, __mmplayer_print_tag_foreach, GUINT_TO_POINTER(MMPLAYER_TAG_INDENT)); + gst_tag_list_unref(tags); + } + } +} - if (player->pending_seek.is_pending) { - LOGW("trying to do pending seek"); - MMPLAYER_CMD_LOCK(player); - __mmplayer_gst_pending_seek(player); - MMPLAYER_CMD_UNLOCK(player); - } - } - } - break; +static void +__mmplayer_stream_notify_cb(GstStreamCollection *collection, + GstStream *stream, GParamSpec *pspec, gpointer data) +{ + LOGD ("Got stream-notify from stream %s for %s (collection %p)\n", + gst_stream_get_stream_id(stream), pspec->name, collection); + if (g_str_equal(pspec->name, "caps")) { + GstCaps *caps = gst_stream_get_caps(stream); + gchar *caps_str = gst_caps_to_string(caps); + LOGD (" New caps: %s\n", caps_str); + g_free(caps_str); + gst_caps_unref(caps); + } - case GST_STATE_PLAYING: - { - if (MMPLAYER_IS_STREAMING(player)) { - // managed prepare async case when buffering is completed - // pending state should be reset otherwise, it's still playing even though it's resumed after bufferging. - if ((MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PLAYING) || - (MMPLAYER_PENDING_STATE(player) == MM_PLAYER_STATE_PLAYING)) - MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING); + if (g_str_equal (pspec->name, "tags")) { + GstTagList *tags = gst_stream_get_tags(stream); + if (tags) { + LOGD (" tags:\n"); + gst_tag_list_foreach(tags, __mmplayer_print_tag_foreach, GUINT_TO_POINTER(MMPLAYER_TAG_INDENT)); + gst_tag_list_unref(tags); + } + } +} - if (MMPLAYER_IS_RTSP_STREAMING(player) && (MMPLAYER_IS_LIVE_STREAMING(player))) { +static void +__mmplayer_gst_bus_msg_callback(GstMessage *msg, gpointer data) +{ + mmplayer_t *player = (mmplayer_t *)(data); - LOGD("Current Buffering Percent = %d", player->streamer->buffering_percent); - if (player->streamer->buffering_percent < 100) { + MMPLAYER_RETURN_IF_FAIL(player); + MMPLAYER_RETURN_IF_FAIL(msg && GST_IS_MESSAGE(msg)); - MMMessageParamType msg_param = {0, }; - LOGW("Posting Buffering Completed Message to Application !!!"); + switch (GST_MESSAGE_TYPE(msg)) { + case GST_MESSAGE_UNKNOWN: + LOGD("unknown message received"); + break; - msg_param.connection.buffering = 100; - MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param); - } - } - } + case GST_MESSAGE_EOS: + LOGD("GST_MESSAGE_EOS received"); + __mmplayer_gst_handle_eos_message(player, msg); + break; - if (player->gapless.stream_changed) { - __mmplayer_update_content_attrs(player, ATTR_ALL); - player->gapless.stream_changed = FALSE; - } + case GST_MESSAGE_ERROR: + _mmplayer_set_reconfigure_state(player, FALSE); + __mmplayer_gst_handle_error_message(player, msg); + break; - if (player->seek_state == MMPLAYER_SEEK_COMPLETED) { - player->seek_state = MMPLAYER_SEEK_NONE; - MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL); - } - } - break; + case GST_MESSAGE_WARNING: + { + char *debug = NULL; + GError *error = NULL; - default: - break; - } + gst_message_parse_warning(msg, &error, &debug); + + LOGD("warning : %s", error->message); + LOGD("debug : %s", debug); + + MMPLAYER_POST_MSG(player, MM_MESSAGE_WARNING, NULL); + + MMPLAYER_FREEIF(debug); + g_error_free(error); + } + break; + + case GST_MESSAGE_TAG: + { + LOGD("GST_MESSAGE_TAG"); + if (!__mmplayer_gst_extract_tag_from_msg(player, msg)) + LOGW("failed to extract tags from gstmessage"); } break; + case GST_MESSAGE_BUFFERING: + __mmplayer_gst_handle_buffering_message(player, msg); + break; + + case GST_MESSAGE_STATE_CHANGED: + __mmplayer_gst_handle_state_message(player, msg); + break; + case GST_MESSAGE_CLOCK_LOST: { GstClock *clock = NULL; gboolean need_new_clock = FALSE; gst_message_parse_clock_lost(msg, &clock); - LOGD("GST_MESSAGE_CLOCK_LOST : %s\n", (clock ? GST_OBJECT_NAME(clock) : "NULL")); + LOGD("GST_MESSAGE_CLOCK_LOST : %s", (clock ? GST_OBJECT_NAME(clock) : "NULL")); if (!player->videodec_linked) need_new_clock = TRUE; @@ -1875,9 +2040,9 @@ __mmplayer_gst_callback(GstMessage *msg, gpointer data) need_new_clock = TRUE; if (need_new_clock) { - LOGD("Provide clock is TRUE, do pause->resume\n"); - __mmplayer_gst_pause(player, FALSE); - __mmplayer_gst_resume(player, FALSE); + LOGD("Provide clock is TRUE, do pause->resume"); + _mmplayer_gst_pause(player, FALSE); + _mmplayer_gst_resume(player, FALSE); } } break; @@ -1886,335 +2051,1445 @@ __mmplayer_gst_callback(GstMessage *msg, gpointer data) { GstClock *clock = NULL; gst_message_parse_new_clock(msg, &clock); - LOGD("GST_MESSAGE_NEW_CLOCK : %s\n", (clock ? GST_OBJECT_NAME(clock) : "NULL")); + LOGD("GST_MESSAGE_NEW_CLOCK : %s", (clock ? GST_OBJECT_NAME(clock) : "NULL")); } break; case GST_MESSAGE_ELEMENT: - { - const gchar *structure_name; - gint count = 0, idx = 0; - MMHandleType attrs = 0; + __mmplayer_gst_handle_element_message(player, msg); + break; + + case GST_MESSAGE_DURATION_CHANGED: + { + LOGD("GST_MESSAGE_DURATION_CHANGED"); + if (!__mmplayer_gst_handle_duration(player, msg)) + LOGW("failed to update duration"); + } + break; + + case GST_MESSAGE_ASYNC_START: + LOGD("GST_MESSAGE_ASYNC_START : %s", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg))); + break; + + case GST_MESSAGE_ASYNC_DONE: + __mmplayer_gst_handle_async_done_message(player, msg); + break; + case GST_MESSAGE_STREAM_COLLECTION: + { + GstStreamCollection *collection = NULL; + LOGD("GST_MESSAGE_STREAM_COLLECTION : %s", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg))); + + gst_message_parse_stream_collection(msg, &collection); + if (collection) { + __mmplayer_dump_collection(collection); + if (player->collection && player->stream_notify_id) { + g_signal_handler_disconnect(player->collection, player->stream_notify_id); + player->stream_notify_id = 0; + } + gst_object_replace((GstObject **)&player->collection, (GstObject *)collection); + if (player->collection) { + player->stream_notify_id = g_signal_connect(player->collection, "stream-notify", + (GCallback)__mmplayer_stream_notify_cb, player); + } + gst_object_unref(collection); + } + } break; + case GST_MESSAGE_STREAMS_SELECTED: + { + GstStreamCollection *collection = NULL; + LOGD("GST_MESSAGE_STREAMS_SELECTED : %s", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg))); + + gst_message_parse_streams_selected(msg, &collection); + if (collection) { + guint i = 0, len = 0; + len = gst_message_streams_selected_get_size(msg); + for (i = 0; i < len; i++) { + GstStream *stream = gst_message_streams_selected_get_stream(msg, i); + LOGD (" Stream #%d : %s\n", i, gst_stream_get_stream_id(stream)); + gst_object_unref(stream); + } + gst_object_unref (collection); + } + } break; + +#ifdef __DEBUG__ + case GST_MESSAGE_REQUEST_STATE: LOGD("GST_MESSAGE_REQUEST_STATE"); break; + case GST_MESSAGE_STEP_START: LOGD("GST_MESSAGE_STEP_START"); break; + case GST_MESSAGE_QOS: LOGD("GST_MESSAGE_QOS"); break; + case GST_MESSAGE_PROGRESS: LOGD("GST_MESSAGE_PROGRESS"); break; + case GST_MESSAGE_ANY: LOGD("GST_MESSAGE_ANY"); break; + case GST_MESSAGE_INFO: LOGD("GST_MESSAGE_STATE_DIRTY"); break; + case GST_MESSAGE_STATE_DIRTY: LOGD("GST_MESSAGE_STATE_DIRTY"); break; + case GST_MESSAGE_STEP_DONE: LOGD("GST_MESSAGE_STEP_DONE"); break; + case GST_MESSAGE_CLOCK_PROVIDE: LOGD("GST_MESSAGE_CLOCK_PROVIDE"); break; + case GST_MESSAGE_STRUCTURE_CHANGE: LOGD("GST_MESSAGE_STRUCTURE_CHANGE"); break; + case GST_MESSAGE_STREAM_STATUS: LOGD("GST_MESSAGE_STREAM_STATUS"); break; + case GST_MESSAGE_APPLICATION: LOGD("GST_MESSAGE_APPLICATION"); break; + case GST_MESSAGE_SEGMENT_START: LOGD("GST_MESSAGE_SEGMENT_START"); break; + case GST_MESSAGE_SEGMENT_DONE: LOGD("GST_MESSAGE_SEGMENT_DONE"); break; + case GST_MESSAGE_LATENCY: LOGD("GST_MESSAGE_LATENCY"); break; +#endif + + default: + break; + } + + /* should not call 'gst_message_unref(msg)' */ + return; +} + +static GstBusSyncReply +__mmplayer_gst_bus_sync_callback(GstBus *bus, GstMessage *message, gpointer data) +{ + mmplayer_t *player = (mmplayer_t *)data; + GstBusSyncReply reply = GST_BUS_DROP; + + if (!(player->pipeline && player->pipeline->mainbin)) { + LOGE("player pipeline handle is null"); + return GST_BUS_PASS; + } + + if (!__mmplayer_gst_check_useful_message(player, message)) { + gst_message_unref(message); + return GST_BUS_DROP; + } + + switch (GST_MESSAGE_TYPE(message)) { + case GST_MESSAGE_TAG: + __mmplayer_gst_extract_tag_from_msg(player, message); + +#ifdef __DEBUG__ + { + GstTagList *tags = NULL; + + gst_message_parse_tag(message, &tags); + if (tags) { + LOGE("TAGS received from element \"%s\".", + GST_STR_NULL(GST_ELEMENT_NAME(GST_MESSAGE_SRC(message)))); + + gst_tag_list_foreach(tags, print_tag, NULL); + gst_tag_list_unref(tags); + tags = NULL; + } + break; + } +#endif + break; + + case GST_MESSAGE_DURATION_CHANGED: + __mmplayer_gst_handle_duration(player, message); + break; + case GST_MESSAGE_ELEMENT: + { + const gchar *klass = NULL; + klass = gst_element_factory_get_metadata + (gst_element_get_factory((GstElement *)message->src), GST_ELEMENT_METADATA_KLASS); + if (!klass || !g_strrstr(klass, "Codec/Decoder")) { + reply = GST_BUS_PASS; + break; + } + __mmplayer_gst_handle_element_message(player, message); + } + break; + case GST_MESSAGE_ASYNC_DONE: + /* NOTE:Don't call gst_callback directly + * because previous frame can be showed even though this message is received for seek. + */ + default: + reply = GST_BUS_PASS; + break; + } + + if (reply == GST_BUS_DROP) + gst_message_unref(message); + + return reply; +} + +static void +__mmplayer_gst_appsrc_feed_data_mem(GstElement *element, guint size, gpointer user_data) +{ + GstElement *appsrc = element; + mmplayer_input_buffer_t *buf = (mmplayer_input_buffer_t *)user_data; + GstBuffer *buffer = NULL; + GstFlowReturn ret = GST_FLOW_OK; + gint len = size; + + MMPLAYER_RETURN_IF_FAIL(element); + MMPLAYER_RETURN_IF_FAIL(buf); + + buffer = gst_buffer_new(); + + if (buf->offset < 0 || buf->len < 0) { + LOGE("invalid buf info %d %d", buf->offset, buf->len); + return; + } + + if (buf->offset >= buf->len) { + LOGD("call eos appsrc"); + g_signal_emit_by_name(appsrc, "end-of-stream", &ret); + return; + } + + if (buf->len - buf->offset < size) + len = buf->len - buf->offset; + + gst_buffer_insert_memory(buffer, -1, gst_memory_new_wrapped(0, (guint8 *)(buf->buf + buf->offset), len, 0, len, NULL, NULL)); + GST_BUFFER_OFFSET(buffer) = (guint64)buf->offset; + GST_BUFFER_OFFSET_END(buffer) = (guint64)(buf->offset + len); + +#ifdef __DEBUG__ + LOGD("feed buffer %p, offset %u-%u length %u", buffer, buf->offset, (buf->offset+len), len); +#endif + g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret); + + buf->offset += len; +} + +static gboolean +__mmplayer_gst_appsrc_seek_data_mem(GstElement *element, guint64 size, gpointer user_data) +{ + mmplayer_input_buffer_t *buf = (mmplayer_input_buffer_t *)user_data; + + MMPLAYER_RETURN_VAL_IF_FAIL(buf, FALSE); + + buf->offset = (int)size; + + return TRUE; +} + +void +__mmplayer_gst_appsrc_feed_data(GstElement *element, guint size, gpointer user_data) +{ + mmplayer_t *player = (mmplayer_t *)user_data; + mmplayer_stream_type_e stream_type = MM_PLAYER_STREAM_TYPE_DEFAULT; + MMMessageParamType msg_param = {0,}; + guint64 current_level_bytes = 0; + + MMPLAYER_RETURN_IF_FAIL(player); + + if (g_strrstr(GST_ELEMENT_NAME(element), "audio")) { + stream_type = MM_PLAYER_STREAM_TYPE_AUDIO; + } else if (g_strrstr(GST_ELEMENT_NAME(element), "video")) { + stream_type = MM_PLAYER_STREAM_TYPE_VIDEO; + } else { + LOGW("invalid feed-data signal from %s", GST_ELEMENT_NAME(element)); + return; + } + + g_object_get(G_OBJECT(element), "current-level-bytes", ¤t_level_bytes, NULL); + + LOGI("stream type: %d, level: %"G_GUINT64_FORMAT, stream_type, current_level_bytes); + + msg_param.union_type = MM_MSG_UNION_BUFFER_STATUS; + msg_param.buffer_status.stream_type = stream_type; + msg_param.buffer_status.status = MM_PLAYER_MEDIA_STREAM_BUFFER_UNDERRUN; + msg_param.buffer_status.bytes = current_level_bytes; + + MMPLAYER_POST_MSG(player, MM_MESSAGE_PUSH_BUFFER_STATUS, &msg_param); +} + +void +__mmplayer_gst_appsrc_enough_data(GstElement *element, gpointer user_data) +{ + mmplayer_t *player = (mmplayer_t *)user_data; + mmplayer_stream_type_e stream_type = MM_PLAYER_STREAM_TYPE_DEFAULT; + MMMessageParamType msg_param = {0,}; + guint64 current_level_bytes = 0; + + MMPLAYER_RETURN_IF_FAIL(player); + + if (g_strrstr(GST_ELEMENT_NAME(element), "audio")) { + stream_type = MM_PLAYER_STREAM_TYPE_AUDIO; + } else if (g_strrstr(GST_ELEMENT_NAME(element), "video")) { + stream_type = MM_PLAYER_STREAM_TYPE_VIDEO; + } else { + LOGW("invalid enough-data signal from %s", GST_ELEMENT_NAME(element)); + return; + } + + g_object_get(G_OBJECT(element), "current-level-bytes", ¤t_level_bytes, NULL); + + LOGI("stream type: %d, level: %"G_GUINT64_FORMAT, stream_type, current_level_bytes); + + msg_param.union_type = MM_MSG_UNION_BUFFER_STATUS; + msg_param.buffer_status.stream_type = stream_type; + msg_param.buffer_status.status = MM_PLAYER_MEDIA_STREAM_BUFFER_OVERFLOW; + msg_param.buffer_status.bytes = current_level_bytes; + + MMPLAYER_POST_MSG(player, MM_MESSAGE_PUSH_BUFFER_STATUS, &msg_param); +} + +gboolean +__mmplayer_gst_appsrc_seek_data(GstElement *element, guint64 position, gpointer user_data) +{ + mmplayer_t *player = (mmplayer_t *)user_data; + mmplayer_stream_type_e stream_type = MM_PLAYER_STREAM_TYPE_DEFAULT; + MMMessageParamType msg_param = {0,}; + + MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE); + + if (g_strrstr(GST_ELEMENT_NAME(element), "audio")) { + stream_type = MM_PLAYER_STREAM_TYPE_AUDIO; + } else if (g_strrstr(GST_ELEMENT_NAME(element), "video")) { + stream_type = MM_PLAYER_STREAM_TYPE_VIDEO; + } else { + LOGW("invalid seek-data signal from %s", GST_ELEMENT_NAME(element)); + return TRUE; + } + + LOGD("stream type: %d, pos: %"G_GUINT64_FORMAT, stream_type, position); + + msg_param.union_type = MM_MSG_UNION_SEEK_DATA; + msg_param.seek_data.stream_type = stream_type; + msg_param.seek_data.offset = position; + + MMPLAYER_POST_MSG(player, MM_MESSAGE_PUSH_BUFFER_SEEK_DATA, &msg_param); + + return TRUE; +} + +static gboolean +__mmplayer_gst_create_es_decoder(mmplayer_t *player, mmplayer_stream_type_e type, GstPad *srcpad) +{ +#define MAX_LEN_NAME 20 + + gboolean ret = FALSE; + GstPad *sinkpad = NULL; + gchar *prefix = NULL; + gchar dec_name[MAX_LEN_NAME] = {0, }; + main_element_id_e elem_id = MMPLAYER_M_NUM; + + mmplayer_gst_element_t *mainbin = NULL; + GstElement *decodebin = NULL; + GstCaps *dec_caps = NULL; + + MMPLAYER_FENTER(); + + MMPLAYER_RETURN_VAL_IF_FAIL(player && + player->pipeline && + player->pipeline->mainbin, FALSE); + MMPLAYER_RETURN_VAL_IF_FAIL(srcpad, FALSE); + + mainbin = player->pipeline->mainbin; + switch (type) { + case MM_PLAYER_STREAM_TYPE_AUDIO: + prefix = "audio"; + elem_id = MMPLAYER_M_AUTOPLUG_A_DEC; + break; + case MM_PLAYER_STREAM_TYPE_VIDEO: + prefix = "video"; + elem_id = MMPLAYER_M_AUTOPLUG_V_DEC; + break; + default: + LOGE("invalid type %d", type); + return FALSE; + } + + if (mainbin[elem_id].gst) { + LOGE("elem(%d) is already created", elem_id); + return FALSE; + } + + snprintf(dec_name, sizeof(dec_name), "%s_decodebin", prefix); + + /* create decodebin */ + decodebin = gst_element_factory_make("decodebin", dec_name); + if (!decodebin) { + LOGE("failed to create %s", dec_name); + return FALSE; + } + + mainbin[elem_id].id = elem_id; + mainbin[elem_id].gst = decodebin; + + /* raw pad handling signal */ + _mmplayer_add_signal_connection(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added", + G_CALLBACK(_mmplayer_gst_decode_pad_added), (gpointer)player); + + /* This signal is emitted whenever decodebin finds a new stream. It is emitted + before looking for any elements that can handle that stream.*/ + _mmplayer_add_signal_connection(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-select", + G_CALLBACK(_mmplayer_gst_decode_autoplug_select), (gpointer)player); + + if (player->need_video_dec_sorting || player->need_audio_dec_sorting) + _mmplayer_add_signal_connection(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-sort", + G_CALLBACK(_mmplayer_gst_decode_autoplug_sort), (gpointer)player); + + /* This signal is emitted when a element is added to the bin.*/ + _mmplayer_add_signal_connection(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "element-added", + G_CALLBACK(_mmplayer_gst_element_added), (gpointer)player); + + if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), decodebin)) { + LOGE("failed to add new decodebin"); + return FALSE; + } + + dec_caps = gst_pad_query_caps(srcpad, NULL); + if (dec_caps) { +#ifdef __DEBUG__ + LOGD("got pad %s:%s , dec_caps %" GST_PTR_FORMAT, GST_DEBUG_PAD_NAME(srcpad), dec_caps); +#endif + g_object_set(G_OBJECT(decodebin), "sink-caps", dec_caps, NULL); + gst_caps_unref(dec_caps); + } + + sinkpad = gst_element_get_static_pad(decodebin, "sink"); + + if (!sinkpad || gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK) { + LOGE("failed to link [%s:%s] to decoder", GST_DEBUG_PAD_NAME(srcpad)); + goto ERROR; + } + gst_object_unref(GST_OBJECT(sinkpad)); + + gst_element_sync_state_with_parent(decodebin); + MMPLAYER_FLEAVE(); + return TRUE; + +ERROR: + if (sinkpad) + gst_object_unref(GST_OBJECT(sinkpad)); + + if (mainbin[elem_id].gst) { + gst_element_set_state(mainbin[elem_id].gst, GST_STATE_NULL); + gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[elem_id].gst); + gst_object_unref(mainbin[elem_id].gst); + mainbin[elem_id].gst = NULL; + } + + MMPLAYER_FLEAVE(); + return ret; +} + +static gboolean +__mmplayer_gst_create_es_path(mmplayer_t *player, mmplayer_stream_type_e type, GstCaps *caps) +{ +#define MAX_LEN_NAME 20 + mmplayer_gst_element_t *mainbin = NULL; + gchar *prefix = NULL; + main_element_id_e src_id = MMPLAYER_M_NUM, queue_id = MMPLAYER_M_NUM; + + gchar src_name[MAX_LEN_NAME] = {0, }, queue_name[MAX_LEN_NAME] = {0, }; + GstElement *src = NULL, *queue = NULL; + GstPad *srcpad = NULL; + + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline && + player->pipeline->mainbin, FALSE); + + mainbin = player->pipeline->mainbin; + + LOGD("type(%d) path is creating", type); + switch (type) { + case MM_PLAYER_STREAM_TYPE_AUDIO: + prefix = "audio"; + if (mainbin[MMPLAYER_M_SRC].gst) + src_id = MMPLAYER_M_2ND_SRC; + else + src_id = MMPLAYER_M_SRC; + queue_id = MMPLAYER_M_A_BUFFER; + break; + case MM_PLAYER_STREAM_TYPE_VIDEO: + prefix = "video"; + src_id = MMPLAYER_M_SRC; + queue_id = MMPLAYER_M_V_BUFFER; + break; + case MM_PLAYER_STREAM_TYPE_TEXT: + prefix = "subtitle"; + src_id = MMPLAYER_M_SUBSRC; + queue_id = MMPLAYER_M_S_BUFFER; + break; + default: + LOGE("invalid type %d", type); + return FALSE; + } + + snprintf(src_name, sizeof(src_name), "%s_appsrc", prefix); + snprintf(queue_name, sizeof(queue_name), "%s_queue", prefix); + + /* create source */ + src = gst_element_factory_make("appsrc", src_name); + if (!src) { + LOGF("failed to create %s", src_name); + goto ERROR; + } + + mainbin[src_id].id = src_id; + mainbin[src_id].gst = src; + + g_object_set(G_OBJECT(src), "format", GST_FORMAT_TIME, + "caps", caps, NULL); + + /* size of many video frames are larger than default blocksize as 4096 */ + if (type == MM_PLAYER_STREAM_TYPE_VIDEO) + g_object_set(G_OBJECT(src), "blocksize", (guint)1048576, NULL); + + if (player->media_stream_buffer_max_size[type] > 0) + g_object_set(G_OBJECT(src), "max-bytes", player->media_stream_buffer_max_size[type], NULL); + + if (player->media_stream_buffer_min_percent[type] > 0) + g_object_set(G_OBJECT(src), "min-percent", player->media_stream_buffer_min_percent[type], NULL); + + /*Fix Seek External Demuxer: set audio and video appsrc as seekable */ + gst_app_src_set_stream_type((GstAppSrc*)G_OBJECT(src), GST_APP_STREAM_TYPE_SEEKABLE); + + _mmplayer_add_signal_connection(player, G_OBJECT(src), MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data", + G_CALLBACK(__mmplayer_gst_appsrc_seek_data), (gpointer)player); + _mmplayer_add_signal_connection(player, G_OBJECT(src), MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data", + G_CALLBACK(__mmplayer_gst_appsrc_feed_data), (gpointer)player); + _mmplayer_add_signal_connection(player, G_OBJECT(src), MM_PLAYER_SIGNAL_TYPE_OTHERS, "enough-data", + G_CALLBACK(__mmplayer_gst_appsrc_enough_data), (gpointer)player); + + /* create queue */ + queue = gst_element_factory_make("queue2", queue_name); + if (!queue) { + LOGE("failed to create %s", queue_name); + goto ERROR; + } + g_object_set(G_OBJECT(queue), "max-size-buffers", 2, NULL); + + mainbin[queue_id].id = queue_id; + mainbin[queue_id].gst = queue; + + if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[src_id].gst)) { + LOGE("failed to add src"); + goto ERROR; + } + + if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[queue_id].gst)) { + LOGE("failed to add queue"); + goto ERROR; + } + + if (!gst_element_link(mainbin[src_id].gst, mainbin[queue_id].gst)) { + LOGE("failed to link src and queue"); + goto ERROR; + } + + /* create decoder */ + srcpad = gst_element_get_static_pad(mainbin[queue_id].gst, "src"); + if (!srcpad) { + LOGE("failed to get srcpad of queue"); + goto ERROR; + } + + if (type == MM_PLAYER_STREAM_TYPE_TEXT) { + _mmplayer_gst_create_decoder(player, srcpad, caps); + } else { + if (!__mmplayer_gst_create_es_decoder(player, type, srcpad)) { + LOGE("failed to create decoder"); + gst_object_unref(GST_OBJECT(srcpad)); + goto ERROR; + } + } + gst_object_unref(GST_OBJECT(srcpad)); + return TRUE; + +ERROR: + if (mainbin[src_id].gst) { + gst_element_set_state(mainbin[src_id].gst, GST_STATE_NULL); + gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[src_id].gst); + gst_object_unref(mainbin[src_id].gst); + mainbin[src_id].gst = NULL; + } + + if (mainbin[queue_id].gst) { + gst_element_set_state(mainbin[queue_id].gst, GST_STATE_NULL); + gst_bin_remove(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[queue_id].gst); + gst_object_unref(mainbin[queue_id].gst); + mainbin[queue_id].gst = NULL; + } + + return FALSE; +} + +static void +__mmplayer_gst_rtp_dynamic_pad(GstElement *element, GstPad *pad, gpointer data) +{ + GstPad *sinkpad = NULL; + GstCaps *caps = NULL; + GstElement *new_element = NULL; + GstStructure *str = NULL; + const gchar *name = NULL; + + mmplayer_t *player = (mmplayer_t *)data; + + MMPLAYER_FENTER(); + + MMPLAYER_RETURN_IF_FAIL(element && pad); + MMPLAYER_RETURN_IF_FAIL(player && + player->pipeline && + player->pipeline->mainbin); + + /* payload type is recognizable. increase num_dynamic and wait for sinkbin creation. + * num_dynamic_pad will decreased after creating a sinkbin. + */ + player->num_dynamic_pad++; + LOGD("stream count inc : %d", player->num_dynamic_pad); + + caps = gst_pad_query_caps(pad, NULL); + MMPLAYER_CHECK_NULL(caps); + + str = gst_caps_get_structure(caps, 0); + name = gst_structure_get_string(str, "media"); + if (!name) { + LOGE("cannot get mimetype from structure."); + goto ERROR; + } + + if (strstr(name, "video")) { + gint stype = 0; + mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype); + + if ((stype == MM_DISPLAY_SURFACE_NULL) && (!player->set_mode.video_export)) { + if (player->v_stream_caps) { + gst_caps_unref(player->v_stream_caps); + player->v_stream_caps = NULL; + } + + new_element = gst_element_factory_make("fakesink", NULL); + player->num_dynamic_pad--; + goto NEW_ELEMENT; + } + } + + if (!_mmplayer_gst_create_decoder(player, pad, caps)) { + LOGE("failed to autoplug for caps"); + goto ERROR; + } + + gst_caps_unref(caps); + caps = NULL; + +NEW_ELEMENT: + + /* excute new_element if created*/ + if (new_element) { + LOGD("adding new element to pipeline"); + + /* set state to READY before add to bin */ + MMPLAYER_ELEMENT_SET_STATE(new_element, GST_STATE_READY); + + /* add new element to the pipeline */ + if (FALSE == gst_bin_add(GST_BIN(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst), new_element)) { + LOGE("failed to add autoplug element to bin"); + goto ERROR; + } + + /* get pad from element */ + sinkpad = gst_element_get_static_pad(GST_ELEMENT(new_element), "sink"); + if (!sinkpad) { + LOGE("failed to get sinkpad from autoplug element"); + goto ERROR; + } + + /* link it */ + if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) { + LOGE("failed to link autoplug element"); + goto ERROR; + } + + gst_object_unref(sinkpad); + sinkpad = NULL; + + /* run. setting PLAYING here since streamming source is live source */ + MMPLAYER_ELEMENT_SET_STATE(new_element, GST_STATE_PLAYING); + } + + if (caps) + gst_caps_unref(caps); + + MMPLAYER_FLEAVE(); + + return; + +STATE_CHANGE_FAILED: +ERROR: + /* FIXIT : take care if new_element has already added to pipeline */ + if (new_element) + gst_object_unref(GST_OBJECT(new_element)); + + if (sinkpad) + gst_object_unref(GST_OBJECT(sinkpad)); + + if (caps) + gst_caps_unref(caps); + + /* FIXIT : how to inform this error to MSL ????? */ + /* FIXIT : I think we'd better to use g_idle_add() to destroy pipeline and + * then post an error to application + */ +} + +static void +__mmplayer_gst_rtp_no_more_pads(GstElement *element, gpointer data) +{ + mmplayer_t *player = (mmplayer_t *)data; + + MMPLAYER_FENTER(); + + /* NOTE : we can remove fakesink here if there's no rtp_dynamic_pad. because whenever + * we connect autoplugging element to the pad which is just added to rtspsrc, we increase + * num_dynamic_pad. and this is no-more-pad situation which means no more pad will be added. + * So we can say this. if num_dynamic_pad is zero, it must be one of followings + + * [1] audio and video will be dumped with filesink. + * [2] autoplugging is done by just using pad caps. + * [3] typefinding has happend in audio but audiosink is created already before no-more-pad signal + * and the video will be dumped via filesink. + */ + if (player->num_dynamic_pad == 0) { + LOGD("it seems pad caps is directely used for autoplugging. removing fakesink now"); + + if (!_mmplayer_gst_remove_fakesink(player, + &player->pipeline->mainbin[MMPLAYER_M_SRC_FAKESINK])) + /* NOTE : _mmplayer_pipeline_complete() can be called several time. because + * signaling mechanism(pad-added, no-more-pad, new-decoded-pad) from various + * source element are not same. To overcome this situation, this function will called + * several places and several times. Therefore, this is not an error case. + */ + return; + } + + /* create dot before error-return. for debugging */ + MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-no-more-pad"); + + player->no_more_pad = TRUE; + + MMPLAYER_FLEAVE(); +} + +static GstElement * +__mmplayer_gst_make_rtsp_src(mmplayer_t *player) +{ + GstElement *element = NULL; + gchar *user_agent = NULL; + MMHandleType attrs = 0; + + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player, NULL); + + /* get profile attribute */ + attrs = MMPLAYER_GET_ATTRS(player); + if (!attrs) { + LOGE("failed to get content attribute"); + return NULL; + } + + element = gst_element_factory_make("rtspsrc", "rtsp source"); + if (!element) { + LOGE("failed to create rtspsrc element"); + return NULL; + } + + /* get attribute */ + mm_attrs_get_string_by_name(attrs, "streaming_user_agent", &user_agent); + + SECURE_LOGD("user_agent : %s", user_agent); + + /* setting property to streaming source */ + g_object_set(G_OBJECT(element), "location", player->profile.uri, NULL); + if (user_agent) + g_object_set(G_OBJECT(element), "user-agent", user_agent, NULL); + + _mmplayer_add_signal_connection(player, G_OBJECT(element), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added", + G_CALLBACK(__mmplayer_gst_rtp_dynamic_pad), (gpointer)player); + _mmplayer_add_signal_connection(player, G_OBJECT(element), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "no-more-pads", + G_CALLBACK(__mmplayer_gst_rtp_no_more_pads), (gpointer)player); + + MMPLAYER_FLEAVE(); + return element; +} + +void __mmplayer_http_src_setup(GstElement *source, gpointer data) +{ +#define HTTP_SOURCE_BLOCK_SIZE (64 * 1024) + + mmplayer_t *player = (mmplayer_t *)data; + MMHandleType attrs = 0; + gchar *user_agent, *cookies, **cookie_list; + gint http_timeout = DEFAULT_HTTP_TIMEOUT; + user_agent = cookies = NULL; + cookie_list = NULL; + + MMPLAYER_FENTER(); + MMPLAYER_RETURN_IF_FAIL(player); + + LOGD("source element %s", GST_ELEMENT_NAME(source)); + + /* get profile attribute */ + attrs = MMPLAYER_GET_ATTRS(player); + if (!attrs) { + LOGE("failed to get content attribute"); + return; + } + + /* get attribute */ + mm_attrs_get_string_by_name(attrs, "streaming_cookie", &cookies); + mm_attrs_get_string_by_name(attrs, "streaming_user_agent", &user_agent); + + if (player->ini.http_timeout != DEFAULT_HTTP_TIMEOUT) + http_timeout = player->ini.http_timeout; + + /* get attribute */ + SECURE_LOGD("cookies : %s", cookies); + SECURE_LOGD("user_agent : %s", user_agent); + LOGD("timeout : %d", http_timeout); + + /* setting property to streaming source */ + g_object_set(G_OBJECT(source), "timeout", http_timeout, "blocksize", (unsigned long)(HTTP_SOURCE_BLOCK_SIZE), NULL); + + /* parsing cookies */ + if ((cookie_list = _mmplayer_get_cookie_list((const char *)cookies))) { + g_object_set(G_OBJECT(source), "cookies", cookie_list, NULL); + g_strfreev(cookie_list); + } + + if (user_agent) + g_object_set(G_OBJECT(source), "user-agent", user_agent, NULL); + + MMPLAYER_FLEAVE(); + return; +} + +static void +__mmplayer_gst_found_source(GObject *object, GObject *orig, GParamSpec *pspec, gpointer data) +{ + mmplayer_t *player = (mmplayer_t *)data; + GstElement *source = NULL; + + MMPLAYER_FENTER(); + LOGD("%s >> %s", GST_ELEMENT_NAME(object), pspec->name); + + g_object_get(orig, pspec->name, &source, NULL); + + player->pipeline->mainbin[MMPLAYER_M_SRC].id = MMPLAYER_M_SRC; + player->pipeline->mainbin[MMPLAYER_M_SRC].gst = source; + + if (MMPLAYER_IS_HTTP_STREAMING(player)) { + __mmplayer_http_src_setup(source, data); + } else if (MMPLAYER_IS_RTSP_STREAMING(player)) { + gchar *user_agent = NULL; + + /* get attribute */ + mm_attrs_get_string_by_name(player->attrs, "streaming_user_agent", &user_agent); + + SECURE_LOGD("user_agent : %s", user_agent); + + /* setting property to streaming source */ + if (user_agent) + g_object_set(G_OBJECT(source), "user-agent", user_agent, NULL); + } else if (MMPLAYER_IS_SMOOTH_STREAMING(player)) { + g_object_set(G_OBJECT(source), "timeout", DEFAULT_HTTP_TIMEOUT, NULL); + } else if (player->profile.uri_type == MM_PLAYER_URI_TYPE_MEM) { + g_object_set(source, "stream-type", GST_APP_STREAM_TYPE_RANDOM_ACCESS, + "size", (gint64)player->profile.input_mem.len, "blocksize", 20480, NULL); + + _mmplayer_add_signal_connection(player, G_OBJECT(source), MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data", + G_CALLBACK(__mmplayer_gst_appsrc_seek_data_mem), (gpointer)&player->profile.input_mem); + _mmplayer_add_signal_connection(player, G_OBJECT(source), MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data", + G_CALLBACK(__mmplayer_gst_appsrc_feed_data_mem), (gpointer)&player->profile.input_mem); + } + MMPLAYER_FLEAVE(); +} + +static gint +__mmplayer_gst_select_stream (GstElement * uridecodebin, GstStreamCollection * collection, + GstStream * stream, gpointer data) +{ + gint ret = 0; /* 1: select, 0: skip, -1: depends on decodebin */ + GstStreamType stype = gst_stream_get_stream_type(stream); + mmplayer_t *player = (mmplayer_t *)data; + mmplayer_track_type_e type = MM_PLAYER_TRACK_TYPE_MAX; + GstCaps *caps = gst_stream_get_caps(stream); + gchar *caps_str = NULL; + + LOGD("Stream type %s flags 0x%x", + gst_stream_type_get_name(stype), + gst_stream_get_stream_flags(stream)); + LOGD(" ID: %s", gst_stream_get_stream_id(stream)); + + if (caps) { + caps_str = gst_caps_to_string(caps); + LOGD(" caps: %s", caps_str); + } + + switch (stype) { + case GST_STREAM_TYPE_AUDIO: + { + GstStructure *caps_structure = NULL; + gint samplerate = 0; + gint channels = 0; + + type = MM_PLAYER_TRACK_TYPE_AUDIO; + + if (caps) { + caps_structure = gst_caps_get_structure(caps, 0); + gst_structure_get_int(caps_structure, "rate", &samplerate); + gst_structure_get_int(caps_structure, "channels", &channels); + + if (channels > 0 && samplerate == 0) { + LOGW("Skip corrupted audio stream"); + goto EXIT; + } + + if (g_strrstr(caps_str, "mobile-xmf")) + mm_player_set_attribute((MMHandleType)player, NULL, + "content_audio_codec", "mobile-xmf", strlen("mobile-xmf"), NULL); + } + break; + } + case GST_STREAM_TYPE_VIDEO: + { + GstStructure *caps_structure = NULL; + gint stype = 0; + gint width = 0; + + type = MM_PLAYER_TRACK_TYPE_VIDEO; + + /* do not support multi track video */ + if (player->track[MM_PLAYER_TRACK_TYPE_VIDEO].total_track_num >= 1) + goto EXIT; + + mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype); + + /* don't make video because of not required */ + if ((stype == MM_DISPLAY_SURFACE_NULL) && + (!player->set_mode.video_export)) { + LOGD("no need video decoding, skip video stream"); + goto EXIT; + } + + if (caps) { + caps_structure = gst_caps_get_structure(caps, 0); + gst_structure_get_int(caps_structure, "width", &width); + + if (width != 0) { + if (player->v_stream_caps) { + gst_caps_unref(player->v_stream_caps); + player->v_stream_caps = NULL; + } + + player->v_stream_caps = gst_caps_copy(caps); + MMPLAYER_LOG_GST_CAPS_TYPE(player->v_stream_caps); + } + } + break; + } + case GST_STREAM_TYPE_TEXT: + type = MM_PLAYER_TRACK_TYPE_TEXT; + break; + default: + LOGW("Skip not supported stream type"); + goto EXIT; + } + + _mmplayer_track_update_stream(player, type, stream); + + if (player->track[type].active_track_index == (player->track[type].total_track_num - 1)) { + LOGD("select this stream, active idx : %d", player->track[type].active_track_index); + if (type == MM_PLAYER_TRACK_TYPE_AUDIO) + _mmplayer_set_audio_attrs(player, caps); + ret = 1; + } + +EXIT: + g_free(caps_str); + if (caps) + gst_caps_unref(caps); + + LOGD("ret %d", ret); + return ret; +} + +static gboolean +__mmplayer_gst_decode_request_resource(GstElement * uridecodebin, GstStreamCollection * collection, + GstStream * stream, gpointer data) +{ + mmplayer_t *player = (mmplayer_t *)data; + GstStreamType stype = gst_stream_get_stream_type(stream); + + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE); + + LOGD("stream type %s", gst_stream_type_get_name(stype)); + + /* public does not support audio hw decoder at the moment */ + + if (player->hw_resource[MMPLAYER_RESOURCE_TYPE_VIDEO_DECODER] != NULL) { + LOGW("video decoder resource is already acquired, skip it."); + return TRUE; + } + + if (_mmplayer_acquire_hw_resource(player, MMPLAYER_RESOURCE_TYPE_VIDEO_DECODER) != MM_ERROR_NONE) { + LOGE("failed to acquire video decoder resource"); + return FALSE; + } + player->interrupted_by_resource = FALSE; + MMPLAYER_FLEAVE(); + return TRUE; +} + +static void +__mmplayer_gst_deep_element_added(GstElement *bin, GstBin *child, GstElement *element, gpointer data) +{ + gchar *factory_name = NULL; + mmplayer_t *player = (mmplayer_t *)data; + mmplayer_gst_element_t *mainbin = NULL; + + MMPLAYER_FENTER(); + MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin); + + factory_name = GST_OBJECT_NAME(gst_element_get_factory(element)); + mainbin = player->pipeline->mainbin; + + LOGD("%s > %s > %s : %s", GST_ELEMENT_NAME(bin), GST_ELEMENT_NAME(child), + factory_name, GST_ELEMENT_NAME(element)); + + /* keep the first typefind reference only */ + if (!mainbin[MMPLAYER_M_TYPEFIND].gst && g_strrstr(factory_name, "typefind")) { // FIXME : not required for local playback+ + mainbin[MMPLAYER_M_TYPEFIND].id = MMPLAYER_M_TYPEFIND; + mainbin[MMPLAYER_M_TYPEFIND].gst = element; + + _mmplayer_add_signal_connection(player, G_OBJECT(element), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "have-type", G_CALLBACK(_mmplayer_typefind_have_type), (gpointer)player); + LOGD("typefind reference is added"); + return; + } + + if ((MMPLAYER_IS_STREAMING(player)) && (!MMPLAYER_IS_RTSP_STREAMING(player))) { + /* update queue2 setting */ + if (g_strrstr(factory_name, "queue2") && (!mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst)) { + gint64 dur_bytes = 0L; + muxed_buffer_type_e type = MUXED_BUFFER_TYPE_MEM_QUEUE; + + mainbin[MMPLAYER_M_MUXED_S_BUFFER].id = MMPLAYER_M_MUXED_S_BUFFER; + mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst = element; + + if (!gst_element_query_duration(mainbin[MMPLAYER_M_SRC].gst, GST_FORMAT_BYTES, &dur_bytes)) + LOGW("failed to get duration from source %s", GST_ELEMENT_NAME(mainbin[MMPLAYER_M_SRC].gst)); + + LOGD("type %s, dur_bytes = %"G_GINT64_FORMAT, player->type, dur_bytes); + /* NOTE : in case of ts streaming, player could not get the correct duration info * + * skip the pull mode(file or ring buffering) setting. */ + if (dur_bytes > 0) { + if ((!g_strrstr(player->type, "video/mpegts")) && (!g_strrstr(player->type, "application/x-hls"))) { + type = MUXED_BUFFER_TYPE_MEM_RING_BUFFER; + player->streamer->ring_buffer_size = player->ini.http_ring_buffer_size; + } + } else { + dur_bytes = 0; + } + + _mm_player_streaming_set_queue2(player->streamer, + element, + FALSE, + type, + (guint64)dur_bytes); /* no meaning at the moment */ + return; + } + + /* update mq setting */ + if (g_strrstr(factory_name, "parsebin") && (!mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst)) { + GstIterator *iter = NULL; + GValue item = {0, }; + GstElement *ch_element = NULL; + GstElementFactory *ch_factory = NULL; + + iter = gst_bin_iterate_recurse(child); + if (iter != NULL) { + while (gst_iterator_next(iter, &item) == GST_ITERATOR_OK) { + ch_element = g_value_get_object(&item); + ch_factory = gst_element_get_factory(ch_element); + LOGD("children factory %s", GST_OBJECT_NAME(ch_factory)); + if (g_strrstr(GST_OBJECT_NAME(ch_factory), "multiqueue")) { + LOGD("get multiqueue"); + player->pipeline->mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].id = MMPLAYER_M_DEMUXED_S_BUFFER; + player->pipeline->mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst = ch_element; + + /* in case of multiqueue, max bytes size is defined with fixed value in mm_player_streaming.h */ + _mm_player_streaming_set_multiqueue(player->streamer, ch_element); + g_value_reset(&item); + break; + } + g_value_reset(&item); + } + gst_iterator_free(iter); + } + } + } + + if (g_strrstr(factory_name, "parsebin")) { + int video_codec_type = 0; + int audio_codec_type = 0; + + g_object_set(G_OBJECT(child), "message-forward", TRUE, NULL); + g_object_set(G_OBJECT(element), "message-forward", TRUE, NULL); + + mm_attrs_get_int_by_name(player->attrs, MM_PLAYER_VIDEO_CODEC_TYPE, &video_codec_type); + mm_attrs_get_int_by_name(player->attrs, MM_PLAYER_AUDIO_CODEC_TYPE, &audio_codec_type); + + /* CAUTION: if there is hw decoder, the rank value has to be higher than sw decoder + and codec default type in ini has to be hw. + */ + if (video_codec_type == MM_PLAYER_CODEC_TYPE_SW) + g_object_set(G_OBJECT(child), "force-sw-decoder-for-video", TRUE, NULL); + if (audio_codec_type == MM_PLAYER_CODEC_TYPE_SW) + g_object_set(G_OBJECT(child), "force-sw-decoder-for-audio", TRUE, NULL); + + mainbin[MMPLAYER_M_AUTOPLUG_PARSEBIN].id = MMPLAYER_M_AUTOPLUG_PARSEBIN; + mainbin[MMPLAYER_M_AUTOPLUG_PARSEBIN].gst = element; + _mmplayer_add_signal_connection(player, G_OBJECT(element), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "unknown-type", G_CALLBACK(_mmplayer_gst_decode_unknown_type), (gpointer)player); + + _mmplayer_add_signal_connection(player, G_OBJECT(element), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-continue", G_CALLBACK(_mmplayer_gst_decode_autoplug_continue), (gpointer)player); + + _mmplayer_add_signal_connection(player, G_OBJECT(element), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-select", G_CALLBACK(_mmplayer_gst_decode_autoplug_select), (gpointer)player); + + _mmplayer_add_signal_connection(player, G_OBJECT(child), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "request-resource", G_CALLBACK(__mmplayer_gst_decode_request_resource), (gpointer)player); + + } else { + _mmplayer_gst_element_added((GstElement *)child, element, data); + } + return; +} + +void +__mmplayer_gst_deep_element_removed(GstElement *bin, GstBin *child, GstElement *element, gpointer data) +{ + LOGD("%s > %s > %s", GST_ELEMENT_NAME(bin), GST_ELEMENT_NAME(child), GST_ELEMENT_NAME(element)); + return; +} + +static GstElement * +__mmplayer_gst_make_uridecodebin(mmplayer_t *player) +{ + GstElement *uridecodebin3 = NULL; + + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player, NULL); + + uridecodebin3 = gst_element_factory_make("uridecodebin3", "uridecodebin3"); + if (!uridecodebin3) { + LOGE("failed to create uridecodebin3"); + return NULL; + } + + /* get attribute */ + SECURE_LOGD("uri : %s", player->profile.uri); + + /* setting property to streaming source */ + g_object_set(G_OBJECT(uridecodebin3), "uri", player->profile.uri, + "message-forward", TRUE, + "buffer-size", DEFAULT_BUFFER_SIZE_BYTES, NULL); + + _mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "deep-notify::source", G_CALLBACK(__mmplayer_gst_found_source), (gpointer)player); + + _mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added", G_CALLBACK(_mmplayer_gst_decode_pad_added), (gpointer)player); + + _mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-removed", G_CALLBACK(_mmplayer_gst_decode_pad_removed), (gpointer)player); - attrs = MMPLAYER_GET_ATTRS(player); - if (!attrs) { - LOGE("cannot get content attribute"); - break; - } + _mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "no-more-pads", G_CALLBACK(_mmplayer_gst_decode_no_more_pads), (gpointer)player); - if (gst_message_get_structure(msg) == NULL) - break; + _mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "select-stream", G_CALLBACK(__mmplayer_gst_select_stream), (gpointer)player); - structure_name = gst_structure_get_name(gst_message_get_structure(msg)); - if (!structure_name) - break; + _mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "about-to-finish", G_CALLBACK(_mmplayer_gst_about_to_finish), (gpointer)player); - LOGD("GST_MESSAGE_ELEMENT %s from %s", structure_name, GST_OBJECT_NAME(GST_MESSAGE_SRC(msg))); + _mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "deep-element-added", G_CALLBACK(__mmplayer_gst_deep_element_added), (gpointer)player); - if (!strcmp(structure_name, "adaptive-streaming-variant")) { - const GValue *var_info = NULL; + _mmplayer_add_signal_connection(player, G_OBJECT(uridecodebin3), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "deep-element-removed", G_CALLBACK(__mmplayer_gst_deep_element_removed), (gpointer)player); - var_info = gst_structure_get_value(gst_message_get_structure(msg), "video-variant-info"); - if (var_info != NULL) { - if (player->adaptive_info.var_list) - g_list_free_full(player->adaptive_info.var_list, g_free); + if (MMPLAYER_URL_HAS_DASH_SUFFIX(player)) + LOGW("[DASH] this is still experimental feature"); - /* share addr or copy the list */ - player->adaptive_info.var_list = - g_list_copy_deep((GList *)g_value_get_pointer(var_info), (GCopyFunc)__mmplayer_adaptive_var_info, NULL); + MMPLAYER_FLEAVE(); + return uridecodebin3; +} - count = g_list_length(player->adaptive_info.var_list); - if (count > 0) { - VariantData *temp = NULL; +static GstElement * +__mmplayer_gst_make_http_src(mmplayer_t *player) +{ +#define MAX_RETRY_COUNT 10 + GstElement *element = NULL; + MMHandleType attrs = 0; + gchar *user_agent, *cookies, **cookie_list; + gint http_timeout = DEFAULT_HTTP_TIMEOUT; - /* print out for debug */ - LOGD("num of variant_info %d", count); - for (idx = 0; idx < count; idx++) { - temp = g_list_nth_data(player->adaptive_info.var_list, idx); - if (temp) - LOGD("variant(%d) [b]%d [w]%d [h]%d ", idx, temp->bandwidth, temp->width, temp->height); - } - } - } - } + user_agent = cookies = NULL; + cookie_list = NULL; - if (!strcmp(structure_name, "prepare-decode-buffers")) { - gint num_buffers = 0; - gint extra_num_buffers = 0; + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player, NULL); - if (gst_structure_get_int(gst_message_get_structure(msg), "num_buffers", &num_buffers)) { - player->video_num_buffers = num_buffers; - LOGD("video_num_buffers : %d", player->video_num_buffers); - } + /* get profile attribute */ + attrs = MMPLAYER_GET_ATTRS(player); + if (!attrs) { + LOGE("failed to get content attribute"); + return NULL; + } - if (gst_structure_get_int(gst_message_get_structure(msg), "extra_num_buffers", &extra_num_buffers)) { - player->video_extra_num_buffers = extra_num_buffers; - LOGD("num_of_vout_extra num buffers : %d", extra_num_buffers); - } - break; - } + LOGD("using http streamming source [%s]", player->ini.httpsrc_element); - if (!strcmp(structure_name, "Language_list")) { - const GValue *lang_list = NULL; - lang_list = gst_structure_get_value(gst_message_get_structure(msg), "lang_list"); - if (lang_list != NULL) { - count = g_list_length((GList *)g_value_get_pointer(lang_list)); - if (count > 1) - LOGD("Total audio tracks(from parser) = %d \n", count); - } - } + element = gst_element_factory_make(player->ini.httpsrc_element, "http_streaming_source"); + if (!element) { + LOGE("failed to create http streaming source element[%s]", player->ini.httpsrc_element); + return NULL; + } - if (!strcmp(structure_name, "Ext_Sub_Language_List")) { - const GValue *lang_list = NULL; - MMPlayerLangStruct *temp = NULL; - - lang_list = gst_structure_get_value(gst_message_get_structure(msg), "lang_list"); - if (lang_list != NULL) { - count = g_list_length((GList *)g_value_get_pointer(lang_list)); - if (count) { - MMPLAYER_SUBTITLE_INFO_LOCK(player); - player->subtitle_language_list = (GList *)g_value_get_pointer(lang_list); - mm_attrs_set_int_by_name(attrs, "content_text_track_num", (gint)count); - if (mmf_attrs_commit(attrs)) - LOGE("failed to commit.\n"); - LOGD("Total subtitle tracks = %d \n", count); - - while (count) { - temp = g_list_nth_data(player->subtitle_language_list, count - 1); - if (temp) - LOGD("value of lang_key is %s and lang_code is %s", - temp->language_key, temp->language_code); - count--; - } - MMPLAYER_SUBTITLE_INFO_SIGNAL(player); - MMPLAYER_SUBTITLE_INFO_UNLOCK(player); - } - } - } + /* get attribute */ + mm_attrs_get_string_by_name(attrs, "streaming_cookie", &cookies); + mm_attrs_get_string_by_name(attrs, "streaming_user_agent", &user_agent); - /* custom message */ - if (!strcmp(structure_name, "audio_codec_not_supported")) { - MMMessageParamType msg_param = {0,}; - msg_param.code = MM_ERROR_PLAYER_AUDIO_CODEC_NOT_FOUND; - MMPLAYER_POST_MSG(player, MM_MESSAGE_ERROR, &msg_param); - } + if (player->ini.http_timeout != DEFAULT_HTTP_TIMEOUT) + http_timeout = player->ini.http_timeout; - /* custom message for RTSP attribute : - RTSP case, buffer is not come from server before PLAYING state. However,we have to get attribute after PAUSE state chaged. - sdp which has contents info is received when rtsp connection is opened. - extract duration ,codec info , resolution from sdp and get it by GstMessage */ - if (!strcmp(structure_name, "rtspsrc_properties")) { + /* get attribute */ + SECURE_LOGD("location : %s", player->profile.uri); + SECURE_LOGD("cookies : %s", cookies); + SECURE_LOGD("user_agent : %s", user_agent); + LOGD("timeout : %d", http_timeout); - gchar *audio_codec = NULL; - gchar *video_codec = NULL; - gchar *video_frame_size = NULL; + /* setting property to streaming source */ + g_object_set(G_OBJECT(element), "location", player->profile.uri, + "timeout", http_timeout, "blocksize", (unsigned long)(64 * 1024), + "retries", MAX_RETRY_COUNT, NULL); - gst_structure_get(gst_message_get_structure(msg), "rtsp_duration", G_TYPE_UINT64, &player->duration, NULL); - LOGD("rtsp duration : %"G_GINT64_FORMAT" msec", GST_TIME_AS_MSECONDS(player->duration)); - player->streaming_type = __mmplayer_get_stream_service_type(player); + /* parsing cookies */ + if ((cookie_list = _mmplayer_get_cookie_list((const char *)cookies))) { + g_object_set(G_OBJECT(element), "cookies", cookie_list, NULL); + g_strfreev(cookie_list); + } - gst_structure_get(gst_message_get_structure(msg), "rtsp_audio_codec", G_TYPE_STRING, &audio_codec, NULL); - LOGD("rtsp_audio_codec : %s", audio_codec); - if (audio_codec) - mm_attrs_set_string_by_name(player->attrs, "content_audio_codec", audio_codec); + if (user_agent) + g_object_set(G_OBJECT(element), "user-agent", user_agent, NULL); - gst_structure_get(gst_message_get_structure(msg), "rtsp_video_codec", G_TYPE_STRING, &video_codec, NULL); - LOGD("rtsp_video_codec : %s", video_codec); - if (video_codec) - mm_attrs_set_string_by_name(player->attrs, "content_video_codec", video_codec); + if (MMPLAYER_URL_HAS_DASH_SUFFIX(player)) + LOGW("[DASH] this is still experimental feature"); - gst_structure_get(gst_message_get_structure(msg), "rtsp_video_frame_size", G_TYPE_STRING, &video_frame_size, NULL); - LOGD("rtsp_video_frame_size : %s", video_frame_size); - if (video_frame_size) { + MMPLAYER_FLEAVE(); + return element; +} - char *seperator = strchr(video_frame_size, '-'); - if (seperator) { +static GstElement * +__mmplayer_gst_make_file_src(mmplayer_t *player) +{ + GstElement *element = NULL; - char video_width[10] = {0,}; - int frame_size_len = strlen(video_frame_size); - int separtor_len = strlen(seperator); + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player, NULL); - strncpy(video_width, video_frame_size, (frame_size_len - separtor_len)); - mm_attrs_set_int_by_name(attrs, "content_video_width", atoi(video_width)); + LOGD("using filesrc for 'file://' handler"); + if (!_mmplayer_get_storage_info(player->profile.uri, &player->storage_info[MMPLAYER_PATH_VOD])) { + LOGE("failed to get storage info"); + return NULL; + } - seperator++; - mm_attrs_set_int_by_name(attrs, "content_video_height", atoi(seperator)); - } - } + element = gst_element_factory_make("filesrc", "source"); + if (!element) { + LOGE("failed to create filesrc"); + return NULL; + } - if (mmf_attrs_commit(attrs)) - LOGE("failed to commit.\n"); - } - } - break; + g_object_set(G_OBJECT(element), "location", (player->profile.uri) + 7, NULL); /* uri+7 -> remove "file:// */ - case GST_MESSAGE_DURATION_CHANGED: - { - LOGD("GST_MESSAGE_DURATION_CHANGED\n"); - if (!__mmplayer_gst_handle_duration(player, msg)) - LOGW("failed to update duration"); - } + MMPLAYER_FLEAVE(); + return element; +} - break; +static gboolean +__mmplayer_gst_msg_push(GstBus *bus, GstMessage *msg, gpointer data) +{ + mmplayer_t *player = (mmplayer_t *)data; - case GST_MESSAGE_ASYNC_START: - LOGD("GST_MESSAGE_ASYNC_START : %s\n", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg))); - break; + g_return_val_if_fail(player, FALSE); + g_return_val_if_fail(msg && GST_IS_MESSAGE(msg), FALSE); + gst_message_ref(msg); - case GST_MESSAGE_ASYNC_DONE: - { - MMPlayerGstElement *mainbin; + g_mutex_lock(&player->bus_msg_q_lock); + g_queue_push_tail(player->bus_msg_q, msg); + g_mutex_unlock(&player->bus_msg_q_lock); - if (!(player->pipeline && player->pipeline->mainbin)) { - LOGE("player pipeline handle is null"); - break; - } + MMPLAYER_BUS_MSG_THREAD_LOCK(player); + MMPLAYER_BUS_MSG_THREAD_SIGNAL(player); + MMPLAYER_BUS_MSG_THREAD_UNLOCK(player); + return TRUE; +} - mainbin = player->pipeline->mainbin; +static gpointer __mmplayer_gst_bus_msg_thread(gpointer data) +{ + mmplayer_t *player = (mmplayer_t *)(data); + GstMessage *msg = NULL; - LOGD("GST_MESSAGE_ASYNC_DONE : %s\n", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg))); + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player && + player->pipeline && + player->pipeline->mainbin && + player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, + NULL); + + MMPLAYER_BUS_MSG_THREAD_LOCK(player); + + LOGD("[handle: %p] gst bus msg thread will be started.", player); + while (!player->bus_msg_thread_exit) { + g_mutex_lock(&player->bus_msg_q_lock); + msg = g_queue_pop_head(player->bus_msg_q); + g_mutex_unlock(&player->bus_msg_q_lock); + if (msg == NULL) { + MMPLAYER_BUS_MSG_THREAD_WAIT(player); + continue; + } + MMPLAYER_BUS_MSG_THREAD_UNLOCK(player); + /* handle the gst msg */ + __mmplayer_gst_bus_msg_callback(msg, player); + MMPLAYER_BUS_MSG_THREAD_LOCK(player); + gst_message_unref(msg); + } - /* we only handle messages from pipeline */ - if (msg->src != (GstObject *)mainbin[MMPLAYER_M_PIPE].gst) - break; + MMPLAYER_BUS_MSG_THREAD_UNLOCK(player); + MMPLAYER_FLEAVE(); - if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS) { - if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) { - player->seek_state = MMPLAYER_SEEK_NONE; - MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL); - } else if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PLAYING) { - if (mainbin[MMPLAYER_M_AUTOPLUG].gst) { - LOGD("sync %s state(%s) with parent state(%s)", - GST_ELEMENT_NAME(mainbin[MMPLAYER_M_AUTOPLUG].gst), - gst_element_state_get_name(GST_STATE(mainbin[MMPLAYER_M_AUTOPLUG].gst)), - gst_element_state_get_name(GST_STATE(mainbin[MMPLAYER_M_PIPE].gst))); - - /* In case of streaming, pause is required before finishing seeking by buffering. - After completing the seek(during buffering), the player and sink elems has paused state but others in playing state. - Because the buffering state is controlled according to the state transition for force resume, - the decodebin state should be paused as player state. */ - gst_element_sync_state_with_parent(mainbin[MMPLAYER_M_AUTOPLUG].gst); - } + return NULL; +} - if ((MMPLAYER_IS_HTTP_STREAMING(player)) && - (player->streamer) && - (player->streamer->streaming_buffer_type == BUFFER_TYPE_MUXED) && - !(player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) { - GstQuery *query = NULL; - gboolean busy = FALSE; - gint percent = 0; - - if (player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer) { - query = gst_query_new_buffering(GST_FORMAT_PERCENT); - if (gst_element_query(player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer, query)) - gst_query_parse_buffering_percent(query, &busy, &percent); - gst_query_unref(query); - - LOGD("buffered percent(%s): %d\n", - GST_ELEMENT_NAME(player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer), percent); - } +static int +__mmplayer_gst_check_duration(mmplayer_t *player, gint64 position) +{ + gint64 dur_nsec = 0; - if (percent >= 100) - __mmplayer_handle_buffering_message(player); - } + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED); - player->seek_state = MMPLAYER_SEEK_COMPLETED; - } + if (MMPLAYER_IS_MS_BUFF_SRC(player)) + return MM_ERROR_NONE; + + /* NOTE : duration cannot be zero except live streaming. + * Since some element could have some timing problemn with quering duration, try again. + */ + if (player->duration == 0) { + if (!gst_element_query_duration(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &dur_nsec)) { + /* For RTSP Streaming , duration is not returned in READY state. So seek to the previous position does not work properly. + * Added a patch to postpone the actual seek when state changes to PLAY. Sending a fake SEEK_COMPLETED event to finish the current request. */ + if ((MMPLAYER_IS_RTSP_STREAMING(player)) && + (_mmplayer_get_stream_service_type(player) == STREAMING_SERVICE_VOD)) { + player->pending_seek.is_pending = true; + player->pending_seek.pos = position; + player->seek_state = MMPLAYER_SEEK_NONE; + MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL); + return MM_ERROR_PLAYER_NO_OP; + } else { + player->seek_state = MMPLAYER_SEEK_NONE; + return MM_ERROR_PLAYER_SEEK; } } - break; - - #if 0 /* delete unnecessary logs */ - case GST_MESSAGE_REQUEST_STATE: LOGD("GST_MESSAGE_REQUEST_STATE\n"); break; - case GST_MESSAGE_STEP_START: LOGD("GST_MESSAGE_STEP_START\n"); break; - case GST_MESSAGE_QOS: LOGD("GST_MESSAGE_QOS\n"); break; - case GST_MESSAGE_PROGRESS: LOGD("GST_MESSAGE_PROGRESS\n"); break; - case GST_MESSAGE_ANY: LOGD("GST_MESSAGE_ANY\n"); break; - case GST_MESSAGE_INFO: LOGD("GST_MESSAGE_STATE_DIRTY\n"); break; - case GST_MESSAGE_STATE_DIRTY: LOGD("GST_MESSAGE_STATE_DIRTY\n"); break; - case GST_MESSAGE_STEP_DONE: LOGD("GST_MESSAGE_STEP_DONE\n"); break; - case GST_MESSAGE_CLOCK_PROVIDE: LOGD("GST_MESSAGE_CLOCK_PROVIDE\n"); break; - case GST_MESSAGE_STRUCTURE_CHANGE: LOGD("GST_MESSAGE_STRUCTURE_CHANGE\n"); break; - case GST_MESSAGE_STREAM_STATUS: LOGD("GST_MESSAGE_STREAM_STATUS\n"); break; - case GST_MESSAGE_APPLICATION: LOGD("GST_MESSAGE_APPLICATION\n"); break; - case GST_MESSAGE_SEGMENT_START: LOGD("GST_MESSAGE_SEGMENT_START\n"); break; - case GST_MESSAGE_SEGMENT_DONE: LOGD("GST_MESSAGE_SEGMENT_DONE\n"); break; - case GST_MESSAGE_LATENCY: LOGD("GST_MESSAGE_LATENCY\n"); break; - #endif + player->duration = dur_nsec; + } - default: - break; + if (player->duration > 0 && player->duration < position) { + LOGE("invalid pos %"G_GINT64_FORMAT", dur: %"G_GINT64_FORMAT, position, player->duration); + return MM_ERROR_INVALID_ARGUMENT; } - /* should not call 'gst_message_unref(msg)' */ - return; + MMPLAYER_FLEAVE(); + return MM_ERROR_NONE; } -GstBusSyncReply -__mmplayer_bus_sync_callback(GstBus * bus, GstMessage * message, gpointer data) +static gboolean +__mmplayer_gst_check_seekable(mmplayer_t *player) { - mm_player_t *player = (mm_player_t *)data; - GstBusSyncReply reply = GST_BUS_DROP; + GstQuery *query = NULL; + gboolean seekable = FALSE; - if (!(player->pipeline && player->pipeline->mainbin)) { - LOGE("player pipeline handle is null"); - return GST_BUS_PASS; + if (MMPLAYER_IS_MS_BUFF_SRC(player)) { + return TRUE; } - if (!__mmplayer_gst_check_useful_message(player, message)) { - gst_message_unref(message); - return GST_BUS_DROP; + query = gst_query_new_seeking(GST_FORMAT_TIME); + if (gst_element_query(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, query)) { + gst_query_parse_seeking(query, NULL, &seekable, NULL, NULL); + gst_query_unref(query); + + if (!seekable) { + LOGW("non-seekable content"); + player->seek_state = MMPLAYER_SEEK_NONE; + return FALSE; + } + } else { + LOGW("failed to get seeking query"); + gst_query_unref(query); /* keep seeking operation */ } - switch (GST_MESSAGE_TYPE(message)) { - case GST_MESSAGE_STATE_CHANGED: - /* post directly for fast launch */ - if (player->sync_handler) { - __mmplayer_gst_callback(message, player); - reply = GST_BUS_DROP; - } else - reply = GST_BUS_PASS; - break; - case GST_MESSAGE_TAG: - __mmplayer_gst_extract_tag_from_msg(player, message); + return TRUE; +} - #if 0 // debug - { - GstTagList *tags = NULL; +int +_mmplayer_gst_set_state(mmplayer_t *player, GstElement *element, GstState state, gboolean async, gint timeout) +{ + GstState element_state = GST_STATE_VOID_PENDING; + GstState element_pending_state = GST_STATE_VOID_PENDING; + GstStateChangeReturn ret = GST_STATE_CHANGE_FAILURE; - gst_message_parse_tag(message, &tags); - if (tags) { - LOGE("TAGS received from element \"%s\".\n", - GST_STR_NULL(GST_ELEMENT_NAME(GST_MESSAGE_SRC(message)))); + MMPLAYER_FENTER(); - gst_tag_list_foreach(tags, print_tag, NULL); - gst_tag_list_free(tags); - tags = NULL; - } - break; - } - #endif - break; + MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED); + MMPLAYER_RETURN_VAL_IF_FAIL(element, MM_ERROR_INVALID_ARGUMENT); - case GST_MESSAGE_DURATION_CHANGED: - __mmplayer_gst_handle_duration(player, message); - break; - case GST_MESSAGE_ASYNC_DONE: - /* NOTE:Don't call gst_callback directly - * because previous frame can be showed even though this message is received for seek. - */ - default: - reply = GST_BUS_PASS; - break; + LOGD("setting [%s] element state to : %s", GST_ELEMENT_NAME(element), gst_element_state_get_name(state)); + + /* set state */ + ret = gst_element_set_state(element, state); + if (ret == GST_STATE_CHANGE_FAILURE) { + LOGE("failed to set [%s] state", GST_ELEMENT_NAME(element)); + + /* dump state of all element */ + _mmplayer_dump_pipeline_state(player); + + return MM_ERROR_PLAYER_INTERNAL; } - if (reply == GST_BUS_DROP) - gst_message_unref(message); + /* return here so state transition to be done in async mode */ + if (async) { + LOGD("async state transition. not waiting for state complete."); + return MM_ERROR_NONE; + } - return reply; + /* wait for state transition */ + ret = gst_element_get_state(element, &element_state, &element_pending_state, timeout * GST_SECOND); + if (ret == GST_STATE_CHANGE_FAILURE || (state != element_state)) { + LOGE("failed to change [%s] element state to [%s] within %d sec", + GST_ELEMENT_NAME(element), + gst_element_state_get_name(state), timeout); + + LOGE(" [%s] state : %s pending : %s", + GST_ELEMENT_NAME(element), + gst_element_state_get_name(element_state), + gst_element_state_get_name(element_pending_state)); + + /* dump state of all element */ + _mmplayer_dump_pipeline_state(player); + + return MM_ERROR_PLAYER_INTERNAL; + } + + LOGD("[%s] element state has changed", GST_ELEMENT_NAME(element)); + + MMPLAYER_FLEAVE(); + + return MM_ERROR_NONE; } -int __mmplayer_gst_start(mm_player_t* player) +int +_mmplayer_gst_start(mmplayer_t *player) { int ret = MM_ERROR_NONE; gboolean async = FALSE; @@ -2223,12 +3498,13 @@ int __mmplayer_gst_start(mm_player_t* player) MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED); - /* NOTE : if SetPosition was called before Start. do it now */ - /* streaming doesn't support it. so it should be always sync */ - /* !!create one more api to check if there is pending seek rather than checking variables */ + /* NOTE : if SetPosition was called before Start. do it now + * streaming doesn't support it. so it should be always sync + * !!create one more api to check if there is pending seek rather than checking variables + */ if (player->pending_seek.is_pending && !MMPLAYER_IS_STREAMING(player)) { MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_PAUSED; - ret = __mmplayer_gst_pause(player, FALSE); + ret = _mmplayer_gst_pause(player, FALSE); if (ret != MM_ERROR_NONE) { LOGE("failed to set state to PAUSED for pending seek"); return ret; @@ -2244,16 +3520,15 @@ int __mmplayer_gst_start(mm_player_t* player) MMPLAYER_PRINT_STATE(player); /* set pipeline state to PLAYING */ - ret = __mmplayer_gst_set_state(player, + ret = _mmplayer_gst_set_state(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PLAYING, async, MMPLAYER_STATE_CHANGE_TIMEOUT(player)); - - if (ret == MM_ERROR_NONE) { - MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING); - } else { + if (ret != MM_ERROR_NONE) { LOGE("failed to set state to PLAYING"); return ret; } + MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING); + /* generating debug info before returning error */ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-start"); @@ -2262,7 +3537,8 @@ int __mmplayer_gst_start(mm_player_t* player) return ret; } -int __mmplayer_gst_stop(mm_player_t* player) +int +_mmplayer_gst_stop(mmplayer_t *player) { GstStateChangeReturn change_ret = GST_STATE_CHANGE_SUCCESS; MMHandleType attrs = 0; @@ -2281,7 +3557,7 @@ int __mmplayer_gst_stop(mm_player_t* player) attrs = MMPLAYER_GET_ATTRS(player); if (!attrs) { - LOGE("cannot get content attribute\n"); + LOGE("cannot get content attribute"); return MM_ERROR_PLAYER_INTERNAL; } @@ -2292,31 +3568,30 @@ int __mmplayer_gst_stop(mm_player_t* player) (player->streaming_type == STREAMING_SERVICE_VOD && player->videodec_linked)) rewind = TRUE; - if (player->es_player_push_mode || MMPLAYER_IS_HTTP_PD(player)) { + if (player->es_player_push_mode) /* disable the async state transition because there could be no data in the pipeline */ - __mmplayer_gst_handle_async(player, FALSE, MMPLAYER_SINK_ALL); - } + __mmplayer_gst_set_async(player, FALSE, MMPLAYER_SINK_ALL); /* set gst state */ - ret = __mmplayer_gst_set_state(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PAUSED, FALSE, timeout); + ret = _mmplayer_gst_set_state(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PAUSED, FALSE, timeout); - if (player->es_player_push_mode || MMPLAYER_IS_HTTP_PD(player)) { + if (player->es_player_push_mode) { /* enable the async state transition as default operation */ - __mmplayer_gst_handle_async(player, TRUE, MMPLAYER_SINK_ALL); + __mmplayer_gst_set_async(player, TRUE, MMPLAYER_SINK_ALL); } /* return if set_state has failed */ if (ret != MM_ERROR_NONE) { - LOGE("failed to set state.\n"); + LOGE("failed to set state."); return ret; } /* rewind */ if (rewind) { - if (!__mmplayer_gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate, + if (!_mmplayer_gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE)) { - LOGW("failed to rewind\n"); + LOGW("failed to rewind"); ret = MM_ERROR_PLAYER_SEEK; } } @@ -2332,9 +3607,9 @@ int __mmplayer_gst_stop(mm_player_t* player) if (change_ret == GST_STATE_CHANGE_SUCCESS || change_ret == GST_STATE_CHANGE_NO_PREROLL) { MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_READY); } else { - LOGE("fail to stop player.\n"); + LOGE("fail to stop player."); ret = MM_ERROR_PLAYER_INTERNAL; - __mmplayer_dump_pipeline_state(player); + _mmplayer_dump_pipeline_state(player); } /* generate dot file if enabled */ @@ -2345,7 +3620,8 @@ int __mmplayer_gst_stop(mm_player_t* player) return ret; } -int __mmplayer_gst_pause(mm_player_t* player, gboolean async) +int +_mmplayer_gst_pause(mmplayer_t *player, gboolean async) { int ret = MM_ERROR_NONE; @@ -2359,83 +3635,85 @@ int __mmplayer_gst_pause(mm_player_t* player, gboolean async) MMPLAYER_PRINT_STATE(player); /* set pipeline status to PAUSED */ - ret = __mmplayer_gst_set_state(player, + ret = _mmplayer_gst_set_state(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PAUSED, async, MMPLAYER_STATE_CHANGE_TIMEOUT(player)); - if (FALSE == async) { - if (ret != MM_ERROR_NONE) { - GstMessage *msg = NULL; - GTimer *timer = NULL; - gdouble MAX_TIMEOUT_SEC = 3; + if (async) + goto EXIT; - LOGE("failed to set state to PAUSED"); + if (ret != MM_ERROR_NONE) { + GstMessage *msg = NULL; + GTimer *timer = NULL; + gdouble MAX_TIMEOUT_SEC = 3; - if (!player->bus_watcher) { - LOGE("there is no bus msg thread. pipeline is shutting down."); - return ret; - } + LOGE("failed to set state to PAUSED"); - if (player->msg_posted) { - LOGE("error msg is already posted."); - return ret; - } + if (!player->bus_watcher) { + LOGE("there is no bus msg thread. pipeline is shutting down."); + return ret; + } - timer = g_timer_new(); - g_timer_start(timer); + if (player->msg_posted) { + LOGE("error msg is already posted."); + return ret; + } - GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst)); + timer = g_timer_new(); + g_timer_start(timer); - do { - msg = gst_bus_timed_pop(bus, 100 * GST_MSECOND); - if (msg) { - if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) { - GError *error = NULL; + GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst)); - /* parse error code */ - gst_message_parse_error(msg, &error, NULL); + do { + msg = gst_bus_timed_pop(bus, 100 * GST_MSECOND); + if (msg) { + if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) { + GError *error = NULL; - if (gst_structure_has_name(gst_message_get_structure(msg), "streaming_error")) { - /* Note : the streaming error from the streaming source is handled - * using __mmplayer_handle_streaming_error. - */ - __mmplayer_handle_streaming_error(player, msg); + /* parse error code */ + gst_message_parse_error(msg, &error, NULL); - } else if (error) { - LOGE("paring error posted from bus, domain : %s, code : %d", g_quark_to_string(error->domain), error->code); + if (gst_structure_has_name(gst_message_get_structure(msg), "streaming_error")) { + /* Note : the streaming error from the streaming source is handled + * using __mmplayer_handle_streaming_error. + */ + __mmplayer_handle_streaming_error(player, msg); - if (error->domain == GST_STREAM_ERROR) - ret = __mmplayer_gst_handle_stream_error(player, error, msg); - else if (error->domain == GST_RESOURCE_ERROR) - ret = __mmplayer_gst_handle_resource_error(player, error->code, NULL); - else if (error->domain == GST_LIBRARY_ERROR) - ret = __mmplayer_gst_handle_library_error(player, error->code); - else if (error->domain == GST_CORE_ERROR) - ret = __mmplayer_gst_handle_core_error(player, error->code); + } else if (error) { + LOGE("paring error posted from bus, domain : %s, code : %d", g_quark_to_string(error->domain), error->code); - g_error_free(error); - } - player->msg_posted = TRUE; + if (error->domain == GST_STREAM_ERROR) + ret = __mmplayer_gst_handle_stream_error(player, error, msg); + else if (error->domain == GST_RESOURCE_ERROR) + ret = __mmplayer_gst_handle_resource_error(player, error->code, NULL); + else if (error->domain == GST_LIBRARY_ERROR) + ret = __mmplayer_gst_handle_library_error(player, error->code); + else if (error->domain == GST_CORE_ERROR) + ret = __mmplayer_gst_handle_core_error(player, error->code); + + g_error_free(error); } - gst_message_unref(msg); + player->msg_posted = TRUE; } - } while (!player->msg_posted && (g_timer_elapsed(timer, NULL) < MAX_TIMEOUT_SEC)); - /* clean */ - gst_object_unref(bus); - g_timer_stop(timer); - g_timer_destroy(timer); - - return ret; + gst_message_unref(msg); + } + } while (!player->msg_posted && (g_timer_elapsed(timer, NULL) < MAX_TIMEOUT_SEC)); + /* clean */ + gst_object_unref(bus); + g_timer_stop(timer); + g_timer_destroy(timer); - } else if ((!MMPLAYER_IS_RTSP_STREAMING(player)) && (!player->video_stream_cb) && - (!player->pipeline->videobin) && (!player->pipeline->audiobin)) { + return ret; + } + if (MMPLAYER_USE_DECODEBIN(player)) { + if ((!MMPLAYER_IS_RTSP_STREAMING(player)) && (!player->video_decoded_cb) && + (!player->pipeline->videobin) && (!player->pipeline->audiobin)) return MM_ERROR_PLAYER_CODEC_NOT_FOUND; - - } else { - MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PAUSED); - } } + MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PAUSED); + +EXIT: /* generate dot file before returning error */ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-pause"); @@ -2444,7 +3722,8 @@ int __mmplayer_gst_pause(mm_player_t* player, gboolean async) return ret; } -int __mmplayer_gst_resume(mm_player_t* player, gboolean async) +int +_mmplayer_gst_resume(mmplayer_t *player, gboolean async) { int ret = MM_ERROR_NONE; gint timeout = 0; @@ -2464,16 +3743,16 @@ int __mmplayer_gst_resume(mm_player_t* player, gboolean async) /* set pipeline state to PLAYING */ timeout = MMPLAYER_STATE_CHANGE_TIMEOUT(player); - ret = __mmplayer_gst_set_state(player, + ret = _mmplayer_gst_set_state(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PLAYING, async, timeout); if (ret != MM_ERROR_NONE) { LOGE("failed to set state to PLAYING"); goto EXIT; - } else { - if (async == FALSE) - MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING); } + if (!async) + MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING); + EXIT: /* generate dot file */ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-resume"); @@ -2485,9 +3764,9 @@ EXIT: /* sending event to one of sinkelements */ gboolean -__mmplayer_gst_send_event_to_sink(mm_player_t* player, GstEvent* event) +_mmplayer_gst_send_event_to_sink(mmplayer_t *player, GstEvent *event) { - GstEvent * event2 = NULL; + GstEvent *event2 = NULL; GList *sinks = NULL; gboolean res = FALSE; MMPLAYER_FENTER(); @@ -2516,14 +3795,14 @@ __mmplayer_gst_send_event_to_sink(mm_player_t* player, GstEvent* event) gst_event_ref(event); if ((res = gst_element_send_event(sink, event))) { - LOGD("sending event[%s] to sink element [%s] success!\n", + LOGD("sending event[%s] to sink element [%s] success!", GST_EVENT_TYPE_NAME(event), GST_ELEMENT_NAME(sink)); /* rtsp case, asyn_done is not called after seek during pause state */ if (MMPLAYER_IS_RTSP_STREAMING(player)) { if (GST_EVENT_TYPE(event) == GST_EVENT_SEEK) { if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) { - LOGD("RTSP seek completed, after pause state..\n"); + LOGD("RTSP seek completed, after pause state.."); player->seek_state = MMPLAYER_SEEK_NONE; MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL); } @@ -2539,7 +3818,7 @@ __mmplayer_gst_send_event_to_sink(mm_player_t* player, GstEvent* event) } } - LOGD("sending event[%s] to sink element [%s] failed. try with next one.\n", + LOGD("sending event[%s] to sink element [%s] failed. try with next one.", GST_EVENT_TYPE_NAME(event), GST_ELEMENT_NAME(sink)); } @@ -2549,7 +3828,7 @@ __mmplayer_gst_send_event_to_sink(mm_player_t* player, GstEvent* event) /* Note : Textbin is not linked to the video or audio bin. * It needs to send the event to the text sink seperatelly. */ - if (player->play_subtitle && player->pipeline) { + if (player->play_subtitle && player->pipeline) { GstElement *text_sink = GST_ELEMENT_CAST(player->pipeline->textbin[MMPLAYER_T_FAKE_SINK].gst); if (GST_IS_ELEMENT(text_sink)) { @@ -2557,15 +3836,15 @@ __mmplayer_gst_send_event_to_sink(mm_player_t* player, GstEvent* event) gst_event_ref(event2); if ((res = gst_element_send_event(text_sink, event2))) - LOGD("sending event[%s] to subtitle sink element [%s] success!\n", - GST_EVENT_TYPE_NAME(event2), GST_ELEMENT_NAME(text_sink)); + LOGD("sending event[%s] to subtitle sink element [%s] success!", + GST_EVENT_TYPE_NAME(event2), GST_ELEMENT_NAME(text_sink)); else - LOGE("sending event[%s] to subtitle sink element [%s] failed!\n", - GST_EVENT_TYPE_NAME(event2), GST_ELEMENT_NAME(text_sink)); + LOGE("sending event[%s] to subtitle sink element [%s] failed!", + GST_EVENT_TYPE_NAME(event2), GST_ELEMENT_NAME(text_sink)); gst_event_unref(event2); } - } + } gst_event_unref(event); @@ -2575,11 +3854,11 @@ __mmplayer_gst_send_event_to_sink(mm_player_t* player, GstEvent* event) } gboolean -__mmplayer_gst_seek(mm_player_t* player, GstElement * element, gdouble rate, +_mmplayer_gst_seek(mmplayer_t *player, GstElement *element, gdouble rate, GstFormat format, GstSeekFlags flags, GstSeekType cur_type, gint64 cur, GstSeekType stop_type, gint64 stop) { - GstEvent* event = NULL; + GstEvent *event = NULL; gboolean result = FALSE; MMPLAYER_FENTER(); @@ -2592,7 +3871,7 @@ __mmplayer_gst_seek(mm_player_t* player, GstElement * element, gdouble rate, event = gst_event_new_seek(rate, format, flags, cur_type, cur, stop_type, stop); - result = __mmplayer_gst_send_event_to_sink(player, event); + result = _mmplayer_gst_send_event_to_sink(player, event); MMPLAYER_FLEAVE(); @@ -2600,189 +3879,92 @@ __mmplayer_gst_seek(mm_player_t* player, GstElement * element, gdouble rate, } int -__mmplayer_gst_set_position(mm_player_t* player, int format, gint64 position, gboolean internal_called) +_mmplayer_gst_set_position(mmplayer_t *player, gint64 position, gboolean internal_called) { - gint64 dur_nsec = 0; + int ret = MM_ERROR_NONE; gint64 pos_nsec = 0; - gboolean ret = TRUE; gboolean accurated = FALSE; - GstSeekFlags seek_flags = GST_SEEK_FLAG_FLUSH; - - MMPLAYER_FENTER(); - MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED); - MMPLAYER_RETURN_VAL_IF_FAIL(!MMPLAYER_IS_LIVE_STREAMING(player), MM_ERROR_PLAYER_NO_OP); - - if (MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PLAYING - && MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PAUSED) - goto PENDING; - - if (!MMPLAYER_IS_MS_BUFF_SRC(player)) { - /* check duration */ - /* NOTE : duration cannot be zero except live streaming. - * Since some element could have some timing problemn with quering duration, try again. - */ - if (player->duration == 0) { - if (!gst_element_query_duration(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &dur_nsec)) { - /* For RTSP Streaming , duration is not returned in READY state. So seek to the previous position does not work properly. - * Added a patch to postpone the actual seek when state changes to PLAY. Sending a fake SEEK_COMPLETED event to finish the current request. */ - if ((MMPLAYER_IS_RTSP_STREAMING(player)) && (__mmplayer_get_stream_service_type(player) == STREAMING_SERVICE_VOD)) { - player->pending_seek.is_pending = TRUE; - player->pending_seek.format = format; - player->pending_seek.pos = position; - player->seek_state = MMPLAYER_SEEK_NONE; - MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL); - return MM_ERROR_NONE; - } else { - goto SEEK_ERROR; - } - } - player->duration = dur_nsec; - } - } - LOGD("playback rate: %f\n", player->playback_rate); - - mm_attrs_get_int_by_name(player->attrs, "accurate_seek", &accurated); - if (accurated) - seek_flags |= GST_SEEK_FLAG_ACCURATE; - else - seek_flags |= GST_SEEK_FLAG_KEY_UNIT; - - /* do seek */ - switch (format) { - case MM_PLAYER_POS_FORMAT_TIME: - { - if (!MMPLAYER_IS_MS_BUFF_SRC(player)) { - GstQuery *query = NULL; - gboolean seekable = FALSE; - - /* check position is valid or not */ - if (position > player->duration) - goto INVALID_ARGS; - - query = gst_query_new_seeking(GST_FORMAT_TIME); - if (gst_element_query(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, query)) { - gst_query_parse_seeking(query, NULL, &seekable, NULL, NULL); - gst_query_unref(query); - - if (!seekable) { - LOGW("non-seekable content"); - player->seek_state = MMPLAYER_SEEK_NONE; - return MM_ERROR_PLAYER_NO_OP; - } - } else { - LOGW("failed to get seeking query"); - gst_query_unref(query); /* keep seeking operation */ - } - - LOGD("seeking to(%"G_GINT64_FORMAT") nsec, duration is %"G_GINT64_FORMAT" nsec\n", position, player->duration); - - /* For rtspsrc stack , npt-start value coming from server is used for finding the current position. - But when a rtsp clip (especially from Youtube Desktop View) is paused and kept for sometime,npt-start is still increasing. - This causes problem is position calculation during normal pause resume scenarios also. - Currently during seek , we are sending the current position to rtspsrc module for position saving for later use. */ - if ((MMPLAYER_IS_RTSP_STREAMING(player)) && - (__mmplayer_get_stream_service_type(player) == STREAMING_SERVICE_VOD)) { - if (!gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &pos_nsec)) - LOGW("getting current position failed in seek\n"); - - player->last_position = pos_nsec; - g_object_set(player->pipeline->mainbin[MMPLAYER_M_SRC].gst, "resume-position", player->last_position, NULL); - } - - if (player->seek_state != MMPLAYER_SEEK_NONE) { - LOGD("not completed seek"); - return MM_ERROR_PLAYER_DOING_SEEK; - } - } - - if (!internal_called) - player->seek_state = MMPLAYER_SEEK_IN_PROGRESS; + GstSeekFlags seek_flags = GST_SEEK_FLAG_FLUSH; - if ((MMPLAYER_IS_HTTP_STREAMING(player)) && (!player->videodec_linked)) { - gint64 cur_time = 0; + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED); + MMPLAYER_RETURN_VAL_IF_FAIL(!MMPLAYER_IS_LIVE_STREAMING(player), MM_ERROR_PLAYER_NO_OP); - /* get current position */ - gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &cur_time); + if ((MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PLAYING) + && (MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PAUSED)) + goto PENDING; - /* flush */ - GstEvent *event = gst_event_new_seek(1.0, - GST_FORMAT_TIME, - (GstSeekFlags)GST_SEEK_FLAG_FLUSH, - GST_SEEK_TYPE_SET, cur_time, - GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE); - if (event) - __mmplayer_gst_send_event_to_sink(player, event); + ret = __mmplayer_gst_check_duration(player, position); + if (ret != MM_ERROR_NONE) { + LOGE("failed to check duration 0x%X", ret); + return (ret == MM_ERROR_PLAYER_NO_OP) ? MM_ERROR_NONE : ret; + } - if (!MMPLAYER_IS_RTSP_STREAMING(player)) - __mmplayer_gst_pause(player, FALSE); - } + if (!__mmplayer_gst_check_seekable(player)) + return MM_ERROR_PLAYER_NO_OP; - pos_nsec = position; + LOGD("seeking to(%"G_GINT64_FORMAT") nsec, rate: %f, dur: %"G_GINT64_FORMAT" nsec", + position, player->playback_rate, player->duration); - /* rtsp streaming case, there is no sink after READY TO PAUSE state(no preroll state change). - that's why set position through property. */ - if ((MMPLAYER_IS_RTSP_STREAMING(player)) && - (MMPLAYER_CURRENT_STATE(player) == MM_PLAYER_STATE_PAUSED) && - (MMPLAYER_PREV_STATE(player) == MM_PLAYER_STATE_READY) && - (!player->videodec_linked) && (!player->audiodec_linked)) { + /* For rtspsrc stack , npt-start value coming from server is used for finding the current position. + But when a rtsp clip (especially from Youtube Desktop View) is paused and kept for sometime,npt-start is still increasing. + This causes problem is position calculation during normal pause resume scenarios also. + Currently during seek , we are sending the current position to rtspsrc module for position saving for later use. */ + if ((MMPLAYER_IS_RTSP_STREAMING(player)) && + (_mmplayer_get_stream_service_type(player) == STREAMING_SERVICE_VOD)) { + if (!gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &pos_nsec)) + LOGW("getting current position failed in seek"); - g_object_set(player->pipeline->mainbin[MMPLAYER_M_SRC].gst, "pending-start-position", pos_nsec, NULL); - LOGD("[%s] set position =%"GST_TIME_FORMAT, - GST_ELEMENT_NAME(player->pipeline->mainbin[MMPLAYER_M_SRC].gst), GST_TIME_ARGS(pos_nsec)); - player->seek_state = MMPLAYER_SEEK_NONE; - MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL); - } else { - ret = __mmplayer_gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate, - GST_FORMAT_TIME, seek_flags, - GST_SEEK_TYPE_SET, pos_nsec, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE); - } + player->last_position = pos_nsec; + g_object_set(player->pipeline->mainbin[MMPLAYER_M_SRC].gst, "resume-position", player->last_position, NULL); + } - if (!ret) { - LOGE("failed to set position."); - goto SEEK_ERROR; - } + if (player->seek_state != MMPLAYER_SEEK_NONE) { + LOGD("not completed seek"); + return MM_ERROR_PLAYER_DOING_SEEK; } - break; - case MM_PLAYER_POS_FORMAT_PERCENT: - { - LOGD("seeking to %"G_GINT64_FORMAT"%%", position); + if (!internal_called) + player->seek_state = MMPLAYER_SEEK_IN_PROGRESS; - if (player->seek_state != MMPLAYER_SEEK_NONE) { - LOGD("not completed seek"); - return MM_ERROR_PLAYER_DOING_SEEK; - } + /* rtsp streaming case, there is no sink after READY TO PAUSE state(no preroll state change). + that's why set position through property. */ + if ((MMPLAYER_IS_RTSP_STREAMING(player)) && + (MMPLAYER_CURRENT_STATE(player) == MM_PLAYER_STATE_PAUSED) && + (MMPLAYER_PREV_STATE(player) == MM_PLAYER_STATE_READY) && + (!player->videodec_linked) && (!player->audiodec_linked)) { + + LOGD("[%s] set position =%"GST_TIME_FORMAT, + GST_ELEMENT_NAME(player->pipeline->mainbin[MMPLAYER_M_SRC].gst), GST_TIME_ARGS(position)); - if (!internal_called) - player->seek_state = MMPLAYER_SEEK_IN_PROGRESS; + g_object_set(player->pipeline->mainbin[MMPLAYER_M_SRC].gst, "pending-start-position", position, NULL); + player->seek_state = MMPLAYER_SEEK_NONE; + MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL); + } else { + mm_attrs_get_int_by_name(player->attrs, "accurate_seek", &accurated); + if (accurated) + seek_flags |= GST_SEEK_FLAG_ACCURATE; + else + seek_flags |= GST_SEEK_FLAG_KEY_UNIT; - /* FIXIT : why don't we use 'GST_FORMAT_PERCENT' */ - pos_nsec = (gint64)((position * player->duration) / 100); - ret = __mmplayer_gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate, + if (!_mmplayer_gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate, GST_FORMAT_TIME, seek_flags, - GST_SEEK_TYPE_SET, pos_nsec, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE); - if (!ret) { - LOGE("failed to set position. pos[%"G_GINT64_FORMAT"] dur[%"G_GINT64_FORMAT"] ", pos_nsec, player->duration); + GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE)) { + LOGE("failed to set position"); goto SEEK_ERROR; } } - break; - - default: - goto INVALID_ARGS; - } /* NOTE : store last seeking point to overcome some bad operation - * (returning zero when getting current position) of some elements - */ - player->last_position = pos_nsec; + * (returning zero when getting current position) of some elements + */ + player->last_position = position; /* MSL should guarante playback rate when seek is selected during trick play of fast forward. */ if (player->playback_rate > 1.0) _mmplayer_set_playspeed((MMHandleType)player, player->playback_rate, FALSE); - if ((!internal_called) && - (player->streamer) && (player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) { + if ((player->streamer) && (player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) { LOGD("buffering should be reset after seeking"); player->streamer->buffering_state = MM_PLAYER_BUFFERING_ABORT; player->streamer->buffering_percent = 100; /* after seeking, new per can be non-zero. */ @@ -2792,32 +3974,27 @@ __mmplayer_gst_set_position(mm_player_t* player, int format, gint64 position, gb return MM_ERROR_NONE; PENDING: - player->pending_seek.is_pending = TRUE; - player->pending_seek.format = format; + player->pending_seek.is_pending = true; player->pending_seek.pos = position; - LOGW("player current-state : %s, pending-state : %s, just preserve pending position(%"G_GINT64_FORMAT").\n", + LOGW("player current-state : %s, pending-state : %s, just preserve pending position(%"G_GINT64_FORMAT")", MMPLAYER_STATE_GET_NAME(MMPLAYER_CURRENT_STATE(player)), MMPLAYER_STATE_GET_NAME(MMPLAYER_PENDING_STATE(player)), player->pending_seek.pos); return MM_ERROR_NONE; -INVALID_ARGS: - LOGE("invalid arguments, position: %"G_GINT64_FORMAT" dur : %"G_GINT64_FORMAT" format : %d \n", position, player->duration, format); - return MM_ERROR_INVALID_ARGUMENT; - SEEK_ERROR: player->seek_state = MMPLAYER_SEEK_NONE; return MM_ERROR_PLAYER_SEEK; } int -__mmplayer_gst_get_position(mm_player_t* player, int format, gint64* position) +_mmplayer_gst_get_position(mmplayer_t *player, gint64 *position) { #define TRICKPLAY_OFFSET GST_MSECOND - MMPlayerStateType current_state = MM_PLAYER_STATE_NONE; + mmplayer_state_e current_state = MM_PLAYER_STATE_NONE; gint64 pos_nsec = 0; gboolean ret = TRUE; @@ -2858,38 +4035,21 @@ __mmplayer_gst_get_position(mm_player_t* player, int format, gint64* position) player->last_position = pos_nsec; } - switch (format) { - case MM_PLAYER_POS_FORMAT_TIME: - *position = pos_nsec; - break; - - case MM_PLAYER_POS_FORMAT_PERCENT: - { - if (player->duration <= 0) { - LOGD("duration is [%"G_GINT64_FORMAT"], so returning position 0\n", player->duration); - *position = 0; - } else { - LOGD("position is [%"G_GINT64_FORMAT"] nsec , duration is [%"G_GINT64_FORMAT"] nsec", pos_nsec, player->duration); - *position = (gint64)(pos_nsec * 100 / player->duration); - } - break; - } - default: - return MM_ERROR_PLAYER_INTERNAL; - } + *position = pos_nsec; return MM_ERROR_NONE; } -int __mmplayer_gst_get_buffer_position(mm_player_t* player, int format, unsigned long* start_pos, unsigned long* stop_pos) +int +_mmplayer_gst_get_buffer_position(mmplayer_t *player, int *start_pos, int *end_pos) { #define STREAMING_IS_FINISHED 0 #define BUFFERING_MAX_PER 100 #define DEFAULT_PER_VALUE -1 #define CHECK_PERCENT_VALUE(a, min, max)(((a) > (min)) ? (((a) < (max)) ? (a) : (max)) : (min)) - MMPlayerGstElement *mainbin = NULL; - gint start_per = DEFAULT_PER_VALUE, stop_per = DEFAULT_PER_VALUE; + mmplayer_gst_element_t *mainbin = NULL; + gint start_per = DEFAULT_PER_VALUE, end_per = DEFAULT_PER_VALUE; gint64 buffered_total = 0; gint64 position = 0; gint buffered_sec = -1; @@ -2902,29 +4062,24 @@ int __mmplayer_gst_get_buffer_position(mm_player_t* player, int format, unsigned player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED); - MMPLAYER_RETURN_VAL_IF_FAIL(start_pos && stop_pos, MM_ERROR_INVALID_ARGUMENT); + MMPLAYER_RETURN_VAL_IF_FAIL(start_pos && end_pos, MM_ERROR_INVALID_ARGUMENT); *start_pos = 0; - *stop_pos = 0; + *end_pos = 0; if (!MMPLAYER_IS_HTTP_STREAMING(player)) { /* and rtsp is not ready yet. */ - LOGW("it's only used for http streaming case.\n"); + LOGW("it's only used for http streaming case"); return MM_ERROR_PLAYER_NO_OP; } - if (format != MM_PLAYER_POS_FORMAT_PERCENT) { - LOGW("Time format is not supported yet.\n"); - return MM_ERROR_INVALID_ARGUMENT; - } - if (content_size_time <= 0 || content_size_bytes <= 0) { - LOGW("there is no content size."); + LOGW("there is no content size"); return MM_ERROR_NONE; } - if (__mmplayer_gst_get_position(player, MM_PLAYER_POS_FORMAT_TIME, &position) != MM_ERROR_NONE) { - LOGW("fail to get current position."); + if (_mmplayer_gst_get_position(player, &position) != MM_ERROR_NONE) { + LOGW("fail to get current position"); return MM_ERROR_NONE; } @@ -2932,7 +4087,7 @@ int __mmplayer_gst_get_buffer_position(mm_player_t* player, int format, unsigned GST_TIME_AS_MSECONDS(position), (guint)GST_TIME_AS_SECONDS(content_size_time), content_size_bytes); mainbin = player->pipeline->mainbin; - start_per = (gint)(floor(100 *(gdouble)position / (gdouble)content_size_time)); + start_per = (gint)(floor(100 * (gdouble)position / (gdouble)content_size_time)); if (mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst) { GstQuery *query = NULL; @@ -2955,7 +4110,7 @@ int __mmplayer_gst_get_buffer_position(mm_player_t* player, int format, unsigned if (gst_element_query_position(mainbin[MMPLAYER_M_SRC].gst, GST_FORMAT_BYTES, &buffered_total)) { LOGD("buffered_total %"G_GINT64_FORMAT, buffered_total); - stop_per = 100 * buffered_total / content_size_bytes; + end_per = 100 * buffered_total / content_size_bytes; } } else { /* GST_BUFFERING_TIMESHIFT or GST_BUFFERING_DOWNLOAD */ @@ -2973,16 +4128,17 @@ int __mmplayer_gst_get_buffer_position(mm_player_t* player, int format, unsigned buffered_total += (stop_byte - start_byte); } - } else - stop_per = BUFFERING_MAX_PER; + } else { + end_per = BUFFERING_MAX_PER; + } } gst_query_unref(query); } - if (stop_per == DEFAULT_PER_VALUE) { + if (end_per == DEFAULT_PER_VALUE) { guint dur_sec = (guint)(content_size_time/GST_SECOND); if (dur_sec > 0) { - guint avg_byterate = (guint)(content_size_bytes/dur_sec); + guint avg_byterate = (guint)(content_size_bytes / dur_sec); /* buffered size info from multiqueue */ if (mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst) { @@ -2994,23 +4150,530 @@ int __mmplayer_gst_get_buffer_position(mm_player_t* player, int format, unsigned } if (avg_byterate > 0) - buffered_sec = (gint)(ceil((gdouble)buffered_total/(gdouble)avg_byterate)); + buffered_sec = (gint)(ceil((gdouble)buffered_total / (gdouble)avg_byterate)); else if (player->total_maximum_bitrate > 0) - buffered_sec = (gint)(ceil((gdouble)GET_BIT_FROM_BYTE(buffered_total)/(gdouble)player->total_maximum_bitrate)); + buffered_sec = (gint)(ceil((gdouble)GET_BIT_FROM_BYTE(buffered_total) / (gdouble)player->total_maximum_bitrate)); else if (player->total_bitrate > 0) - buffered_sec = (gint)(ceil((gdouble)GET_BIT_FROM_BYTE(buffered_total)/(gdouble)player->total_bitrate)); + buffered_sec = (gint)(ceil((gdouble)GET_BIT_FROM_BYTE(buffered_total) / (gdouble)player->total_bitrate)); if (buffered_sec >= 0) - stop_per = start_per +(gint)(ceil)(100*(gdouble)buffered_sec/(gdouble)dur_sec); + end_per = start_per + (gint)(ceil)(100 * (gdouble)buffered_sec / (gdouble)dur_sec); } } *start_pos = CHECK_PERCENT_VALUE(start_per, 0, 100); - *stop_pos = CHECK_PERCENT_VALUE(stop_per, *start_pos, 100); + *end_pos = CHECK_PERCENT_VALUE(end_per, *start_pos, 100); + + LOGD("buffered info: %"G_GINT64_FORMAT" bytes, %d sec, per %d~%d", + buffered_total, buffered_sec, *start_pos, *end_pos); + + return MM_ERROR_NONE; +} + +GstElement * +_mmplayer_gst_create_source(mmplayer_t *player) +{ + GstElement *element = NULL; + + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline && + player->pipeline->mainbin, NULL); + + /* setup source for gapless play */ + switch (player->profile.uri_type) { + /* file source */ + case MM_PLAYER_URI_TYPE_FILE: + element = __mmplayer_gst_make_file_src(player); + break; + case MM_PLAYER_URI_TYPE_URL_HTTP: + element = __mmplayer_gst_make_http_src(player); + break; + default: + LOGE("not support uri type %d", player->profile.uri_type); + break; + } + + if (!element) { + LOGE("failed to create source element"); + return NULL; + } + + MMPLAYER_FLEAVE(); + return element; +} + +int +_mmplayer_gst_build_es_pipeline(mmplayer_t *player) +{ + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline && + player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED); + + SECURE_LOGD("uri : %s", player->profile.uri); + + mm_player_set_attribute((MMHandleType)player, NULL, "profile_prepare_async", TRUE, NULL); + + if ((player->v_stream_caps) && + !(__mmplayer_gst_create_es_path(player, MM_PLAYER_STREAM_TYPE_VIDEO, player->v_stream_caps))) + return MM_ERROR_PLAYER_INTERNAL; + + if ((player->a_stream_caps) && + !(__mmplayer_gst_create_es_path(player, MM_PLAYER_STREAM_TYPE_AUDIO, player->a_stream_caps))) + return MM_ERROR_PLAYER_INTERNAL; + + if ((player->s_stream_caps) && + !(__mmplayer_gst_create_es_path(player, MM_PLAYER_STREAM_TYPE_TEXT, player->s_stream_caps))) + return MM_ERROR_PLAYER_INTERNAL; + + MMPLAYER_FLEAVE(); + return MM_ERROR_NONE; +} + +int +_mmplayer_gst_build_pipeline_with_src(mmplayer_t *player) +{ + mmplayer_gst_element_t *mainbin = NULL; + GstElement *autoplug_elem = NULL; + + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline && + player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED); + + mainbin = player->pipeline->mainbin; + + LOGD("uri type %d", player->profile.uri_type); + + if ((player->profile.uri_type == MM_PLAYER_URI_TYPE_FILE) && + (!_mmplayer_get_storage_info(player->profile.uri, &player->storage_info[MMPLAYER_PATH_VOD]))) { + return MM_ERROR_PLAYER_INTERNAL; + } + + if (player->profile.uri_type == MM_PLAYER_URI_TYPE_MEM) { + g_strlcpy(player->profile.uri, "appsrc://", MM_MAX_URL_LEN); + } + + autoplug_elem = __mmplayer_gst_make_uridecodebin(player); + if (!autoplug_elem) { + LOGE("failed to create uridecodebin3 element"); + goto ERROR; + } + + LOGD("autoplug elem is created %s", GST_ELEMENT_NAME(autoplug_elem)); + mainbin[MMPLAYER_M_AUTOPLUG].id = MMPLAYER_M_AUTOPLUG; + mainbin[MMPLAYER_M_AUTOPLUG].gst = autoplug_elem; + + if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), autoplug_elem)) { + LOGE("failed to add uridecodebin to pipeline"); + goto ERROR; + } + + /* FIXME: required ?*/ + /* create fakesink element for keeping the pipeline state PAUSED. if needed */ + mainbin[MMPLAYER_M_SRC_FAKESINK].id = MMPLAYER_M_SRC_FAKESINK; + mainbin[MMPLAYER_M_SRC_FAKESINK].gst = gst_element_factory_make("fakesink", "state-holder"); + + if (!mainbin[MMPLAYER_M_SRC_FAKESINK].gst) { + LOGE("failed to create fakesink"); + goto ERROR; + } + GST_OBJECT_FLAG_UNSET(mainbin[MMPLAYER_M_SRC_FAKESINK].gst, GST_ELEMENT_FLAG_SINK); + + /* take ownership of fakesink. we are reusing it */ + gst_object_ref(mainbin[MMPLAYER_M_SRC_FAKESINK].gst); + + if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[MMPLAYER_M_SRC_FAKESINK].gst)) { + LOGE("failed to add fakesink to bin"); + gst_object_unref(mainbin[MMPLAYER_M_SRC_FAKESINK].gst); + goto ERROR; + } + + MMPLAYER_FLEAVE(); + return MM_ERROR_NONE; + +ERROR: + + if (mainbin[MMPLAYER_M_AUTOPLUG].gst) + gst_object_unref(GST_OBJECT(mainbin[MMPLAYER_M_AUTOPLUG].gst)); + + if (mainbin[MMPLAYER_M_SRC_FAKESINK].gst) + gst_object_unref(GST_OBJECT(mainbin[MMPLAYER_M_SRC_FAKESINK].gst)); + + mainbin[MMPLAYER_M_AUTOPLUG].gst = NULL; + mainbin[MMPLAYER_M_SRC_FAKESINK].gst = NULL; + + return MM_ERROR_PLAYER_INTERNAL; +} + +int +_mmplayer_gst_build_pipeline(mmplayer_t *player) +{ + mmplayer_gst_element_t *mainbin = NULL; + GstElement *src_elem = NULL; + GstElement *autoplug_elem = NULL; + GList *element_bucket = NULL; + main_element_id_e autoplug_elem_id = MMPLAYER_M_NUM; + + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline && + player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED); + + LOGD("uri type %d", player->profile.uri_type); + + /* create source element */ + switch (player->profile.uri_type) { + case MM_PLAYER_URI_TYPE_URL_RTSP: + src_elem = __mmplayer_gst_make_rtsp_src(player); + break; + case MM_PLAYER_URI_TYPE_URL_HTTP: + src_elem = __mmplayer_gst_make_http_src(player); + break; + case MM_PLAYER_URI_TYPE_FILE: + src_elem = __mmplayer_gst_make_file_src(player); + break; + case MM_PLAYER_URI_TYPE_SS: + { + gint http_timeout = DEFAULT_HTTP_TIMEOUT; + src_elem = gst_element_factory_make("souphttpsrc", "http streaming source"); + if (!src_elem) { + LOGE("failed to create http streaming source element[%s]", player->ini.httpsrc_element); + break; + } + + if (player->ini.http_timeout != DEFAULT_HTTP_TIMEOUT) { + LOGD("get timeout from ini"); + http_timeout = player->ini.http_timeout; + } + + /* setting property to streaming source */ + g_object_set(G_OBJECT(src_elem), "location", player->profile.uri, "timeout", http_timeout, NULL); + } + break; + case MM_PLAYER_URI_TYPE_MEM: + { + GstAppStreamType stream_type = GST_APP_STREAM_TYPE_RANDOM_ACCESS; + + src_elem = gst_element_factory_make("appsrc", "mem-source"); + if (!src_elem) { + LOGE("failed to create appsrc element"); + break; + } + + g_object_set(src_elem, "stream-type", stream_type, + "size", (gint64)player->profile.input_mem.len, "blocksize", 20480, NULL); + + _mmplayer_add_signal_connection(player, G_OBJECT(src_elem), MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data", + G_CALLBACK(__mmplayer_gst_appsrc_seek_data_mem), (gpointer)&player->profile.input_mem); + _mmplayer_add_signal_connection(player, G_OBJECT(src_elem), MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data", + G_CALLBACK(__mmplayer_gst_appsrc_feed_data_mem), (gpointer)&player->profile.input_mem); + } + break; + default: + LOGE("not support uri type"); + break; + } + + if (!src_elem) { + LOGE("failed to create source element"); + return MM_ERROR_PLAYER_INTERNAL; + } + + mainbin = player->pipeline->mainbin; + + /* take source element */ + LOGD("source elem is created %s", GST_ELEMENT_NAME(src_elem)); + + mainbin[MMPLAYER_M_SRC].id = MMPLAYER_M_SRC; + mainbin[MMPLAYER_M_SRC].gst = src_elem; + element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_SRC]); + + /* create next element for auto-plugging */ + if (MMPLAYER_IS_HTTP_STREAMING(player)) { + autoplug_elem_id = MMPLAYER_M_TYPEFIND; + autoplug_elem = gst_element_factory_make("typefind", "typefinder"); + if (!autoplug_elem) { + LOGE("failed to create typefind element"); + goto ERROR; + } + + _mmplayer_add_signal_connection(player, G_OBJECT(autoplug_elem), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "have-type", + G_CALLBACK(_mmplayer_typefind_have_type), (gpointer)player); + } else if (!MMPLAYER_IS_RTSP_STREAMING(player)) { + autoplug_elem_id = MMPLAYER_M_AUTOPLUG; + autoplug_elem = _mmplayer_gst_make_decodebin(player); + if (!autoplug_elem) { + LOGE("failed to create decodebin"); + goto ERROR; + } + + /* default size of mq in decodebin is 2M + * but it can cause blocking issue during seeking depends on content. */ + g_object_set(G_OBJECT(autoplug_elem), "max-size-bytes", (5 * 1024 * 1024), NULL); + } + + if (autoplug_elem) { + LOGD("autoplug elem is created %s", GST_ELEMENT_NAME(autoplug_elem)); + mainbin[autoplug_elem_id].id = autoplug_elem_id; + mainbin[autoplug_elem_id].gst = autoplug_elem; + + element_bucket = g_list_append(element_bucket, &mainbin[autoplug_elem_id]); + } + + /* add elements to pipeline */ + if (!_mmplayer_gst_element_add_bucket_to_bin(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), element_bucket)) { + LOGE("failed to add elements to pipeline"); + goto ERROR; + } + + /* linking elements in the bucket by added order. */ + if (_mmplayer_gst_element_link_bucket(element_bucket) == -1) { + LOGE("failed to link some elements"); + goto ERROR; + } + + /* FIXME: need to check whether this is required or not. */ + if (MMPLAYER_IS_HTTP_STREAMING(player) || MMPLAYER_IS_RTSP_STREAMING(player) || + (player->audio_extract_opt & MM_PLAYER_AUDIO_EXTRACT_DEINTERLEAVE)) { + /* create fakesink element for keeping the pipeline state PAUSED. if needed */ + mainbin[MMPLAYER_M_SRC_FAKESINK].id = MMPLAYER_M_SRC_FAKESINK; + mainbin[MMPLAYER_M_SRC_FAKESINK].gst = gst_element_factory_make("fakesink", "state-holder"); + + if (!mainbin[MMPLAYER_M_SRC_FAKESINK].gst) { + LOGE("failed to create fakesink"); + goto ERROR; + } + GST_OBJECT_FLAG_UNSET(mainbin[MMPLAYER_M_SRC_FAKESINK].gst, GST_ELEMENT_FLAG_SINK); + + /* take ownership of fakesink. we are reusing it */ + gst_object_ref(mainbin[MMPLAYER_M_SRC_FAKESINK].gst); + + if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), mainbin[MMPLAYER_M_SRC_FAKESINK].gst)) { + LOGE("failed to add fakesink to bin"); + gst_object_unref(mainbin[MMPLAYER_M_SRC_FAKESINK].gst); + goto ERROR; + } + } + + g_list_free(element_bucket); + + MMPLAYER_FLEAVE(); + return MM_ERROR_NONE; + +ERROR: + g_list_free(element_bucket); + + if (mainbin[MMPLAYER_M_SRC].gst) + gst_object_unref(GST_OBJECT(mainbin[MMPLAYER_M_SRC].gst)); + + if (mainbin[autoplug_elem_id].gst) + gst_object_unref(GST_OBJECT(mainbin[autoplug_elem_id].gst)); + + if (mainbin[MMPLAYER_M_SRC_FAKESINK].gst) + gst_object_unref(GST_OBJECT(mainbin[MMPLAYER_M_SRC_FAKESINK].gst)); + + mainbin[MMPLAYER_M_SRC].gst = NULL; + mainbin[autoplug_elem_id].gst = NULL; + mainbin[MMPLAYER_M_SRC_FAKESINK].gst = NULL; + + return MM_ERROR_PLAYER_INTERNAL; +} + +int +_mmplayer_gst_add_bus_watch(mmplayer_t *player) +{ + GstBus *bus = NULL; + mmplayer_gst_element_t *mainbin = NULL; + + MMPLAYER_FENTER(); + MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline && + player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED); + + mainbin = player->pipeline->mainbin; + + /* connect bus callback */ + bus = gst_pipeline_get_bus(GST_PIPELINE(mainbin[MMPLAYER_M_PIPE].gst)); + if (!bus) { + LOGE("cannot get bus from pipeline"); + return MM_ERROR_PLAYER_INTERNAL; + } - LOGD("buffered info: %"G_GINT64_FORMAT" bytes, %d sec, per %lu~%lu\n", - buffered_total, buffered_sec, *start_pos, *stop_pos); + player->bus_watcher = gst_bus_add_watch_full(bus, G_PRIORITY_DEFAULT, + (GstBusFunc)__mmplayer_gst_msg_push, player, + (GDestroyNotify)_mmplayer_watcher_removed_notify); + if (player->bus_watcher == 0) { + LOGE("failed to add bus watch"); + return MM_ERROR_PLAYER_INTERNAL; + } + + g_mutex_init(&player->bus_watcher_mutex); + g_cond_init(&player->bus_watcher_cond); + + player->context.thread_default = g_main_context_get_thread_default(); + if (player->context.thread_default == NULL) { + player->context.thread_default = g_main_context_default(); + LOGD("thread-default context is the global default context"); + } + LOGW("bus watcher thread context = %p, watcher : %d", player->context.thread_default, player->bus_watcher); + + /* set sync handler to get tag synchronously */ + gst_bus_set_sync_handler(bus, __mmplayer_gst_bus_sync_callback, player, NULL); + gst_object_unref(GST_OBJECT(bus)); + + /* create gst bus_msb_cb thread */ + g_mutex_init(&player->bus_msg_thread_mutex); + g_cond_init(&player->bus_msg_thread_cond); + player->bus_msg_thread_exit = FALSE; + player->bus_msg_thread = + g_thread_try_new("gst_bus_msg_thread", __mmplayer_gst_bus_msg_thread, (gpointer)player, NULL); + if (!player->bus_msg_thread) { + LOGE("failed to create gst BUS msg thread"); + g_mutex_clear(&player->bus_msg_thread_mutex); + g_cond_clear(&player->bus_msg_thread_cond); + return MM_ERROR_PLAYER_INTERNAL; + } + MMPLAYER_FLEAVE(); return MM_ERROR_NONE; } +void +_mmplayer_activate_next_source(mmplayer_t *player, GstState target) +{ + int ret = MM_ERROR_NONE; + mmplayer_gst_element_t *mainbin = NULL; + MMMessageParamType msg_param = {0,}; + GstElement *element = NULL; + MMHandleType attrs = 0; + char *uri = NULL; + main_element_id_e elem_idx = MMPLAYER_M_NUM; + + MMPLAYER_FENTER(); + + if (!player || !player->pipeline || !player->pipeline->mainbin) { + LOGE("player is not initialized"); + goto ERROR; + } + + mainbin = player->pipeline->mainbin; + msg_param.code = MM_ERROR_PLAYER_INTERNAL; + + attrs = MMPLAYER_GET_ATTRS(player); + if (!attrs) { + LOGE("fail to get attributes"); + goto ERROR; + } + + mm_attrs_get_string_by_name(attrs, "profile_uri", &uri); + + if (_mmplayer_parse_profile((const char *)uri, NULL, &player->profile) != MM_ERROR_NONE) { + LOGE("failed to parse profile"); + msg_param.code = MM_ERROR_PLAYER_INVALID_URI; + goto ERROR; + } + + if ((MMPLAYER_URL_HAS_DASH_SUFFIX(player)) || + (MMPLAYER_URL_HAS_HLS_SUFFIX(player))) { + LOGE("dash or hls is not supportable"); + msg_param.code = MM_ERROR_PLAYER_INVALID_URI; + goto ERROR; + } + + if (!MMPLAYER_USE_DECODEBIN(player)) { + ret = _mmplayer_gst_build_pipeline_with_src(player); + if (ret != MM_ERROR_NONE) + goto ERROR; + + if (gst_element_set_state(mainbin[MMPLAYER_M_AUTOPLUG].gst, target) == GST_STATE_CHANGE_FAILURE) { + LOGE("Failed to change state of uridecodebin3 element"); + goto ERROR; + } + goto DONE; + } + + element = _mmplayer_gst_create_source(player); + if (!element) { + LOGE("no source element was created"); + goto ERROR; + } + + if (gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), element) == FALSE) { + LOGE("failed to add source element to pipeline"); + gst_object_unref(GST_OBJECT(element)); + element = NULL; + goto ERROR; + } + + /* take source element */ + mainbin[MMPLAYER_M_SRC].id = MMPLAYER_M_SRC; + mainbin[MMPLAYER_M_SRC].gst = element; + + element = NULL; + + if (MMPLAYER_IS_HTTP_STREAMING(player)) { + if (player->streamer == NULL) { + player->streamer = _mm_player_streaming_create(); + _mm_player_streaming_initialize(player->streamer, TRUE); + } + + elem_idx = MMPLAYER_M_TYPEFIND; + element = gst_element_factory_make("typefind", "typefinder"); + _mmplayer_add_signal_connection(player, G_OBJECT(element), + MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "have-type", G_CALLBACK(_mmplayer_typefind_have_type), (gpointer)player); + } else { + elem_idx = MMPLAYER_M_AUTOPLUG; + element = _mmplayer_gst_make_decodebin(player); + } + + /* check autoplug element is OK */ + if (!element) { + LOGE("can not create element(%d)", elem_idx); + goto ERROR; + } + + if (gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), element) == FALSE) { + LOGE("failed to add %s to pipeline", GST_ELEMENT_NAME(element)); + gst_object_unref(GST_OBJECT(element)); + element = NULL; + goto ERROR; + } + + mainbin[elem_idx].id = elem_idx; + mainbin[elem_idx].gst = element; + + if (gst_element_link(mainbin[MMPLAYER_M_SRC].gst, mainbin[elem_idx].gst) == FALSE) { + LOGE("Failed to link src - autoplug(or typefind)"); + goto ERROR; + } + + if (MMPLAYER_IS_HTTP_STREAMING(player)) { + if (gst_element_set_state(mainbin[MMPLAYER_M_TYPEFIND].gst, target) == GST_STATE_CHANGE_FAILURE) { // ???? + LOGE("Failed to change state of src element"); + goto ERROR; + } + } else { + if (gst_element_set_state(mainbin[MMPLAYER_M_AUTOPLUG].gst, target) == GST_STATE_CHANGE_FAILURE) { + LOGE("Failed to change state of decodebin"); + goto ERROR; + } + } + + if (gst_element_set_state(mainbin[MMPLAYER_M_SRC].gst, target) == GST_STATE_CHANGE_FAILURE) { + LOGE("Failed to change state of src element"); + goto ERROR; + } + +DONE: + player->gapless.stream_changed = TRUE; + player->gapless.running = TRUE; + MMPLAYER_FLEAVE(); + return; + +ERROR: + if (player) { + _mmplayer_set_reconfigure_state(player, FALSE); + if (!player->msg_posted) { + MMPLAYER_POST_MSG(player, MM_MESSAGE_ERROR, &msg_param); + player->msg_posted = TRUE; + } + } + return; +}